diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d76a5319e965e5f877e66a99c71f92d08e50d67f..fcf228ca0c1b6b6c4cfe344b7fab829dfdc94cb4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,8 +3,8 @@ stages:
   - test
 
 variables:
-  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12
-  SPACK_PATH_GITLAB: /mnt/spack_v0.23.0
+  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel
+  SPACK_PATH_GITLAB: /mnt/spack_v0.23.1
   SYSTEMNAME: ebrainslab
   GIT_SUBMODULE_STRATEGY: recursive
   GIT_CLEAN_FLAGS: -ffdxq
@@ -102,7 +102,9 @@ variables:
     SPACK_ENV: test
     RELEASE_NAME: EBRAINS-test
   rules:
-    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
+    # - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
+    - if: $CI_COMMIT_BRANCH == "spack_v0.23.1" && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
+      when: manual
 
 # deploy the experimental release of tools once a week from latest working version of int release 
 .deploy-exp-release:
@@ -197,9 +199,9 @@ build-spack-env-on-runner:
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
-    - shopt -s globstar
-    - PKG_DIR=$CI_PROJECT_DIR/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0
-    - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi
+    - . $CI_PROJECT_DIR/spack/share/spack/setup-env.sh
+    - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])")
+    - find . -mindepth 4 -maxdepth 4 \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;
       # for not succesfully installed packages: also keep the spack logs for any packages that failed
     - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
     # - if [ -d /tmp/spack_tests ]; then mv /tmp/spack_tests $CI_PROJECT_DIR; fi
@@ -250,7 +252,7 @@ sync-esd-image:
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
-    - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0
+    - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-*/gcc-13.3.0
     - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi
       # for not succesfully installed packages: also keep the spack logs for any packages that failed
     - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
@@ -259,10 +261,11 @@ sync-esd-image:
       - spack_logs
     when: always
   timeout: 2 days
-  resource_group: registry-esd-master-image
+  resource_group: registry-esd-spack-23-image
   rules:
     # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches
-    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE == "push"
+    #- if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE == "push"
+    - if: $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE == "push"
       when: manual
 
 # update gitlab-runner upstream (read-only) installation
@@ -274,7 +277,7 @@ sync-gitlab-spack-instance:
   image: $BUILD_ENV_DOCKER_IMAGE
   variables:
     SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds
-    SPACK_JOBS: 4
+    SPACK_JOBS: 16
     OCI_CACHE_PREFIX: ""
     UPDATE_SPACK_OCI_CACHES: false
   script:
@@ -296,9 +299,9 @@ sync-gitlab-spack-instance:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
       # (we use repo.yaml, that is modified at each start of the pipeline, as a reference file)
-    - shopt -s globstar
-    - PKG_DIR=$SPACK_PATH_GITLAB/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0
-    - if cd $PKG_DIR; then find . -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi
+    - . $SPACK_PATH_GITLAB/spack/share/spack/setup-env.sh
+    - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])")
+    - find . -mindepth 4 -maxdepth 4 -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;
       # for not succesfully installed packages: also keep the spack logs for any packages that failed
     - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
   artifacts:
@@ -307,7 +310,8 @@ sync-gitlab-spack-instance:
     when: always
   rules:
     # branches that update the gitlab-runner upstream (read-only) installation
-    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
+    #- if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
+    - if: $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
       when: manual
 
 # run (scheduled) standalone tests for environment
diff --git a/README.md b/README.md
index 36bce891f97ba77d298c27bfe99a6e93beb569dc..cc0ed81ec2a0c972386d1d43ade5519a0a7ee74e 100644
--- a/README.md
+++ b/README.md
@@ -38,9 +38,9 @@ Clone this repository. You can use the `ebrains-yy-mm` branches to install the E
 git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git
 ```
 
-Clone Spack. We currently use version v0.23.0:
+Clone Spack. We currently use version v0.23.1:
 ```
-git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.0 https://github.com/spack/spack
+git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.1 https://github.com/spack/spack
 ```
 
 Activate Spack:
diff --git a/create_job.sh b/create_job.sh
index 60958e3bb43aaa28238f519ff53d535fd830b008..56d80c8fc0aaa7337b340b00c30bd61945a1c883 100644
--- a/create_job.sh
+++ b/create_job.sh
@@ -36,11 +36,11 @@ spec:
         imagePullPolicy: Always
         resources:
           limits:
-            cpu: '6'
-            memory: '18Gi'
+            cpu: '8'
+            memory: '32Gi'
           requests:
             cpu: '4'
-            memory: '12Gi'
+            memory: '20Gi'
         volumeMounts:
           - name: sharedbin
             mountPath: /srv
@@ -99,7 +99,7 @@ spec:
           - name: EBRAINS_REPO_PATH
             value: $INSTALLATION_ROOT/ebrains-spack-builds
           - name: SPACK_JOBS
-            value: '4'
+            value: '6'
       volumes:
         - name: sharedbin
           persistentVolumeClaim:
diff --git a/install_spack_env.sh b/install_spack_env.sh
index faf7ef8be89ef584b576093c19dbef0c5ab79a42..a0bd61a9d35c38b0394ccdb5f2b7a8d16b1c5705 100644
--- a/install_spack_env.sh
+++ b/install_spack_env.sh
@@ -21,10 +21,9 @@ export OCI_CACHE_PREFIX=$7
 # make sure spack uses the symlinked folder as path
 export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack
 
-# specify location of .spack dir (by default in ~)
-# this is where cache and configuration settings are stored
-export SPACK_USER_CACHE_PATH=${CI_SPACK_ROOT}/.spack
-export SPACK_USER_CONFIG_PATH=${CI_SPACK_ROOT}/.spack
+# disable local configuration and cache directories
+export SPACK_DISABLE_LOCAL_CONFIG=true
+export SPACK_USER_CACHE_PATH=/tmp/spack
 
 # define SYSTEMNAME variable in sites where it's not already defined
 export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}}
@@ -46,19 +45,14 @@ if [ ! -d ${CI_SPACK_ROOT} ]; then
   SPACK_ROOT_EXISTED=0
 fi
 
-if [[ $UPSTREAM_INSTANCE ]]
-then
-  UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {})
-  cat <<EOF > ${CI_SPACK_ROOT}/etc/spack/defaults/upstreams.yaml
-upstreams:
-  upstream-spack-instance:
-    install_tree: $UPSTREAM_PREFIX
-EOF
-fi
-
 # activate Spack
 source ${CI_SPACK_ROOT}/share/spack/setup-env.sh
 
+if [[ $UPSTREAM_INSTANCE ]]; then
+    UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {})
+    spack config add upstreams:upstream-spack-instance:install_tree:$UPSTREAM_PREFIX
+fi
+
 if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then
     # for caching purposes it's nice if we can relocate into long paths, but we
     # can't do that for existing installations -> else path
@@ -85,52 +79,6 @@ if [[ ! $(spack mirror list | grep local_cache) ]]; then
     spack mirror add local_cache ${SPACK_CACHE_BUILD}
 fi
 
-# install platform compiler (extract version from packages.yaml)
-if [ $SYSTEMNAME == ebrainslab ]
-then
-    EBRAINS_SPACK_COMPILER=$(grep 'compiler' $EBRAINS_REPO/site-config/$SYSTEMNAME/packages.yaml | awk -F'[][]' '{ print $2 }')
-    spack compiler find
-    spack load $EBRAINS_SPACK_COMPILER || {
-        # dump dag to file
-        spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "/tmp/req_compiler.yaml"
-        if [ -n "${OCI_CACHE_PREFIX}" ]; then
-            # fetch missing sources (if packages not yet installed)
-            python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \
-                --local-cache=${SPACK_CACHE_SOURCE} \
-                --remote-cache-type=oci \
-                --remote-cache=${OCI_CACHE_PREFIX}/source_cache \
-                --yashchiki-home=${YASHCHIKI_HOME} \
-                /tmp/compiler_missing_paths_sources.dat /tmp/req_compiler.yaml
-            # fetch missing build results (if packages not yet installed)
-            python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \
-                --local-cache=${SPACK_CACHE_BUILD}/build_cache \
-                --remote-cache-type=oci \
-                --remote-cache=${OCI_CACHE_PREFIX}/build_cache \
-                --yashchiki-home=${YASHCHIKI_HOME} \
-                /tmp/compiler_missing_paths_buildresults.dat /tmp/req_compiler.yaml
-        fi
-        spack install --no-check-signature -y -j$SPACK_JOBS $EBRAINS_SPACK_COMPILER arch=x86_64
-        if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
-            echo "Performing update of the source cache (for base compiler)"
-            python3 ${YASHCHIKI_HOME}/update_cached_sources.py \
-                --local-cache=${SPACK_CACHE_SOURCE} \
-                --remote-cache-type=oci \
-                --remote-cache=${OCI_CACHE_PREFIX}/source_cache \
-                /tmp/compiler_missing_paths_sources.dat
-            # push previously missing (but now installed) packages to the local cache
-            spack buildcache create --unsigned ${SPACK_CACHE_BUILD} ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$?
-            # upload packages from local to remote cache
-            echo "Performing update of the build cache (for base compiler)"
-            python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \
-                --local-cache=${SPACK_CACHE_BUILD}/build_cache \
-                --remote-cache-type=oci \
-                --remote-cache=${OCI_CACHE_PREFIX}/build_cache \
-                /tmp/compiler_missing_paths_buildresults.dat
-        fi
-        spack load $EBRAINS_SPACK_COMPILER
-    }
-fi
-
 spack compiler find
 
 # create environment if it does not exist
@@ -170,7 +118,7 @@ if [ -n "${OCI_CACHE_PREFIX}" ]; then
 fi
 spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && (
     # fetch all sources but delay exit code handling
-    spack fetch --dependencies --missing && ret=$? || ret=$?;
+    #spack fetch --dependencies --missing && ret=$? || ret=$?;
     if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
         # push freshly fetched sources to remote cache
         echo "Performing update of the source cache"
@@ -192,6 +140,9 @@ if [ -n "${OCI_CACHE_PREFIX}" ]; then
     dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE})
 fi
 
+# temporarily skip packages that fail
+spack rm hxtorch jaxsnn pynn-brainscales wf-brainscales2-demos py-snudda r-sbtabvfgen
+
 # install the environment, use 2 jobs to reduce the amount of required RAM
 # delay exit code until we have updated the cache below
 spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$?
diff --git a/packages/arbor/package.py b/packages/arbor/package.py
index 7a4a2c25a7fa8a81a22e1daa7b3e300bd78e6c5c..8cf2887bb0005b43585d65f10ab10092722ce472 100644
--- a/packages/arbor/package.py
+++ b/packages/arbor/package.py
@@ -13,10 +13,15 @@ class Arbor(CMakePackage, CudaPackage):
     homepage = "https://arbor-sim.org"
     git = "https://github.com/arbor-sim/arbor.git"
     url = "https://github.com/arbor-sim/arbor/releases/download/v0.9.0/arbor-v0.9.0-full.tar.gz"
-    maintainers = ["thorstenhater", "brenthuisman", "haampie"]
+    maintainers = ("thorstenhater", "ErbB4", "haampie")
 
-    version("master", branch="master")
-    version("develop")
+    version("master", branch="master", submodules=True)
+    version("develop", branch="master", submodules=True)
+    version(
+        "0.10.0",
+        sha256="72966b7a2f45ce259b8ba167ca3e4f5ab9f212136a300267aaac0c04ed3fe3fc",
+        url="https://github.com/arbor-sim/arbor/releases/download/v0.10.1/arbor-v0.10.0-full.tar.gz",
+    )
     version(
         "0.9.0",
         sha256="5f9740955c821aca81e23298c17ad64f33f635756ad9b4a0c1444710f564306a",
@@ -75,36 +80,50 @@ class Arbor(CMakePackage, CudaPackage):
     conflicts("%cce@:9.1")
     conflicts("%intel")
 
+    # begin EBRAINS (modified: added run dep)
     depends_on("cmake@3.19:", type=("build", "run"))
+    # end EBRAINS
 
     # misc dependencies
     depends_on("fmt@7.1:", when="@0.5.3:")  # required by the modcc compiler
     depends_on("fmt@9.1:", when="@0.7.1:")
-    depends_on("googletest@1.12.1", when="@0.7.1:")
+    # begin EBRAINS (modified: relaxed (upstream gave no info about update))
+    # upstream adds: depends_on("fmt@10.1:", when="@0.9.1:")
+    depends_on("googletest@1.12.1:", when="@0.7.1:")
     depends_on("pugixml@1.11:", when="@0.7.1:")
-    depends_on("nlohmann-json@3.11.2")
+    # upstream adds: depends_on("pugixml@1.13:", when="@0.9.1:")
+    depends_on("nlohmann-json@3.11.2:")
     depends_on("random123")
+    #upstream adds: depends_on("random123@1.14.0:", when="@0.10:")
+    # end EBRAINS (modified)
     with when("+cuda"):
         depends_on("cuda@10:")
         depends_on("cuda@11:", when="@0.7.1:")
+        depends_on("cuda@12:", when="@0.9.1:")
 
     # mpi
+    # begin EBRAINS (modified: added run dep)
     depends_on("mpi", when="+mpi", type=("build", "run"))
+    # end EBRAINS (modified)
     depends_on("py-mpi4py", when="+mpi+python", type=("build", "run"))
 
     # python (bindings)
-    extends("python", when="+python")
-    depends_on("python@3.7:", when="+python", type=("build", "run"))
-    depends_on("py-numpy", when="+python", type=("build", "run"))
     with when("+python"):
+        extends("python")
+        depends_on("python@3.7:", type=("build", "run"))
+        depends_on("python@3.9:", when="@0.9.1:", type=("build", "run"))
+        depends_on("py-numpy", type=("build", "run"))
         depends_on("py-pybind11@2.6:", type="build")
         depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type="build")
         depends_on("py-pybind11@2.10.1:", when="@0.7.1:", type="build")
+        depends_on("py-pandas", type="test")
+        depends_on("py-seaborn", type="test")
 
     # sphinx based documentation
-    depends_on("python@3.7:", when="+doc", type="build")
-    depends_on("py-sphinx", when="+doc", type="build")
-    depends_on("py-svgwrite", when="+doc", type="build")
+    with when("+doc"):
+        depends_on("python@3.10:", type="build")
+        depends_on("py-sphinx", type="build")
+        depends_on("py-svgwrite", type="build")
 
     @property
     def build_targets(self):
@@ -127,7 +146,11 @@ class Arbor(CMakePackage, CudaPackage):
         opt_flags = spack.build_environment.optimization_flags(
             self.compiler, self.spec.target
         )
-        args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags)
+        # Might return nothing
+        if opt_flags:
+            args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags)
+        # Needed, spack has no units package
+        args.append("-DARB_USE_BUNDLED_UNITS=ON")
 
         return args
 
@@ -135,3 +158,4 @@ class Arbor(CMakePackage, CudaPackage):
     @on_package_attributes(run_tests=True)
     def install_test(self):
         python("-c", "import arbor")
+        python("python/example/single_cell_model.py")
diff --git a/packages/py-annarchy/package.py b/packages/py-annarchy/package.py
index bf672bb8aae2b0b1697eccde4f9bbb919dbbdcd8..4368d1e01bff0165b9c77550190780df1d4b3690 100644
--- a/packages/py-annarchy/package.py
+++ b/packages/py-annarchy/package.py
@@ -13,14 +13,16 @@ class PyAnnarchy(PythonPackage):
     """
 
     homepage = "https://annarchy.readthedocs.io/en/latest/"
-    pypi = 'ANNarchy/ANNarchy-4.7.2.5.tar.gz'
+    pypi = 'ANNarchy/annarchy-4.8.2.3.tar.gz'
 
     maintainers = ['dionperd', 'paulapopa', "ldomide"]
 
+    version('4.8.2.3', '25a4d09905983ce27f7c6b4dd67a54831ea233b6b28943cb67dafd3c351d1dde')
     version('4.7.2.5', 'b7ef91cc4415e078e386eb30e595922c9f0ef90ad1340a12dc5ca46e728a7bb2')
 
     # python_requires
-    depends_on('python@3.8:3.10', type=('build', 'run'))
+    depends_on('python@3.8:3.10', when='@:4.7.2', type=('build', 'run'))
+    depends_on('python@3.10:', when='@4.7.3:', type=('build', 'run'))
 
     # setup_requires
     depends_on('py-pip', type='build')
@@ -34,6 +36,8 @@ class PyAnnarchy(PythonPackage):
     depends_on('py-sympy', type=('build', 'run'))  # >= 1.6
     depends_on('py-matplotlib', type=('build', 'run'))  # >= 2.0
     depends_on('py-cython', type=('build', 'run'))  # >= 0.20
+    depends_on('py-tqdm', when='@4.8:', type=('build', 'run'))
+    depends_on('py-h5py', when='@4.8.2:', type=('build', 'run'))
 
     # Highly recommended:
     # pyqtgraph >= 0.9.8 (to visualize some of the provided examples. The OpenGL backend can also be needed)
diff --git a/packages/py-arborize/package.py b/packages/py-arborize/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..f897963fe137fb405143b9b74a20374543640185
--- /dev/null
+++ b/packages/py-arborize/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyArborize(PythonPackage):
+    """
+    A framework to package NEURON cell models following a clear separation between cell model description and its implementation in NEURON.
+     Cell models described using this framework are highly portable, testable and distributable.
+    """
+
+    homepage = "https://github.com/dbbs-lab/arborize"
+    pypi = "arborize/arborize-4.1.0.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.1.0", sha256="2cb88b890fa69de42a49b38ea4defd0caa4ee91b34f9b75216e1536d9058f57f")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-numpy@1.21:")
+    depends_on("py-errr@1.2:")
+    depends_on("py-morphio@3.3.6:4")
diff --git a/packages/py-arviz/package.py b/packages/py-arviz/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c127d5f422b32fcd271fd38e90a8a883cb6e75f6
--- /dev/null
+++ b/packages/py-arviz/package.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyArviz(PythonPackage):
+    """ArviZ (pronounced "AR-vees") is a Python package for exploratory
+    analysis of Bayesian models. Includes functions for posterior analysis,
+    model checking, comparison and diagnostics."""
+
+    homepage = "https://github.com/arviz-devs/arviz"
+    pypi = "arviz/arviz-0.6.1.tar.gz"
+
+    license("Apache-2.0")
+
+    # begin EBRAINS (added): added version
+    version("0.20.0", sha256="a2704e0c141410fcaea1973a90cabf280f5aed5c1e10f44381ebd6c144c10a9c")
+    # end EBRAINS
+    version("0.6.1", sha256="435edf8db49c41a8fa198f959e7581063006c49a4efdef4755bb778db6fd4f72")
+
+    depends_on("py-setuptools", type="build")
+    depends_on("py-matplotlib@3.0:", type=("build", "run"))
+    depends_on("py-numpy@1.12:", type=("build", "run"))
+    depends_on("py-scipy@0.19:", type=("build", "run"))
+    depends_on("py-packaging", type=("build", "run"))
+    depends_on("py-pandas@0.23:", type=("build", "run"))
+    depends_on("py-xarray@0.11:", type=("build", "run"))
+    # begin EBRAINS (modified): update dependencies
+    depends_on("py-netcdf4", when="@:0.13", type=("build", "run"))
+    depends_on("py-h5netcdf", when="@0.15:", type=("build", "run"))
+    depends_on("py-typing-extensions", when="@0.11:", type=("build", "run"))
+    depends_on("py-xarray-einstats", type=("build", "run"))
+    # end EBRAINS
diff --git a/packages/py-astropy/package.py b/packages/py-astropy/package.py
index e780f21c93192d5a60535e9140fe625ff7797628..d62abe730f8e60db3a94959f794d25eeb5500782 100644
--- a/packages/py-astropy/package.py
+++ b/packages/py-astropy/package.py
@@ -40,7 +40,9 @@ class PyAstropy(PythonPackage):
 #  TODO: probably fix, unrealistic
     depends_on("py-cython@0.29.13:", type="build")
     depends_on("py-cython@0.29.30", when="@5.1:6.0", type="build")
-    depends_on("py-cython@3.0.0", when="@6.1.0:", type="build")
+    # begin EBRAINS (modified): relax dependency version constraint
+    depends_on("py-cython@3.0", when="@6:", type="build")
+    # end EBRAINS
 
     # in newer pip versions --install-option does not exist
     depends_on("py-pip@:23.0", type="build")
diff --git a/packages/py-blessed/package.py b/packages/py-blessed/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c40b13ca210622b062cc0ff07767fdbefd99694c
--- /dev/null
+++ b/packages/py-blessed/package.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyBlessed(PythonPackage):
+    """Blessed is a thin, practical wrapper around terminal capabilities in
+    Python."""
+
+    homepage = "https://github.com/jquast/blessed"
+    pypi = "blessed/blessed-1.15.0.tar.gz"
+
+    license("MIT")
+
+    # begin EBRAINS (added): add version
+    version("1.20.0", sha256="2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680")
+    # end EBRAINS
+    version("1.19.0", sha256="4db0f94e5761aea330b528e84a250027ffe996b5a94bf03e502600c9a5ad7a61")
+    version("1.18.1", sha256="8b09936def6bc06583db99b65636b980075733e13550cb6af262ce724a55da23")
+    version("1.18.0", sha256="1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755")
+    version("1.17.12", sha256="580429e7e0c6f6a42ea81b0ae5a4993b6205c6ccbb635d034b4277af8175753e")
+    version("1.17.11", sha256="7d4914079a6e8e14fbe080dcaf14dee596a088057cdc598561080e3266123b48")
+    version("1.17.10", sha256="58b9464609f54e2eca5f5926db590a5b01fefef882844ce05064f483b8f96c26")
+    version("1.17.9", sha256="0d497a5be8a808b7300c00bf8303e7ba9fd11f6063a67bb924a475e5bfa7a9bb")
+    version("1.17.8", sha256="7671d057b2df6ddbefd809009fb08feb2f8d2d163d240b5e765088a90519b2f1")
+    version("1.17.7", sha256="0329a3d1db91328986a6dfd36475dbc498c867090f0433cdcc1a45a5eb2067e4")
+    version("1.17.6", sha256="a9a774fc6eda05248735b0d86e866d640ca2fef26038878f7e4d23f7749a1e40")
+    version("1.17.5", sha256="926916492220af741657ec4668aba95f54a8c32445e765cfa38c7ccd3343cc6f")
+    version("1.17.4", sha256="320a619c83298a9c9d632dbd8fafbb90ba9a38b83c7e64726c572fb186dd0781")
+    version("1.17.3", sha256="cc38547175ae0a3a3d4e5dcc7e7478a5a6bf0a6b5f4d9c6b2e5eadbe4475cb0e")
+    version("1.17.0", sha256="38632d60dd384de9e9be0ee5b6e1c6130f96efd0767c6ca530a453da36238c25")
+    version("1.16.1", sha256="a222783b09f266cf76f5a01f4dfd9de79650f07cbefe2cbc67ec7bb9577c1dfa")
+    version("1.16.0", sha256="34b78e9b56c2ba2f6a9a625cc989d6cf4ae8ae87dcc4ed8ad144660ae4cf7784")
+    version("1.15.0", sha256="777b0b6b5ce51f3832e498c22bc6a093b6b5f99148c7cbf866d26e2dec51ef21")
+
+    depends_on("py-setuptools", type="build")
+    depends_on("py-wcwidth@0.1.4:", type=("build", "run"))
+    depends_on("py-six@1.9.0:", type=("build", "run"))
diff --git a/packages/py-bsb-core/package.py b/packages/py-bsb-core/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..eacb83a4a7303df7c19502402ac069c14430834c
--- /dev/null
+++ b/packages/py-bsb-core/package.py
@@ -0,0 +1,59 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBsbCore(PythonPackage):
+    """
+    The BSB is a component framework for neural modeling, which focuses on component
+    declarations to piece together a model.
+    """
+
+    homepage = "https://bsb.readthedocs.io"
+    pypi = "bsb-core/bsb_core-5.0.0.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("5.0.2", sha256="414be0f3ba72b2f656b89f8e4636e4a1d19b1f4dc9ba9360cc984020cb1859dc")
+    version("5.0.1", sha256="7cb905ee38419709b4ead2ffb40e1005d813d2c6780706b3f5eb2696aabeb983")
+    version("5.0.0", sha256="08e1776d351a8bb5c056ffbd8108d0bd941f71518b475aecbad9f22050b7cc91")
+
+    variant('parallel', default=True,
+            description='Build with MPI bindings')
+
+    depends_on("python@3.9:3.12", type=("build", "run"))
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-numpy@1.19:")
+    depends_on("py-scipy@1.5:")
+    depends_on("py-scikit-learn@1.0:")
+    depends_on("py-rtree@1.0:")
+    depends_on("py-psutil@5.8:")
+    depends_on("py-pynrrd@1.0:")
+    depends_on("py-toml@0.10:")
+    depends_on("py-requests")
+    depends_on("py-urllib3@2:")
+    depends_on("py-appdirs@1.4:")
+    depends_on("py-neo")
+    depends_on("py-tqdm@4.50:")
+    depends_on("py-shortuuid")
+    depends_on("py-quantities@0.15.0:")
+    depends_on("py-morphio@3.3:")
+    depends_on("py-errr@1.2.0:")
+    depends_on("py-dashing@0.1.0:")
+    depends_on("py-exceptiongroup")
+    
+    depends_on('mpi', when='+parallel')
+    depends_on('py-mpi4py', when='+parallel')
+    depends_on('py-mpipool@2.2.1:3', when='+parallel')
+    depends_on('py-mpilock@1.1:', when='+parallel')
+
+
+    def setup_build_environment(self, env):
+        env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0])
+
+    def setup_run_environment(self, env):
+        self.setup_build_environment(env) 
diff --git a/packages/py-bsb-hdf5/package.py b/packages/py-bsb-hdf5/package.py
index 059f2df3b1c74c9e70b91c3d0ed8e5ac47a2d6a8..36fbc5d08f22ef5f7c02e8e9cf68530d7e3fffc2 100644
--- a/packages/py-bsb-hdf5/package.py
+++ b/packages/py-bsb-hdf5/package.py
@@ -10,13 +10,13 @@ class PyBsbHdf5(PythonPackage):
     """An HDF-5 based storage engine for the BSB framework."""
 
     homepage = "https://github.com/dbbs-lab/bsb-hdf5"
-    url = "https://pypi.org/packages/py3/b/bsb_hdf5/bsb_hdf5-0.8.3-py3-none-any.whl"
+    pypi = "bsb-hdf5/bsb_hdf5-5.0.4.tar.gz"
 
-    maintainers = ["helveg"]
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
 
-    version('0.8.3', sha256="38162bfe9470b87cb30a2bff78dce68fc1b97f2df7d7e3b288c16b671f7579e5", expand=False)
+    version('5.0.2', sha256='ed11177887848a3f177982201e1adb5770131bd541055a96935af38b39439fac')
 
-    depends_on("py-setuptools", type="build")
-    # depends_on("py-bsb@4.0.0a57:")
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-bsb-core@5.0.0:",when='@5.0.2')
     depends_on("py-shortuuid")
-    depends_on("py-h5py@3.0:")
diff --git a/packages/py-bsb-json/package.py b/packages/py-bsb-json/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c692a2c5a6f0b09c4989a59566453a53a20baefa
--- /dev/null
+++ b/packages/py-bsb-json/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBsbJson(PythonPackage):
+    """A plugin that allows the user to write their models' configuration in the json format, for the BSB framework."""
+
+    homepage = "https://github.com/dbbs-lab/bsb-json"
+    pypi = "bsb-json/bsb_json-4.2.2.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.2.2", sha256="0c9e0af2a50f8ebbce353ba19bd11bafaf2536d74f0a79af3b0b6d8241fa6937")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-bsb-core@5.0.0:")
+    depends_on("py-shortuuid")
diff --git a/packages/py-bsb-nest/package.py b/packages/py-bsb-nest/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d81556a860aad45d834cec30b09136fdb0b09be
--- /dev/null
+++ b/packages/py-bsb-nest/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBsbNest(PythonPackage):
+    """
+    The BSB-NEST is a component framework for neural modeling, used for simulate SNN with NEST software.
+    """
+
+    homepage = "https://github.com/dbbs-lab/bsb-nest"
+    pypi = "bsb-nest/bsb_nest-4.3.2.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.3.2", sha256="478aa2937ca554ff291ce726cc69e1c1b283d7353a56e3b6878b585ed0684041")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-bsb-core@5.0.2:")
+    depends_on("nest")
diff --git a/packages/py-bsb-neuron/package.py b/packages/py-bsb-neuron/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e4ec130bb75ded76917d248d43f3e01db0bbfe5
--- /dev/null
+++ b/packages/py-bsb-neuron/package.py
@@ -0,0 +1,26 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBsbNeuron(PythonPackage):
+    """
+    The BSB-NEURON is a component framework for neural modeling, used for simulate with NEURON software.
+    """
+
+    homepage = "https://github.com/dbbs-lab/bsb-neuron"
+    pypi = "bsb-neuron/bsb_neuron-4.2.2.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.2.2", sha256="e7570c0cb17d31349eb8e88487e8ba48653f0fad0d7c232df8815cadde34a941")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-bsb-core@5.0.2:")
+    depends_on("neuron")
+    depends_on("py-arborize@4.1:")
+    depends_on("py-nrn-patch@4:")
diff --git a/packages/py-bsb-yaml/package.py b/packages/py-bsb-yaml/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d91b3301d65e361289df2301c3d9f1b399a9259b
--- /dev/null
+++ b/packages/py-bsb-yaml/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyBsbYaml(PythonPackage):
+    """A plugin that allows the user to write their models' configuration in the yaml format, for the BSB framework."""
+
+    homepage = "https://github.com/dbbs-lab/bsb-yaml"
+    pypi = "bsb-yaml/bsb_yaml-4.2.2.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.2.2", sha256="c5614bc5fe57b78a445303756819a8d4ba032924484f88a07f6c26dd7e5afbec")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-bsb-core@5.0.0:")
+    depends_on("py-pyyaml@6.0:")
+    depends_on("py-shortuuid")
diff --git a/packages/py-bsb/package.py b/packages/py-bsb/package.py
deleted file mode 100644
index 397bdbd3af00464bb89db6c2de7e5cd2a7529041..0000000000000000000000000000000000000000
--- a/packages/py-bsb/package.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-from spack.package import *
-
-
-class PyBsb(PythonPackage):
-    """
-    The BSB is a component framework for neural modeling, which focuses on component
-    declarations to piece together a model.
-    """
-
-    homepage = "https://bsb.readthedocs.io"
-    url = "https://pypi.org/packages/py3/b/bsb/bsb-4.0.0a57-py3-none-any.whl"
-
-    maintainers = ["helveg"]
-
-    version("4.0.0a57", sha256="5da15799aa8994894ff5371561d534b43beffaa79461189c94080071359f4076", expand=False)
-
-    depends_on("python@3.8:", type=("build", "run"))
-    depends_on("py-setuptools", type="build")
-    depends_on("py-numpy@1.19:")
-    depends_on("py-scipy@1.5:")
-    depends_on("py-scikit-learn@1.0:")
-    depends_on("py-plotly")
-    depends_on("py-rtree@1.0:")
-    depends_on("py-psutil@5.8:")
-    depends_on("py-pynrrd@1.0:")
-    depends_on("py-toml")
-    depends_on("py-requests")
-    depends_on("py-appdirs@1.4:")
-    depends_on("py-neo")
-    depends_on("py-tqdm@4.50:")
-    depends_on("py-shortuuid")
-    depends_on("py-quantities")
-    depends_on("py-pyyaml@6.0:")
-    depends_on("py-morphio@3.3:")
-    depends_on("py-bsb-hdf5@0.8.3:")
-    depends_on("py-errr@1.2.0:")
-    depends_on("py-colour@0.1.5:")
-
-    def setup_build_environment(self, env):
-        env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0])
-
-    def setup_run_environment(self, env):
-        self.setup_build_environment(env)
-
-    skip_modules = ['bsb.simulators.arbor', 'bsb.simulators.arbor.devices']
-    
diff --git a/packages/py-cons/package.py b/packages/py-cons/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..77b2e1ab75a93b90f833d2a7106680c558c8d2de
--- /dev/null
+++ b/packages/py-cons/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyCons(PythonPackage):
+    """An implementation of cons in Python."""
+
+    homepage = "https://github.com/pythological/python-cons"
+    pypi = "cons/cons-0.4.6.tar.gz"
+
+    version("0.4.6", "669fe9d5ee916d5e42b9cac6acc911df803d04f2e945c1604982a04d27a29b47")
+
+    depends_on("python@3.6:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-logical-unification@0.4.0:", type=("build", "run"))
diff --git a/packages/py-dashing/package.py b/packages/py-dashing/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..48d63dc4d1f3dccbba02dae152f5f9d131ab9c05
--- /dev/null
+++ b/packages/py-dashing/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyDashing(PythonPackage):
+    """Dashing is a library to quickly create terminal-based dashboards in Python."""
+
+    homepage = "https://github.com/FedericoCeratto/dashing"
+    pypi = "dashing-next/dashing_next-0.1.0.tar.gz"
+
+    version("0.1.0", sha256="9d48e97fce430a9cfb47d5627041b001ab306b65e97d6967fe86e2c25e324612")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-blessed@1.20.0:")
diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py
index 1ceebcfb54280464e069f86e2e4bf29972b42ff3..5b3a984d4fa904f1d1fba74618bb8ae3d181e86a 100644
--- a/packages/py-elephant/package.py
+++ b/packages/py-elephant/package.py
@@ -16,6 +16,8 @@ class PyElephant(PythonPackage, CudaPackage):
     maintainers = ["moritzkern"]
 
     version('develop', branch='master')
+
+    version("1.1.1", sha256="c08b89358b52e826bd081ee5a530728fe487c45f5d0539ec97ebdaed9c106e89")
     version("1.1.0", sha256="4085a8fcac3ab855f2585d017d17a1589c74adfbd930f7a1e012f2f5bd994e71")
     version("1.0.0", sha256="b1471228821a5b8f3a3137f9facc1a7f2dc355b8e3300490bdc05f0466b80b27")
     version("0.14.0", sha256="02ce3b2a8d08dc19828f95384551339ea0946bc405c1db9aace54135417c2b0f")
@@ -61,18 +63,20 @@ class PyElephant(PythonPackage, CudaPackage):
     depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras")
     depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras")
     depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:1.0.0")
+    depends_on("py-neo@0.13.1:", type=("build", "run"), when="@1.1.1:")
     depends_on("py-neo@0.13.0:", type=("build", "run"), when="@0.11.0:1.1.0")
     depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0")
     depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0")
     depends_on("py-numpy@1.18.1:1.23.5", type=("build", "run"), when="@0.6.4:0.11.2")
-    depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@0.12.0:")
+    depends_on("py-numpy@1.18.1:1.26.4", type=("build", "run"), when="@0.12.0:")
     depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@develop")
     depends_on("py-quantities@0.12.1:0.13.0", type=("build", "run"), when="@0.6.4:0.11.2")
     depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@develop")
     depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.12.0:")
     depends_on("py-scikit-learn", type=("build", "run"), when="@0.3:")
     depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:1.0.0")
-    depends_on("py-scipy@1.10.0:", type=("build", "run"), when="@1.1.0:")
+    depends_on("py-scipy@1.10.0:", type=("build", "run"), when="@1.1.0")
+    depends_on("py-scipy@1.14.0:", type=("build", "run"), when="@1.1.1:")
     depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:")
     depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:")
     depends_on("py-pycuda", type=("build", "run"), when="@0.10.0:+cuda")
@@ -86,7 +90,6 @@ class PyElephant(PythonPackage, CudaPackage):
         # skip some tests that seem to fail on HPC deployments,
         # see https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/38
         # test_WPLI_ground_truth_consistency_real_LFP_dataset, test_multitaper_cohere_perfect_cohere
-        # skip the following due to issue with neo > 0.13.0 https://github.com/NeuralEnsemble/elephant/pull/634
         # ECM (2025-02-05): also disable "test_parallel" test due to some test hang, cf. ESD issue 86
         # https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/86
-        pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials and not test_parallel')
+        pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_parallel')
diff --git a/packages/py-etuples/package.py b/packages/py-etuples/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3462437bf2ef7a81b7dc483c235230960249f98
--- /dev/null
+++ b/packages/py-etuples/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyEtuples(PythonPackage):
+    """Python library that allows one to define, optimize, and efficiently evaluate mathematical expressions involving
+    multi-dimensional arrays. It provides the computational backend for PyMC."""
+
+    homepage = "http://github.com/pythological/etuples"
+    pypi = "etuples/etuples-0.3.9.tar.gz"
+
+    version("0.3.9", "a474e586683d8ba8d842ba29305005ceed1c08371a4b4b0e0e232527137e5ea3")
+
+    depends_on("python@3.8:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-cons", type=("build", "run"))
+    depends_on("py-multipledispatch", type=("build", "run"))
diff --git a/packages/py-exceptiongroup/package.py b/packages/py-exceptiongroup/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..1fe03cd45fd240264e40bf6109e9787332d72638
--- /dev/null
+++ b/packages/py-exceptiongroup/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyExceptiongroup(PythonPackage):
+    """A backport of the BaseExceptionGroup and ExceptionGroup classes from Python 3.11."""
+
+    homepage = "https://github.com/agronholm/exceptiongroup"
+    pypi = "exceptiongroup/exceptiongroup-1.0.4.tar.gz"
+
+    # begin EBRAINS (added): add version
+    version("1.2.0", sha256="91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68")
+    # end EBRAINS
+    version("1.1.1", sha256="d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785")
+    version("1.0.4", sha256="bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec")
+
+    depends_on("py-flit-scm", type="build")
diff --git a/packages/py-joblib/package.py b/packages/py-joblib/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..41d5602ad1bb43608fd6db46b5a6a896b01d643d
--- /dev/null
+++ b/packages/py-joblib/package.py
@@ -0,0 +1,44 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyJoblib(PythonPackage):
+    """Lightweight pipelining with Python functions."""
+
+    homepage = "https://joblib.readthedocs.io/"
+    pypi = "joblib/joblib-0.14.0.tar.gz"
+    git = "https://github.com/joblib/joblib"
+
+    # 'joblib.test' requires 'pytest'. Leave out of 'import_modules' to avoid
+    # unnecessary dependencies.
+    skip_modules = ["joblib.test"]
+
+    license("BSD-3-Clause")
+
+    # begin EBRAINS (added): add version
+    version("1.4.2", sha256="2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e")
+    # end EBRAINS
+    version("1.2.0", sha256="e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018")
+    version("1.1.0", sha256="4158fcecd13733f8be669be0683b96ebdbbd38d23559f54dca7205aea1bf1e35")
+    version("1.0.1", sha256="9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7")
+    version("1.0.0", sha256="7ad866067ac1fdec27d51c8678ea760601b70e32ff1881d4dc8e1171f2b64b24")
+    version("0.17.0", sha256="9e284edd6be6b71883a63c9b7f124738a3c16195513ad940eae7e3438de885d5")
+    version("0.16.0", sha256="8f52bf24c64b608bf0b2563e0e47d6fcf516abc8cfafe10cfd98ad66d94f92d6")
+    version("0.15.1", sha256="61e49189c84b3c5d99a969d314853f4d1d263316cc694bec17548ebaa9c47b6e")
+    version("0.15.0", sha256="f8f84dcef519233be4ede1c64fd1f2d48b1e8bbb632d1013ebca75f8b678ee72")
+    version("0.14.1", sha256="0630eea4f5664c463f23fbf5dcfc54a2bc6168902719fa8e19daf033022786c8")
+    version("0.14.0", sha256="6fcc57aacb4e89451fd449e9412687c51817c3f48662c3d8f38ba3f8a0a193ff")
+    version("0.13.2", sha256="315d6b19643ec4afd4c41c671f9f2d65ea9d787da093487a81ead7b0bac94524")
+    version("0.11", sha256="7b8fd56df36d9731a83729395ccb85a3b401f62a96255deb1a77220c00ed4085")
+    version("0.10.3", sha256="29b2965a9efbc90a5fe66a389ae35ac5b5b0c1feabfc7cab7fd5d19f429a071d")
+    version("0.10.2", sha256="3123553bdad83b143428033537c9e1939caf4a4d8813dade6a2246948c94494b")
+    version("0.10.0", sha256="49b3a0ba956eaa2f077e1ebd230b3c8d7b98afc67520207ada20a4d8b8efd071")
+
+    depends_on("python@3.7:", when="@1.2:", type=("build", "run"))
+    depends_on("python@3.6:", when="@0.15:", type=("build", "run"))
+    depends_on("python@2.7:2.8,3.4:", type=("build", "run"))
+    depends_on("py-setuptools", type=("build", "run"))
diff --git a/packages/py-llvmlite/package.py b/packages/py-llvmlite/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4389d9c6c946e64111872656ca8f1f169159a4f
--- /dev/null
+++ b/packages/py-llvmlite/package.py
@@ -0,0 +1,86 @@
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyLlvmlite(PythonPackage):
+    """A lightweight LLVM python binding for writing JIT compilers"""
+
+    homepage = "https://llvmlite.readthedocs.io/en/latest/index.html"
+    pypi = "llvmlite/llvmlite-0.23.0.tar.gz"
+    git = "https://github.com/numba/llvmlite.git"
+
+    license("BSD-2-Clause")
+
+    version("0.44.0", sha256="07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4")
+    version("0.43.0", sha256="ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5")
+    version("0.42.0", sha256="f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a")
+    version("0.41.1", sha256="f19f767a018e6ec89608e1f6b13348fa2fcde657151137cb64e56d48598a92db")
+    version("0.41.0", sha256="7d41db345d76d2dfa31871178ce0d8e9fd8aa015aa1b7d4dab84b5cb393901e0")
+    version("0.40.1", sha256="5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4")
+    version("0.40.0", sha256="c910b8fbfd67b8e9d0b10ebc012b23cd67cbecef1b96f00d391ddd298d71671c")
+    version("0.39.1", sha256="b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572")
+    version("0.39.0", sha256="01098be54f1aa25e391cebba8ea71cd1533f8cd1f50e34c7dd7540c2560a93af")
+    version("0.38.1", sha256="0622a86301fcf81cc50d7ed5b4bebe992c030580d413a8443b328ed4f4d82561")
+    version("0.38.0", sha256="a99d166ccf3b116f3b9ed23b9b70ba2415640a9c978f3aaa13fad49c58f4965c")
+    version("0.37.0", sha256="6392b870cd018ec0c645d6bbb918d6aa0eeca8c62674baaee30862d6b6865b15")
+    version(
+        "0.34.0",
+        sha256="f03ee0d19bca8f2fe922bb424a909d05c28411983b0c2bc58b020032a0d11f63",
+        deprecated=True,
+    )
+    version(
+        "0.33.0",
+        sha256="9c8aae96f7fba10d9ac864b443d1e8c7ee4765c31569a2b201b3d0b67d8fc596",
+        deprecated=True,
+    )
+    version(
+        "0.31.0",
+        sha256="22ab2b9d7ec79fab66ac8b3d2133347de86addc2e2df1b3793e523ac84baa3c8",
+        deprecated=True,
+    )
+
+    depends_on("cxx", type="build")  # generated
+
+    depends_on("py-setuptools", type="build")
+    depends_on("python@3.9:3.12", when="@0.42:", type=("build", "run"))
+    depends_on("python@3.8:3.11", when="@0.40:0.41", type=("build", "run"))
+    depends_on("python@:3.10", when="@0.38:0.39", type=("build", "run"))
+    depends_on("python@:3.9", when="@0.36:0.37", type=("build", "run"))
+    depends_on("python@:3.8", when="@0.31:0.35", type=("build", "run"))
+
+    # https://github.com/numba/llvmlite#compatibility
+    depends_on("llvm@15", when="@0.44:")
+    depends_on("llvm@14", when="@0.41:0.43")
+    depends_on("llvm@11:14", when="@0.40")
+    depends_on("llvm@11", when="@0.37:0.39")
+    for t in [
+        "arm:",
+        "ppc:",
+        "ppc64:",
+        "ppc64le:",
+        "ppcle:",
+        "sparc:",
+        "sparc64:",
+        "x86:",
+        "x86_64:",
+    ]:
+        depends_on("llvm@10.0", when=f"@0.34:0.36 target={t}")
+
+    depends_on("llvm@9.0", when="@0.34:0.36 target=aarch64:")
+    depends_on("llvm@9.0", when="@0.33")
+    depends_on("llvm@7.0:7.1,8.0", when="@0.29:0.32")
+    depends_on("binutils", type="build")
+
+    # TODO: investigate
+    conflicts("%apple-clang@15:")
+
+    def setup_build_environment(self, env):
+        if self.spec.satisfies("%fj"):
+            env.set("CXX_FLTO_FLAGS", "{0}".format(self.compiler.cxx_pic_flag))
+            env.set("LD_FLTO_FLAGS", "-Wl,--exclude-libs=ALL")
+        else:
+            # Need to set PIC flag since this is linking statically with LLVM
+            env.set("CXX_FLTO_FLAGS", "-flto {0}".format(self.compiler.cxx_pic_flag))
diff --git a/packages/py-logical-unification/package.py b/packages/py-logical-unification/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ba1620e4eac73cede7806e73f3648bf6a99a53b
--- /dev/null
+++ b/packages/py-logical-unification/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyLogicalUnification(PythonPackage):
+    """Logical unification in Python, extensible via dispatch."""
+
+    homepage = "http://github.com/pythological/unification/"
+    pypi = "logical-unification/logical-unification-0.4.6.tar.gz"
+
+    version("0.4.6", "908435123f8a106fa4dcf9bf1b75c7beb309fa2bbecf277868af8f1c212650a0")
+
+    depends_on("python@3.6:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-toolz", type=("build", "run"))
+    depends_on("py-multipledispatch", type=("build", "run"))
diff --git a/packages/py-mini-kanren/package.py b/packages/py-mini-kanren/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d24eb02cdfad669bc3213da38ded284d27701b9
--- /dev/null
+++ b/packages/py-mini-kanren/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyMiniKanren(PythonPackage):
+    """Logic/relational programming in Python with miniKanren."""
+
+    homepage = "http://github.com/pythological/kanren"
+    pypi = "miniKanren/miniKanren-1.0.3.tar.gz"
+
+    version("1.0.3", "1ec8bdb01144ad5e8752c7c297fb8a122db920f859276d25a72d164e998d7f6e")
+
+    depends_on("python@3.6:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-toolz", type=("build", "run"))
+    depends_on("py-cons@0.4.0:", type=("build", "run"))
+    depends_on("py-multipledispatch", type=("build", "run"))
+    depends_on("py-etuples@0.3.1:", type=("build", "run"))
+    depends_on("py-logical-unification@0.4.1:", type=("build", "run"))
+    depends_on("py-typing-extensions", type=("build", "run"))
diff --git a/packages/py-mpilock/package.py b/packages/py-mpilock/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..16e9a60cb3d3b6f027c1622bdec1b45523eb1428
--- /dev/null
+++ b/packages/py-mpilock/package.py
@@ -0,0 +1,21 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyMpilock(PythonPackage):
+    """mpilock offers a WindowController class with a high-level API for parallel access to resources. """
+
+    homepage = "https://github.com/Helveg/mpilock"
+    pypi = "mpilock/mpilock-1.1.0-py3-none-any.whl"
+
+    version("1.1.0", sha256="0902ef859a7b3dfb4312a3c46332302493aa14fa398b610554706b0b9e7cb57c", expand=False)
+
+    maintainers=["helveg"]
+
+    depends_on("py-setuptools", type="build")
+    depends_on("py-mpi4py@3.0.3:")
+    depends_on("py-numpy@1.20.0:")
diff --git a/packages/py-mpipool/package.py b/packages/py-mpipool/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb6f112a8b70caecb251795805f035b6953f8e30
--- /dev/null
+++ b/packages/py-mpipool/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyMpipool(PythonPackage):
+    """mpipool offers MPI based parallel execution of tasks through implementations of Python's standard library interfaces"""
+
+    homepage = "https://github.com/mpipool/mpipool"
+    pypi = "mpipool/mpipool-2.2.1.tar.gz"
+
+    version("2.2.1", sha256="dc735b994349ae3e06fce7c3601523ba062125ffa6dd4c6c51a94c168c9ff92c")
+
+    maintainers=["helveg"]
+
+    depends_on("py-flit-core@3.2:4", type="build")
+    depends_on("py-mpi4py@3.0.3:")
+    depends_on("py-errr@1.0:")
+    depends_on("py-tblib@1.7.0:")
+    depends_on("py-dill@0.3.3:")
diff --git a/packages/py-nflows/package.py b/packages/py-nflows/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..72f82caf53201553d401c127bdff0cffa99ea3a1
--- /dev/null
+++ b/packages/py-nflows/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+class PyNflows(PythonPackage):
+    """It is a comprehensive collection of normalizing flows using PyTorch."""
+
+    homepage = "https://github.com/bayesiains/nflows"
+    pypi = "nflows/nflows-0.14.tar.gz"
+
+    version("0.14", "6299844a62f9999fcdf2d95cb2d01c091a50136bd17826e303aba646b2d11b55")
+
+    depends_on("py-setuptools", type="build")
+    depends_on("py-matplotlib", type=("build", "run"))
+    depends_on("py-numpy", type=("build", "run"))
+    depends_on("py-tensorboard", type=("build", "run"))
+    depends_on("py-torch", type=("build", "run"))
+    depends_on("py-tqdm", type=("build", "run"))
+    depends_on("py-umnn", type=("build", "run"))
diff --git a/packages/py-nmodl-glia/package.py b/packages/py-nmodl-glia/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..acd06eaf5934697ae1d40c52e664acd6401c8af5
--- /dev/null
+++ b/packages/py-nmodl-glia/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyNmodlGlia(PythonPackage):
+    """
+    Patch to use NMODL within the BSB
+    """
+
+    homepage = "https://github.com/dbbs-lab/glia"
+    pypi = "nmodl-glia/nmodl_glia-4.0.1.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.0.1", sha256="c3b3dad203eac1f394d6a4ca6e4f42d25d5eebc013970309f1453c7ca3e5c5a3")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-numpy@1.21:")
+    depends_on("py-errr@1.2:")
+    depends_on("py-nmodl@0.5:")
diff --git a/packages/py-nmodl/fix-setup-requirements.patch b/packages/py-nmodl/fix-setup-requirements.patch
new file mode 100644
index 0000000000000000000000000000000000000000..52ec60c20f9a468047b10b505adf509f8946843d
--- /dev/null
+++ b/packages/py-nmodl/fix-setup-requirements.patch
@@ -0,0 +1,21 @@
+diff --git a/setup.py b/setup.py
+index c956004c9..671a6619e 100644
+--- a/setup.py
++++ b/setup.py
+@@ -131,16 +131,6 @@ setup(
+     zip_safe=False,
+     setup_requires=[
+         "jinja2>=2.9.3",
+-        "jupyter-client",
+-        "jupyter",
+-        "myst_parser",
+-        "mistune<3",  # prevents a version conflict with nbconvert
+-        "nbconvert",
+-        "nbsphinx>=0.3.2",
+-        "pytest>=3.7.2",
+-        "sphinxcontrib-applehelp<1.0.3",
+-        "sphinx<6",
+-        "sphinx-rtd-theme",
+     ]
+     + install_requirements,
+     install_requires=install_requirements,
diff --git a/packages/py-nmodl/package.py b/packages/py-nmodl/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd5fb0118803f1c4e32ac943e5de271f45ee9a16
--- /dev/null
+++ b/packages/py-nmodl/package.py
@@ -0,0 +1,38 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyNmodl(PythonPackage):
+    """The NMODL Framework is a code generation engine for NEURON MODeling Language (NMODL)."""
+
+    homepage = "https://github.com/BlueBrain/nmodl"
+
+    git = "https://github.com/BlueBrain/nmodl"
+
+    license("Apache-2.0")
+    maintainers = ["bbp.opensource"]
+
+    version("0.5", tag="0.5", commit="ac272785dc444c8444b085d121f08b7575bb6647", submodules=True)
+
+    patch("fix-setup-requirements.patch", when="@:0.6")
+
+    depends_on("flex@2.6:")
+    depends_on("bison@3.0:")
+    depends_on("cmake@3.15:", type="build")
+    depends_on("python@3.9:", type=("build","run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-scikit-build", type="build")
+    depends_on("py-jinja2@2.9.3:", type="build")
+    depends_on("py-pyyaml@3.13:", type="build")
+    depends_on("py-pytest")
+    depends_on("py-sympy@1.3:", type=("build","run"))
+    depends_on("py-find-libpython", type=("build","run"))
+    depends_on("py-importlib-metadata", when="^python@:3.8", type=("build","run"))
+    depends_on("py-importlib-resources", when="^python@:3.8", type=("build","run"))
+
+    def setup_build_environment(self, env):
+        env.set("NMODL_WHEEL_VERSION", self.version)
diff --git a/packages/py-nrn-patch/package.py b/packages/py-nrn-patch/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..aaf40362a939c8ee13d31ead0ca5a9ea5f08acaa
--- /dev/null
+++ b/packages/py-nrn-patch/package.py
@@ -0,0 +1,29 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyNrnPatch(PythonPackage):
+    """A patch to make the BSB interface with NEURON software"""
+
+    homepage = "https://github.com/dbbs-lab/patch"
+    pypi = "nrn-patch/nrn_patch-4.0.0.tar.gz"
+
+    license("GPL-3.0-only")
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.0.0", sha256="0f95243798c7363826d7835023f7c9215577edd8d6695cc7caeb65a7fe8a54c0")
+
+    depends_on("py-flit-core@3.2:4.0", type="build")
+    depends_on("py-numpy@1.21:")
+    depends_on("py-errr@1.2:")
+    depends_on("py-click@8.0:")
+    depends_on("py-appdirs@1.0:")
+    depends_on("py-cookiecutter@2.0:")
+    depends_on("py-black@0.24:")
+    depends_on("py-toml@0.1:")
+    depends_on("py-nmodl-glia@4.0:")
+    depends_on("neuron@8:10")
diff --git a/packages/py-numba/package.py b/packages/py-numba/package.py
index 522abe3192f8a9f5d2bdc39609836432ac83aeeb..98b99ab1365cee9a42104e71ff44590a2cf4312d 100644
--- a/packages/py-numba/package.py
+++ b/packages/py-numba/package.py
@@ -1,5 +1,4 @@
-# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
 #
 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
 
@@ -17,6 +16,9 @@ class PyNumba(PythonPackage):
 
     license("BSD-2-Clause")
 
+    version("0.61.0", sha256="888d2e89b8160899e19591467e8fdd4970e07606e1fbc248f239c89818d5f925")
+    version("0.60.0", sha256="5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16")
+    version("0.59.1", sha256="76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b")
     version("0.58.1", sha256="487ded0633efccd9ca3a46364b40006dbdaca0f95e99b8b83e778d1195ebcbaa")
     version("0.57.0", sha256="2af6d81067a5bdc13960c6d2519dbabbf4d5d597cf75d640c5aeaefd48c6420a")
     version("0.56.4", sha256="32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee")
@@ -24,22 +26,42 @@ class PyNumba(PythonPackage):
     version("0.55.2", sha256="e428d9e11d9ba592849ccc9f7a009003eb7d30612007e365afe743ce7118c6f4")
     version("0.55.1", sha256="03e9069a2666d1c84f93b00dbd716fb8fedde8bb2c6efafa2f04842a46442ea3")
     version("0.54.0", sha256="bad6bd98ab2e41c34aa9c80b8d9737e07d92a53df4f74d3ada1458b0b516ccff")
-    version("0.51.1", sha256="1e765b1a41535684bf3b0465c1d0a24dcbbff6af325270c8f4dad924c0940160")
-    version("0.50.1", sha256="89e81b51b880f9b18c82b7095beaccc6856fcf84ba29c4f0ced42e4e5748a3a7")
-    version("0.48.0", sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017")
+    version(
+        "0.51.1",
+        sha256="1e765b1a41535684bf3b0465c1d0a24dcbbff6af325270c8f4dad924c0940160",
+        deprecated=True,
+    )
+    version(
+        "0.50.1",
+        sha256="89e81b51b880f9b18c82b7095beaccc6856fcf84ba29c4f0ced42e4e5748a3a7",
+        deprecated=True,
+    )
+    version(
+        "0.48.0",
+        sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017",
+        deprecated=True,
+    )
 
     depends_on("c", type="build")  # generated
     depends_on("cxx", type="build")  # generated
 
     variant("tbb", default=False, description="Build with Intel Threading Building Blocks")
 
-    depends_on("python@3.8:3.11", when="@0.57:", type=("build", "run"))
+    # Be careful that the bounds given in setup.py are exclusive on the upper bound
+    # i.e., [min, max)
+    depends_on("python@3.10:3.13", when="@0.61:", type=("build", "run"))
+    depends_on("python@3.9:3.12", when="@0.59:", type=("build", "run"))
+    depends_on("python@3.8:3.11", when="@0.57:0.58", type=("build", "run"))
     depends_on("python@3.7:3.10", when="@0.55:0.56", type=("build", "run"))
     depends_on("python@3.7:3.9", when="@0.54", type=("build", "run"))
     depends_on("python@3.6:3.9", when="@0.53", type=("build", "run"))
     depends_on("python@3.6:3.8", when="@0.52", type=("build", "run"))
     depends_on("python@3.6:3.8", when="@0.48:0.51", type=("build", "run"))
-    depends_on("py-numpy@1.22:1.26", when="@0.58.1:", type=("build", "run"))
+    # begin EBRAINS (added): fix numpy dependency version range
+    depends_on("py-numpy@1.24:1.26,2.0:2.1", when="@0.61:", type=("build", "run"))
+    depends_on("py-numpy@1.22:1.26,2.0", when="@0.60", type=("build", "run"))
+    # end EBRAINS
+    depends_on("py-numpy@1.22:1.26", when="@0.58.1:0.59", type=("build", "run"))
     depends_on("py-numpy@1.21:1.25", when="@0.58.0", type=("build", "run"))
     depends_on("py-numpy@1.21:1.24", when="@0.57", type=("build", "run"))
     depends_on("py-numpy@1.18:1.23", when="@0.56.1:0.56.4", type=("build", "run"))
@@ -48,6 +70,9 @@ class PyNumba(PythonPackage):
     depends_on("py-numpy@1.17:1.20", when="@0.54", type=("build", "run"))
     depends_on("py-numpy@1.15:1.20", when="@0.48:0.53", type=("build", "run"))
     depends_on("py-setuptools", type=("build", "run"))
+    depends_on("py-llvmlite@0.44", when="@0.61", type=("build", "run"))
+    depends_on("py-llvmlite@0.43", when="@0.60", type=("build", "run"))
+    depends_on("py-llvmlite@0.42", when="@0.59", type=("build", "run"))
     depends_on("py-llvmlite@0.41", when="@0.58", type=("build", "run"))
     depends_on("py-llvmlite@0.40", when="@0.57", type=("build", "run"))
     depends_on("py-llvmlite@0.39", when="@0.56", type=("build", "run"))
@@ -64,10 +89,6 @@ class PyNumba(PythonPackage):
     # See https://reviews.llvm.org/D44140
     conflicts("^llvm@6.0.0")
 
-    # begin EBRAINS (added): numba>=0.57 requires at least version 14.0.0 of LLVM
-    conflicts("llvm@:13", when="@0.57.0:")
-    # end EBRAINS
-
     def setup_build_environment(self, env):
         if self.spec.satisfies("~tbb"):
             env.set("NUMBA_DISABLE_TBB", "yes")
diff --git a/packages/py-parameterized/package.py b/packages/py-parameterized/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ab006228c864a3961d2bac0f6a6c25e1ef21080
--- /dev/null
+++ b/packages/py-parameterized/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyParameterized(PythonPackage):
+    """Parameterized testing with any Python test framework."""
+
+    homepage = "https://github.com/wolever/parameterized"
+    pypi = "parameterized/parameterized-0.7.1.tar.gz"
+
+    # begin EBRAINS (added): new version
+    version("0.9.0", sha256="7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1")
+    # end EBRAINS
+    version("0.7.1", sha256="6a94dbea30c6abde99fd4c2f2042c1bf7f980e48908bf92ead62394f93cf57ed")
+
+    depends_on("py-setuptools", type="build")
diff --git a/packages/py-pycatch22/package.py b/packages/py-pycatch22/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..68c4be093beccec50bf5b4f096db249371d598f7
--- /dev/null
+++ b/packages/py-pycatch22/package.py
@@ -0,0 +1,16 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPycatch22(PythonPackage):
+    
+    homepage = "https://github.com/DynamicsAndNeuralSystems/pycatch22"
+    pypi="pycatch22/pycatch22-0.4.5.tar.gz"
+    
+    version("0.4.5", sha256="7ec844c659f22bedc66847ac866ef2bd86ffbbd4d8114b5e97f699f20a6f9f81")
+
+    depends_on("py-setuptools", type="build")
diff --git a/packages/py-pyknos/package.py b/packages/py-pyknos/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9449a6da430ac64be9ca1f1fe3128aa32843576
--- /dev/null
+++ b/packages/py-pyknos/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPyknos(PythonPackage):
+    """Python package for conditional density estimation. It either wraps or implements diverse conditional density
+    estimators."""
+
+    homepage = "https://github.com/sbi-dev/pyknos"
+    pypi = "pyknos/pyknos-0.16.0.tar.gz"
+
+    version("0.16.0", "4e1db834d8a5fd847882a081937732fea6798668b72293ae052765e7bfc371c3")
+
+    depends_on("python@3.8:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-matplotlib", type=("build", "run"))
+    depends_on("py-nflows@0.14", type=("build", "run"))
+    depends_on("py-numpy", type=("build", "run"))
+    depends_on("py-tensorboard", type=("build", "run"))
+    depends_on("py-torch", type=("build", "run"))
+    depends_on("py-tqdm", type=("build", "run"))
diff --git a/packages/py-pymc/package.py b/packages/py-pymc/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7dc999f41c53366467e19aa4cf65113b0dea464
--- /dev/null
+++ b/packages/py-pymc/package.py
@@ -0,0 +1,31 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyPymc(PythonPackage):
+    """PyMC (formerly PyMC3) is a Python package for Bayesian statistical modeling focusing on advanced Markov chain Monte
+    Carlo (MCMC) and variational inference (VI) algorithms."""
+
+    homepage = "https://github.com/pymc-devs/pymc"
+    pypi = "pymc/pymc-5.20.1.tar.gz"
+
+    version("5.20.1", "fb5f20d196a1b34eb193a855c611887b2e7b98d3af37d8573a33d112e2278eac")
+
+    depends_on("python@3.10:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-versioneer", type="build")
+
+    depends_on("py-arviz@0.13:", type=("build", "run"))
+    depends_on("py-cachetools@4.2.1:", type=("build", "run"))
+    depends_on("py-cloudpickle", type=("build", "run"))
+    depends_on("py-numpy@1.25.0:", type=("build", "run"))
+    depends_on("py-pandas@0.24.0:", type=("build", "run"))
+    depends_on("py-pytensor@2.26.1:2.27.999", type=("build", "run"))
+    depends_on("py-rich@13.7.1:", type=("build", "run"))
+    depends_on("py-scipy@1.4.1:", type=("build", "run"))
+    depends_on("py-threadpoolctl@3.1.0:3.99", type=("build", "run"))
+    depends_on("py-typing-extensions@3.7.4:", type=("build", "run"))
diff --git a/packages/py-pyspike/cython3.patch b/packages/py-pyspike/cython3.patch
new file mode 100644
index 0000000000000000000000000000000000000000..e3e62eaf93f854b4adfd2d784b6ce57cf2706324
--- /dev/null
+++ b/packages/py-pyspike/cython3.patch
@@ -0,0 +1,24 @@
+diff --git a/setup.py b/setup.py
+index 297746d..b52cf8b 100644
+--- a/setup.py
++++ b/setup.py
+@@ -21,11 +21,14 @@ else:
+     use_cython = True
+ 
+ 
+-class numpy_include(object):
+-    """Defers import of numpy until install_requires is through"""
+-    def __str__(self):
+-        import numpy
+-        return numpy.get_include()
++class numpy_include(os.PathLike):
++     """Defers import of numpy until install_requires is through"""
++     def __str__(self):
++         import numpy
++         return numpy.get_include()
++
++     def __fspath__(self):
++         return str(self)
+ 
+ 
+ if os.path.isfile("pyspike/cython/cython_add.c") and \
diff --git a/packages/py-pyspike/package.py b/packages/py-pyspike/package.py
index 9fbe4128e3df05c14f307085bfe6d3cb5e5b32c4..ddf17e7e28afbf190937694dbdbee31b1f486edb 100644
--- a/packages/py-pyspike/package.py
+++ b/packages/py-pyspike/package.py
@@ -12,15 +12,17 @@ class PyPyspike(PythonPackage):
     """
 
     homepage = "https://github.com/mariomulansky/PySpike"
-    pypi = 'pyspike/pyspike-0.7.0.tar.gz'
+    url = 'https://github.com/mariomulansky/PySpike/archive/refs/tags/0.8.0.tar.gz'
 
     maintainers = ['dionperd', 'paulapopa', "ldomide"]
 
-    version('0.8.0', '76137b861ed531608aaf55af1a5ebf8a586e98653dab2467b4c1da7b2d9aa4e5')
-    version('0.7.0', 'a5d1c1472d3e7c3ac85c8a4ce069d750cca02acf18f185677b29c0a757e78efe')
+    version('0.8.0', '199d41af097e0b6e6583e22d4a9c3cedab51ceba4da2d940682ffefe8120a414')
+    version('0.7.0', '47031ba10a5726845982b62dcae970449ca50c4be9985a1ed0d2a021456bf25a')
+
+    patch("cython3.patch", when="^py-cython@3:")
 
     # python_requires
-    depends_on('python@3.8:3.10', type=('build', 'run'))
+    depends_on('python@3.8:', type=('build', 'run'))
 
     # setup_requires
     depends_on('py-pip', type='build')
@@ -31,10 +33,10 @@ class PyPyspike(PythonPackage):
     depends_on('py-scipy', type=('build', 'run'))
     depends_on('py-matplotlib', type=('build', 'run'))
     depends_on('py-pytest', type=('build', 'run'))
-    depends_on('py-cython@:2', type=('build', 'run'))
+    depends_on('py-cython', type=('build', 'run'))
 
     # Test dependency
-    depends_on('py-pytest@:7.1', type='test')
+    depends_on('py-pytest', type='test')
     
     @run_after('install')
     @on_package_attributes(run_tests=True)
diff --git a/packages/py-pytensor/package.py b/packages/py-pytensor/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c04539334675f9185af0f5a10aadae5b8c07cdd
--- /dev/null
+++ b/packages/py-pytensor/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+class PyPytensor(PythonPackage):
+    """Python library that allows one to define, optimize, and efficiently evaluate mathematical expressions involving
+    multi-dimensional arrays. It provides the computational backend for PyMC."""
+
+    homepage = "https://github.com/pymc-devs/pytensor"
+    pypi = "pytensor/pytensor-2.27.1.tar.gz"
+
+    version("2.27.1", "ed5075e1504e0e4c2322340111289820c5e1718b70187922777d560a8ef26f75")
+
+    depends_on("python@3.10:3.13", type=("build", "run"))
+    depends_on("py-setuptools@59.0.0:", type="build")
+    depends_on("py-cython", type="build")
+    depends_on("py-versioneer+toml", type="build")
+    depends_on("py-scipy@1.0:1", type=("build", "run"))
+    depends_on("py-numpy@1.17.0:", type=("build", "run"))
+    depends_on("py-filelock", type=("build", "run")) # TODO: it needs filelock>=3.15, but on pypi the latest one is 3.12.4
+    depends_on("py-etuples", type=("build", "run"))
+    depends_on("py-logical-unification", type=("build", "run"))
+    depends_on("py-mini-kanren", type=("build", "run"))
+    depends_on("py-cons", type=("build", "run"))
diff --git a/packages/py-ray/package.py b/packages/py-ray/package.py
index 82ba34c49b9369fdb272d674ace93d11f7f88390..f1bf4b7207ed9fe7a5a35f7e1649ca0b1cdcfb1e 100644
--- a/packages/py-ray/package.py
+++ b/packages/py-ray/package.py
@@ -15,7 +15,10 @@ class PyRay(PythonPackage):
 
     license("Apache-2.0")
 
-    # begin EBRAINS (added): ECM new node-js -> new react whatever -> new py-ray
+    # begin EBRAINS (added): new versions
+    # new version to allow building with newer bazel
+    version("2.30.0", sha256="854d549a77f0b0e810d1e9a18e7becf984279e2a0bfad5bed508f500ff770e34")
+    # ECM: new node-js -> new react whatever -> new py-ray
     version("2.4.0", sha256="b0110a84630b2f6d10cd13e8ac955875c3658373eb6cabcc77cf316de3c28066")
     # end EBRAINS
     version("2.0.1", sha256="b8b2f0a99d2ac4c001ff11c78b4521b217e2a02df95fb6270fd621412143f28b")
@@ -30,18 +33,20 @@ class PyRay(PythonPackage):
     conflicts("node-js@17:", when="@:2.0.1")
     # end EBRAINS
 
-    # begin EBRAINS (added): ElM add missing dependencies/constraints for added version 2.4.0
+    # begin EBRAINS (added): ElM add missing dependencies/constraints for added versions
+    depends_on("python@3.9:3.12", when="@2.30.0", type=("build", "run"))
     depends_on("python@3.6:3.11", when="@2.4.0", type=("build", "run"))
+    depends_on("bazel@6.5", when="@2.30.0", type="build")
     depends_on("bazel@5", when="@2.4.0", type="build")
-    depends_on("py-cython@0.29.32:", when="@2.4.0", type="build")
+    depends_on("py-cython@0.29.32:", when="@2.4.0:", type="build")
     depends_on("py-attrs", when="@2.4.0", type=("build", "run"))
-    depends_on("py-click@7.0:", when="@2.4.0", type=("build", "run"))
-    depends_on("py-grpcio@1.32:1.51.3", when="@2.4.0 ^python@:3.9", type=("build", "run"))
-    depends_on("py-grpcio@1.42:1.51.3", when="@2.4.0 ^python@3.10:", type=("build", "run"))
-    depends_on("py-protobuf@3.15.3:", when="@2.4.0", type=("build", "run"))
+    depends_on("py-click@7.0:", when="@2.4.0:", type=("build", "run"))
+    depends_on("py-grpcio@1.32:1.51.3", when="@2.4.0: ^python@:3.9", type=("build", "run"))
+    depends_on("py-grpcio@1.42:1.51.3", when="@2.4.0: ^python@3.10:", type=("build", "run"))
+    depends_on("py-protobuf@3.15.3:", when="@2.4.0:", type=("build", "run"))
     depends_on("py-frozenlist", when="@2.4.0", type=("build", "run"))
     depends_on("py-typing-extensions", when="@2.4.0 ^python@:3.7", type=("build", "run"))
-    depends_on("py-virtualenv@20.0.24:", when="@2.4.0", type=("build", "run"))
+    depends_on("py-virtualenv@20.0.24:", when="@2.4.0:", type=("build", "run"))
     # end EBRAINS
 
     depends_on("python@3.6:3.10", when="@2.0.1", type=("build", "run"))
@@ -113,7 +118,7 @@ class PyRay(PythonPackage):
     build_directory = "python"
 
     # begin EBRAINS (added): fix boost download url
-    patch("fix-url-boost.patch", when="@2.4.0:")
+    patch("fix-url-boost.patch", when="@2.4.0")
     # end EBRAINS
 
     def patch(self):
diff --git a/packages/py-sbi/package.py b/packages/py-sbi/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..a560cd93431e33171296d3d6019fb1c4eade6851
--- /dev/null
+++ b/packages/py-sbi/package.py
@@ -0,0 +1,46 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PySbi(PythonPackage):
+    """Python package for simulating biological systems."""
+
+    homepage = "https://sbi-dev.github.io/sbi/v0.23.3/"
+    url = "https://github.com/sbi-dev/sbi/archive/refs/tags/v0.23.3.tar.gz"
+
+    version("0.23.3", "b1ef102e47c90088f2adfff5ea88b18421e84c4641ff4dd4f68c1116c296ba81")
+
+    depends_on("python@3.9:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-wheel", type="build")
+
+    depends_on("py-arviz", type=("build", "run"))
+    depends_on("py-joblib@1.3.0:", type=("build", "run"))
+    depends_on("py-matplotlib", type=("build", "run"))
+    depends_on("py-notebook@:6.4.12", type=("build", "run"))
+    depends_on("py-numpy@:1", type=("build", "run"))
+    depends_on("py-pillow", type=("build", "run"))
+    depends_on("py-pyknos@0.16.0:", type=("build", "run"))
+    depends_on("py-pyro-ppl@1.3.1:", type=("build", "run"))
+    depends_on("py-scikit-learn", type=("build", "run"))
+    depends_on("py-scipy", type=("build", "run"))
+    depends_on("py-tensorboard", type=("build", "run"))
+    depends_on("py-torch@1.13.0:", type=("build", "run"))
+    depends_on("py-tqdm", type=("build", "run"))
+    depends_on("py-pymc@5.0.0:", type=("build", "run"))
+    depends_on("py-zuko@1.2.0:", type=("build", "run"))
+
+    depends_on("py-pytest", type="test")
+    depends_on("py-torchtestcase", type="test")
+
+    skip_modules = ["sbi.inference.snle", "sbi.inference.snpe", "sbi.inference.snre", "sbi.samplers.score", "sbi.samplers.vi"]
+
+    @run_after("install")
+    @on_package_attributes(run_tests=True)
+    def install_test(self):
+        pytest = which("pytest")
+        pytest("-m", "not slow and not gpu")
diff --git a/packages/py-tblib/package.py b/packages/py-tblib/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9f8511b75c4caac919ee4c3825c3b4a2b05e73a
--- /dev/null
+++ b/packages/py-tblib/package.py
@@ -0,0 +1,25 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyTblib(PythonPackage):
+    """Traceback fiddling library. Allows you to pickle tracebacks."""
+
+    homepage = "https://github.com/ionelmc/python-tblib"
+    pypi = "tblib/tblib-1.6.0.tar.gz"
+
+    license("BSD-2-Clause")
+
+    # begin EBRAINS (added): new version
+    version("2.0.0", sha256="a6df30f272c08bf8be66e0775fad862005d950a6b8449b94f7c788731d70ecd7")
+    # end EBRAINS
+    version("1.6.0", sha256="229bee3754cb5d98b4837dd5c4405e80cfab57cb9f93220410ad367f8b352344")
+    version("1.4.0", sha256="bd1ad564564a158ff62c290687f3db446038f9ac11a0bf6892712e3601af3bcd")
+
+    depends_on("python@2.7:2.8,3.5:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
diff --git a/packages/py-torch/detect_omp_of_fujitsu_compiler.patch b/packages/py-torch/detect_omp_of_fujitsu_compiler.patch
new file mode 100644
index 0000000000000000000000000000000000000000..519d66869d578ea4a59c4e7f626569baade6837a
--- /dev/null
+++ b/packages/py-torch/detect_omp_of_fujitsu_compiler.patch
@@ -0,0 +1,20 @@
+--- pytorch/cmake/Modules/FindOpenMP.cmake.org	2020-05-26 17:43:53.000000000 +0900
++++ pytorch/cmake/Modules/FindOpenMP.cmake	2020-05-26 17:46:37.000000000 +0900
+@@ -84,7 +84,7 @@
+     unset(OpenMP_FLAG_CANDIDATES)
+ 
+     set(OMP_FLAG_GNU "-fopenmp")
+-    set(OMP_FLAG_Clang "-fopenmp=libomp" "-fopenmp=libiomp5" "-fopenmp")
++    set(OMP_FLAG_Clang "-fopenmp" "-fopenmp=libomp" "-fopenmp=libiomp5")
+ 
+     # AppleClang may need a header file, search for omp.h with hints to brew
+     # default include dir
+@@ -245,7 +245,7 @@
+         set(OpenMP_libomp_LIBRARY "${MKL_OPENMP_LIBRARY}" CACHE STRING "libomp location for OpenMP")
+       else()
+         find_library(OpenMP_libomp_LIBRARY
+-          NAMES omp gomp iomp5
++          NAMES fjomp omp gomp iomp5
+           HINTS ${CMAKE_${LANG}_IMPLICIT_LINK_DIRECTORIES}
+           DOC "libomp location for OpenMP"
+         )
diff --git a/packages/py-torch/fj-ssl2_1.10.patch b/packages/py-torch/fj-ssl2_1.10.patch
new file mode 100644
index 0000000000000000000000000000000000000000..bcd2c37804c1ba1ee62d70907c8705a23607a5aa
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.10.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index ca560288a4..f5a29ecf43 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -130,7 +130,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib")
++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -185,6 +185,20 @@ elseif(BLAS STREQUAL "vecLib")
+   set(BLAS_INFO "veclib")
+   set(BLAS_FOUND 1)
+   set(BLAS_LIBRARIES ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ elseif(BLAS STREQUAL "Generic")
+   # On Debian family, the CBLAS ABIs have been merged into libblas.so
+   find_library(BLAS_LIBRARIES blas)
+@@ -201,7 +215,7 @@ if(NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_ENABLED 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND))
++  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if(NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index 47c80b45f6..efd4a87d06 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -276,6 +276,28 @@ if((NOT BLAS_LIBRARIES)
+   endif()
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/fj-ssl2_1.11.patch b/packages/py-torch/fj-ssl2_1.11.patch
new file mode 100644
index 0000000000000000000000000000000000000000..af41e5bb93931ca3c0f12a55733407a52fb4af31
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.11.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index 557ab649a4..56d1699736 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -174,7 +174,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib")
++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -229,6 +229,20 @@ elseif(BLAS STREQUAL "vecLib")
+   set(BLAS_INFO "veclib")
+   set(BLAS_FOUND 1)
+   set(BLAS_LIBRARIES ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ elseif(BLAS STREQUAL "FlexiBLAS")
+   find_package(FlexiBLAS REQUIRED)
+   include_directories(SYSTEM ${FlexiBLAS_INCLUDE_DIR})
+@@ -250,7 +264,7 @@ if(NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_SEQUENTIAL 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND OR FlexiBLAS_FOUND))
++  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND OR FlexiBLAS_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if(NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index 94942d520f..ae5b8db963 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -289,6 +289,28 @@ if((NOT BLAS_LIBRARIES)
+   endif()
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/fj-ssl2_1.3-1.5.patch b/packages/py-torch/fj-ssl2_1.3-1.5.patch
new file mode 100644
index 0000000000000000000000000000000000000000..0ea87500b0a60e35450c90315c5466d5f6488073
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.3-1.5.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index a8e9769536..f0f91304c2 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -107,7 +107,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME")
++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -147,6 +147,20 @@ elseif(BLAS STREQUAL "vecLib")
+   find_package(vecLib REQUIRED)
+   include_directories(SYSTEM ${vecLib_INCLUDE_DIR})
+   list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ else()
+   message(FATAL_ERROR "Unrecognized BLAS option: " ${BLAS})
+ endif()
+@@ -156,7 +170,7 @@ if (NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_ENABLED 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND))
++  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if (NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index e93e98a609..d43a6c40bd 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -239,6 +239,28 @@ if((NOT BLAS_LIBRARIES)
+   endif (BLAS_LIBRARIES)
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/fj-ssl2_1.6-1.7.patch b/packages/py-torch/fj-ssl2_1.6-1.7.patch
new file mode 100644
index 0000000000000000000000000000000000000000..423af3f21299395696d27703e531c504c19b50f4
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.6-1.7.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index 36e1ab7682..0f02f51c47 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -114,7 +114,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME;Generic")
++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME;Generic")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -154,6 +154,20 @@ elseif(BLAS STREQUAL "vecLib")
+   find_package(vecLib REQUIRED)
+   include_directories(SYSTEM ${vecLib_INCLUDE_DIR})
+   list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ elseif(BLAS STREQUAL "Generic")
+   # On Debian family, the CBLAS ABIs have been merged into libblas.so
+   find_library(BLAS_LIBRARIES blas)
+@@ -168,7 +182,7 @@ if(NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_ENABLED 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR GENERIC_BLAS_FOUND))
++  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND OR GENERIC_BLAS_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if(NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index e93e98a609..d43a6c40bd 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -239,6 +239,28 @@ if((NOT BLAS_LIBRARIES)
+   endif (BLAS_LIBRARIES)
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/fj-ssl2_1.8.patch b/packages/py-torch/fj-ssl2_1.8.patch
new file mode 100644
index 0000000000000000000000000000000000000000..461c1a2976b7a118173182452075e809f7f52858
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.8.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index 06464e799a..7f50bd8fa0 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -118,7 +118,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME;Generic")
++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME;Generic")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -157,6 +157,20 @@ elseif(BLAS STREQUAL "vecLib")
+   find_package(vecLib REQUIRED)
+   include_directories(SYSTEM ${vecLib_INCLUDE_DIR})
+   list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ elseif(BLAS STREQUAL "Generic")
+   # On Debian family, the CBLAS ABIs have been merged into libblas.so
+   find_library(BLAS_LIBRARIES blas)
+@@ -171,7 +185,7 @@ if(NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_ENABLED 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR GENERIC_BLAS_FOUND))
++  if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND OR GENERIC_BLAS_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if(NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index e8f5d7c950..29219e057f 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -257,6 +257,28 @@ if((NOT BLAS_LIBRARIES)
+   endif()
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/fj-ssl2_1.9.patch b/packages/py-torch/fj-ssl2_1.9.patch
new file mode 100644
index 0000000000000000000000000000000000000000..0febb57586fc1297ac21c1707e1c9cfc93da819d
--- /dev/null
+++ b/packages/py-torch/fj-ssl2_1.9.patch
@@ -0,0 +1,76 @@
+diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
+index 5d57b9ca78..a74fe73b9f 100644
+--- a/cmake/Dependencies.cmake
++++ b/cmake/Dependencies.cmake
+@@ -118,7 +118,7 @@ else()
+   set(AT_MKLDNN_ENABLED 0)
+   set(AT_MKL_ENABLED 0)
+ endif()
+-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib")
++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib")
+ message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS})
+ 
+ if(BLAS STREQUAL "Eigen")
+@@ -161,6 +161,20 @@ elseif(BLAS STREQUAL "vecLib")
+   find_package(vecLib REQUIRED)
+   include_directories(SYSTEM ${vecLib_INCLUDE_DIR})
+   list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS})
++elseif(BLAS STREQUAL "SSL2")
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    message(STATUS "SSL2 Selected BLAS library")
++    list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so")
++    set(SSL2_FOUND ON)
++    message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran")
++    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    set(WITH_BLAS "ssl2")
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
+ elseif(BLAS STREQUAL "Generic")
+   # On Debian family, the CBLAS ABIs have been merged into libblas.so
+   find_library(BLAS_LIBRARIES blas)
+@@ -175,7 +189,7 @@ if(NOT INTERN_BUILD_MOBILE)
+   set(AT_MKL_ENABLED 0)
+   set(AT_MKL_MT 0)
+   set(USE_BLAS 1)
+-  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND))
++  if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND))
+     message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library")
+     find_package(BLAS)
+     if(NOT BLAS_FOUND)
+diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake
+index eefd6d475a..92ad75d32e 100644
+--- a/cmake/Modules/FindBLAS.cmake
++++ b/cmake/Modules/FindBLAS.cmake
+@@ -276,6 +276,28 @@ if((NOT BLAS_LIBRARIES)
+   endif()
+ endif()
+ 
++# BLAS in SSL2 library?
++if((NOT BLAS_LIBRARIES)
++    AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2")))
++  if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$"
++      AND CMAKE_C_COMPILER MATCHES ".*/fcc$")
++    check_fortran_libraries(
++    BLAS_LIBRARIES
++    BLAS
++    sgemm
++    "-SSL2;--linkfortran"
++    "fjlapackexsve")
++    if (BLAS_LIBRARIES)
++      set(BLAS_INFO "ssl2")
++      set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran")
++    endif (BLAS_LIBRARIES)
++  else()
++    message(STATUS "Not built using fcc and FCC.")
++    message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
++    message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}")
++  endif()
++endif()
++
+ # Generic BLAS library?
+ if((NOT BLAS_LIBRARIES)
+     AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic")))
diff --git a/packages/py-torch/package.py b/packages/py-torch/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..b274723b83414cbe1f3e13714d6441f16b33d64d
--- /dev/null
+++ b/packages/py-torch/package.py
@@ -0,0 +1,715 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+import sys
+
+from spack.operating_systems.mac_os import macos_version
+from spack.package import *
+
+
+class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
+    """Tensors and Dynamic neural networks in Python with strong GPU acceleration."""
+
+    homepage = "https://pytorch.org/"
+    git = "https://github.com/pytorch/pytorch.git"
+    submodules = True
+
+    # Exact set of modules is version- and variant-specific, just attempt to import the
+    # core libraries to ensure that the package was successfully installed.
+    import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"]
+
+    license("BSD-3-Clause")
+    maintainers("adamjstewart")
+
+    version("main", branch="main")
+    version("2.5.1", tag="v2.5.1", commit="a8d6afb511a69687bbb2b7e88a3cf67917e1697e")
+    version("2.5.0", tag="v2.5.0", commit="32f585d9346e316e554c8d9bf7548af9f62141fc")
+    version("2.4.1", tag="v2.4.1", commit="ee1b6804381c57161c477caa380a840a84167676")
+    version("2.4.0", tag="v2.4.0", commit="d990dada86a8ad94882b5c23e859b88c0c255bda")
+    version("2.3.1", tag="v2.3.1", commit="63d5e9221bedd1546b7d364b5ce4171547db12a9")
+    version("2.3.0", tag="v2.3.0", commit="97ff6cfd9c86c5c09d7ce775ab64ec5c99230f5d")
+    version("2.2.2", tag="v2.2.2", commit="39901f229520a5256505ec24782f716ee7ddc843")
+    version("2.2.1", tag="v2.2.1", commit="6c8c5ad5eaf47a62fafbb4a2747198cbffbf1ff0")
+    version("2.2.0", tag="v2.2.0", commit="8ac9b20d4b090c213799e81acf48a55ea8d437d6")
+    version("2.1.2", tag="v2.1.2", commit="a8e7c98cb95ff97bb30a728c6b2a1ce6bff946eb")
+    version("2.1.1", tag="v2.1.1", commit="4c55dc50355d5e923642c59ad2a23d6ad54711e7")
+    version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d")
+    version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5")
+    version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e")
+    version("1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee")
+    version("1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5")
+    version("1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8")
+    version("1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121")
+    version("1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755")
+    version("1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191")
+    version("1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030")
+    version("1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42")
+    version("1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce")
+    version("1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa")
+    version("1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8")
+    version("1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969")
+    version("1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56")
+    version("1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57")
+    version("1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26")
+    version("1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d")
+    version("1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3")
+    version("1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b")
+    version("1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948")
+
+    depends_on("c", type="build")
+    depends_on("cxx", type="build")
+
+    is_darwin = sys.platform == "darwin"
+
+    # All options are defined in CMakeLists.txt.
+    # Some are listed in setup.py, but not all.
+    variant("debug", default=False, description="Build with debugging support")
+    variant("caffe2", default=False, description="Build Caffe2", when="@1.7:")
+    variant("test", default=False, description="Build C++ test binaries")
+    variant("cuda", default=not is_darwin, description="Use CUDA")
+    variant("rocm", default=False, description="Use ROCm")
+    variant("cudnn", default=not is_darwin, description="Use cuDNN", when="+cuda")
+    variant("fbgemm", default=True, description="Use FBGEMM (quantized 8-bit server operators)")
+    variant("kineto", default=True, description="Use Kineto profiling library", when="@1.8:")
+    variant("magma", default=not is_darwin, description="Use MAGMA", when="+cuda")
+    variant("metal", default=is_darwin, description="Use Metal for Caffe2 iOS build")
+    variant(
+        "mps",
+        default=is_darwin and macos_version() >= Version("12.3"),
+        description="Use MPS for macOS build (requires full Xcode suite)",
+        when="@1.12: platform=darwin",
+    )
+    variant("nccl", default=True, description="Use NCCL", when="+cuda platform=linux")
+    variant("nccl", default=True, description="Use NCCL", when="+rocm platform=linux")
+    # Requires AVX2: https://discuss.pytorch.org/t/107518
+    variant("nnpack", default=True, description="Use NNPACK", when="target=x86_64_v3:")
+    variant("numa", default=True, description="Use NUMA", when="platform=linux")
+    variant("numpy", default=True, description="Use NumPy")
+    variant("openmp", default=True, description="Use OpenMP for parallel code")
+    variant("qnnpack", default=True, description="Use QNNPACK (quantized 8-bit operators)")
+    variant("valgrind", default=True, description="Use Valgrind", when="@1.8: platform=linux")
+    variant("xnnpack", default=True, description="Use XNNPACK", when="@1.5:")
+    variant("mkldnn", default=True, description="Use MKLDNN")
+    variant("distributed", default=True, description="Use distributed")
+    variant("mpi", default=True, description="Use MPI for Caffe2", when="+distributed")
+    variant("ucc", default=False, description="Use UCC", when="@1.13: +distributed")
+    variant("gloo", default=True, description="Use Gloo", when="+distributed")
+    variant("tensorpipe", default=True, description="Use TensorPipe", when="@1.6: +distributed")
+    variant("onnx_ml", default=True, description="Enable traditional ONNX ML API", when="@1.5:")
+    variant(
+        "breakpad",
+        default=True,
+        description="Enable breakpad crash dump library",
+        when="@1.10:1.11",
+    )
+    # py-torch has strict dependencies on old protobuf/py-protobuf versions that
+    # cause problems with other packages that require newer versions of protobuf
+    # and py-protobuf --> provide an option to use the internal/vendored protobuf.
+    variant("custom-protobuf", default=False, description="Use vendored protobuf")
+
+    conflicts("+cuda+rocm")
+    conflicts("+tensorpipe", when="+rocm ^hip@:5.1", msg="TensorPipe not supported until ROCm 5.2")
+    conflicts("+breakpad", when="target=ppc64:")
+    conflicts("+breakpad", when="target=ppc64le:")
+
+    # https://github.com/pytorch/pytorch/issues/77811
+    conflicts("+qnnpack", when="platform=darwin target=aarch64:")
+
+    # https://github.com/pytorch/pytorch/issues/97397
+    conflicts(
+        "~tensorpipe",
+        when="@1.8: +distributed",
+        msg="TensorPipe must be enabled with +distributed",
+    )
+
+    # https://github.com/pytorch/pytorch/issues/100991
+    conflicts("%apple-clang@14:", when="@:1")
+
+    conflicts(
+        "cuda_arch=none",
+        when="+cuda",
+        msg="Must specify CUDA compute capabilities of your GPU, see "
+        "https://developer.nvidia.com/cuda-gpus",
+    )
+
+    # Required dependencies
+    # Based on PyPI wheel availability
+    with default_args(type=("build", "link", "run")):
+        depends_on("python@3.9:3.13", when="@2.5:")
+        depends_on("python@3.8:3.12", when="@2.2:2.4")
+        depends_on("python@3.8:3.11", when="@2.0:2.1")
+        depends_on("python@:3.10", when="@1.11:1")
+        depends_on("python@:3.9", when="@1.7.1:1.10")
+        depends_on("python@:3.8", when="@1.4:1.7.0")
+
+    # CMakelists.txt
+    with default_args(type="build"):
+        depends_on("cmake@3.18:", when="@2:")
+        depends_on("cmake@3.13:", when="@1.11:")
+        depends_on("cmake@3.10:", when="@1.10:")
+        depends_on("cmake@3.5:")
+        depends_on("ninja@1.5:")
+
+    with default_args(type=("build", "run")):
+        # setup.py
+        depends_on("py-filelock", when="@2:")
+        depends_on("py-typing-extensions@4.8:", when="@2.2:")
+        depends_on("py-typing-extensions@3.6.2.1:", when="@1.7:")
+        depends_on("py-sympy", when="@2:")
+        # begin EBRAINS (added): fix sympy version for py-torch@2.5: (from setup.py)
+        depends_on("py-sympy@1.13.1", when="@2.5:^python@3.9:")
+        # end EBRAINS
+        depends_on("py-networkx", when="@2:")
+        depends_on("py-jinja2", when="@2:")
+        depends_on("py-fsspec", when="@2.1:")
+
+        # pyproject.toml
+        depends_on("py-setuptools")
+        depends_on("py-astunparse", when="@1.13:")
+        depends_on("py-numpy@1.16.6:")
+        # https://github.com/pytorch/pytorch/issues/107302
+        depends_on("py-numpy@:1", when="@:2.2")
+        depends_on("py-pyyaml")
+        depends_on("py-requests", when="@1.13:")
+
+    # Undocumented dependencies
+    depends_on("py-tqdm", type="run")
+    depends_on("blas")
+    depends_on("lapack")
+
+    # Third party dependencies
+    depends_on("fp16@2020-05-14", when="@1.6:")
+    depends_on("fxdiv@2020-04-17", when="@1.6:")
+    # https://github.com/pytorch/pytorch/issues/60332
+    # depends_on("xnnpack@2024-02-29", when="@2.3:+xnnpack")
+    # depends_on("xnnpack@2022-12-21", when="@2.0:2.2+xnnpack")
+    # depends_on("xnnpack@2022-02-16", when="@1.12:1+xnnpack")
+    # depends_on("xnnpack@2021-06-21", when="@1.10:1.11+xnnpack")
+    # depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack")
+    # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack")
+    depends_on("benchmark", when="@1.6:+test")
+    depends_on("cpuinfo@2024-09-06", when="@2.5.1:")
+    depends_on("cpuinfo@2024-08-30", when="@2.5.0")
+    depends_on("cpuinfo@2023-11-04", when="@2.3:2.4")
+    depends_on("cpuinfo@2023-01-13", when="@2.1:2.2")
+    depends_on("cpuinfo@2022-08-19", when="@1.13:2.0")
+    depends_on("cpuinfo@2020-12-17", when="@1.8:1.12")
+    depends_on("cpuinfo@2020-06-11", when="@1.6:1.7")
+    depends_on("eigen")
+    depends_on("gloo@2023-12-03", when="@2.3:+gloo")
+    depends_on("gloo@2023-05-19", when="@2.1:2.2+gloo")
+    depends_on("gloo@2023-01-17", when="@2.0+gloo")
+    depends_on("gloo@2022-05-18", when="@1.13:1+gloo")
+    depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo")
+    depends_on("gloo@2021-05-04", when="@1.9+gloo")
+    depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo")
+    depends_on("gloo@2020-03-17", when="@1.6+gloo")
+    depends_on("gloo+cuda", when="@1.6:+gloo+cuda")
+    depends_on("gloo+libuv", when="@1.6: platform=darwin")
+    depends_on("nccl", when="+nccl+cuda")
+    # https://github.com/pytorch/pytorch/issues/60331
+    # depends_on("onnx@1.16.0", when="@2.3:+onnx_ml")
+    # depends_on("onnx@1.15.0", when="@2.2+onnx_ml")
+    # depends_on("onnx@1.14.1", when="@2.1+onnx_ml")
+    # depends_on("onnx@1.13.1", when="@2.0+onnx_ml")
+    # depends_on("onnx@1.12.0", when="@1.13:1+onnx_ml")
+    # depends_on("onnx@1.11.0", when="@1.12+onnx_ml")
+    # depends_on("onnx@1.10.1_2021-10-08", when="@1.11+onnx_ml")
+    # depends_on("onnx@1.10.1", when="@1.10+onnx_ml")
+    # depends_on("onnx@1.8.0_2020-11-03", when="@1.8:1.9+onnx_ml")
+    # depends_on("onnx@1.7.0_2020-05-31", when="@1.6:1.7+onnx_ml")
+    with when("~custom-protobuf"):
+        depends_on("protobuf@3.13.0", when="@1.10:")
+        depends_on("protobuf@3.11.4", when="@1.6:1.9")
+        depends_on("protobuf@3.6.1", when="@1.1:1.5")
+        depends_on("protobuf@3.5.0", when="@1.0")
+        with default_args(type=("build", "run")):
+            depends_on("py-protobuf@3.13", when="@1.10:")
+            depends_on("py-protobuf@3.11", when="@1.6:1.9")
+            depends_on("py-protobuf@3.6", when="@1.1:1.5")
+            depends_on("py-protobuf@3.5", when="@1.0")
+    depends_on("psimd@2020-05-17", when="@1.6:")
+    depends_on("pthreadpool@2023-08-29", when="@2.2:")
+    depends_on("pthreadpool@2021-04-13", when="@1.9:2.1")
+    depends_on("pthreadpool@2020-10-05", when="@1.8")
+    depends_on("pthreadpool@2020-06-15", when="@1.6:1.7")
+    with default_args(type=("build", "link", "run")):
+        depends_on("py-pybind11@2.13.5:", when="@2.5:")
+        depends_on("py-pybind11@2.12.0:", when="@2.3:2.4")
+        depends_on("py-pybind11@2.11.0:", when="@2.1:2.2")
+        depends_on("py-pybind11@2.10.1:", when="@2.0")
+        depends_on("py-pybind11@2.10.0:", when="@1.13:1")
+        depends_on("py-pybind11@2.6.2:", when="@1.8:1.12")
+        depends_on("py-pybind11@2.3.0:", when="@:1.7")
+    depends_on("sleef@3.6.0_2024-03-20", when="@2.4:")
+    depends_on("sleef@3.5.1_2020-12-22", when="@1.8:2.3")
+    depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7")
+
+    # Optional dependencies
+    with default_args(type=("build", "link", "run")):
+        # cmake/public/cuda.cmake
+        depends_on("cuda@11:", when="@2.4:+cuda")
+        # https://github.com/pytorch/pytorch/issues/122169
+        depends_on("cuda@11:12.3", when="@2.0:2.3+cuda")
+        depends_on("cuda@10.2:12.3", when="@1.11:1+cuda")
+        # https://discuss.pytorch.org/t/compiling-1-10-1-from-source-with-gcc-11-and-cuda-11-5/140971
+        depends_on("cuda@10.2:11.4", when="@1.10+cuda")
+        depends_on("cuda@9.2:11.4", when="@1.6:1.9+cuda")
+        depends_on("cuda@9:11.4", when="@:1.5+cuda")
+    # https://github.com/pytorch/pytorch#prerequisites
+    # https://github.com/pytorch/pytorch/issues/119400
+    depends_on("cudnn@8.5:9.0", when="@2.3:+cudnn")
+    depends_on("cudnn@7:8", when="@1.6:2.2+cudnn")
+    depends_on("cudnn@7", when="@:1.5+cudnn")
+    depends_on("magma+cuda", when="+magma+cuda")
+    depends_on("magma+rocm", when="+magma+rocm")
+    depends_on("numactl", when="+numa")
+    depends_on("llvm-openmp", when="%apple-clang +openmp")
+    depends_on("valgrind", when="+valgrind")
+    with when("+rocm"):
+        depends_on("hsa-rocr-dev")
+        depends_on("hip")
+        depends_on("rccl", when="+nccl")
+        depends_on("rocprim")
+        depends_on("hipcub")
+        depends_on("rocthrust")
+        depends_on("roctracer-dev")
+        depends_on("rocrand")
+        depends_on("hipsparse")
+        depends_on("hipfft")
+        depends_on("rocfft")
+        depends_on("rocblas")
+        depends_on("miopen-hip")
+        depends_on("rocminfo")
+    depends_on("mpi", when="+mpi")
+    depends_on("ucc", when="+ucc")
+    depends_on("ucx", when="+ucc")
+    depends_on("mkl", when="+mkldnn")
+
+    # Test dependencies
+    with default_args(type="test"):
+        depends_on("py-hypothesis")
+        depends_on("py-six")
+        depends_on("py-psutil")
+
+    # Historical dependencies
+    with default_args(type=("build", "run")):
+        depends_on("mkl@2021.1.1:2021.4.0", when="@2.3 platform=windows")
+        depends_on("py-cffi", when="@:1")
+        depends_on("py-future", when="@1.5:1")
+        depends_on("py-six", when="@1.13:1")
+
+    conflicts("%gcc@:9.3", when="@2.2:", msg="C++17 support required")
+
+    # https://github.com/pytorch/pytorch/issues/90448
+    patch(
+        "https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1",
+        sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7",
+        when="@1.10:2.0",
+    )
+
+    # Fix BLAS being overridden by MKL
+    # https://github.com/pytorch/pytorch/issues/60328
+    patch(
+        "https://github.com/pytorch/pytorch/pull/59220.patch?full_index=1",
+        sha256="6d5717267f901e8ee493dfacd08734d9bcc48ad29a76ca9ef702368e96bee675",
+        when="@:1.11",
+    )
+
+    # Fixes build on older systems with glibc <2.12
+    patch(
+        "https://github.com/pytorch/pytorch/pull/55063.patch?full_index=1",
+        sha256="2229bcbf20fbe88aa9f7318f89c126ec7f527875ffe689a763c78abfa127a65c",
+        when="@:1.8.1",
+    )
+
+    # https://github.com/pytorch/pytorch/issues/70297
+    patch(
+        "https://github.com/google/breakpad/commit/605c51ed96ad44b34c457bbca320e74e194c317e.patch?full_index=1",
+        sha256="694d83db3a2147d543357f22ba5c8d5683d0ed43e693d42bca8f24ec50080f98",
+        when="+breakpad",
+        working_dir="third_party/breakpad",
+    )
+
+    # Fixes CMake configuration error when XNNPACK is disabled
+    # https://github.com/pytorch/pytorch/pull/35607
+    # https://github.com/pytorch/pytorch/pull/37865
+    patch("xnnpack.patch", when="@1.5")
+
+    # Fixes build error when ROCm is enabled for pytorch-1.5 release
+    patch("rocm.patch", when="@1.5+rocm")
+
+    # Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
+    # https://github.com/pytorch/pytorch/pull/37086
+    patch(
+        "https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch?full_index=1",
+        sha256="0f3ad037a95af9d34b1d085050c1e7771fd00f0b89e5b3a276097b7c9f4fabf8",
+        when="@:1.5",
+    )
+
+    # Fixes 'FindOpenMP.cmake'
+    # to detect openmp settings used by Fujitsu compiler.
+    patch("detect_omp_of_fujitsu_compiler.patch", when="%fj")
+
+    # Fixes to build with fujitsu-ssl2
+    patch("fj-ssl2_1.11.patch", when="@1.11:^fujitsu-ssl2")
+    patch("fj-ssl2_1.10.patch", when="@1.10^fujitsu-ssl2")
+    patch("fj-ssl2_1.9.patch", when="@1.9^fujitsu-ssl2")
+    patch("fj-ssl2_1.8.patch", when="@1.8^fujitsu-ssl2")
+    patch("fj-ssl2_1.6-1.7.patch", when="@1.6:1.7^fujitsu-ssl2")
+    patch("fj-ssl2_1.3-1.5.patch", when="@:1.5^fujitsu-ssl2")
+
+    # Fix compilation of +distributed~tensorpipe
+    # https://github.com/pytorch/pytorch/issues/68002
+    patch(
+        "https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch?full_index=1",
+        sha256="41271e494a3a60a65a8dd45ac053d1a6e4e4d5b42c2dac589ac67524f61ac41e",
+        when="@1.10.0+distributed~tensorpipe",
+    )
+
+    # Use patches from IBM's Open CE to enable building on Power systems
+    # 01xx patches are specific to open-ce, we only include 03xx patches used in meta.yaml
+    # https://github.com/open-ce/pytorch-feedstock
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0302-cpp-extension.patch",
+        sha256="ecb3973fa7d0f4c8f8ae40433f3ca5622d730a7b16f6cb63325d1e95baff8aa2",
+        when="@1.10:1.11 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0311-PR66085-Remove-unused-dump-method-from-VSX-vec256-methods.patch",
+        sha256="f05db59f3def4c4215db7142d81029c73fe330c660492159b66d65ca5001f4d1",
+        when="@1.10 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0312-PR67331-Dummpy-VSX-bfloat16-implementation.patch",
+        sha256="860b64afa85f5e6647ebc3c91d5a0bb258784770900c9302c3599c98d5cff1ee",
+        when="@1.10 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0313-add-missing-vsx-dispatch.patch",
+        sha256="7393c2bc0b6d41ecc813c829a1e517bee864686652e91f174cb7bcdfb10ba451",
+        when="@1.10 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0314-fix-nullpointer-error.patch",
+        sha256="b9cff8966f316f58514c66a403b7a6786be3cdb252f1380a6b91c722686a4097",
+        when="@1.10 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.12/recipe/0302-cpp-extension.patch",
+        sha256="2fac519cca8997f074c263505657ff867e7ba2d6637fc8bda99c70a99be0442a",
+        when="@1.12 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.8.0/pytorch-1.13/recipe/0302-cpp-extension.patch",
+        sha256="a54db63640b90e5833cc1099c0935572f5297d2d8625f62f01ac1fda79ed4569",
+        when="@1.13 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0309-fallback-to-cpu_kernel-with-VSX.patch",
+        sha256="27f41c8d6cb61e69e761be62f03dc1ce023cbca34926e3ba559996821a7ce726",
+        when="@2.0 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0310-PR100149.patch",
+        sha256="1adbd38a9cc1611f1caaa325614695f4349d9ffd236332e0d8f0de5a3880f4dd",
+        when="@2.0 arch=ppc64le:",
+    )
+    patch(
+        "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.10.0/pytorch-2.0/recipe/0311-PR104956.patch",
+        sha256="be27c906924a21be198a3ea6c459739a1daa8b8b89045af339dafa4cd6f90d6c",
+        when="@2.0 arch=ppc64le:",
+    )
+    conflicts("arch=ppc64le:", when="@:1.9")
+
+    # Cherry-pick a patch to allow earlier versions of PyTorch to work with CUDA 11.4
+    patch(
+        "https://github.com/pytorch/pytorch/commit/c74c0c571880df886474be297c556562e95c00e0.patch?full_index=1",
+        sha256="8ff7d285e52e4718bad1ca01ceb3bb6471d7828329036bb94222717fcaa237da",
+        when="@:1.9.1 ^cuda@11.4.100:",
+    )
+
+    # PyTorch does not build with GCC 12 (fixed in 2.0)
+    # See: https://github.com/pytorch/pytorch/issues/77614
+    patch(
+        "https://github.com/facebookincubator/gloo/commit/4a5e339b764261d20fc409071dc7a8b8989aa195.patch?full_index=1",
+        sha256="dc8b3a9bea4693f32d6850ea2ce6ce75e1778538bfba464b50efca92bac425e3",
+        when="@:1 %gcc@12:",
+        working_dir="third_party/gloo",
+    )
+
+    # PyTorch does not build on Linux >=6.0.3 (fixed in master)
+    # See: https://github.com/facebookincubator/gloo/issues/345
+    patch(
+        "https://github.com/facebookincubator/gloo/commit/10909297fedab0a680799211a299203e53515032.patch?full_index=1",
+        sha256="8e6e9a44e0533ba4303a95a651b1934e5d73632cab08cc7d5a9435e1e64aa424",
+        when="@:1",
+        working_dir="third_party/gloo",
+    )
+
+    # Some missing includes
+    # See: https://github.com/pytorch/pytorch/pull/100036
+    patch(
+        "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100036.patch?full_index=1",
+        sha256="65060b54c31196b26dcff29bbb178fd17d5677e8481a2a06002c0ca4dd37b3d0",
+        when="@2.0.0:2.0.1",
+    )
+    # See: https://github.com/pytorch/pytorch/pull/100049
+    patch(
+        "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100049.patch?full_index=1",
+        sha256="673056141c0ea6ff4411f65a26f1a9d7a7c49ad8fe034a01ef0d56ba8a7a9386",
+        when="@2.0.0:2.0.1",
+    )
+
+    # Use correct OpenBLAS include path under prefix
+    patch(
+        "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/110063.patch?full_index=1",
+        sha256="23fb4009f7337051fc5303927ff977186a5af960245e7212895406477d8b2f66",
+        when="@:2.1",
+    )
+
+    patch(
+        "https://github.com/pytorch/FBGEMM/commit/da01a59556fec9776733bf20aea8fe8fb29cdd3d.patch?full_index=1",
+        sha256="97d8bd43f8cd8bb203dab3480d609c08499224acaca9915f2bdeb23c62350fb1",
+        when="@2.0.1 +fbgemm",
+        working_dir="third_party/fbgemm",
+    )
+
+    # begin EBRAINS (added): see https://github.com/pytorch/pytorch/issues/129304
+    patch(
+        "https://github.com/pytorch/pytorch/commit/9174d14551c4c6f594bd1532ab00fb7158b1bbfa.patch?full_index=1",
+        sha256="25204236888f25ea74c081787a01deae1a6fa66ecd77ecbe280e1ce8af6b8116",
+        when="@2.4",
+    )
+    # end EBRAINS
+
+    @when("@1.5.0:")
+    def patch(self):
+        # https://github.com/pytorch/pytorch/issues/52208
+        filter_file(
+            "torch_global_deps PROPERTIES LINKER_LANGUAGE C",
+            "torch_global_deps PROPERTIES LINKER_LANGUAGE CXX",
+            "caffe2/CMakeLists.txt",
+        )
+
+    def torch_cuda_arch_list(self, env):
+        if "+cuda" in self.spec:
+            torch_cuda_arch = CudaPackage.compute_capabilities(
+                self.spec.variants["cuda_arch"].value
+            )
+            env.set("TORCH_CUDA_ARCH_LIST", ";".join(torch_cuda_arch))
+
+    def setup_build_environment(self, env):
+        """Set environment variables used to control the build.
+
+        PyTorch's ``setup.py`` is a thin wrapper around ``cmake``.
+        In ``tools/setup_helpers/cmake.py``, you can see that all
+        environment variables that start with ``BUILD_``, ``USE_``,
+        or ``CMAKE_``, plus a few more explicitly specified variable
+        names, are passed directly to the ``cmake`` call. Therefore,
+        most flags defined in ``CMakeLists.txt`` can be specified as
+        environment variables.
+        """
+
+        def enable_or_disable(variant, keyword="USE", var=None):
+            """Set environment variable to enable or disable support for a
+            particular variant.
+
+            Parameters:
+                variant (str): the variant to check
+                keyword (str): the prefix to use for enabling/disabling
+                var (str): CMake variable to set. Defaults to variant.upper()
+            """
+            if var is None:
+                var = variant.upper()
+
+            if "+" + variant in self.spec:
+                env.set(keyword + "_" + var, "ON")
+            elif "~" + variant in self.spec:
+                env.set(keyword + "_" + var, "OFF")
+
+        # Build in parallel to speed up build times
+        env.set("MAX_JOBS", make_jobs)
+
+        # Spack logs have trouble handling colored output
+        env.set("COLORIZE_OUTPUT", "OFF")
+
+        enable_or_disable("test", keyword="BUILD")
+        enable_or_disable("caffe2", keyword="BUILD")
+
+        enable_or_disable("cuda")
+        if "+cuda" in self.spec:
+            env.set("CUDA_TOOLKIT_ROOT_DIR", self.spec["cuda"].prefix)  # Linux/macOS
+            env.set("CUDA_HOME", self.spec["cuda"].prefix)  # Linux/macOS
+            env.set("CUDA_PATH", self.spec["cuda"].prefix)  # Windows
+            self.torch_cuda_arch_list(env)
+
+            if self.spec.satisfies("%clang"):
+                for flag in self.spec.compiler_flags["cxxflags"]:
+                    if "gcc-toolchain" in flag:
+                        env.set("CMAKE_CUDA_FLAGS", "=-Xcompiler={0}".format(flag))
+
+        enable_or_disable("rocm")
+        if "+rocm" in self.spec:
+            env.set("PYTORCH_ROCM_ARCH", ";".join(self.spec.variants["amdgpu_target"].value))
+            env.set("HSA_PATH", self.spec["hsa-rocr-dev"].prefix)
+            env.set("ROCBLAS_PATH", self.spec["rocblas"].prefix)
+            env.set("ROCFFT_PATH", self.spec["rocfft"].prefix)
+            env.set("HIPFFT_PATH", self.spec["hipfft"].prefix)
+            env.set("HIPSPARSE_PATH", self.spec["hipsparse"].prefix)
+            env.set("HIP_PATH", self.spec["hip"].prefix)
+            env.set("HIPRAND_PATH", self.spec["rocrand"].prefix)
+            env.set("ROCRAND_PATH", self.spec["rocrand"].prefix)
+            env.set("MIOPEN_PATH", self.spec["miopen-hip"].prefix)
+            if "+nccl" in self.spec:
+                env.set("RCCL_PATH", self.spec["rccl"].prefix)
+            env.set("ROCPRIM_PATH", self.spec["rocprim"].prefix)
+            env.set("HIPCUB_PATH", self.spec["hipcub"].prefix)
+            env.set("ROCTHRUST_PATH", self.spec["rocthrust"].prefix)
+            env.set("ROCTRACER_PATH", self.spec["roctracer-dev"].prefix)
+            if self.spec.satisfies("^hip@5.2.0:"):
+                env.set("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip)
+
+        enable_or_disable("cudnn")
+        if "+cudnn" in self.spec:
+            # cmake/Modules_CUDA_fix/FindCUDNN.cmake
+            env.set("CUDNN_INCLUDE_DIR", self.spec["cudnn"].prefix.include)
+            env.set("CUDNN_LIBRARY", self.spec["cudnn"].libs[0])
+
+        # Flash attention has very high memory requirements that may cause the build to fail
+        # https://github.com/pytorch/pytorch/issues/111526
+        # https://github.com/pytorch/pytorch/issues/124018
+        env.set("USE_FLASH_ATTENTION", "OFF")
+
+        enable_or_disable("fbgemm")
+        enable_or_disable("kineto")
+        enable_or_disable("magma")
+        enable_or_disable("metal")
+        enable_or_disable("mps")
+        enable_or_disable("breakpad")
+
+        enable_or_disable("nccl")
+        if "+cuda+nccl" in self.spec:
+            env.set("NCCL_LIB_DIR", self.spec["nccl"].libs.directories[0])
+            env.set("NCCL_INCLUDE_DIR", self.spec["nccl"].prefix.include)
+
+        # cmake/External/nnpack.cmake
+        enable_or_disable("nnpack")
+
+        enable_or_disable("numa")
+        if "+numa" in self.spec:
+            # cmake/Modules/FindNuma.cmake
+            env.set("NUMA_ROOT_DIR", self.spec["numactl"].prefix)
+
+        # cmake/Modules/FindNumPy.cmake
+        enable_or_disable("numpy")
+        # cmake/Modules/FindOpenMP.cmake
+        enable_or_disable("openmp")
+        enable_or_disable("qnnpack")
+        enable_or_disable("qnnpack", var="PYTORCH_QNNPACK")
+        enable_or_disable("valgrind")
+        enable_or_disable("xnnpack")
+        enable_or_disable("mkldnn")
+        enable_or_disable("distributed")
+        enable_or_disable("mpi")
+        enable_or_disable("ucc")
+        # cmake/Modules/FindGloo.cmake
+        enable_or_disable("gloo")
+        enable_or_disable("tensorpipe")
+
+        if "+debug" in self.spec:
+            env.set("DEBUG", "ON")
+        else:
+            env.set("DEBUG", "OFF")
+
+        if "+onnx_ml" in self.spec:
+            env.set("ONNX_ML", "ON")
+        elif "~onnx_ml" in self.spec:
+            env.set("ONNX_ML", "OFF")
+
+        if not self.spec.satisfies("@main"):
+            env.set("PYTORCH_BUILD_VERSION", self.version)
+            env.set("PYTORCH_BUILD_NUMBER", 0)
+
+        # BLAS to be used by Caffe2
+        # Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake
+        if self.spec["blas"].name == "atlas":
+            env.set("BLAS", "ATLAS")
+            env.set("WITH_BLAS", "atlas")
+            env.set("Atlas_ROOT_DIR", self.spec["atlas"].prefix)
+        elif self.spec["blas"].name in ["blis", "amdblis"]:
+            env.set("BLAS", "BLIS")
+            env.set("WITH_BLAS", "blis")
+            env.set("BLIS_HOME", self.spec["blas"].prefix)
+        elif self.spec["blas"].name == "eigen":
+            env.set("BLAS", "Eigen")
+        elif self.spec["lapack"].name in ["libflame", "amdlibflame"]:
+            env.set("BLAS", "FLAME")
+            env.set("WITH_BLAS", "FLAME")
+        elif self.spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
+            env.set("BLAS", "MKL")
+            env.set("WITH_BLAS", "mkl")
+            # help find MKL
+            if self.spec["mkl"].name == "intel-oneapi-mkl":
+                env.set("INTEL_MKL_DIR", self.spec["mkl"].prefix.mkl.latest)
+            else:
+                env.set("INTEL_MKL_DIR", self.spec["mkl"].prefix.mkl)
+        elif self.spec["blas"].name == "openblas":
+            env.set("BLAS", "OpenBLAS")
+            env.set("WITH_BLAS", "open")
+            env.set("OpenBLAS_HOME", self.spec["openblas"].prefix)
+        elif self.spec["blas"].name == "veclibfort":
+            env.set("BLAS", "vecLib")
+            env.set("WITH_BLAS", "veclib")
+        elif self.spec["blas"].name == "fujitsu-ssl2":
+            env.set("BLAS", "SSL2")
+            env.set("WITH_BLAS", "ssl2")
+        else:
+            env.set("BLAS", "Generic")
+            env.set("WITH_BLAS", "generic")
+
+        # Don't use vendored third-party libraries when possible
+        # env.set("USE_SYSTEM_LIBS", "ON")
+        env.set("USE_SYSTEM_BENCHMARK", "ON")
+        env.set("USE_SYSTEM_CPUINFO", "ON")
+        env.set("USE_SYSTEM_EIGEN_INSTALL", "ON")
+        env.set("USE_SYSTEM_FP16", "ON")
+        env.set("USE_SYSTEM_FXDIV", "ON")
+        env.set("USE_SYSTEM_GLOO", "ON")
+        env.set("USE_SYSTEM_NCCL", "ON")
+        # https://github.com/pytorch/pytorch/issues/60331
+        # env.set("USE_SYSTEM_ONNX", "ON")
+        env.set("USE_SYSTEM_PSIMD", "ON")
+        env.set("USE_SYSTEM_PTHREADPOOL", "ON")
+        env.set("USE_SYSTEM_PYBIND11", "ON")
+        env.set("USE_SYSTEM_SLEEF", "ON")
+        env.set("USE_SYSTEM_UCC", "ON")
+        # https://github.com/pytorch/pytorch/issues/60332
+        # env.set("USE_SYSTEM_XNNPACK", "ON")
+
+        if self.spec.satisfies("+custom-protobuf"):
+            env.set("BUILD_CUSTOM_PROTOBUF", "ON")
+        else:
+            env.set("BUILD_CUSTOM_PROTOBUF", "OFF")
+
+    def setup_run_environment(self, env):
+        self.torch_cuda_arch_list(env)
+
+    @run_before("install")
+    def build_amd(self):
+        if "+rocm" in self.spec:
+            python(os.path.join("tools", "amd_build", "build_amd.py"))
+
+    @run_after("install")
+    @on_package_attributes(run_tests=True)
+    def install_test(self):
+        with working_dir("test"):
+            python("run_test.py")
+
+    @property
+    def cmake_prefix_paths(self):
+        cmake_prefix_paths = [join_path(python_platlib, "torch", "share", "cmake")]
+        return cmake_prefix_paths
diff --git a/packages/py-torch/rocm.patch b/packages/py-torch/rocm.patch
new file mode 100644
index 0000000000000000000000000000000000000000..b50cc7e1598a23f41e1e1a73e6672e6a4d132b6a
--- /dev/null
+++ b/packages/py-torch/rocm.patch
@@ -0,0 +1,98 @@
+diff --git a/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h b/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h
+index 9cd678dfb4cc7..4630465115c7c 100644
+--- a/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h
++++ b/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h
+@@ -67,6 +67,14 @@ namespace at { namespace cuda {
+ //
+ // HIP doesn't have
+ //   cuGetErrorString  (maps to non-functional hipGetErrorString___)
++//
++// HIP from ROCm 3.5 on renamed hipOccupancyMaxActiveBlocksPerMultiprocessor
++// to hipModuleOccupancyMaxActiveBlocksPerMultiprocessor.
++#if HIP_VERSION < 305
++#define HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR hipOccupancyMaxActiveBlocksPerMultiprocessor
++#else
++#define HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR cuOccupancyMaxActiveBlocksPerMultiprocessor
++#endif
+ 
+ #define AT_FORALL_NVRTC(_)                       \
+   _(nvrtcVersion)                                \
+@@ -76,7 +84,7 @@ namespace at { namespace cuda {
+   _(nvrtcGetPTX)                                 \
+   _(cuModuleLoadData)                            \
+   _(cuModuleGetFunction)                         \
+-  _(cuOccupancyMaxActiveBlocksPerMultiprocessor) \
++  _(HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR)\
+   _(nvrtcGetErrorString)                         \
+   _(nvrtcGetProgramLogSize)                      \
+   _(nvrtcGetProgramLog)                          \
+diff --git a/aten/src/ATen/native/cuda/SoftMax.cu b/aten/src/ATen/native/cuda/SoftMax.cu
+index da1995123ecfc..f935eb4ef3d0e 100644
+--- a/aten/src/ATen/native/cuda/SoftMax.cu
++++ b/aten/src/ATen/native/cuda/SoftMax.cu
+@@ -127,8 +127,8 @@ void SpatialSoftMax_getLaunchSizes(
+   uint32_t block_threads = block.x * block.y;
+   smem_size = block.x == 1 ? 0 : block_threads * sizeof(accscalar_t);
+   int max_active_blocks;
+-#ifdef __HIP_PLATFORM_HCC__
+-  // XXX HIP function signature is not compatible yet.
++#if defined(__HIP_PLATFORM_HCC__) && HIP_VERSION < 305
++  // HIP function signature is not compatible yet.
+   uint32_t max_blocks;
+   cudaOccupancyMaxActiveBlocksPerMultiprocessor(&max_blocks,
+                                                 k, block_threads, smem_size);
+diff --git a/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp b/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp
+index 5586e49919727..27315ee475277 100644
+--- a/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp
++++ b/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp
+@@ -140,10 +140,10 @@ FusedKernelCUDA::FusedKernelCUDA(
+       nvrtc().cuModuleGetFunction(&function_, module_, name_.c_str()));
+ 
+   // Computes max blocks
+-#ifdef __HIP_PLATFORM_HCC__
+-  // XXX HIP function signature is not compatible yet
++#if defined(__HIP_PLATFORM_HCC__) && HIP_VERSION < 305
++  // HIP function signature is not compatible yet
+   uint32_t max_blocks;
+-  AT_CUDA_DRIVER_CHECK(nvrtc().cuOccupancyMaxActiveBlocksPerMultiprocessor(
++  AT_CUDA_DRIVER_CHECK(nvrtc().hipOccupancyMaxActiveBlocksPerMultiprocessor(
+       &max_blocks, function_, 128, 0));
+   maxBlocks_ = max_blocks;
+ #else
+diff --git a/torch/utils/hipify/cuda_to_hip_mappings.py b/torch/utils/hipify/cuda_to_hip_mappings.py
+index 7e21363cbe6af..26f269d92ae38 100644
+--- a/torch/utils/hipify/cuda_to_hip_mappings.py
++++ b/torch/utils/hipify/cuda_to_hip_mappings.py
+@@ -2890,7 +2890,7 @@
+         (
+             "cuOccupancyMaxActiveBlocksPerMultiprocessor",
+             (
+-                "hipOccupancyMaxActiveBlocksPerMultiprocessor",
++                "hipModuleOccupancyMaxActiveBlocksPerMultiprocessor",
+                 CONV_OCCUPANCY,
+                 API_DRIVER,
+             ),
+@@ -2898,7 +2898,7 @@
+         (
+             "cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags",
+             (
+-                "hipOccupancyMaxActiveBlocksPerMultiprocessorWithFlags",
++                "hipModuleOccupancyMaxActiveBlocksPerMultiprocessorWithFlags",
+                 CONV_OCCUPANCY,
+                 API_DRIVER,
+                 HIP_UNSUPPORTED,
+@@ -2906,12 +2906,12 @@
+         ),
+         (
+             "cuOccupancyMaxPotentialBlockSize",
+-            ("hipOccupancyMaxPotentialBlockSize", CONV_OCCUPANCY, API_DRIVER),
++            ("hipModuleOccupancyMaxPotentialBlockSize", CONV_OCCUPANCY, API_DRIVER),
+         ),
+         (
+             "cuOccupancyMaxPotentialBlockSizeWithFlags",
+             (
+-                "hipOccupancyMaxPotentialBlockSizeWithFlags",
++                "hipModuleOccupancyMaxPotentialBlockSizeWithFlags",
+                 CONV_OCCUPANCY,
+                 API_DRIVER,
+                 HIP_UNSUPPORTED,
diff --git a/packages/py-torch/xnnpack.patch b/packages/py-torch/xnnpack.patch
new file mode 100644
index 0000000000000000000000000000000000000000..154033081e7ff91867e9a043a93c46b888bfe8cb
--- /dev/null
+++ b/packages/py-torch/xnnpack.patch
@@ -0,0 +1,47 @@
+diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt
+index 8025a7de3c..0da37079d6 100644
+--- a/caffe2/CMakeLists.txt
++++ b/caffe2/CMakeLists.txt
+@@ -46,12 +46,19 @@ if (INTERN_BUILD_ATEN_OPS)
+   list(APPEND Caffe2_DEPENDENCY_INCLUDE ${ATen_THIRD_PARTY_INCLUDE})
+ endif()
+ 
++# {Q/X,etc} NPACK support is enabled by default, if none of these options
++# are selected, turn this flag ON to incidate the support is disabled
++set(NNPACK_AND_FAMILY_DISABLED OFF)
++if(NOT (USE_NNPACK OR USE_QNNPACK OR USE_PYTORCH_QNNPACK OR USE_XNNPACK))
++  set(NNPACK_AND_FAMILY_DISABLED ON)
++endif()
++
+ # ---[ Caffe2 build
+ # Note: the folders that are being commented out have not been properly
+ # addressed yet.
+ 
+ # For pthreadpool_new_if_impl. TODO: Remove when threadpools are unitied.
+-if (NOT MSVC)
++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED)
+   IF(NOT TARGET fxdiv)
+     SET(FXDIV_BUILD_TESTS OFF CACHE BOOL "")
+     SET(FXDIV_BUILD_BENCHMARKS OFF CACHE BOOL "")
+@@ -710,7 +717,7 @@ ELSEIF(USE_CUDA)
+ ENDIF()
+ 
+ 
+-if (NOT MSVC)
++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED)
+   TARGET_LINK_LIBRARIES(torch_cpu PRIVATE fxdiv)
+ endif()
+ 
+diff --git a/caffe2/utils/CMakeLists.txt b/caffe2/utils/CMakeLists.txt
+index 27aabb1315..3c7845c67d 100644
+--- a/caffe2/utils/CMakeLists.txt
++++ b/caffe2/utils/CMakeLists.txt
+@@ -36,7 +36,7 @@ list(APPEND Caffe2_CPU_SRCS
+ # ---[ threadpool/pthreadpool* is a local modification of the NNPACK
+ # pthreadpool with a very similar interface. Neither NNPACK, nor this
+ # thread pool supports Windows.
+-if (NOT MSVC)
++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED)
+   add_definitions(-DUSE_INTERNAL_THREADPOOL_IMPL)
+   set(Caffe2_CPU_SRCS ${Caffe2_CPU_SRCS}
+           utils/threadpool/pthreadpool.cc
diff --git a/packages/py-torchtestcase/package.py b/packages/py-torchtestcase/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..29db01bd64dc306e6d7e096f62ee17bbb1b77aaa
--- /dev/null
+++ b/packages/py-torchtestcase/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyTorchtestcase(PythonPackage):
+    """Extends unittest.TestCase such that assertions support PyTorch tensors and parameters."""
+
+    homepage = "https://github.com/phohenecker/torch-test-case"
+    pypi = "torchtestcase/torchtestcase-2018.2.tar.gz"
+
+    version("2018.2", sha256="0061cde2eb79f09c9501fae675c52c799371606d52afcff8753c44e1a6254a00")
+    version("2018.1", sha256="691b053b0466aed40201e1b41f5a903b4df889a64272a18bcab4b1c8e9091cb4")
+    version("2017.1", sha256="f8bb0c4e3216087130f80c4237bb5e4c1e6de629d553f25fd7b85f6e33bf9b34")
+
+    depends_on("py-numpy@1.13.1:", type=("build", "run"))
+    depends_on("py-torch@0.4.0:", type=("build", "run"))
diff --git a/packages/py-tvb-multiscale/package.py b/packages/py-tvb-multiscale/package.py
index 7b78790a82f40291ab64dc1439b0b55a1bd353c0..b57b8f03b4376d6a3d5de7ec91522913ebd5e0c6 100644
--- a/packages/py-tvb-multiscale/package.py
+++ b/packages/py-tvb-multiscale/package.py
@@ -21,7 +21,7 @@ class PyTvbMultiscale(PythonPackage):
     patch('tvb-multiscale-2.1.0-version-pep440.patch', when='@2.1.0.ebrains')
 
     # python_requires
-    depends_on('python@3.8:3.10', type=('build', 'run'))
+    depends_on('python@3.8:3.11', type=('build', 'run'))
 
     # setup_requires
     depends_on('py-pip', type='build')
@@ -39,7 +39,7 @@ class PyTvbMultiscale(PythonPackage):
     depends_on('py-ray', type=('build', 'run'))
 
     # Test dependency
-    depends_on('py-pytest@:7.1', type='test')
+    depends_on('py-pytest', type='test')
     
     @run_after('install')
     @on_package_attributes(run_tests=True)
diff --git a/packages/py-umnn/package.py b/packages/py-umnn/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..545b1ec0d4470791d52a4c3e330eb0cfaeefc7b4
--- /dev/null
+++ b/packages/py-umnn/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyUmnn(PythonPackage):
+    """Official implementation of Unconstrained Monotonic Neural Networks (UMNN)."""
+
+    homepage = "https://github.com/AWehenkel/UMNN"
+    pypi = "umnn/umnn-1.71.tar.gz"
+
+    version("1.71", "bdd41d941a5d904e2217a960a9584922afad8068304976dc6fb0245e4f834996")
+
+    depends_on("python@3.6:", type=("build", "run"))
+    depends_on("py-hatchling", type="build")
+    depends_on("py-numpy", type=("build", "run"))
+    depends_on("py-torch@1.1:", type=("build", "run"))
diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..5466325b2d2aa05013658e8959c38eda906815e8
--- /dev/null
+++ b/packages/py-vbi/package.py
@@ -0,0 +1,47 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyVbi(PythonPackage, CudaPackage):
+
+    homepage = "https://vbi.readthedocs.io/latest/"
+    git = "https://github.com/ins-amu/vbi"
+    url = "https://github.com/ins-amu/vbi/archive/refs/tags/v0.1.3.tar.gz"
+
+    version("0.1.3.2", "6de0367b15834195ad1c14c61e4286875b5fe32d86975185977065765564576d")
+    version("0.1.3", "8ccccf2bf0def2bf97f4706b8597c4cb3ac5f0cf2ac5f08566e22cd6273c1163")
+    version("0.1.2", "6ccfeeec718be62a480002a8370130a3e3344955186f99ecbb15b646b68210d6")
+    
+
+
+    depends_on("python@3.8:", type=("build","run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-setuptools-scm", type="build")
+    depends_on("py-wheel", type="build")
+    depends_on("swig@4:", type="build")
+    depends_on("py-numpy", type=("build", "run"))
+    depends_on("py-scipy", type=("build", "run"))
+    depends_on("py-numba", type=("build", "run"))
+    depends_on("py-h5py", type=("build", "run"))
+    depends_on("py-pandas", type=("build", "run"))
+    depends_on("py-networkx", type=("build", "run"))
+    depends_on("py-nbconvert", type=("build", "run"))
+    depends_on("py-matplotlib", type=("build", "run"))
+    depends_on("py-tqdm", type=("build", "run"))
+    depends_on("py-sbi", type=("build", "run"))
+    depends_on("py-torch", type=("build", "run"))
+    depends_on("py-parameterized", type=("build", "run"))
+    depends_on("py-scikit-learn", type=("build", "run"))
+    depends_on("py-pycatch22", type=("build", "run"))
+    depends_on("py-pytest", type="test")
+    depends_on("py-cupy", type=("build", "run"), when="+cuda")
+    
+    @run_after("install")
+    @on_package_attributes(run_tests=True)
+    def install_test(self):
+        pytest = which("pytest")
+        pytest()
diff --git a/packages/py-viziphant/package.py b/packages/py-viziphant/package.py
index 439d905141bb127d1b85a60f16ae3f28d5df9fd9..22281cb4192f1dee28d7a1a731607241a1202868 100644
--- a/packages/py-viziphant/package.py
+++ b/packages/py-viziphant/package.py
@@ -20,7 +20,7 @@ class PyViziphant(PythonPackage):
     version('0.1.0', sha256='8fd56ec8633f799396dc33fbace95d2553bedb17f680a8c0e97f43b3a629bf6c')
 
     depends_on('py-setuptools', type='build')
-    depends_on('python@3.7:3.10', type=('build', 'run'))
+    depends_on('python@3.7:3.11', type=('build', 'run'))
     depends_on('py-neo@0.9.0:', type=('build', 'run'))
     depends_on('py-elephant@0.9.0:', type=('build', 'run'))
     depends_on('py-numpy@1.18.1:', type=('build', 'run'))
diff --git a/packages/py-xarray-einstats/package.py b/packages/py-xarray-einstats/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..520eb406167ef5622f5cd8d713830b4a4cd2f72a
--- /dev/null
+++ b/packages/py-xarray-einstats/package.py
@@ -0,0 +1,23 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PyXarrayEinstats(PythonPackage):
+    """Stats, linear algebra and einops for xarray"""
+
+    homepage = "https://github.com/arviz-devs/xarray-einstats"
+    pypi = "xarray_einstats/xarray_einstats-0.8.0.tar.gz"
+
+    version("0.8.0", sha256="7f1573f9bd4d60d6e7ed9fd27c4db39da51ec49bf8ba654d4602a139a6309d7f")
+    version("0.7.0", sha256="2d7b571b3bbad3cf2fd10c6c75fd949d247d14c29574184c8489d9d607278d38")
+    version("0.6.0", sha256="ace90601505cfbe2d374762e674557ed14e1725b024823372f7ef9fd237effad")
+
+    depends_on("python@3.10:", type=("build", "run"))
+    depends_on("py-flit-core@3.4:4", type="build")
+    depends_on("py-numpy@1.23:", type=("build", "run"))
+    depends_on("py-scipy@1.9:", type=("build", "run"))
+    depends_on("py-xarray@2022.09:", type=("build", "run"))
diff --git a/packages/py-zuko/package.py b/packages/py-zuko/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c2222731d6865b11507c988baa21701e72704a1
--- /dev/null
+++ b/packages/py-zuko/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class PyZuko(PythonPackage):
+    """Python package that implements normalizing flows in PyTorch."""
+
+    homepage = "https://github.com/probabilists/zuko"
+    pypi = "zuko/zuko-1.3.1.tar.gz"
+
+    version("1.3.1", "00f246802d3f486183185529ba22e0b2bf691397e03b28150a5cf713fa0da758")
+
+    depends_on("python@3.9:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-numpy@1.20.0:", type=("build", "run"))
+    depends_on("py-torch@1.12.0:", type=("build", "run"))
diff --git a/packages/python/cpython-windows-externals.patch b/packages/python/cpython-windows-externals.patch
new file mode 100644
index 0000000000000000000000000000000000000000..c3bcce983f0ea58112ca6efca38c6e4677d78de2
--- /dev/null
+++ b/packages/python/cpython-windows-externals.patch
@@ -0,0 +1,28 @@
+diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat
+index b5a44e3..52941c7 100644
+--- a/PCbuild/get_externals.bat
++++ b/PCbuild/get_externals.bat
+@@ -76,7 +76,7 @@ for %%e in (%libraries%) do (
+ echo.Fetching external binaries...
+ 
+ set binaries=
+-if NOT "%IncludeLibffi%"=="false"  set binaries=%binaries% libffi
++if NOT "%IncludeLibffi%"=="false"  set binaries=%binaries% libffi-3.3.0
+ if NOT "%IncludeSSL%"=="false"     set binaries=%binaries% openssl-bin-1.1.1k-1
+ if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0
+ if NOT "%IncludeSSLSrc%"=="false"  set binaries=%binaries% nasm-2.11.06
+diff --git a/PCbuild/python.props b/PCbuild/python.props
+index 419d5eb..c66fb07 100644
+--- a/PCbuild/python.props
++++ b/PCbuild/python.props
+@@ -59,8 +59,8 @@
+     <sqlite3Dir>$(ExternalsDir)sqlite-3.35.5.0\</sqlite3Dir>
+     <bz2Dir>$(ExternalsDir)bzip2-1.0.6\</bz2Dir>
+     <lzmaDir>$(ExternalsDir)xz-5.2.2\</lzmaDir>
+-    <libffiDir>$(ExternalsDir)libffi\</libffiDir>
+-    <libffiOutDir>$(ExternalsDir)libffi\$(ArchName)\</libffiOutDir>
++    <libffiDir>$(ExternalsDir)libffi-3.3.0\</libffiDir>
++    <libffiOutDir>$(ExternalsDir)libffi-3.3.0\$(ArchName)\</libffiOutDir>
+     <libffiIncludeDir>$(libffiOutDir)include</libffiIncludeDir>
+     <opensslDir>$(ExternalsDir)openssl-1.1.1k\</opensslDir>
+     <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1k-1\$(ArchName)\</opensslOutDir>
diff --git a/packages/python/curses.patch b/packages/python/curses.patch
new file mode 100644
index 0000000000000000000000000000000000000000..b83ec60e027280cefe4e9913b6c8dfada4f8a901
--- /dev/null
+++ b/packages/python/curses.patch
@@ -0,0 +1,13 @@
+diff --git a/setup.py b/setup.py
+index 85a2b26357..8c83b9f175 100644
+--- a/setup.py
++++ b/setup.py
+@@ -1088,7 +1088,7 @@ def detect_readline_curses(self):
+             if ret == 0:
+                 with open(tmpfile) as fp:
+                     for ln in fp:
+-                        if 'curses' in ln:
++                        if 'libcurses' in ln or 'libncurses' in ln:
+                             readline_termcap_library = re.sub(
+                                 r'.*lib(n?cursesw?)\.so.*', r'\1', ln
+                             ).rstrip()
diff --git a/packages/python/fj-rpath-3.1.patch b/packages/python/fj-rpath-3.1.patch
new file mode 100644
index 0000000000000000000000000000000000000000..d25b58da77f6e2fda67d3fd5402d326382d9ab43
--- /dev/null
+++ b/packages/python/fj-rpath-3.1.patch
@@ -0,0 +1,13 @@
+--- a/Lib/distutils/unixccompiler.py	2009-05-09 21:55:12.000000000 +1000
++++ b/Lib/distutils/unixccompiler.py	2017-05-13 14:30:18.077518999 +1000
+@@ -215,7 +211,8 @@
+         return "-L" + dir
+
+     def _is_gcc(self, compiler_name):
+-        return "gcc" in compiler_name or "g++" in compiler_name
++        return "gcc" in compiler_name or "g++" in compiler_name \
++        or "fcc" in compiler_name or "FCC" in compiler_name
+
+     def runtime_library_dir_option(self, dir):
+         # XXX Hackish, at the very least.  See Python bug #445902:
+
diff --git a/packages/python/fj-rpath-3.9.patch b/packages/python/fj-rpath-3.9.patch
new file mode 100644
index 0000000000000000000000000000000000000000..1542b367e94257d46a39a59c903fdb46095e34ab
--- /dev/null
+++ b/packages/python/fj-rpath-3.9.patch
@@ -0,0 +1,11 @@
+--- spack-src/Lib/distutils/unixccompiler.py.org	2022-01-31 14:42:34.000000000 +0900
++++ spack-src/Lib/distutils/unixccompiler.py	2022-01-31 14:43:19.000000000 +0900
+@@ -212,7 +212,7 @@
+ 
+     def _is_gcc(self, compiler_name):
+         # clang uses same syntax for rpath as gcc
+-        return any(name in compiler_name for name in ("gcc", "g++", "clang"))
++        return any(name in compiler_name for name in ("gcc", "g++", "clang", "fcc", "FCC"))
+ 
+     def runtime_library_dir_option(self, dir):
+         # XXX Hackish, at the very least.  See Python bug #445902:
diff --git a/packages/python/intel-3.7.patch b/packages/python/intel-3.7.patch
new file mode 100644
index 0000000000000000000000000000000000000000..f2277624af27a7567fc73bed8211809e283ee88d
--- /dev/null
+++ b/packages/python/intel-3.7.patch
@@ -0,0 +1,38 @@
+From 87ed388f41d761ddddc8447e5104569f2436c005 Mon Sep 17 00:00:00 2001
+From: Victor Stinner <vstinner@python.org>
+Date: Fri, 11 Oct 2019 15:13:51 +0200
+Subject: [PATCH] bpo-37415: Fix stdatomic.h header check for ICC compiler
+
+Fix stdatomic.h header check for ICC compiler: the ICC implementation
+lacks atomic_uintptr_t type which is needed by Python.
+
+Test:
+
+* atomic_int and atomic_uintptr_t types
+* atomic_load_explicit() and atomic_store_explicit()
+* memory_order_relaxed and memory_order_seq_cst constants
+
+But don't test ATOMIC_VAR_INIT(): it's not used in Python.
+---
+ configure                                     | 7 +++++--
+ 1 file changed, 5 insertions(+), 2 deletions(-)
+
+diff --git a/configure b/configure
+index f1979c1b8124c..1b30a848a77e7 100755
+--- a/configure
++++ b/configure
+@@ -16734,9 +16722,12 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ 
+ 
+     #include <stdatomic.h>
+-    atomic_int value = ATOMIC_VAR_INIT(1);
++    atomic_int int_var;
++    atomic_uintptr_t uintptr_var;
+     int main() {
+-      int loaded_value = atomic_load(&value);
++      atomic_store_explicit(&int_var, 5, memory_order_relaxed);
++      atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed);
++      int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst);
+       return 0;
+     }
+ 
diff --git a/packages/python/package.py b/packages/python/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b9add7eb58c0785fdbef14b4d74bb5c4c253272
--- /dev/null
+++ b/packages/python/package.py
@@ -0,0 +1,1327 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import glob
+import json
+import os
+import platform
+import re
+import subprocess
+import sys
+from shutil import copy
+from typing import Dict, List
+
+import llnl.util.tty as tty
+from llnl.util.lang import dedupe
+
+from spack.build_environment import dso_suffix, stat_suffix
+from spack.package import *
+from spack.util.prefix import Prefix
+
+
+def make_pyvenv_cfg(python_spec: "spack.spec.Spec", venv_prefix: str) -> str:
+    """Make a pyvenv_cfg file for a given (real) python command and venv prefix."""
+    python_cmd = python_spec.command.path
+    lines = [
+        # directory containing python command
+        f"home = {os.path.dirname(python_cmd)}",
+        # venv should not allow site packages from the real python to be loaded
+        "include-system-site-packages = false",
+        # version of the python command
+        f"version = {python_spec.version}",
+        # the path to the python command
+        f"executable = {python_cmd}",
+        # command "used" to create the pyvenv.cfg
+        f"command = {python_cmd} -m venv --without-pip {venv_prefix}",
+    ]
+
+    return "\n".join(lines) + "\n"
+
+
+class Python(Package):
+    """The Python programming language."""
+
+    homepage = "https://www.python.org/"
+    url = "https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz"
+    list_url = "https://www.python.org/ftp/python/"
+    list_depth = 1
+    tags = ["windows"]
+
+    maintainers("skosukhin", "scheibelp")
+
+    phases = ["configure", "build", "install"]
+
+    #: phase
+    install_targets = ["install"]
+    build_targets: List[str] = []
+
+    license("0BSD")
+
+    version("3.13.0", sha256="12445c7b3db3126c41190bfdc1c8239c39c719404e844babbd015a1bc3fafcd4")
+    version("3.12.5", sha256="38dc4e2c261d49c661196066edbfb70fdb16be4a79cc8220c224dfeb5636d405")
+    version("3.12.4", sha256="01b3c1c082196f3b33168d344a9c85fb07bfe0e7ecfe77fee4443420d1ce2ad9")
+    version("3.12.3", sha256="a6b9459f45a6ebbbc1af44f5762623fa355a0c87208ed417628b379d762dddb0")
+    version("3.12.2", sha256="a7c4f6a9dc423d8c328003254ab0c9338b83037bd787d680826a5bf84308116e")
+    version("3.12.1", sha256="d01ec6a33bc10009b09c17da95cc2759af5a580a7316b3a446eb4190e13f97b2")
+    version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb")
+    # begin EBRAINS (added): add version
+    version("3.11.10", sha256="92f2faf242681bfa406d53a51e17d42c5373affe23a130cd9697e132ef574706")
+    # end EBRAINS
+    version("3.11.9", sha256="e7de3240a8bc2b1e1ba5c81bf943f06861ff494b69fda990ce2722a504c6153d")
+    version("3.11.8", sha256="d3019a613b9e8761d260d9ebe3bd4df63976de30464e5c0189566e1ae3f61889")
+    version("3.11.7", sha256="068c05f82262e57641bd93458dfa883128858f5f4997aad7a36fd25b13b29209")
+    version("3.11.6", sha256="c049bf317e877cbf9fce8c3af902436774ecef5249a29d10984ca3a37f7f4736")
+    version("3.11.5", sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58")
+    version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63")
+    version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048")
+    version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849")
+    version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4")
+    version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb")
+    version("3.10.14", sha256="cefea32d3be89c02436711c95a45c7f8e880105514b78680c14fe76f5709a0f6")
+    version("3.10.13", sha256="698ec55234c1363bd813b460ed53b0f108877c7a133d48bde9a50a1eb57b7e65")
+    version("3.10.12", sha256="a43cd383f3999a6f4a7db2062b2fc9594fefa73e175b3aedafa295a51a7bb65c")
+    version("3.10.11", sha256="f3db31b668efa983508bd67b5712898aa4247899a346f2eb745734699ccd3859")
+    version("3.10.10", sha256="fba64559dde21ebdc953e4565e731573bb61159de8e4d4cedee70fb1196f610d")
+    version("3.10.9", sha256="4ccd7e46c8898f4c7862910a1703aa0e63525913a519abb2f55e26220a914d88")
+    version("3.10.8", sha256="f400c3fb394b8bef1292f6dc1292c5fadc3533039a5bc0c3e885f3e16738029a")
+    version("3.10.7", sha256="1b2e4e2df697c52d36731666979e648beeda5941d0f95740aafbf4163e5cc126")
+    version("3.10.6", sha256="848cb06a5caa85da5c45bd7a9221bb821e33fc2bdcba088c127c58fad44e6343")
+    version("3.10.5", sha256="18f57182a2de3b0be76dfc39fdcfd28156bb6dd23e5f08696f7492e9e3d0bf2d")
+    version("3.10.4", sha256="f3bcc65b1d5f1dc78675c746c98fcee823c038168fc629c5935b044d0911ad28")
+    version("3.10.3", sha256="5a3b029bad70ba2a019ebff08a65060a8b9b542ffc1a83c697f1449ecca9813b")
+    version("3.10.2", sha256="3c0ede893011319f9b0a56b44953a3d52c7abf9657c23fb4bc9ced93b86e9c97")
+    version("3.10.1", sha256="b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3")
+    version("3.10.0", sha256="c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758")
+    version("3.9.19", sha256="f5f9ec8088abca9e399c3b62fd8ef31dbd2e1472c0ccb35070d4d136821aaf71")
+    version("3.9.18", sha256="504ce8cfd59addc04c22f590377c6be454ae7406cb1ebf6f5a350149225a9354")
+    version("3.9.17", sha256="8ead58f669f7e19d777c3556b62fae29a81d7f06a7122ff9bc57f7dd82d7e014")
+    version("3.9.16", sha256="1ad539e9dbd2b42df714b69726e0693bc6b9d2d2c8e91c2e43204026605140c5")
+    version("3.9.15", sha256="48d1ccb29d5fbaf1fb8f912271d09f7450e426d4dfe95978ef6aaada70ece4d8")
+    version("3.9.14", sha256="9201836e2c16361b2b7408680502393737d44f227333fe2e5729c7d5f6041675")
+    version("3.9.13", sha256="829b0d26072a44689a6b0810f5b4a3933ee2a0b8a4bfc99d7c5893ffd4f97c44")
+    version("3.9.12", sha256="70e08462ebf265012bd2be88a63d2149d880c73e53f1712b7bbbe93750560ae8")
+    version("3.9.11", sha256="3442400072f582ac2f0df30895558f08883b416c8c7877ea55d40d00d8a93112")
+    version("3.9.10", sha256="1aa9c0702edbae8f6a2c95f70a49da8420aaa76b7889d3419c186bfc8c0e571e")
+    version("3.9.9", sha256="2cc7b67c1f3f66c571acc42479cdf691d8ed6b47bee12c9b68430413a17a44ea")
+    version("3.9.8", sha256="7447fb8bb270942d620dd24faa7814b1383b61fa99029a240025fd81c1db8283")
+    version("3.9.7", sha256="a838d3f9360d157040142b715db34f0218e535333696a5569dc6f854604eb9d1")
+    version("3.9.6", sha256="d0a35182e19e416fc8eae25a3dcd4d02d4997333e4ad1f2eee6010aadc3fe866")
+    version("3.9.5", sha256="e0fbd5b6e1ee242524430dee3c91baf4cbbaba4a72dd1674b90fda87b713c7ab")
+    version("3.9.4", sha256="66c4de16daa74a825cf9da9ddae1fe020b72c3854b73b1762011cc33f9e4592f")
+    version("3.9.3", sha256="3afeb61a45b5a2e6f1c0f621bd8cf925a4ff406099fdb3d8c97b993a5f43d048")
+    version("3.9.2", sha256="7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519")
+    version("3.9.1", sha256="29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117")
+    version("3.9.0", sha256="df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8")
+    version("3.8.19", sha256="c7fa55a36e5c7a19ec37d8f90f60a2197548908c9ac8b31e7c0dbffdd470eeac")
+    version("3.8.18", sha256="7c5df68bab1be81a52dea0cc2e2705ea00553b67107a301188383d7b57320b16")
+    version("3.8.17", sha256="def428fa6cf61b66bcde72e3d9f7d07d33b2e4226f04f9d6fce8384c055113ae")
+    version("3.8.16", sha256="71ca9d935637ed2feb59e90a368361dc91eca472a90acb1d344a2e8178ccaf10")
+    version("3.8.15", sha256="924d46999df82aa2eaa1de5ca51d6800ffb56b4bf52486a28f40634e3362abc4")
+    version("3.8.14", sha256="41f959c480c59211feb55d5a28851a56c7e22d02ef91035606ebb21011723c31")
+    version("3.8.13", sha256="903b92d76354366b1d9c4434d0c81643345cef87c1600adfa36095d7b00eede4")
+    version("3.8.12", sha256="316aa33f3b7707d041e73f246efedb297a70898c4b91f127f66dc8d80c596f1a")
+    version("3.8.11", sha256="b77464ea80cec14581b86aeb7fb2ff02830e0abc7bcdc752b7b4bdfcd8f3e393")
+    version("3.8.10", sha256="b37ac74d2cbad2590e7cd0dd2b3826c29afe89a734090a87bf8c03c45066cb65")
+    version("3.8.9", sha256="9779ec1df000bf86914cdd40860b88da56c1e61db59d37784beca14a259ac9e9")
+    version("3.8.8", sha256="76c0763f048e4f9b861d24da76b7dd5c7a3ba7ec086f40caedeea359263276f7")
+    version("3.8.7", sha256="20e5a04262f0af2eb9c19240d7ec368f385788bba2d8dfba7e74b20bab4d2bac")
+    version("3.8.6", sha256="313562ee9986dc369cd678011bdfd9800ef62fbf7b1496228a18f86b36428c21")
+    version("3.8.5", sha256="015115023c382eb6ab83d512762fe3c5502fa0c6c52ffebc4831c4e1a06ffc49")
+    version("3.8.4", sha256="32c4d9817ef11793da4d0d95b3191c4db81d2e45544614e8449255ca9ae3cc18")
+    version("3.8.3", sha256="6af6d4d2e010f9655518d0fc6738c7ff7069f10a4d2fbd55509e467f092a8b90")
+    version("3.8.2", sha256="e634a7a74776c2b89516b2e013dda1728c89c8149b9863b8cea21946daf9d561")
+    version("3.8.1", sha256="c7cfa39a43b994621b245e029769e9126caa2a93571cee2e743b213cceac35fb")
+    version("3.8.0", sha256="f1069ad3cae8e7ec467aa98a6565a62a48ef196cb8f1455a245a08db5e1792df")
+    version(
+        "3.7.17",
+        sha256="fd50161bc2a04f4c22a0971ff0f3856d98b4bf294f89740a9f06b520aae63b49",
+        deprecated=True,
+    )
+    version(
+        "3.7.16",
+        sha256="0cf2da07fa464636755215415909e22eb1d058817af4824bc15af8390d05fb38",
+        deprecated=True,
+    )
+    version(
+        "3.7.15",
+        sha256="cf2993798ae8430f3af3a00d96d9fdf320719f4042f039380dca79967c25e436",
+        deprecated=True,
+    )
+    version(
+        "3.7.14",
+        sha256="82b2abf8978caa61a9011d166eede831b32de9cbebc0db8162900fa23437b709",
+        deprecated=True,
+    )
+    version(
+        "3.7.13",
+        sha256="e405417f50984bc5870c7e7a9f9aeb93e9d270f5ac67f667a0cd3a09439682b5",
+        deprecated=True,
+    )
+    version(
+        "3.7.12",
+        sha256="33b4daaf831be19219659466d12645f87ecec6eb21d4d9f9711018a7b66cce46",
+        deprecated=True,
+    )
+    version(
+        "3.7.11",
+        sha256="b4fba32182e16485d0a6022ba83c9251e6a1c14676ec243a9a07d3722cd4661a",
+        deprecated=True,
+    )
+    version(
+        "3.7.10",
+        sha256="c9649ad84dc3a434c8637df6963100b2e5608697f9ba56d82e3809e4148e0975",
+        deprecated=True,
+    )
+    version(
+        "3.7.9",
+        sha256="39b018bc7d8a165e59aa827d9ae45c45901739b0bbb13721e4f973f3521c166a",
+        deprecated=True,
+    )
+    version(
+        "3.7.8",
+        sha256="0e25835614dc221e3ecea5831b38fa90788b5389b99b675a751414c858789ab0",
+        deprecated=True,
+    )
+    version(
+        "3.7.7",
+        sha256="8c8be91cd2648a1a0c251f04ea0bb4c2a5570feb9c45eaaa2241c785585b475a",
+        deprecated=True,
+    )
+    version(
+        "3.7.6",
+        sha256="aeee681c235ad336af116f08ab6563361a0c81c537072c1b309d6e4050aa2114",
+        deprecated=True,
+    )
+    version(
+        "3.7.5",
+        sha256="8ecc681ea0600bbfb366f2b173f727b205bb825d93d2f0b286bc4e58d37693da",
+        deprecated=True,
+    )
+    version(
+        "3.7.4",
+        sha256="d63e63e14e6d29e17490abbe6f7d17afb3db182dbd801229f14e55f4157c4ba3",
+        deprecated=True,
+    )
+    version(
+        "3.7.3",
+        sha256="d62e3015f2f89c970ac52343976b406694931742fbde2fed8d1ce8ebb4e1f8ff",
+        deprecated=True,
+    )
+    version(
+        "3.7.2",
+        sha256="f09d83c773b9cc72421abba2c317e4e6e05d919f9bcf34468e192b6a6c8e328d",
+        deprecated=True,
+    )
+    version(
+        "3.7.1",
+        sha256="36c1b81ac29d0f8341f727ef40864d99d8206897be96be73dc34d4739c9c9f06",
+        deprecated=True,
+    )
+    version(
+        "3.7.0",
+        sha256="85bb9feb6863e04fb1700b018d9d42d1caac178559ffa453d7e6a436e259fd0d",
+        deprecated=True,
+    )
+
+    depends_on("c", type="build")  # generated
+    depends_on("cxx", type="build")  # generated
+
+    extendable = True
+
+    # Variants to avoid cyclical dependencies for concretizer
+    variant("libxml2", default=True, description="Use a gettext library build with libxml2")
+
+    variant(
+        "debug", default=False, description="debug build with extra checks (this is high overhead)"
+    )
+
+    variant("shared", default=True, description="Enable shared libraries")
+    variant("pic", default=True, description="Produce position-independent code (for shared libs)")
+    variant(
+        "optimizations",
+        default=False,
+        description="Enable expensive build-time optimizations, if available",
+    )
+    # See https://legacy.python.org/dev/peps/pep-0394/
+    variant(
+        "pythoncmd",
+        default=sys.platform != "win32",
+        description="Symlink 'python3' executable to 'python' (not PEP 394 compliant)",
+    )
+
+    # Optional Python modules
+    variant("readline", default=sys.platform != "win32", description="Build readline module")
+    variant("ssl", default=True, description="Build ssl module")
+    variant("sqlite3", default=True, description="Build sqlite3 module")
+    variant("dbm", default=True, description="Build dbm module")
+    variant("nis", default=False, description="Build nis module")
+    variant("zlib", default=True, description="Build zlib module")
+    variant("bz2", default=True, description="Build bz2 module")
+    variant("lzma", default=True, description="Build lzma module")
+    variant("pyexpat", default=True, description="Build pyexpat module")
+    variant("ctypes", default=True, description="Build ctypes module")
+    variant("tkinter", default=False, description="Build tkinter module")
+    variant("uuid", default=True, description="Build uuid module")
+    variant("tix", default=False, description="Build Tix module", when="+tkinter")
+    variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=linux")
+    variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=darwin")
+
+    if sys.platform != "win32":
+        depends_on("gmake", type="build")
+        depends_on("pkgconfig@0.9.0:", type="build")
+        depends_on("gettext +libxml2", when="+libxml2")
+        depends_on("gettext ~libxml2", when="~libxml2")
+
+        # Optional dependencies
+        # See detect_modules() in setup.py for details
+        depends_on("readline", when="+readline")
+        depends_on("ncurses", when="+readline")
+        depends_on("openssl", when="+ssl")
+        # https://docs.python.org/3/whatsnew/3.7.html#build-changes
+        depends_on("openssl@1.0.2:", when="+ssl")
+        # https://docs.python.org/3.10/whatsnew/3.10.html#build-changes
+        depends_on("openssl@1.1.1:", when="@3.10:+ssl")
+        depends_on("sqlite@3.0.8:", when="@:3.9+sqlite3")
+        # https://docs.python.org/3.10/whatsnew/3.10.html#build-changes
+        depends_on("sqlite@3.7.15:", when="@3.10:+sqlite3")
+        depends_on("gdbm", when="+dbm")  # alternatively ndbm or berkeley-db
+        depends_on("libnsl", when="+nis")
+        depends_on("zlib-api", when="+zlib")
+        depends_on("bzip2", when="+bz2")
+        depends_on("xz libs=shared", when="+lzma")
+        depends_on("expat", when="+pyexpat")
+        depends_on("libffi", when="+ctypes")
+        # https://docs.python.org/3/whatsnew/3.11.html#build-changes
+        depends_on("tk@8.5.12:", when="@3.11: +tkinter")
+        depends_on("tk", when="+tkinter")
+        depends_on("tcl@8.5.12:", when="@3.11: +tkinter")
+        depends_on("tcl", when="+tkinter")
+        depends_on("uuid", when="+uuid")
+        depends_on("tix", when="+tix")
+        depends_on("libxcrypt", when="+crypt")
+
+    # Python needs to be patched to build extensions w/ mixed C/C++ code:
+    # https://github.com/NixOS/nixpkgs/pull/19585/files
+    # https://bugs.python.org/issue1222585
+    #
+    # NOTE: This patch puts Spack's default Python installation out of
+    # sync with standard Python installs. If you're using such an
+    # installation as an external and encountering build issues with mixed
+    # C/C++ modules, consider installing a Spack-managed Python with
+    # this patch instead. For more information, see:
+    # https://github.com/spack/spack/pull/16856
+    patch("python-3.7.2-distutils-C++.patch", when="@3.7.2")
+    patch("python-3.7.3-distutils-C++.patch", when="@3.7.3")
+    patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:3.10")
+    patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:3.11")
+    patch("python-3.11-distutils-C++.patch", when="@3.11.0:3.11")
+    patch("cpython-windows-externals.patch", when="@:3.9.6 platform=windows")
+    patch("tkinter-3.7.patch", when="@3.7 platform=darwin")
+    # Patch the setup script to deny that tcl/x11 exists rather than allowing
+    # autodetection of (possibly broken) system components
+    patch("tkinter-3.8.patch", when="@3.8:3.9 ~tkinter")
+    patch("tkinter-3.10.patch", when="@3.10.0:3.10 ~tkinter")
+    patch("tkinter-3.11.patch", when="@3.11.0:3.11 ~tkinter")
+
+    # Ensure that distutils chooses correct compiler option for RPATH:
+    patch("rpath-non-gcc.patch", when="@:3.11")
+
+    # Ensure that distutils chooses correct compiler option for RPATH on fj:
+    patch("fj-rpath-3.1.patch", when="@:3.9.7,3.10.0 %fj")
+    patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1:3.11 %fj")
+
+    # Fixes build with the Intel compilers
+    # https://github.com/python/cpython/pull/16717
+    patch("intel-3.7.patch", when="@3.7.1:3.7.5 %intel")
+
+    # begin EBRAINS (added)
+    # Fix curses/readline detection logic to not be triggered by path name
+    # https://github.com/spack/spack/issues/34872
+    patch("curses.patch", when="@:3.11")
+    # end EBRAINS
+
+    # CPython tries to build an Objective-C file with GCC's C frontend
+    # https://github.com/spack/spack/pull/16222
+    # https://github.com/python/cpython/pull/13306
+    conflicts(
+        "%gcc platform=darwin",
+        msg="CPython does not compile with GCC on macOS yet, use clang. "
+        "See: https://github.com/python/cpython/pull/13306",
+    )
+    conflicts("%nvhpc")
+
+    # https://bugs.python.org/issue45405
+    conflicts("@:3.7.12,3.8.0:3.8.12,3.9.0:3.9.7,3.10.0", when="%apple-clang@13:")
+
+    # See https://github.com/python/cpython/issues/106424
+    # datetime.now(timezone.utc) segfaults
+    conflicts("@3.9:", when="%oneapi@2022.2.1:2023")
+
+    # Used to cache various attributes that are expensive to compute
+    _config_vars: Dict[str, Dict[str, str]] = {}
+
+    # An in-source build with --enable-optimizations fails for python@3.X
+    build_directory = "spack-build"
+
+    executables = [r"^python\d?$"]
+
+    @classmethod
+    def determine_version(cls, exe):
+        # Newer versions of Python support `--version`,
+        # but older versions only support `-V`
+        # Output looks like:
+        #   Python 3.7.7
+        # On pre-production Ubuntu, this is also possible:
+        #   Python 3.10.2+
+        output = Executable(exe)("-V", output=str, error=str)
+        match = re.search(r"Python\s+([A-Za-z0-9_.-]+)", output)
+        return match.group(1) if match else None
+
+    @classmethod
+    def determine_variants(cls, exes, version_str):
+        python = Executable(exes[0])
+
+        variants = ""
+        for exe in exes:
+            if os.path.basename(exe) == "python":
+                variants += "+pythoncmd"
+                break
+        else:
+            variants += "~pythoncmd"
+
+        for module in [
+            "readline",
+            "sqlite3",
+            "dbm",
+            "nis",
+            "zlib",
+            "bz2",
+            "lzma",
+            "ctypes",
+            "tkinter",
+            "uuid",
+        ]:
+            try:
+                python("-c", "import " + module, error=os.devnull)
+                variants += "+" + module
+            except ProcessError:
+                variants += "~" + module
+
+        # Some variants enable multiple modules
+        try:
+            python("-c", "import ssl", error=os.devnull)
+            python("-c", "import hashlib", error=os.devnull)
+            variants += "+ssl"
+        except ProcessError:
+            variants += "~ssl"
+
+        try:
+            python("-c", "import xml.parsers.expat", error=os.devnull)
+            python("-c", "import xml.etree.ElementTree", error=os.devnull)
+            variants += "+pyexpat"
+        except ProcessError:
+            variants += "~pyexpat"
+
+        # Some variant names do not match module names
+        if "+tkinter" in variants:
+            try:
+                python("-c", "import tkinter.tix", error=os.devnull)
+                variants += "+tix"
+            except ProcessError:
+                variants += "~tix"
+
+        # Some modules are platform-dependent
+        if sys.platform != "win32":
+            try:
+                python("-c", "import crypt", error=os.devnull)
+                variants += "+crypt"
+            except ProcessError:
+                variants += "~crypt"
+
+        return variants
+
+    def url_for_version(self, version):
+        url = "https://www.python.org/ftp/python/{0}/Python-{1}.tgz"
+        return url.format(re.split("[a-z]", str(version))[0], version)
+
+    def patch(self):
+        # NOTE: Python's default installation procedure makes it possible for a
+        # user's local configurations to change the Spack installation.  In
+        # order to prevent this behavior for a full installation, we must
+        # modify the installation script so that it ignores user files.
+        ff = FileFilter("Makefile.pre.in")
+        ff.filter(
+            r"^(.*)setup\.py(.*)((build)|(install))(.*)$", r"\1setup.py\2 --no-user-cfg \3\6"
+        )
+
+    def setup_build_environment(self, env):
+        spec = self.spec
+
+        # TODO: Python has incomplete support for Python modules with mixed
+        # C/C++ source, and patches are required to enable building for these
+        # modules. All Python versions without a viable patch are installed
+        # with a warning message about this potentially erroneous behavior.
+        if not spec.satisfies("@3.7.2:"):
+            tty.warn(
+                (
+                    'Python v{0} does not have the C++ "distutils" patch; '
+                    "errors may occur when installing Python modules w/ "
+                    "mixed C/C++ source files."
+                ).format(self.version)
+            )
+
+        env.unset("PYTHONPATH")
+        env.unset("PYTHONHOME")
+
+        # avoid build error on fugaku
+        if spec.satisfies("@3.10.0 arch=linux-rhel8-a64fx"):
+            if spec.satisfies("%gcc") or spec.satisfies("%fj"):
+                env.unset("LC_ALL")
+
+        # https://github.com/python/cpython/issues/87275
+        if spec.satisfies("@:3.9.5 +optimizations %apple-clang"):
+            xcrun = Executable("/usr/bin/xcrun")
+            env.set("LLVM_AR", xcrun("-find", "ar", output=str).strip())
+
+    def flag_handler(self, name, flags):
+        # python 3.8 requires -fwrapv when compiled with intel
+        if self.spec.satisfies("@3.8: %intel"):
+            if name == "cflags":
+                flags.append("-fwrapv")
+
+        # Fix for following issues for python with aocc%3.2.0:
+        # https://github.com/spack/spack/issues/29115
+        # https://github.com/spack/spack/pull/28708
+        if self.spec.satisfies("%aocc@3.2.0"):
+            if name == "cflags":
+                flags.extend(["-mllvm", "-disable-indvar-simplify=true"])
+
+        # allow flags to be passed through compiler wrapper
+        return (flags, None, None)
+
+    @property
+    def plat_arch(self):
+        """
+        String referencing platform architecture
+        filtered through Python's Windows build file
+        architecture support map
+
+        Note: This function really only makes
+        sense to use on Windows, could be overridden to
+        cross compile however.
+        """
+
+        arch_map = {"AMD64": "x64", "x86": "Win32", "IA64": "Win32", "EM64T": "Win32"}
+        arch = platform.machine()
+        if arch in arch_map:
+            arch = arch_map[arch]
+        return arch
+
+    @property
+    def win_build_params(self):
+        """
+        Arguments must be passed to the Python build batch script
+        in order to configure it to spec and system.
+        A number of these toggle optional MSBuild Projects
+        directly corresponding to the python support of the same
+        name.
+        """
+        args = []
+        args.append("-p %s" % self.plat_arch)
+        if self.spec.satisfies("+debug"):
+            args.append("-d")
+        if self.spec.satisfies("~ctypes"):
+            args.append("--no-ctypes")
+        if self.spec.satisfies("~ssl"):
+            args.append("--no-ssl")
+        if self.spec.satisfies("~tkinter"):
+            args.append("--no-tkinter")
+        return args
+
+    def win_installer(self, prefix):
+        """
+        Python on Windows does not export an install target
+        so we must handcraft one here. This structure
+        directly mimics the install tree of the Python
+        Installer on Windows.
+
+        Parameters:
+            prefix (str): Install prefix for package
+        """
+        proj_root = self.stage.source_path
+        pcbuild_root = os.path.join(proj_root, "PCbuild")
+        build_root = os.path.join(pcbuild_root, platform.machine().lower())
+        include_dir = os.path.join(proj_root, "Include")
+        copy_tree(include_dir, prefix.include)
+        doc_dir = os.path.join(proj_root, "Doc")
+        copy_tree(doc_dir, prefix.Doc)
+        tools_dir = os.path.join(proj_root, "Tools")
+        copy_tree(tools_dir, prefix.Tools)
+        lib_dir = os.path.join(proj_root, "Lib")
+        copy_tree(lib_dir, prefix.Lib)
+        pyconfig = os.path.join(proj_root, "PC", "pyconfig.h")
+        copy(pyconfig, prefix.include)
+        shared_libraries = []
+        shared_libraries.extend(glob.glob("%s\\*.exe" % build_root))
+        shared_libraries.extend(glob.glob("%s\\*.dll" % build_root))
+        shared_libraries.extend(glob.glob("%s\\*.pyd" % build_root))
+        os.makedirs(prefix.DLLs)
+        for lib in shared_libraries:
+            file_name = os.path.basename(lib)
+            if (
+                file_name.endswith(".exe")
+                or (file_name.endswith(".dll") and "python" in file_name)
+                or "vcruntime" in file_name
+            ):
+                copy(lib, prefix)
+            else:
+                copy(lib, prefix.DLLs)
+        static_libraries = glob.glob("%s\\*.lib" % build_root)
+        os.makedirs(prefix.libs, exist_ok=True)
+        for lib in static_libraries:
+            copy(lib, prefix.libs)
+
+    def configure_args(self):
+        spec = self.spec
+        config_args = []
+        cflags = []
+
+        # setup.py needs to be able to read the CPPFLAGS and LDFLAGS
+        # as it scans for the library and headers to build
+        link_deps = spec.dependencies(deptype="link")
+
+        if link_deps:
+            # Header files are often included assuming they reside in a
+            # subdirectory of prefix.include, e.g. #include <openssl/ssl.h>,
+            # which is why we don't use HeaderList here. The header files of
+            # libffi reside in prefix.lib but the configure script of Python
+            # finds them using pkg-config.
+            cppflags = " ".join("-I" + spec[dep.name].prefix.include for dep in link_deps)
+
+            # Currently, the only way to get SpecBuildInterface wrappers of the
+            # dependencies (which we need to get their 'libs') is to get them
+            # using spec.__getitem__.
+            ldflags = " ".join(spec[dep.name].libs.search_flags for dep in link_deps)
+
+            config_args.extend(["CPPFLAGS=" + cppflags, "LDFLAGS=" + ldflags])
+
+        if "+optimizations" in spec:
+            config_args.append("--enable-optimizations")
+            # Prefer thin LTO for faster compilation times.
+            if "@3.11.0: %clang@3.9:" in spec or "@3.11.0: %apple-clang@8:" in spec:
+                config_args.append("--with-lto=thin")
+            else:
+                config_args.append("--with-lto")
+            config_args.append("--with-computed-gotos")
+
+        if spec.satisfies("@3.7 %intel"):
+            config_args.append("--with-icc={0}".format(spack_cc))
+
+        if "+debug" in spec:
+            config_args.append("--with-pydebug")
+        else:
+            config_args.append("--without-pydebug")
+
+        if "+shared" in spec:
+            config_args.append("--enable-shared")
+        else:
+            config_args.append("--disable-shared")
+
+        config_args.append("--without-ensurepip")
+
+        if "+pic" in spec:
+            cflags.append(self.compiler.cc_pic_flag)
+
+        if "+ssl" in spec:
+            config_args.append("--with-openssl={0}".format(spec["openssl"].prefix))
+
+        if "+dbm" in spec:
+            # Default order is ndbm:gdbm:bdb
+            config_args.append("--with-dbmliborder=gdbm")
+        else:
+            config_args.append("--with-dbmliborder=")
+
+        if "+pyexpat" in spec:
+            config_args.append("--with-system-expat")
+        else:
+            config_args.append("--without-system-expat")
+
+        if self.version < Version("3.12.0"):
+            if "+ctypes" in spec:
+                config_args.append("--with-system-ffi")
+            else:
+                config_args.append("--without-system-ffi")
+
+        if "+tkinter" in spec:
+            config_args.extend(
+                [
+                    "--with-tcltk-includes=-I{0} -I{1}".format(
+                        spec["tcl"].prefix.include, spec["tk"].prefix.include
+                    ),
+                    "--with-tcltk-libs={0} {1}".format(
+                        spec["tcl"].libs.ld_flags, spec["tk"].libs.ld_flags
+                    ),
+                ]
+            )
+
+        # https://docs.python.org/3.8/library/sqlite3.html#f1
+        if spec.satisfies("+sqlite3 ^sqlite+dynamic_extensions"):
+            config_args.append("--enable-loadable-sqlite-extensions")
+
+        if spec.satisfies("%oneapi"):
+            cflags.append("-fp-model=strict")
+
+        if cflags:
+            config_args.append("CFLAGS={0}".format(" ".join(cflags)))
+
+        if self.version >= Version("3.12.0") and sys.platform == "darwin":
+            config_args.append("CURSES_LIBS={0}".format(spec["ncurses"].libs.link_flags))
+
+        return config_args
+
+    def configure(self, spec, prefix):
+        """Runs configure with the arguments specified in
+        :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
+        and an appropriately set prefix.
+        """
+        with working_dir(self.stage.source_path, create=True):
+            if sys.platform == "win32":
+                pass
+            else:
+                options = getattr(self, "configure_flag_args", [])
+                options += ["--prefix={0}".format(prefix)]
+                options += self.configure_args()
+                configure(*options)
+
+    def build(self, spec, prefix):
+        """Makes the build targets specified by
+        :py:attr:``~.AutotoolsPackage.build_targets``
+        """
+        # Windows builds use a batch script to drive
+        # configure and build in one step
+        with working_dir(self.stage.source_path):
+            if sys.platform == "win32":
+                pcbuild_root = os.path.join(self.stage.source_path, "PCbuild")
+                builder_cmd = os.path.join(pcbuild_root, "build.bat")
+                try:
+                    subprocess.check_output(  # novermin
+                        " ".join([builder_cmd] + self.win_build_params), stderr=subprocess.STDOUT
+                    )
+                except subprocess.CalledProcessError as e:
+                    raise ProcessError(
+                        "Process exited with status %d" % e.returncode,
+                        long_message=e.output.decode("utf-8"),
+                    )
+            else:
+                # See https://autotools.io/automake/silent.html
+                params = ["V=1"]
+                params += self.build_targets
+                make(*params)
+
+    def install(self, spec, prefix):
+        """Makes the install targets specified by
+        :py:attr:``~.AutotoolsPackage.install_targets``
+        """
+        with working_dir(self.stage.source_path):
+            if sys.platform == "win32":
+                self.win_installer(prefix)
+            else:
+                # See https://github.com/python/cpython/issues/102007
+                make(*self.install_targets, parallel=False)
+
+    @run_after("install")
+    def filter_compilers(self):
+        """Run after install to tell the configuration files and Makefiles
+        to use the compilers that Spack built the package with.
+
+        If this isn't done, they'll have CC and CXX set to Spack's generic
+        cc and c++. We want them to be bound to whatever compiler
+        they were built with."""
+        if sys.platform == "win32":
+            return
+        kwargs = {"ignore_absent": True, "backup": False, "string": True}
+
+        filenames = [self.get_sysconfigdata_name(), self.config_vars["makefile_filename"]]
+
+        filter_file(spack_cc, self.compiler.cc, *filenames, **kwargs)
+        if spack_cxx and self.compiler.cxx:
+            filter_file(spack_cxx, self.compiler.cxx, *filenames, **kwargs)
+
+    @run_after("install")
+    def symlink(self):
+        if sys.platform == "win32":
+            return
+        spec = self.spec
+        prefix = self.prefix
+
+        if spec.satisfies("+pythoncmd"):
+            os.symlink(os.path.join(prefix.bin, "python3"), os.path.join(prefix.bin, "python"))
+            os.symlink(
+                os.path.join(prefix.bin, "python3-config"),
+                os.path.join(prefix.bin, "python-config"),
+            )
+
+    @run_after("install")
+    def install_python_gdb(self):
+        # https://devguide.python.org/gdb/
+        src = os.path.join("Tools", "gdb", "libpython.py")
+        if os.path.exists(src):
+            install(src, self.command.path + "-gdb.py")
+
+    @run_after("install")
+    @on_package_attributes(run_tests=True)
+    def import_tests(self):
+        """Test that basic Python functionality works."""
+
+        spec = self.spec
+
+        with working_dir("spack-test", create=True):
+            # Ensure that readline module works
+            if "+readline" in spec:
+                self.command("-c", "import readline")
+
+            # Ensure that ssl module works
+            if "+ssl" in spec:
+                self.command("-c", "import ssl")
+                self.command("-c", "import hashlib")
+
+            # Ensure that sqlite3 module works
+            if "+sqlite3" in spec:
+                self.command("-c", "import sqlite3")
+
+            # Ensure that dbm module works
+            if "+dbm" in spec:
+                self.command("-c", "import dbm")
+
+            # Ensure that nis module works
+            if "+nis" in spec:
+                self.command("-c", "import nis")
+
+            # Ensure that zlib module works
+            if "+zlib" in spec:
+                self.command("-c", "import zlib")
+
+            # Ensure that bz2 module works
+            if "+bz2" in spec:
+                self.command("-c", "import bz2")
+
+            # Ensure that lzma module works
+            if "+lzma" in spec:
+                self.command("-c", "import lzma")
+
+            # Ensure that pyexpat module works
+            if "+pyexpat" in spec:
+                self.command("-c", "import xml.parsers.expat")
+                self.command("-c", "import xml.etree.ElementTree")
+
+            # Ensure that ctypes module works
+            if "+ctypes" in spec:
+                self.command("-c", "import ctypes")
+
+            # Ensure that tkinter module works
+            # https://wiki.python.org/moin/TkInter
+            if "+tkinter" in spec:
+                # Only works if ForwardX11Trusted is enabled, i.e. `ssh -Y`
+                if "DISPLAY" in env:
+                    self.command("-c", "import tkinter; tkinter._test()")
+                else:
+                    self.command("-c", "import tkinter")
+
+            # Ensure that uuid module works
+            if "+uuid" in spec:
+                self.command("-c", "import uuid")
+
+            # Ensure that tix module works
+            if "+tix" in spec:
+                self.command("-c", "import tkinter.tix")
+
+            # Ensure that crypt module works
+            if "+crypt" in spec:
+                self.command("-c", "import crypt")
+
+    # ========================================================================
+    # Set up environment to make install easy for python extensions.
+    # ========================================================================
+
+    @property
+    def command(self):
+        """Returns the Python command, which may vary depending
+        on the version of Python and how it was installed.
+
+        In general, Python 3 only comes with a ``python3`` command. However, some
+        package managers will symlink ``python`` to ``python3``, while others
+        may contain ``python3.11``, ``python3.10``, and ``python3.9`` in the
+        same directory.
+
+        Returns:
+            Executable: the Python command
+        """
+        # We need to be careful here. If the user is using an externally
+        # installed python, several different commands could be located
+        # in the same directory. Be as specific as possible. Search for:
+        #
+        # * python3.11
+        # * python3
+        # * python
+        #
+        # in that order if using python@3.11.0, for example.
+        suffixes = [self.spec.version.up_to(2), self.spec.version.up_to(1), ""]
+        file_extension = "" if sys.platform != "win32" else ".exe"
+        patterns = [f"python{ver}{file_extension}" for ver in suffixes]
+        root = self.prefix.bin if sys.platform != "win32" else self.prefix
+        path = find_first(root, files=patterns)
+
+        if path is not None:
+            return Executable(path)
+
+        else:
+            # Give a last try at rhel8 platform python
+            if self.spec.external and self.prefix == "/usr" and self.spec.satisfies("os=rhel8"):
+                path = os.path.join(self.prefix, "libexec", "platform-python")
+                if os.path.exists(path):
+                    return Executable(path)
+
+        raise RuntimeError(
+            f"cannot to locate the '{self.name}' command in {root} or its subdirectories"
+        )
+
+    @property
+    def config_vars(self):
+        """Return a set of variable definitions associated with a Python installation.
+
+        Wrapper around various ``sysconfig`` functions. To see these variables on the
+        command line, run:
+
+        .. code-block:: console
+
+           $ python -m sysconfig
+
+        Returns:
+            dict: variable definitions
+        """
+        cmd = """
+import json
+from sysconfig import (
+    get_config_vars,
+    get_config_h_filename,
+    get_makefile_filename,
+    get_paths,
+)
+
+config = get_config_vars()
+config['config_h_filename'] = get_config_h_filename()
+config['makefile_filename'] = get_makefile_filename()
+config.update(get_paths())
+
+print(json.dumps(config))
+"""
+
+        dag_hash = self.spec.dag_hash()
+        lib_prefix = "lib" if sys.platform != "win32" else ""
+        if dag_hash not in self._config_vars:
+            # Default config vars
+            version = self.version.up_to(2)
+            if sys.platform == "win32":
+                version = str(version).split(".")[0]
+            config = {
+                # get_config_vars
+                "BINDIR": self.prefix.bin,
+                "CC": "cc",
+                "CONFINCLUDEPY": self.prefix.include.join("python{}").format(version),
+                "CXX": "c++",
+                "INCLUDEPY": self.prefix.include.join("python{}").format(version),
+                "LIBDEST": self.prefix.lib.join("python{}").format(version),
+                "LIBDIR": self.prefix.lib,
+                "LIBPL": self.prefix.lib.join("python{0}")
+                .join("config-{0}-{1}")
+                .format(version, sys.platform),
+                "LDLIBRARY": "{}python{}.{}".format(lib_prefix, version, dso_suffix),
+                "LIBRARY": "{}python{}.{}".format(lib_prefix, version, stat_suffix),
+                "LDSHARED": "cc",
+                "LDCXXSHARED": "c++",
+                "PYTHONFRAMEWORKPREFIX": "/System/Library/Frameworks",
+                "base": self.prefix,
+                "installed_base": self.prefix,
+                "installed_platbase": self.prefix,
+                "platbase": self.prefix,
+                "prefix": self.prefix,
+                # get_config_h_filename
+                "config_h_filename": self.prefix.include.join("python{}")
+                .join("pyconfig.h")
+                .format(version),
+                # get_makefile_filename
+                "makefile_filename": self.prefix.lib.join("python{0}")
+                .join("config-{0}-{1}")
+                .Makefile.format(version, sys.platform),
+                # get_paths
+                "data": self.prefix,
+                "include": self.prefix.include.join("python{}".format(version)),
+                "platinclude": self.prefix.include64.join("python{}".format(version)),
+                "platlib": self.prefix.lib64.join("python{}".format(version)).join(
+                    "site-packages"
+                ),
+                "platstdlib": self.prefix.lib64.join("python{}".format(version)),
+                "purelib": self.prefix.lib.join("python{}".format(version)).join("site-packages"),
+                "scripts": self.prefix.bin,
+                "stdlib": self.prefix.lib.join("python{}".format(version)),
+            }
+
+            try:
+                config.update(json.loads(self.command("-c", cmd, output=str)))
+            except (ProcessError, RuntimeError):
+                pass
+            self._config_vars[dag_hash] = config
+        return self._config_vars[dag_hash]
+
+    def get_sysconfigdata_name(self):
+        """Return the full path name of the sysconfigdata file."""
+
+        libdest = self.config_vars["LIBDEST"]
+
+        cmd = "from sysconfig import _get_sysconfigdata_name; "
+        cmd += "print(_get_sysconfigdata_name())"
+        filename = self.command("-c", cmd, output=str).strip()
+        filename += ".py"
+
+        return join_path(libdest, filename)
+
+    @property
+    def home(self):
+        """Most of the time, ``PYTHONHOME`` is simply
+        ``spec['python'].prefix``. However, if the user is using an
+        externally installed python, it may be symlinked. For example,
+        Homebrew installs python in ``/usr/local/Cellar/python/2.7.12_2``
+        and symlinks it to ``/usr/local``. Users may not know the actual
+        installation directory and add ``/usr/local`` to their
+        ``packages.yaml`` unknowingly. Query the python executable to
+        determine exactly where it is installed.
+        """
+        return Prefix(self.config_vars["base"])
+
+    def find_library(self, library):
+        # Spack installs libraries into lib, except on openSUSE where it installs them
+        # into lib64. If the user is using an externally installed package, it may be
+        # in either lib or lib64, so we need to ask Python where its LIBDIR is.
+        libdir = self.config_vars["LIBDIR"]
+
+        # Debian and derivatives use a triplet subdir under /usr/lib, LIBPL can be used
+        # to get the Python library directory
+        libpldir = self.config_vars["LIBPL"]
+
+        # The system Python installation on macOS and Homebrew installations
+        # install libraries into a Frameworks directory
+        frameworkprefix = self.config_vars["PYTHONFRAMEWORKPREFIX"]
+
+        # Get the active Xcode environment's Framework location.
+        macos_developerdir = os.environ.get("DEVELOPER_DIR")
+        if macos_developerdir and os.path.exists(macos_developerdir):
+            macos_developerdir = os.path.join(macos_developerdir, "Library", "Frameworks")
+        else:
+            macos_developerdir = ""
+
+        # Windows libraries are installed directly to BINDIR
+        win_bin_dir = self.config_vars["BINDIR"]
+        win_root_dir = self.config_vars["prefix"]
+
+        directories = [
+            libdir,
+            libpldir,
+            frameworkprefix,
+            macos_developerdir,
+            win_bin_dir,
+            win_root_dir,
+        ]
+
+        if self.spec.satisfies("platform=windows"):
+            lib_dirs = ["libs"]
+        else:
+            # The Python shipped with Xcode command line tools isn't in any of these locations
+            lib_dirs = ["lib", "lib64"]
+
+        for subdir in lib_dirs:
+            directories.append(os.path.join(self.config_vars["base"], subdir))
+
+        directories = dedupe(directories)
+        for directory in directories:
+            path = os.path.join(directory, library)
+            if os.path.exists(path):
+                return LibraryList(path)
+
+    @property
+    def libs(self):
+        py_version = self.version.up_to(2)
+        if sys.platform == "win32":
+            py_version = str(py_version).replace(".", "")
+        lib_prefix = "lib" if sys.platform != "win32" else ""
+        # The values of LDLIBRARY and LIBRARY aren't reliable. Intel Python uses a
+        # static binary but installs shared libraries, so sysconfig reports
+        # libpythonX.Y.a but only libpythonX.Y.so exists. So we add our own paths, too.
+
+        # With framework python on macOS, self.config_vars["LDLIBRARY"] can point
+        # to a library that is not linkable because it does not have the required
+        # suffix of a shared library (it is called "Python" without extention).
+        # The linker then falls back to libPython.tbd in the default macOS
+        # software tree, which security settings prohibit to link against
+        # (your binary is not an allowed client of /path/to/libPython.tbd).
+        # To avoid this, we replace the entry in config_vars with a default value.
+        file_extension_shared = os.path.splitext(self.config_vars["LDLIBRARY"])[-1]
+        if file_extension_shared == "":
+            shared_libs = []
+        else:
+            shared_libs = [self.config_vars["LDLIBRARY"]]
+        shared_libs += ["{}python{}.{}".format(lib_prefix, py_version, dso_suffix)]
+        # Like LDLIBRARY for Python on Mac OS, LIBRARY may refer to an un-linkable object
+        file_extension_static = os.path.splitext(self.config_vars["LIBRARY"])[-1]
+        if file_extension_static == "":
+            static_libs = []
+        else:
+            static_libs = [self.config_vars["LIBRARY"]]
+        static_libs += ["{}python{}.{}".format(lib_prefix, py_version, stat_suffix)]
+
+        # The +shared variant isn't reliable, as `spack external find` currently can't
+        # detect it. If +shared, prefer the shared libraries, but check for static if
+        # those aren't found. Vice versa for ~shared.
+        if self.spec.satisfies("platform=windows"):
+            # Since we are searching for link libraries, on Windows search only for
+            # ".Lib" extensions by default as those represent import libraries for implict links.
+            candidates = static_libs
+        elif self.spec.satisfies("+shared"):
+            candidates = shared_libs + static_libs
+        else:
+            candidates = static_libs + shared_libs
+
+        for candidate in dedupe(candidates):
+            lib = self.find_library(candidate)
+            if lib:
+                return lib
+
+        raise spack.error.NoLibrariesError(
+            "Unable to find {} libraries with the following names:\n\n* ".format(self.name)
+            + "\n* ".join(candidates)
+        )
+
+    @property
+    def headers(self):
+        # Locations where pyconfig.h could be
+        # This varies by system, especially on macOS where the command line tools are
+        # installed in a very different directory from the system python interpreter.
+        py_version = str(self.version.up_to(2))
+        candidates = [
+            os.path.dirname(self.config_vars["config_h_filename"]),
+            self.config_vars["INCLUDEPY"],
+            self.config_vars["CONFINCLUDEPY"],
+            os.path.join(self.config_vars["base"], "include", py_version),
+            os.path.join(self.config_vars["base"], "Headers"),
+        ]
+        candidates = list(dedupe(candidates))
+
+        for directory in candidates:
+            headers = find_headers("pyconfig", directory)
+            if headers:
+                config_h = headers[0]
+                break
+        else:
+            raise spack.error.NoHeadersError(
+                "Unable to locate {} headers in any of these locations:\n\n* ".format(self.name)
+                + "\n* ".join(candidates)
+            )
+
+        headers.directories = [os.path.dirname(config_h)]
+        return headers
+
+    # https://docs.python.org/3/library/sysconfig.html#installation-paths
+    # https://discuss.python.org/t/understanding-site-packages-directories/12959
+    # https://github.com/pypa/pip/blob/22.1/src/pip/_internal/locations/__init__.py
+    # https://github.com/pypa/installer/pull/103
+
+    # NOTE: XCode Python's sysconfing module was incorrectly patched, and hard-codes
+    # everything to be installed in /Library/Python. Therefore, we need to use a
+    # fallback in the following methods. For more information, see:
+    # https://github.com/pypa/pip/blob/22.1/src/pip/_internal/locations/__init__.py#L486
+
+    @property
+    def platlib(self):
+        """Directory for site-specific, platform-specific files.
+
+        Exact directory depends on platform/OS/Python version. Examples include:
+
+        * ``lib/pythonX.Y/site-packages`` on most POSIX systems
+        * ``lib64/pythonX.Y/site-packages`` on RHEL/CentOS/Fedora with system Python
+        * ``lib/pythonX/dist-packages`` on Debian/Ubuntu with system Python
+        * ``lib/python/site-packages`` on macOS with framework Python
+        * ``Lib/site-packages`` on Windows
+
+        Returns:
+            str: platform-specific site-packages directory
+        """
+        prefix = self.config_vars["platbase"] + os.sep
+        path = self.config_vars["platlib"]
+        if path.startswith(prefix):
+            return path.replace(prefix, "")
+        return os.path.join("lib64", f"python{self.version.up_to(2)}", "site-packages")
+
+    @property
+    def purelib(self):
+        """Directory for site-specific, non-platform-specific files.
+
+        Exact directory depends on platform/OS/Python version. Examples include:
+
+        * ``lib/pythonX.Y/site-packages`` on most POSIX systems
+        * ``lib/pythonX/dist-packages`` on Debian/Ubuntu with system Python
+        * ``lib/python/site-packages`` on macOS with framework Python
+        * ``Lib/site-packages`` on Windows
+
+        Returns:
+            str: platform-independent site-packages directory
+        """
+        prefix = self.config_vars["base"] + os.sep
+        path = self.config_vars["purelib"]
+        if path.startswith(prefix):
+            return path.replace(prefix, "")
+        return os.path.join("lib", f"python{self.version.up_to(2)}", "site-packages")
+
+    @property
+    def include(self):
+        """Directory for non-platform-specific header files.
+
+        Exact directory depends on platform/Python version/ABI flags. Examples include:
+
+        * ``include/pythonX.Y`` on most POSIX systems
+        * ``include/pythonX.Yd`` for debug builds
+        * ``include/pythonX.Ym`` for malloc builds
+        * ``include/pythonX.Yu`` for wide unicode builds
+        * ``include`` on macOS with framework Python
+        * ``Include`` on Windows
+
+        Returns:
+            str: platform-independent header file directory
+        """
+        prefix = self.config_vars["installed_base"] + os.sep
+        path = self.config_vars["include"]
+        if path.startswith(prefix):
+            return path.replace(prefix, "")
+        return os.path.join("include", "python{}".format(self.version.up_to(2)))
+
+    def setup_dependent_build_environment(self, env, dependent_spec):
+        """Set PYTHONPATH to include the site-packages directory for the
+        extension and any other python extensions it depends on.
+        """
+        # We need to make sure that the extensions are compiled and linked with
+        # the Spack wrapper. Paths to the executables that are used for these
+        # operations are normally taken from the sysconfigdata file, which we
+        # modify after the installation (see method filter compilers). The
+        # modified file contains paths to the real compilers, not the wrappers.
+        # The values in the file, however, can be overridden with environment
+        # variables. The first variable, CC (CXX), which is used for
+        # compilation, is set by Spack for the dependent package by default.
+        # That is not 100% correct because the value for CC (CXX) in the
+        # sysconfigdata file often contains additional compiler flags (e.g.
+        # -pthread), which we lose by simply setting CC (CXX) to the path to the
+        # Spack wrapper. Moreover, the user might try to build an extension with
+        # a compiler that is different from the one that was used to build
+        # Python itself, which might have unexpected side effects. However, the
+        # experience shows that none of the above is a real issue and we will
+        # not try to change the default behaviour. Given that, we will simply
+        # try to modify LDSHARED (LDCXXSHARED), the second variable, which is
+        # used for linking, in a consistent manner.
+
+        for compile_var, link_var in [("CC", "LDSHARED"), ("CXX", "LDCXXSHARED")]:
+            # First, we get the values from the sysconfigdata:
+            config_compile = self.config_vars[compile_var]
+            config_link = self.config_vars[link_var]
+
+            # The dependent environment will have the compilation command set to
+            # the following:
+            new_compile = join_path(
+                spack.paths.build_env_path,
+                dependent_spec.package.compiler.link_paths[compile_var.lower()],
+            )
+
+            # Normally, the link command starts with the compilation command:
+            if config_link.startswith(config_compile):
+                new_link = new_compile + config_link[len(config_compile) :]
+            else:
+                # Otherwise, we try to replace the compiler command if it
+                # appears "in the middle" of the link command; to avoid
+                # mistaking some substring of a path for the compiler (e.g. to
+                # avoid replacing "gcc" in "-L/path/to/gcc/"), we require that
+                # the compiler command be surrounded by spaces. Note this may
+                # leave "config_link" unchanged if the compilation command does
+                # not appear in the link command at all, for example if "ld" is
+                # invoked directly (no change would be required in that case
+                # because Spack arranges for the Spack ld wrapper to be the
+                # first instance of "ld" in PATH).
+                new_link = config_link.replace(f" {config_compile} ", f" {new_compile} ")
+
+            # There is logic in the sysconfig module that is sensitive to the
+            # fact that LDSHARED is set in the environment, therefore we export
+            # the variable only if the new value is different from what we got
+            # from the sysconfigdata file:
+            if config_link != new_link and sys.platform != "win32":
+                env.set(link_var, new_link)
+
+    def setup_dependent_run_environment(self, env, dependent_spec):
+        """Set PYTHONPATH to include the site-packages directory for the
+        extension and any other python extensions it depends on.
+        """
+        if not dependent_spec.package.extends(self.spec) or dependent_spec.dependencies(
+            "python-venv"
+        ):
+            return
+
+        # Packages may be installed in platform-specific or platform-independent site-packages
+        # directories
+        for directory in {self.platlib, self.purelib}:
+            env.prepend_path("PYTHONPATH", os.path.join(dependent_spec.prefix, directory))
+
+    def setup_dependent_package(self, module, dependent_spec):
+        """Called before python modules' install() methods."""
+        module.python = self.command
+        module.python_include = join_path(dependent_spec.prefix, self.include)
+        module.python_platlib = join_path(dependent_spec.prefix, self.platlib)
+        module.python_purelib = join_path(dependent_spec.prefix, self.purelib)
+
+    def add_files_to_view(self, view, merge_map, skip_if_exists=True):
+        """Make the view a virtual environment if it isn't one already.
+
+        If `python-venv` is linked into the view, it will already be a virtual
+        environment. If not, then this is an older python that doesn't use the
+        python-venv support, or we may be using python packages that
+        use ``depends_on("python")`` but not ``extends("python")``.
+
+        We used to copy the python interpreter in, but we can get the same effect in a
+        simpler way by adding a ``pyvenv.cfg`` to the environment.
+
+        """
+        super().add_files_to_view(view, merge_map, skip_if_exists=skip_if_exists)
+
+        # location of python inside the view, where we will put the venv config
+        projection = view.get_projection_for_spec(self.spec)
+        pyvenv_cfg = os.path.join(projection, "pyvenv.cfg")
+        if os.path.lexists(pyvenv_cfg):
+            return
+
+        # don't put a pyvenv.cfg in a copy view
+        if view.link_type == "copy":
+            return
+
+        with open(pyvenv_cfg, "w") as cfg_file:
+            cfg_file.write(make_pyvenv_cfg(self.spec["python"], projection))
+
+    def test_hello_world(self):
+        """run simple hello world program"""
+        # do not use self.command because we are also testing the run env
+        python = self.spec["python"].command
+
+        msg = "hello world!"
+        out = python("-c", f'print("{msg}")', output=str.split, error=str.split)
+        assert msg in out
+
+    def test_import_executable(self):
+        """ensure import of installed executable works"""
+        python = self.spec["python"].command
+
+        out = python("-c", "import sys; print(sys.executable)", output=str.split, error=str.split)
+        assert self.spec.prefix in out
diff --git a/packages/python/python-3.11-distutils-C++.patch b/packages/python/python-3.11-distutils-C++.patch
new file mode 100644
index 0000000000000000000000000000000000000000..335e06b93c3974d53be5551a0b0ac22255a25324
--- /dev/null
+++ b/packages/python/python-3.11-distutils-C++.patch
@@ -0,0 +1,257 @@
+diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
+index aa66c8b9f4..71e6556bac 100644
+--- a/Lib/_osx_support.py
++++ b/Lib/_osx_support.py
+@@ -14,13 +14,13 @@
+ # configuration variables that may contain universal build flags,
+ # like "-arch" or "-isdkroot", that may need customization for
+ # the user environment
+-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
+-                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
+-                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
+-                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS',
++                          'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED',
++                          'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS',
++                          'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS')
+ 
+ # configuration variables that may contain compiler calls
+-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX')
+ 
+ # prefix added to original configuration variable names
+ _INITPRE = '_OSX_SUPPORT_INITIAL_'
+diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py
+index 66c12dd358..dddb9fd2d4 100644
+--- a/Lib/distutils/cygwinccompiler.py
++++ b/Lib/distutils/cygwinccompiler.py
+@@ -123,8 +123,10 @@ def __init__(self, verbose=0, dry_run=0, force=0):
+         # dllwrap 2.10.90 is buggy
+         if self.ld_version >= "2.10.90":
+             self.linker_dll = "gcc"
++            self.linker_dll_cxx = "g++"
+         else:
+             self.linker_dll = "dllwrap"
++            self.linker_dll_cxx = "dllwrap"
+ 
+         # ld_version >= "2.13" support -shared so use it instead of
+         # -mdll -static
+@@ -138,9 +140,13 @@ def __init__(self, verbose=0, dry_run=0, force=0):
+         self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                              compiler_so='gcc -mcygwin -mdll -O -Wall',
+                              compiler_cxx='g++ -mcygwin -O -Wall',
++                             compiler_so_cxx='g++ -mcygwin -mdll -O -Wall',
+                              linker_exe='gcc -mcygwin',
+                              linker_so=('%s -mcygwin %s' %
+-                                        (self.linker_dll, shared_option)))
++                                        (self.linker_dll, shared_option)),
++                             linker_exe_cxx='g++ -mcygwin',
++                             linker_so_cxx=('%s -mcygwin %s' %
++                                            (self.linker_dll_cxx, shared_option)))
+ 
+         # cygwin and mingw32 need different sets of libraries
+         if self.gcc_version == "2.91.57":
+@@ -164,8 +170,12 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+                 raise CompileError(msg)
+         else: # for other files use the C-compiler
+             try:
+-                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+-                           extra_postargs)
++                if self.detect_language(src) == 'c++':
++                    self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] +
++                               extra_postargs)
++                else:
++                    self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
++                               extra_postargs)
+             except DistutilsExecError as msg:
+                 raise CompileError(msg)
+ 
+@@ -300,9 +310,14 @@ def __init__(self, verbose=0, dry_run=0, force=0):
+         self.set_executables(compiler='gcc -O -Wall',
+                              compiler_so='gcc -mdll -O -Wall',
+                              compiler_cxx='g++ -O -Wall',
++                             compiler_so_cxx='g++ -mdll -O -Wall',
+                              linker_exe='gcc',
+                              linker_so='%s %s %s'
+                                         % (self.linker_dll, shared_option,
++                                           entry_point),
++                             linker_exe_cxx='g++',
++                             linker_so_cxx='%s %s %s'
++                                        % (self.linker_dll_cxx, shared_option,
+                                            entry_point))
+         # Maybe we should also append -mthreads, but then the finished
+         # dlls need another dll (mingwm10.dll see Mingw32 docs)
+diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
+index 3414a761e7..f1af560cc1 100644
+--- a/Lib/distutils/sysconfig.py
++++ b/Lib/distutils/sysconfig.py
+@@ -216,9 +216,11 @@ def customize_compiler(compiler):
+                 _osx_support.customize_compiler(_config_vars)
+                 _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+ 
+-        (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+-            get_config_vars('CC', 'CXX', 'CFLAGS',
+-                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++        (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \
++            get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED',
++                            'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++
++        cxxflags = cflags
+ 
+         if 'CC' in os.environ:
+             newcc = os.environ['CC']
+@@ -233,19 +235,27 @@ def customize_compiler(compiler):
+             cxx = os.environ['CXX']
+         if 'LDSHARED' in os.environ:
+             ldshared = os.environ['LDSHARED']
++        if 'LDCXXSHARED' in os.environ:
++            ldcxxshared = os.environ['LDCXXSHARED']
+         if 'CPP' in os.environ:
+             cpp = os.environ['CPP']
+         else:
+             cpp = cc + " -E"           # not always
+         if 'LDFLAGS' in os.environ:
+             ldshared = ldshared + ' ' + os.environ['LDFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS']
+         if 'CFLAGS' in os.environ:
+-            cflags = cflags + ' ' + os.environ['CFLAGS']
++            cflags = os.environ['CFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CFLAGS']
++        if 'CXXFLAGS' in os.environ:
++            cxxflags = os.environ['CXXFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS']
+         if 'CPPFLAGS' in os.environ:
+             cpp = cpp + ' ' + os.environ['CPPFLAGS']
+             cflags = cflags + ' ' + os.environ['CPPFLAGS']
++            cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS']
+         if 'AR' in os.environ:
+             ar = os.environ['AR']
+         if 'ARFLAGS' in os.environ:
+@@ -254,13 +264,17 @@ def customize_compiler(compiler):
+             archiver = ar + ' ' + ar_flags
+ 
+         cc_cmd = cc + ' ' + cflags
++        cxx_cmd = cxx + ' ' + cxxflags
+         compiler.set_executables(
+             preprocessor=cpp,
+             compiler=cc_cmd,
+             compiler_so=cc_cmd + ' ' + ccshared,
+-            compiler_cxx=cxx,
++            compiler_cxx=cxx_cmd,
++            compiler_so_cxx=cxx_cmd + ' ' + ccshared,
+             linker_so=ldshared,
+             linker_exe=cc,
++            linker_so_cxx=ldcxxshared,
++            linker_exe_cxx=cxx,
+             archiver=archiver)
+ 
+         compiler.shared_lib_extension = shlib_suffix
+diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
+index d00c48981e..4a3d271fee 100644
+--- a/Lib/distutils/unixccompiler.py
++++ b/Lib/distutils/unixccompiler.py
+@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler):
+     # are pretty generic; they will probably have to be set by an outsider
+     # (eg. using information discovered by the sysconfig about building
+     # Python extensions).
+-    executables = {'preprocessor' : None,
+-                   'compiler'     : ["cc"],
+-                   'compiler_so'  : ["cc"],
+-                   'compiler_cxx' : ["cc"],
+-                   'linker_so'    : ["cc", "-shared"],
+-                   'linker_exe'   : ["cc"],
+-                   'archiver'     : ["ar", "-cr"],
+-                   'ranlib'       : None,
++    executables = {'preprocessor'    : None,
++                   'compiler'        : ["cc"],
++                   'compiler_so'     : ["cc"],
++                   'compiler_cxx'    : ["c++"],
++                   'compiler_so_cxx' : ["c++"],
++                   'linker_so'       : ["cc", "-shared"],
++                   'linker_exe'      : ["cc"],
++                   'linker_so_cxx'   : ["c++", "-shared"],
++                   'linker_exe_cxx'  : ["c++"],
++                   'archiver'        : ["ar", "-cr"],
++                   'ranlib'          : None,
+                   }
+ 
+     if sys.platform[:6] == "darwin":
+@@ -110,12 +113,19 @@ def preprocess(self, source, output_file=None, macros=None,
+ 
+     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+         compiler_so = self.compiler_so
++        compiler_so_cxx = self.compiler_so_cxx
+         if sys.platform == 'darwin':
+             compiler_so = _osx_support.compiler_fixup(compiler_so,
+                                                     cc_args + extra_postargs)
++            compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx,
++                                                    cc_args + extra_postargs)
+         try:
+-            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+-                       extra_postargs)
++            if self.detect_language(src) == 'c++':
++                self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] +
++                           extra_postargs)
++            else:
++                self.spawn(compiler_so + cc_args + [src, '-o', obj] +
++                           extra_postargs)
+         except DistutilsExecError as msg:
+             raise CompileError(msg)
+ 
+@@ -173,30 +183,16 @@ def link(self, target_desc, objects,
+                 ld_args.extend(extra_postargs)
+             self.mkpath(os.path.dirname(output_filename))
+             try:
+-                if target_desc == CCompiler.EXECUTABLE:
+-                    linker = self.linker_exe[:]
++                if target_lang == "c++":
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe_cxx[:]
++                    else:
++                        linker = self.linker_so_cxx[:]
+                 else:
+-                    linker = self.linker_so[:]
+-                if target_lang == "c++" and self.compiler_cxx:
+-                    # skip over environment variable settings if /usr/bin/env
+-                    # is used to set up the linker's environment.
+-                    # This is needed on OSX. Note: this assumes that the
+-                    # normal and C++ compiler have the same environment
+-                    # settings.
+-                    i = 0
+-                    if os.path.basename(linker[0]) == "env":
+-                        i = 1
+-                        while '=' in linker[i]:
+-                            i += 1
+-
+-                    if os.path.basename(linker[i]) == 'ld_so_aix':
+-                        # AIX platforms prefix the compiler with the ld_so_aix
+-                        # script, so we need to adjust our linker index
+-                        offset = 1
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe[:]
+                     else:
+-                        offset = 0
+-
+-                    linker[i+offset] = self.compiler_cxx[i]
++                        linker = self.linker_so[:]
+ 
+                 if sys.platform == 'darwin':
+                     linker = _osx_support.compiler_fixup(linker, ld_args)
+diff --git a/Makefile.pre.in b/Makefile.pre.in
+index f803391346..090f14c46c 100644
+--- a/Makefile.pre.in
++++ b/Makefile.pre.in
+@@ -732,9 +732,9 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt @LIBMPDEC_INTERNAL@ @LIBEXPAT_INTERNAL
+ 	    *\ -s*|s*) quiet="-q";; \
+ 	    *) quiet="";; \
+ 	esac; \
+-	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \
+-	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
+ 
+ 
diff --git a/packages/python/python-3.7.2-distutils-C++.patch b/packages/python/python-3.7.2-distutils-C++.patch
new file mode 100644
index 0000000000000000000000000000000000000000..5728fad6f77f331d8f8fee1ad97c742f27928980
--- /dev/null
+++ b/packages/python/python-3.7.2-distutils-C++.patch
@@ -0,0 +1,241 @@
+--- a/Lib/_osx_support.py
++++ b/Lib/_osx_support.py
+@@ -14,13 +14,13 @@ __all__ = [
+ # configuration variables that may contain universal build flags,
+ # like "-arch" or "-isdkroot", that may need customization for
+ # the user environment
+-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
+-                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
+-                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
+-                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS',
++                          'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED',
++                          'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS',
++                          'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS')
+ 
+ # configuration variables that may contain compiler calls
+-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX')
+ 
+ # prefix added to original configuration variable names
+ _INITPRE = '_OSX_SUPPORT_INITIAL_'
+--- a/Lib/distutils/cygwinccompiler.py
++++ b/Lib/distutils/cygwinccompiler.py
+@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler):
+         # dllwrap 2.10.90 is buggy
+         if self.ld_version >= "2.10.90":
+             self.linker_dll = "gcc"
++            self.linker_dll_cxx = "g++"
+         else:
+             self.linker_dll = "dllwrap"
++            self.linker_dll_cxx = "dllwrap"
+ 
+         # ld_version >= "2.13" support -shared so use it instead of
+         # -mdll -static
+@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler):
+         self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                              compiler_so='gcc -mcygwin -mdll -O -Wall',
+                              compiler_cxx='g++ -mcygwin -O -Wall',
++                             compiler_so_cxx='g++ -mcygwin -mdll -O -Wall',
+                              linker_exe='gcc -mcygwin',
+                              linker_so=('%s -mcygwin %s' %
+-                                        (self.linker_dll, shared_option)))
++                                        (self.linker_dll, shared_option)),
++                             linker_exe_cxx='g++ -mcygwin',
++                             linker_so_cxx=('%s -mcygwin %s' %
++                                            (self.linker_dll_cxx, shared_option)))
+ 
+         # cygwin and mingw32 need different sets of libraries
+         if self.gcc_version == "2.91.57":
+@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler):
+                 raise CompileError(msg)
+         else: # for other files use the C-compiler
+             try:
+-                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+-                           extra_postargs)
++                if self.detect_language(src) == 'c++':
++                    self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] +
++                               extra_postargs)
++                else:
++                    self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
++                               extra_postargs)
+             except DistutilsExecError as msg:
+                 raise CompileError(msg)
+ 
+@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler):
+         self.set_executables(compiler='gcc -O -Wall',
+                              compiler_so='gcc -mdll -O -Wall',
+                              compiler_cxx='g++ -O -Wall',
++                             compiler_so_cxx='g++ -mdll -O -Wall',
+                              linker_exe='gcc',
+                              linker_so='%s %s %s'
+                                         % (self.linker_dll, shared_option,
++                                           entry_point),
++                             linker_exe_cxx='g++',
++                             linker_so_cxx='%s %s %s'
++                                        % (self.linker_dll_cxx, shared_option,
+                                            entry_point))
+         # Maybe we should also append -mthreads, but then the finished
+         # dlls need another dll (mingwm10.dll see Mingw32 docs)
+--- a/Lib/distutils/sysconfig.py
++++ b/Lib/distutils/sysconfig.py
+@@ -170,9 +170,11 @@ def customize_compiler(compiler):
+                 _osx_support.customize_compiler(_config_vars)
+                 _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+ 
+-        (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+-            get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+-                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++        (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \
++            get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED',
++                            'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++
++        cxxflags = cflags
+ 
+         if 'CC' in os.environ:
+             newcc = os.environ['CC']
+@@ -187,19 +189,27 @@ def customize_compiler(compiler):
+             cxx = os.environ['CXX']
+         if 'LDSHARED' in os.environ:
+             ldshared = os.environ['LDSHARED']
++        if 'LDCXXSHARED' in os.environ:
++            ldcxxshared = os.environ['LDCXXSHARED']
+         if 'CPP' in os.environ:
+             cpp = os.environ['CPP']
+         else:
+             cpp = cc + " -E"           # not always
+         if 'LDFLAGS' in os.environ:
+             ldshared = ldshared + ' ' + os.environ['LDFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS']
+         if 'CFLAGS' in os.environ:
+-            cflags = opt + ' ' + os.environ['CFLAGS']
++            cflags = os.environ['CFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CFLAGS']
++        if 'CXXFLAGS' in os.environ:
++            cxxflags = os.environ['CXXFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS']
+         if 'CPPFLAGS' in os.environ:
+             cpp = cpp + ' ' + os.environ['CPPFLAGS']
+             cflags = cflags + ' ' + os.environ['CPPFLAGS']
++            cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS']
+         if 'AR' in os.environ:
+             ar = os.environ['AR']
+         if 'ARFLAGS' in os.environ:
+@@ -208,13 +218,17 @@ def customize_compiler(compiler):
+             archiver = ar + ' ' + ar_flags
+ 
+         cc_cmd = cc + ' ' + cflags
++        cxx_cmd = cxx + ' ' + cxxflags
+         compiler.set_executables(
+             preprocessor=cpp,
+             compiler=cc_cmd,
+             compiler_so=cc_cmd + ' ' + ccshared,
+-            compiler_cxx=cxx,
++            compiler_cxx=cxx_cmd,
++            compiler_so_cxx=cxx_cmd + ' ' + ccshared,
+             linker_so=ldshared,
+             linker_exe=cc,
++            linker_so_cxx=ldcxxshared,
++            linker_exe_cxx=cxx,
+             archiver=archiver)
+ 
+         compiler.shared_lib_extension = shlib_suffix
+--- a/Lib/distutils/unixccompiler.py
++++ b/Lib/distutils/unixccompiler.py
+@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler):
+     # are pretty generic; they will probably have to be set by an outsider
+     # (eg. using information discovered by the sysconfig about building
+     # Python extensions).
+-    executables = {'preprocessor' : None,
+-                   'compiler'     : ["cc"],
+-                   'compiler_so'  : ["cc"],
+-                   'compiler_cxx' : ["cc"],
+-                   'linker_so'    : ["cc", "-shared"],
+-                   'linker_exe'   : ["cc"],
+-                   'archiver'     : ["ar", "-cr"],
+-                   'ranlib'       : None,
++    executables = {'preprocessor'    : None,
++                   'compiler'        : ["cc"],
++                   'compiler_so'     : ["cc"],
++                   'compiler_cxx'    : ["c++"],
++                   'compiler_so_cxx' : ["c++"],
++                   'linker_so'       : ["cc", "-shared"],
++                   'linker_exe'      : ["cc"],
++                   'linker_so_cxx'   : ["c++", "-shared"],
++                   'linker_exe_cxx'  : ["c++"],
++                   'archiver'        : ["ar", "-cr"],
++                   'ranlib'          : None,
+                   }
+ 
+     if sys.platform[:6] == "darwin":
+@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler):
+ 
+     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+         compiler_so = self.compiler_so
++        compiler_so_cxx = self.compiler_so_cxx
+         if sys.platform == 'darwin':
+             compiler_so = _osx_support.compiler_fixup(compiler_so,
+                                                     cc_args + extra_postargs)
++            compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx,
++                                                    cc_args + extra_postargs)
+         try:
+-            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+-                       extra_postargs)
++            if self.detect_language(src) == 'c++':
++                self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] +
++                           extra_postargs)
++            else:
++                self.spawn(compiler_so + cc_args + [src, '-o', obj] +
++                           extra_postargs)
+         except DistutilsExecError as msg:
+             raise CompileError(msg)
+ 
+@@ -173,22 +183,16 @@ class UnixCCompiler(CCompiler):
+                 ld_args.extend(extra_postargs)
+             self.mkpath(os.path.dirname(output_filename))
+             try:
+-                if target_desc == CCompiler.EXECUTABLE:
+-                    linker = self.linker_exe[:]
++                if target_lang == "c++":
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe_cxx[:]
++                    else:
++                        linker = self.linker_so_cxx[:]
+                 else:
+-                    linker = self.linker_so[:]
+-                if target_lang == "c++" and self.compiler_cxx:
+-                    # skip over environment variable settings if /usr/bin/env
+-                    # is used to set up the linker's environment.
+-                    # This is needed on OSX. Note: this assumes that the
+-                    # normal and C++ compiler have the same environment
+-                    # settings.
+-                    i = 0
+-                    if os.path.basename(linker[0]) == "env":
+-                        i = 1
+-                        while '=' in linker[i]:
+-                            i += 1
+-                    linker[i] = self.compiler_cxx[i]
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe[:]
++                    else:
++                        linker = self.linker_so[:]
+ 
+                 if sys.platform == 'darwin':
+                     linker = _osx_support.compiler_fixup(linker, ld_args)
+--- a/Makefile.pre.in
++++ b/Makefile.pre.in
+@@ -584,10 +584,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
+ 	    *\ -s*|s*) quiet="-q";; \
+ 	    *) quiet="";; \
+ 	esac; \
+-	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \
+-	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
+ 
diff --git a/packages/python/python-3.7.3-distutils-C++.patch b/packages/python/python-3.7.3-distutils-C++.patch
new file mode 100644
index 0000000000000000000000000000000000000000..e29323bf0b51b3c02a2816adfdd67cffbf66a8e7
--- /dev/null
+++ b/packages/python/python-3.7.3-distutils-C++.patch
@@ -0,0 +1,256 @@
+diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
+index db6674e..ccbe09a 100644
+--- a/Lib/_osx_support.py
++++ b/Lib/_osx_support.py
+@@ -14,13 +14,13 @@ __all__ = [
+ # configuration variables that may contain universal build flags,
+ # like "-arch" or "-isdkroot", that may need customization for
+ # the user environment
+-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
+-                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
+-                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
+-                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS',
++                          'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED',
++                          'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS',
++                          'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS')
+ 
+ # configuration variables that may contain compiler calls
+-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX')
+ 
+ # prefix added to original configuration variable names
+ _INITPRE = '_OSX_SUPPORT_INITIAL_'
+diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py
+index 6c5d777..640fa2d 100644
+--- a/Lib/distutils/cygwinccompiler.py
++++ b/Lib/distutils/cygwinccompiler.py
+@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler):
+         # dllwrap 2.10.90 is buggy
+         if self.ld_version >= "2.10.90":
+             self.linker_dll = "gcc"
++            self.linker_dll_cxx = "g++"
+         else:
+             self.linker_dll = "dllwrap"
++            self.linker_dll_cxx = "dllwrap"
+ 
+         # ld_version >= "2.13" support -shared so use it instead of
+         # -mdll -static
+@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler):
+         self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                              compiler_so='gcc -mcygwin -mdll -O -Wall',
+                              compiler_cxx='g++ -mcygwin -O -Wall',
++                             compiler_so_cxx='g++ -mcygwin -mdll -O -Wall',
+                              linker_exe='gcc -mcygwin',
+                              linker_so=('%s -mcygwin %s' %
+-                                        (self.linker_dll, shared_option)))
++                                        (self.linker_dll, shared_option)),
++                             linker_exe_cxx='g++ -mcygwin',
++                             linker_so_cxx=('%s -mcygwin %s' %
++                                            (self.linker_dll_cxx, shared_option)))
+ 
+         # cygwin and mingw32 need different sets of libraries
+         if self.gcc_version == "2.91.57":
+@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler):
+                 raise CompileError(msg)
+         else: # for other files use the C-compiler
+             try:
+-                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+-                           extra_postargs)
++                if self.detect_language(src) == 'c++':
++                    self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] +
++                               extra_postargs)
++                else:
++                    self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
++                               extra_postargs)
+             except DistutilsExecError as msg:
+                 raise CompileError(msg)
+ 
+@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler):
+         self.set_executables(compiler='gcc -O -Wall',
+                              compiler_so='gcc -mdll -O -Wall',
+                              compiler_cxx='g++ -O -Wall',
++                             compiler_so_cxx='g++ -mdll -O -Wall',
+                              linker_exe='gcc',
+                              linker_so='%s %s %s'
+                                         % (self.linker_dll, shared_option,
++                                           entry_point),
++                             linker_exe_cxx='g++',
++                             linker_so_cxx='%s %s %s'
++                                        % (self.linker_dll_cxx, shared_option,
+                                            entry_point))
+         # Maybe we should also append -mthreads, but then the finished
+         # dlls need another dll (mingwm10.dll see Mingw32 docs)
+diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
+index 83160f8..b735369 100644
+--- a/Lib/distutils/sysconfig.py
++++ b/Lib/distutils/sysconfig.py
+@@ -183,9 +183,11 @@ def customize_compiler(compiler):
+                 _osx_support.customize_compiler(_config_vars)
+                 _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+ 
+-        (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
++        (cc, cxx, opt, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \
+             get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+-                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++                            'CCSHARED', 'LDSHARED', 'LDCXXSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++
++        cxxflags = cflags
+ 
+         if 'CC' in os.environ:
+             newcc = os.environ['CC']
+@@ -200,19 +202,27 @@ def customize_compiler(compiler):
+             cxx = os.environ['CXX']
+         if 'LDSHARED' in os.environ:
+             ldshared = os.environ['LDSHARED']
++        if 'LDCXXSHARED' in os.environ:
++            ldcxxshared = os.environ['LDCXXSHARED']
+         if 'CPP' in os.environ:
+             cpp = os.environ['CPP']
+         else:
+             cpp = cc + " -E"           # not always
+         if 'LDFLAGS' in os.environ:
+             ldshared = ldshared + ' ' + os.environ['LDFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS']
+         if 'CFLAGS' in os.environ:
+             cflags = opt + ' ' + os.environ['CFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CFLAGS']
++        if 'CXXFLAGS' in os.environ:
++            cxxflags = opt + ' ' + os.environ['CXXFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS']
+         if 'CPPFLAGS' in os.environ:
+             cpp = cpp + ' ' + os.environ['CPPFLAGS']
++            cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS']
+             cflags = cflags + ' ' + os.environ['CPPFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS']
+         if 'AR' in os.environ:
+             ar = os.environ['AR']
+         if 'ARFLAGS' in os.environ:
+@@ -221,13 +231,17 @@ def customize_compiler(compiler):
+             archiver = ar + ' ' + ar_flags
+ 
+         cc_cmd = cc + ' ' + cflags
++        cxx_cmd = cxx + ' ' + cxxflags
+         compiler.set_executables(
+             preprocessor=cpp,
+             compiler=cc_cmd,
+             compiler_so=cc_cmd + ' ' + ccshared,
+-            compiler_cxx=cxx,
++            compiler_cxx=cxx_cmd,
++            compiler_so_cxx=cxx_cmd + ' ' + ccshared,
+             linker_so=ldshared,
+             linker_exe=cc,
++            linker_so_cxx=ldcxxshared,
++            linker_exe_cxx=cxx,
+             archiver=archiver)
+ 
+         compiler.shared_lib_extension = shlib_suffix
+diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
+index d10a78d..7e88781 100644
+--- a/Lib/distutils/unixccompiler.py
++++ b/Lib/distutils/unixccompiler.py
+@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler):
+     # are pretty generic; they will probably have to be set by an outsider
+     # (eg. using information discovered by the sysconfig about building
+     # Python extensions).
+-    executables = {'preprocessor' : None,
+-                   'compiler'     : ["cc"],
+-                   'compiler_so'  : ["cc"],
+-                   'compiler_cxx' : ["cc"],
+-                   'linker_so'    : ["cc", "-shared"],
+-                   'linker_exe'   : ["cc"],
+-                   'archiver'     : ["ar", "-cr"],
+-                   'ranlib'       : None,
++    executables = {'preprocessor'    : None,
++                   'compiler'        : ["cc"],
++                   'compiler_so'     : ["cc"],
++                   'compiler_cxx'    : ["c++"],
++                   'compiler_so_cxx' : ["c++"],
++                   'linker_so'       : ["cc", "-shared"],
++                   'linker_exe'      : ["cc"],
++                   'linker_so_cxx'   : ["c++", "-shared"],
++                   'linker_exe_cxx'  : ["c++"],
++                   'archiver'        : ["ar", "-cr"],
++                   'ranlib'          : None,
+                   }
+ 
+     if sys.platform[:6] == "darwin":
+@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler):
+ 
+     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+         compiler_so = self.compiler_so
++        compiler_so_cxx = self.compiler_so_cxx
+         if sys.platform == 'darwin':
+             compiler_so = _osx_support.compiler_fixup(compiler_so,
+                                                     cc_args + extra_postargs)
++            compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx,
++                                                    cc_args + extra_postargs)
+         try:
+-            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+-                       extra_postargs)
++            if self.detect_language(src) == 'c++':
++                self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] +
++                           extra_postargs)
++            else:
++                self.spawn(compiler_so + cc_args + [src, '-o', obj] +
++                           extra_postargs)
+         except DistutilsExecError as msg:
+             raise CompileError(msg)
+ 
+@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler):
+                 ld_args.extend(extra_postargs)
+             self.mkpath(os.path.dirname(output_filename))
+             try:
+-                if target_desc == CCompiler.EXECUTABLE:
+-                    linker = self.linker_exe[:]
++                if target_lang == "c++":
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe_cxx[:]
++                    else:
++                        linker = self.linker_so_cxx[:]
+                 else:
+-                    linker = self.linker_so[:]
+-                if target_lang == "c++" and self.compiler_cxx:
+-                    # skip over environment variable settings if /usr/bin/env
+-                    # is used to set up the linker's environment.
+-                    # This is needed on OSX. Note: this assumes that the
+-                    # normal and C++ compiler have the same environment
+-                    # settings.
+-                    i = 0
+-                    if os.path.basename(linker[0]) == "env":
+-                        i = 1
+-                        while '=' in linker[i]:
+-                            i += 1
+-
+-                    if os.path.basename(linker[i]) == 'ld_so_aix':
+-                        # AIX platforms prefix the compiler with the ld_so_aix
+-                        # script, so we need to adjust our linker index
+-                        offset = 1
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe[:]
+                     else:
+-                        offset = 0
+-
+-                    linker[i+offset] = self.compiler_cxx[i]
++                        linker = self.linker_so[:]
+ 
+                 if sys.platform == 'darwin':
+                     linker = _osx_support.compiler_fixup(linker, ld_args)
+diff --git a/Makefile.pre.in b/Makefile.pre.in
+index 2d2e11f..8456e3f 100644
+--- a/Makefile.pre.in
++++ b/Makefile.pre.in
+@@ -615,10 +615,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
+ 	    *\ -s*|s*) quiet="-q";; \
+ 	    *) quiet="";; \
+ 	esac; \
+-	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \
+-	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
+ 
diff --git a/packages/python/python-3.7.4+-distutils-C++-testsuite.patch b/packages/python/python-3.7.4+-distutils-C++-testsuite.patch
new file mode 100644
index 0000000000000000000000000000000000000000..99361087024d26c6145ebad6f6c49032910a51f7
--- /dev/null
+++ b/packages/python/python-3.7.4+-distutils-C++-testsuite.patch
@@ -0,0 +1,138 @@
+This patch updates the distutils test suite for:
+var/spack/repos/builtin/packages/python/python-3.7.4+-distutils-C++.patch
+
+That patch fixes several shortcomings in the distutils C++ support,
+most prominently missing support for passing CXXFLAGS from the environment.
+
+Since it does not update the distutils testsuite, it causes the testsuite
+to fail, which this patch updates to pass.
+
+-----------------------------------------------------------------------------
+Spack changelog
+- Added patch header to aid understanding the patch and maintainance
+- Updated the distutils testsuite in Lib/distutils/tests/test_sysconfig.py
+
+-----------------------------------------------------------------------------
+Upstream status
+
+Upstream bug: https://bugs.python.org/issue1222585
+
+Status: Closed, wont fix, comment by Eric Araujo, Python Core Dev:
+"setuptools and other active build tools are the better target for this feature."
+https://bugs.python.org/issue1222585#msg379348
+
+But according to the last-but-oncomment, setuptools seems to be lacking there too.
+https://bugs.python.org/issue1222585#msg371840
+
+-----------------------------------------------------------------------------
+Description
+
+distutils has no support for CXXFLAGS, this patch adds it.
+
+Upstream distutils requires to pass all CXXFLAGS (C++-specific CFLAGS)
+as part of the CXX enviromnent variable instead.
+
+This patch:
+- adds CXXFLAGS support
+- adds LDCXXSHARED like LDSHARED
+- passes cflags to CXX like it passes them to CC.
+
+The distutils testsuite is updated accordingly to pass the tests.
+Since it passes, it is the authoritative info of the current effects of this patch.
+
+See the update of the distutils testsuite in Lib/distutils/tests/test_sysconfig.py
+below for details on the effect of this patch.
+
+diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py
+index db6674e..ccbe09a 100644
+--- a/Lib/distutils/tests/test_sysconfig.py
++++ b/Lib/distutils/tests/test_sysconfig.py
+@@ -89,8 +89,10 @@
+             'CXX': 'sc_cxx',
+             'ARFLAGS': '--sc-arflags',
+             'CFLAGS': '--sc-cflags',
++            'CXXFLAGS': '--sc-cxxflags',
+             'CCSHARED': '--sc-ccshared',
+             'LDSHARED': 'sc_ldshared',
++            'LDCXXSHARED': 'sc_ldshared_cxx',
+             'SHLIB_SUFFIX': 'sc_shutil_suffix',
+ 
+             # On macOS, disable _osx_support.customize_compiler()
+@@ -114,11 +116,13 @@
+         os.environ['AR'] = 'env_ar'
+         os.environ['CC'] = 'env_cc'
+         os.environ['CPP'] = 'env_cpp'
+-        os.environ['CXX'] = 'env_cxx --env-cxx-flags'
++        os.environ['CXX'] = 'env_cxx'
+         os.environ['LDSHARED'] = 'env_ldshared'
++        os.environ['LDCXXSHARED'] = 'env_ldshared_cxx'
+         os.environ['LDFLAGS'] = '--env-ldflags'
+         os.environ['ARFLAGS'] = '--env-arflags'
+         os.environ['CFLAGS'] = '--env-cflags'
++        os.environ['CXXFLAGS'] = '--env-cxxflags'
+         os.environ['CPPFLAGS'] = '--env-cppflags'
+ 
+         comp = self.customize_compiler()
+@@ -128,16 +132,24 @@
+                          'env_cpp --env-cppflags')
+         self.assertEqual(comp.exes['compiler'],
+                          'env_cc --sc-cflags --env-cflags --env-cppflags')
++        self.assertEqual(comp.exes['compiler_cxx'],
++                         'env_cxx --sc-cflags --env-cxxflags --env-cppflags')
+         self.assertEqual(comp.exes['compiler_so'],
+                          ('env_cc --sc-cflags '
+                           '--env-cflags ''--env-cppflags --sc-ccshared'))
+-        self.assertEqual(comp.exes['compiler_cxx'],
+-                         'env_cxx --env-cxx-flags')
++        self.assertEqual(comp.exes['compiler_so_cxx'],
++                         ('env_cxx --sc-cflags '
++                          '--env-cxxflags ''--env-cppflags --sc-ccshared'))
+         self.assertEqual(comp.exes['linker_exe'],
+                          'env_cc')
++        self.assertEqual(comp.exes['linker_exe_cxx'],
++                         'env_cxx')
+         self.assertEqual(comp.exes['linker_so'],
+                          ('env_ldshared --env-ldflags --env-cflags'
+                           ' --env-cppflags'))
++        self.assertEqual(comp.exes['linker_so_cxx'],
++                         ('env_ldshared_cxx --env-ldflags --env-cxxflags'
++                          ' --env-cppflags'))
+         self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
+ 
+         del os.environ['AR']
+@@ -145,9 +157,11 @@
+         del os.environ['CPP']
+         del os.environ['CXX']
+         del os.environ['LDSHARED']
++        del os.environ['LDCXXSHARED']
+         del os.environ['LDFLAGS']
+         del os.environ['ARFLAGS']
+         del os.environ['CFLAGS']
++        del os.environ['CXXFLAGS']
+         del os.environ['CPPFLAGS']
+ 
+         comp = self.customize_compiler()
+@@ -157,14 +171,21 @@
+                          'sc_cc -E')
+         self.assertEqual(comp.exes['compiler'],
+                          'sc_cc --sc-cflags')
++        # TODO: Likely this sould get --sc-cxxflags instead:
++        self.assertEqual(comp.exes['compiler_cxx'],
++                         'sc_cxx --sc-cflags')
+         self.assertEqual(comp.exes['compiler_so'],
+                          'sc_cc --sc-cflags --sc-ccshared')
+-        self.assertEqual(comp.exes['compiler_cxx'],
+-                         'sc_cxx')
++        self.assertEqual(comp.exes['compiler_so_cxx'],
++                         'sc_cxx --sc-cflags --sc-ccshared')
+         self.assertEqual(comp.exes['linker_exe'],
+                          'sc_cc')
++        self.assertEqual(comp.exes['linker_exe_cxx'],
++                         'sc_cxx')
+         self.assertEqual(comp.exes['linker_so'],
+                          'sc_ldshared')
++        self.assertEqual(comp.exes['linker_so_cxx'],
++                         'sc_ldshared_cxx')
+         self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix')
+ 
+     def test_parse_makefile_base(self):
diff --git a/packages/python/python-3.7.4+-distutils-C++.patch b/packages/python/python-3.7.4+-distutils-C++.patch
new file mode 100644
index 0000000000000000000000000000000000000000..02daf0a11bfbb0b004812992f0bcb64db6a59380
--- /dev/null
+++ b/packages/python/python-3.7.4+-distutils-C++.patch
@@ -0,0 +1,257 @@
+diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
+index db6674e..ccbe09a 100644
+--- a/Lib/_osx_support.py
++++ b/Lib/_osx_support.py
+@@ -14,13 +14,13 @@ __all__ = [
+ # configuration variables that may contain universal build flags,
+ # like "-arch" or "-isdkroot", that may need customization for
+ # the user environment
+-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
+-                            'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
+-                            'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
+-                            'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS',
++                          'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED',
++                          'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS',
++                          'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS')
+ 
+ # configuration variables that may contain compiler calls
+-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX')
+ 
+ # prefix added to original configuration variable names
+ _INITPRE = '_OSX_SUPPORT_INITIAL_'
+diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py
+index 6c5d777..640fa2d 100644
+--- a/Lib/distutils/cygwinccompiler.py
++++ b/Lib/distutils/cygwinccompiler.py
+@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler):
+         # dllwrap 2.10.90 is buggy
+         if self.ld_version >= "2.10.90":
+             self.linker_dll = "gcc"
++            self.linker_dll_cxx = "g++"
+         else:
+             self.linker_dll = "dllwrap"
++            self.linker_dll_cxx = "dllwrap"
+ 
+         # ld_version >= "2.13" support -shared so use it instead of
+         # -mdll -static
+@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler):
+         self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                              compiler_so='gcc -mcygwin -mdll -O -Wall',
+                              compiler_cxx='g++ -mcygwin -O -Wall',
++                             compiler_so_cxx='g++ -mcygwin -mdll -O -Wall',
+                              linker_exe='gcc -mcygwin',
+                              linker_so=('%s -mcygwin %s' %
+-                                        (self.linker_dll, shared_option)))
++                                        (self.linker_dll, shared_option)),
++                             linker_exe_cxx='g++ -mcygwin',
++                             linker_so_cxx=('%s -mcygwin %s' %
++                                            (self.linker_dll_cxx, shared_option)))
+ 
+         # cygwin and mingw32 need different sets of libraries
+         if self.gcc_version == "2.91.57":
+@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler):
+                 raise CompileError(msg)
+         else: # for other files use the C-compiler
+             try:
+-                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+-                           extra_postargs)
++                if self.detect_language(src) == 'c++':
++                    self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] +
++                               extra_postargs)
++                else:
++                    self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
++                               extra_postargs)
+             except DistutilsExecError as msg:
+                 raise CompileError(msg)
+ 
+@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler):
+         self.set_executables(compiler='gcc -O -Wall',
+                              compiler_so='gcc -mdll -O -Wall',
+                              compiler_cxx='g++ -O -Wall',
++                             compiler_so_cxx='g++ -mdll -O -Wall',
+                              linker_exe='gcc',
+                              linker_so='%s %s %s'
+                                         % (self.linker_dll, shared_option,
++                                           entry_point),
++                             linker_exe_cxx='g++',
++                             linker_so_cxx='%s %s %s'
++                                        % (self.linker_dll_cxx, shared_option,
+                                            entry_point))
+         # Maybe we should also append -mthreads, but then the finished
+         # dlls need another dll (mingwm10.dll see Mingw32 docs)
+diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
+index 0a034ee..ecf4759 100644
+--- a/Lib/distutils/sysconfig.py
++++ b/Lib/distutils/sysconfig.py
+@@ -188,9 +188,11 @@ def customize_compiler(compiler):
+                 _osx_support.customize_compiler(_config_vars)
+                 _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+ 
+-        (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+-            get_config_vars('CC', 'CXX', 'CFLAGS',
+-                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++        (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \
++            get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED',
++                            'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
++
++        cxxflags = cflags
+ 
+         if 'CC' in os.environ:
+             newcc = os.environ['CC']
+@@ -205,19 +207,27 @@ def customize_compiler(compiler):
+             cxx = os.environ['CXX']
+         if 'LDSHARED' in os.environ:
+             ldshared = os.environ['LDSHARED']
++        if 'LDCXXSHARED' in os.environ:
++            ldcxxshared = os.environ['LDCXXSHARED']
+         if 'CPP' in os.environ:
+             cpp = os.environ['CPP']
+         else:
+             cpp = cc + " -E"           # not always
+         if 'LDFLAGS' in os.environ:
+             ldshared = ldshared + ' ' + os.environ['LDFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS']
+         if 'CFLAGS' in os.environ:
+             cflags = cflags + ' ' + os.environ['CFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CFLAGS']
++        if 'CXXFLAGS' in os.environ:
++            cxxflags = cxxflags + ' ' + os.environ['CXXFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS']
+         if 'CPPFLAGS' in os.environ:
+             cpp = cpp + ' ' + os.environ['CPPFLAGS']
+             cflags = cflags + ' ' + os.environ['CPPFLAGS']
++            cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS']
+             ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
++            ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS']
+         if 'AR' in os.environ:
+             ar = os.environ['AR']
+         if 'ARFLAGS' in os.environ:
+@@ -226,13 +236,17 @@ def customize_compiler(compiler):
+             archiver = ar + ' ' + ar_flags
+ 
+         cc_cmd = cc + ' ' + cflags
++        cxx_cmd = cxx + ' ' + cxxflags
+         compiler.set_executables(
+             preprocessor=cpp,
+             compiler=cc_cmd,
+             compiler_so=cc_cmd + ' ' + ccshared,
+-            compiler_cxx=cxx,
++            compiler_cxx=cxx_cmd,
++            compiler_so_cxx=cxx_cmd + ' ' + ccshared,
+             linker_so=ldshared,
+             linker_exe=cc,
++            linker_so_cxx=ldcxxshared,
++            linker_exe_cxx=cxx,
+             archiver=archiver)
+ 
+         compiler.shared_lib_extension = shlib_suffix
+diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
+index d10a78d..7e88781 100644
+--- a/Lib/distutils/unixccompiler.py
++++ b/Lib/distutils/unixccompiler.py
+@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler):
+     # are pretty generic; they will probably have to be set by an outsider
+     # (eg. using information discovered by the sysconfig about building
+     # Python extensions).
+-    executables = {'preprocessor' : None,
+-                   'compiler'     : ["cc"],
+-                   'compiler_so'  : ["cc"],
+-                   'compiler_cxx' : ["cc"],
+-                   'linker_so'    : ["cc", "-shared"],
+-                   'linker_exe'   : ["cc"],
+-                   'archiver'     : ["ar", "-cr"],
+-                   'ranlib'       : None,
++    executables = {'preprocessor'    : None,
++                   'compiler'        : ["cc"],
++                   'compiler_so'     : ["cc"],
++                   'compiler_cxx'    : ["c++"],
++                   'compiler_so_cxx' : ["c++"],
++                   'linker_so'       : ["cc", "-shared"],
++                   'linker_exe'      : ["cc"],
++                   'linker_so_cxx'   : ["c++", "-shared"],
++                   'linker_exe_cxx'  : ["c++"],
++                   'archiver'        : ["ar", "-cr"],
++                   'ranlib'          : None,
+                   }
+ 
+     if sys.platform[:6] == "darwin":
+@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler):
+ 
+     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+         compiler_so = self.compiler_so
++        compiler_so_cxx = self.compiler_so_cxx
+         if sys.platform == 'darwin':
+             compiler_so = _osx_support.compiler_fixup(compiler_so,
+                                                     cc_args + extra_postargs)
++            compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx,
++                                                    cc_args + extra_postargs)
+         try:
+-            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+-                       extra_postargs)
++            if self.detect_language(src) == 'c++':
++                self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] +
++                           extra_postargs)
++            else:
++                self.spawn(compiler_so + cc_args + [src, '-o', obj] +
++                           extra_postargs)
+         except DistutilsExecError as msg:
+             raise CompileError(msg)
+ 
+@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler):
+                 ld_args.extend(extra_postargs)
+             self.mkpath(os.path.dirname(output_filename))
+             try:
+-                if target_desc == CCompiler.EXECUTABLE:
+-                    linker = self.linker_exe[:]
++                if target_lang == "c++":
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe_cxx[:]
++                    else:
++                        linker = self.linker_so_cxx[:]
+                 else:
+-                    linker = self.linker_so[:]
+-                if target_lang == "c++" and self.compiler_cxx:
+-                    # skip over environment variable settings if /usr/bin/env
+-                    # is used to set up the linker's environment.
+-                    # This is needed on OSX. Note: this assumes that the
+-                    # normal and C++ compiler have the same environment
+-                    # settings.
+-                    i = 0
+-                    if os.path.basename(linker[0]) == "env":
+-                        i = 1
+-                        while '=' in linker[i]:
+-                            i += 1
+-
+-                    if os.path.basename(linker[i]) == 'ld_so_aix':
+-                        # AIX platforms prefix the compiler with the ld_so_aix
+-                        # script, so we need to adjust our linker index
+-                        offset = 1
++                    if target_desc == CCompiler.EXECUTABLE:
++                        linker = self.linker_exe[:]
+                     else:
+-                        offset = 0
+-
+-                    linker[i+offset] = self.compiler_cxx[i]
++                        linker = self.linker_so[:]
+ 
+                 if sys.platform == 'darwin':
+                     linker = _osx_support.compiler_fixup(linker, ld_args)
+diff --git a/Makefile.pre.in b/Makefile.pre.in
+index 35ca1a8..cfa79df 100644
+--- a/Makefile.pre.in
++++ b/Makefile.pre.in
+@@ -618,10 +618,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
+ 	    *\ -s*|s*) quiet="-q";; \
+ 	    *) quiet="";; \
+ 	esac; \
+-	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \
+-	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
++	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \
+ 		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+ 		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
+ 
diff --git a/packages/python/rpath-non-gcc.patch b/packages/python/rpath-non-gcc.patch
new file mode 100644
index 0000000000000000000000000000000000000000..f203bbbaa5f2866561b78ec996d82f63e750227a
--- /dev/null
+++ b/packages/python/rpath-non-gcc.patch
@@ -0,0 +1,15 @@
+--- a/Lib/distutils/unixccompiler.py	2009-05-09 21:55:12.000000000 +1000
++++ b/Lib/distutils/unixccompiler.py	2017-05-13 14:30:18.077518999 +1000
+@@ -299,10 +299,8 @@
+                 else:
+                     return "-Wl,-R" + dir
+             else:
+-                # No idea how --enable-new-dtags would be passed on to
+-                # ld if this system was using GNU ld.  Don't know if a
+-                # system like this even exists.
+-                return "-R" + dir
++                # Patched by spack to use gcc syntax by default:
++                return "-Wl,-R" + dir
+ 
+     def library_option(self, lib):
+         return "-l" + lib
diff --git a/packages/python/tkinter-3.10.patch b/packages/python/tkinter-3.10.patch
new file mode 100644
index 0000000000000000000000000000000000000000..e06be826b2ee203db1d33410799d4eff09030a5a
--- /dev/null
+++ b/packages/python/tkinter-3.10.patch
@@ -0,0 +1,11 @@
+--- a/setup.py	2021-12-06 12:23:39.000000000 -0600
++++ b/setup.py	2021-12-14 10:30:33.000000000 -0600
+@@ -2099,6 +2099,8 @@
+         #
+         # Detection stops at the first successful method.
+ 
++        return False
++
+         # Check for Tcl and Tk at the locations indicated by _TCLTK_INCLUDES
+         # and _TCLTK_LIBS environment variables.
+         if self.detect_tkinter_fromenv():
diff --git a/packages/python/tkinter-3.11.patch b/packages/python/tkinter-3.11.patch
new file mode 100644
index 0000000000000000000000000000000000000000..fe2d54bd43cc8020c582d0b9f2a61a1b67208794
--- /dev/null
+++ b/packages/python/tkinter-3.11.patch
@@ -0,0 +1,25 @@
+From a49e95e44961a0b6703ef9cb577d2ae5334c4a62 Mon Sep 17 00:00:00 2001
+From: Harmen Stoppels <harmenstoppels@gmail.com>
+Date: Thu, 3 Nov 2022 13:54:00 +0100
+Subject: [PATCH] disable tkinter explicitly
+
+---
+ setup.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/setup.py b/setup.py
+index 15d0d45..642adb3 100644
+--- a/setup.py
++++ b/setup.py
+@@ -1358,7 +1358,7 @@ class PyBuildExt(build_ext):
+         self.detect_decimal()
+         self.detect_ctypes()
+         self.detect_multiprocessing()
+-        self.detect_tkinter()
++        # self.detect_tkinter()
+         self.detect_uuid()
+ 
+         # Uncomment the next line if you want to play with xxmodule.c
+-- 
+2.38.1
+
diff --git a/packages/python/tkinter-3.7.patch b/packages/python/tkinter-3.7.patch
new file mode 100644
index 0000000000000000000000000000000000000000..87e19018077b8739eab81306617d06826a236ce5
--- /dev/null
+++ b/packages/python/tkinter-3.7.patch
@@ -0,0 +1,17 @@
+diff -Naur a/setup.py b/setup.py
+--- a/setup.py	2019-01-13 18:59:14.000000000 -0600
++++ b/setup.py	2019-01-13 19:00:31.000000000 -0600
+@@ -1787,13 +1787,6 @@
+         if self.detect_tkinter_explicitly():
+             return
+ 
+-        # Rather than complicate the code below, detecting and building
+-        # AquaTk is a separate method. Only one Tkinter will be built on
+-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
+-        if (host_platform == 'darwin' and
+-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
+-            return
+-
+         # Assume we haven't found any of the libraries or include files
+         # The versions with dots are used on Unix, and the versions without
+         # dots on Windows, for detection by cygwin.
diff --git a/packages/python/tkinter-3.8.patch b/packages/python/tkinter-3.8.patch
new file mode 100644
index 0000000000000000000000000000000000000000..a1fc5729aaa3327b794d3223f6311cc8befcd02d
--- /dev/null
+++ b/packages/python/tkinter-3.8.patch
@@ -0,0 +1,12 @@
+diff -Naur a/setup.py b/setup.py
+--- a/setup.py.orig	2021-09-29 21:28:23.000000000 -0400
++++ a/setup.py	2021-09-29 21:28:44.000000000 -0400
+@@ -1826,6 +1826,8 @@
+     def detect_tkinter(self):
+         # The _tkinter module.
+
++        return False
++
+         # Check whether --with-tcltk-includes and --with-tcltk-libs were
+         # configured or passed into the make target.  If so, use these values
+         # to build tkinter and bypass the searches for Tcl and TK in standard
diff --git a/packages/wf-bsb/package.py b/packages/wf-bsb/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e843818fe7a321b2c9d699c07a44524e2bf0e4b
--- /dev/null
+++ b/packages/wf-bsb/package.py
@@ -0,0 +1,29 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack import *
+
+
+class WfBsb(BundlePackage):
+    """Meta-package to collect all dependencies for the BSB."""
+
+    homepage="https://github.com/dbbs-lab/bsb"
+
+    maintainers = ["helveg","filimarc","drodarie"]
+
+    version("4.4")
+
+    variant('nest', default=False,
+            description='Build with NEST interface')
+    variant('neuron', default=False,
+            description='Build with NEURON interface')
+
+    depends_on("py-bsb-core@5.0.2:")
+    depends_on("py-bsb-hdf5@5.0.2:")
+    depends_on("py-bsb-json@4.2.2:")
+    depends_on("py-bsb-yaml@4.2.2:")
+
+    depends_on("py-bsb-nest",when="+nest")
+    depends_on("py-bsb-neuron",when="+neuron")
diff --git a/packages/wf-human-multi-area-model/package.py b/packages/wf-human-multi-area-model/package.py
index 7cafb2c150b15adbba1473d2b4c2af9058879952..5bcefe0d68f89a317a81109d36b88b26caa5cdb1 100644
--- a/packages/wf-human-multi-area-model/package.py
+++ b/packages/wf-human-multi-area-model/package.py
@@ -28,7 +28,7 @@ class WfHumanMultiAreaModel(Package):
     depends_on("py-nnmt@1.3.0:", type=("run", "test"))
     depends_on("py-dicthash@0.0.1:", type=("run", "test"))
     depends_on("py-networkx@3.1:", type=("run", "test"))
-    depends_on("py-notebook@6.5.4:", type=("run", "test"))
+    depends_on("py-notebook@6.4:", type=("run", "test"))
     depends_on("py-future@0.18.2:", type=("run", "test"))
 
     def install(self, spec, prefix):
diff --git a/site-config/ebrainslab/packages.yaml b/site-config/ebrainslab/packages.yaml
index 44e6777d5248befff4998eb3ffa1acb4bcd38878..ceda3371cbb490c56237061337066ccc0259f7fd 100644
--- a/site-config/ebrainslab/packages.yaml
+++ b/site-config/ebrainslab/packages.yaml
@@ -2,10 +2,10 @@ packages:
     all:
         # collab-specific settings
         target: [x86_64]
-        compiler: [gcc@13]
+        compiler: [gcc@13.3.0]
     python:
         # collab-specific settings
-        require: "@3.11"
+        require: "@3.11.10+tkinter"
     r:
         # EM: +X fixes build for collab
         require: "@4.3.3+X"
diff --git a/spack.yaml b/spack.yaml
index 99226cda88406ea35e90564cf8b964eca17bad63..e278c469b6a9fd72346c42ce00c6332f55c0f070 100644
--- a/spack.yaml
+++ b/spack.yaml
@@ -3,9 +3,9 @@ spack:
     - site-config/$SYSTEMNAME
   specs:
     # EBRAINS tools
-    - arbor@0.9.0 +python +mpi
-    - py-biobb-analysis@4.0.1
-    - py-biobb-chemistry@4.0.0
+    - arbor@0.10.0 +python +mpi
+#    - py-biobb-analysis@4.0.1
+#    - py-biobb-chemistry@4.0.0
     - py-biobb-common@4.1.0
     - py-biobb-gromacs@4.1.1
     - py-biobb-io@4.1.0
@@ -19,12 +19,17 @@ spack:
     - py-bluepyefe@2.3.6
     - py-bluepymm@0.8.7
     - py-bluepyopt@1.14.11
-    - py-bsb@4.0.0a57
+    - py-bsb-core@5.0.2
+    - py-bsb-hdf5@5.0.2
+    - py-bsb-yaml@4.2.2
+    - py-bsb-json@4.2.2
+    - py-bsb-nest@4.3.2
+    - py-bsb-neuron@4.2.2
     - py-ebrains-drive@0.6.0
     - py-ebrains-kg-core@0.9.15
     - py-ebrains-validation-client@0.9.1
     - py-efel@5.2.0
-    - py-elephant@1.1.0
+    - py-elephant@1.1.1
     - py-fairgraph@0.12.1
     - py-frites@0.4.4
     - py-hbp-archive@1.1.1
@@ -56,15 +61,17 @@ spack:
     - py-tvb-ext-unicore
     - py-tvb-ext-xircuits@1.1.0
     - py-viziphant@0.4.0
+    - py-vbi
     - pynn-brainscales@9.0-a8
     - r-rgsl@0.1.1
     - r-sbtabvfgen@0.1
     - r-uqsa@2.2
-    - sda@7.3.3d
+#    - sda@7.3.3d
     # Workflows (meta-packages)
     - wf-biobb
     - wf-brainscales2-demos@9.0-a8
-    - wf-protein-association-rates@0.1
+    - wf-bsb@4.4 +nest +neuron
+#    - wf-protein-association-rates@0.1
     - wf-multi-area-model@1.2.0
     - wf-human-multi-area-model@2.0.1
     - wf-uq-akar4@0.1
diff --git a/vendor/spack b/vendor/spack
index 65abf4d14071280c6d4a183e20c0f6991ed49986..712b36d5963a179615bf72c48e90acff3cf4f6b9 160000
--- a/vendor/spack
+++ b/vendor/spack
@@ -1 +1 @@
-Subproject commit 65abf4d14071280c6d4a183e20c0f6991ed49986
+Subproject commit 712b36d5963a179615bf72c48e90acff3cf4f6b9