Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • feat_add_nmpi-backend
  • experimental_rel
  • master
  • lab-jupyterlab4
  • fix_bazel_for_spinnaker
  • add-nestml-tests
  • feat-add_graphviz_wf
  • snudda_2.2.2
  • ebrains-25-02
  • ebrains-24-04
  • feat_add_py-jaxley
  • feat_JUSUF_image
  • fix-k8s-job-cache-dir
  • test_open3d
  • add-music
  • add_gcc-spinnaker
  • disable-view
  • test_quiggeldy_service
  • image_build
  • update-readme
  • create-module-file
  • feat_add_py-norse
  • update-libneuroml
  • update-bluebrain-packages
  • feat_arbor_install_all_binaries
  • ebrains-23.09-jsc-site-config
  • ebrains-23-09-spack-v0.19.2
  • ebrains-23-09
  • ebrains-23-06
  • ebrains-23-02
  • ebrains-22-10
  • ebrains-22-07
  • release_v0.1_202109_hotfix
  • release_v0.1_202109
  • v22.07
  • v22.10
  • v23.02
  • v23.06
  • v23.09
  • v24.04
  • v25.02
41 results

Target

Select target project
No results found
Select Git revision
  • add-py-neuralactivitycubic
  • master
  • experimental_rel
  • disable-view
  • test_quiggeldy_service
  • update-arbor-0.10.0
  • image_build
  • spack_v0.22.1
  • ebrains-24-04
  • update-readme
  • create-module-file
  • add-nestml-tests
  • feat_add_py-norse
  • update-libneuroml
  • update-bluebrain-packages
  • feat_arbor_install_all_binaries
  • ebrains-23.09-jsc-site-config
  • spack-v0.20.0
  • ebrains-23-09-spack-v0.19.2
  • ebrains-23-09
  • nestml-source-distribution
  • ebrains-23-06
  • ebrains-23-02
  • ebrains-22-10
  • ebrains-22-07
  • release_v0.1_202109_hotfix
  • release_v0.1_202109
27 results
Show changes

Commits on Source 182

82 additional commits have been omitted to prevent performance issues.
154 files
+ 8657
452
Compare changes
  • Side-by-side
  • Inline

Files

+46 −6
Original line number Diff line number Diff line
@@ -4,10 +4,12 @@ stages:

variables:
  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel
  RUN_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/clb-jupyter-image/ebrains:dev-fc31f010
  SPACK_PATH_GITLAB: /mnt/spack_v0.23.1
  SYSTEMNAME: ebrainslab
  GIT_SUBMODULE_STRATEGY: recursive
  GIT_CLEAN_FLAGS: -ffdxq
  RUNNER_AFTER_SCRIPT_TIMEOUT: 20m

# ===================================================================
# LAB DEPLOYMENTS
@@ -47,6 +49,15 @@ variables:
    - if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
    # delete the job, as we have the logs here
    - kubectl delete job simplejob${CI_PIPELINE_ID} || true
  after_script:
    - kubectl config use-context $KUBE_CONTEXT
    - sh create_job_widget_script.sh $CI_PIPELINE_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
    - cat widget-script.yml
    - kubectl create -f widget-script.yml
    - while true; do sleep 300; x=$(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.phase}'); if [ $x != "Running" ]; then break; fi; done
    - kubectl logs jobs/widget-script${CI_PIPELINE_ID} | tee log.txt
    - if [ $(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
    - kubectl delete job widget-script${CI_PIPELINE_ID} || true
  # artifacts:
  #   paths:
  #     - spack_logs
@@ -97,12 +108,24 @@ variables:
# -------------------------------------------------------------------

# deploy int release (latest changes) to dev env to be tested before release to production
# (the master branch and any branch starting with "lab-" is deployed to a dedicated kernel)
.deploy-int-release:
  variables:
    SPACK_ENV: test
    RELEASE_NAME: EBRAINS-test
    SPACK_ENV: $CI_COMMIT_BRANCH
    RELEASE_NAME: $CI_COMMIT_BRANCH
  rules:
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
    - if: '($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH =~ /^lab-/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"'

# deploy a pre-production environment first, to avoid directly modifying the experimental or official release environments
.deploy-ppd-release:
  variables:
    SPACK_ENV: ppd
    RELEASE_NAME: EBRAINS-ppd
  allow_failure: false
  rules:
    - if: $CI_PIPELINE_SOURCE == "schedule"  && $DEPLOYMENT == "prod"
    - if: $CI_COMMIT_BRANCH =~ /^ebrains/
      when: manual

# deploy the experimental release of tools once a week from latest working version of int release 
.deploy-exp-release:
@@ -125,8 +148,8 @@ variables:
# deploy the production release of tools
.deploy-prod-release:
  variables:
    SPACK_ENV: ebrains-24-04
    RELEASE_NAME: EBRAINS-24.04
    SPACK_ENV: ebrains-25-02
    RELEASE_NAME: EBRAINS-25.02
  rules:
    - if: $CI_COMMIT_BRANCH =~ /^ebrains/
      when: manual
@@ -147,26 +170,42 @@ deploy-exp-release-dev-cineca:
    - .deploy-exp-dev-release
    - .deploy-dev-server-cineca

# deploy ppd release to prod environment at JSC
deploy-ppd-release-prod-jsc:
  extends:
    - .deploy-ppd-release
    - .deploy-prod-server-jsc

# deploy ppd release to prod environment at CINECA
deploy-ppd-release-prod-cineca:
  extends:
    - .deploy-ppd-release
    - .deploy-prod-server-cineca

# deploy exp release to prod environment at JSC
deploy-exp-release-prod-jsc:
  needs: [deploy-ppd-release-prod-jsc]
  extends:
    - .deploy-exp-prod-release
    - .deploy-prod-server-jsc

# deploy exp release to prod environment at CINECA
deploy-exp-release-prod-cineca:
  needs: [deploy-ppd-release-prod-cineca]
  extends:
    - .deploy-exp-prod-release
    - .deploy-prod-server-cineca

# deploy prod release to prod environment at JSC
deploy-prod-release-prod-jsc:
  needs: [deploy-ppd-release-prod-jsc]
  extends:
    - .deploy-prod-release
    - .deploy-prod-server-jsc

# deploy prod release to prod environment at CINECA
deploy-prod-release-prod-cineca:
  needs: [deploy-ppd-release-prod-cineca]
  extends:
    - .deploy-prod-release
    - .deploy-prod-server-cineca
@@ -234,7 +273,8 @@ sync-esd-image:
    # run installation script inside future container environment
    #   => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
    - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
    - export APPTAINERENV_SYSTEMNAME=$SYSTEMNAME
    - apptainer exec --containall --bind /tmp:/tmp --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
    - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
    - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
    # preparing to assemble the image: move in the CI project contents...
Original line number Diff line number Diff line
@@ -9,8 +9,9 @@

### Checks

<!-- please download the EBRAINS Software Quality checklist, fill it and replace the file below -->
* [ ] Software Quality Checklist: [SQ-Checklist.pdf](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Checklist.pdf) (level: <!-- passing, silver, gold -->)
<!-- please read the EBRAINS Software Quality Guidelines and fill out the checklist, and attach/replace the file below -->
* [ ] Software Follows [EBRAINS Software Quality Guidelines](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Guideline.pdf) and reaches level: <!-- passing, silver, gold -->
   > *fill the [SQ-Checklist.pdf](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Checklist.pdf) and attach below*
* [ ] Current maintainer is listed first in `package.py`
* [ ] No pinned dependency versions
* [ ] Post-installation tests are defined
 No newline at end of file
Original line number Diff line number Diff line
@@ -10,8 +10,9 @@

### Checks

<!-- please download the EBRAINS Software Quality checklist, fill it and replace the file below -->
* [ ] Software Quality Checklist: [SQ-Checklist.pdf](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Checklist.pdf) (level: <!-- passing, silver, gold -->)
<!-- please read the EBRAINS Software Quality Guidelines and fill out the checklist, and attach/replace the file below -->
* [ ] Software Follows [EBRAINS Software Quality Guidelines](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Guideline.pdf) and reaches level: <!-- passing, silver, gold -->
   > *fill the [SQ-Checklist.pdf](https://drive.ebrains.eu/d/6061531326d048308823/files/?p=%2FSQ-Checklist.pdf) and attach below*
* [ ] Current maintainer is listed first in `package.py`
* [ ] No pinned dependency versions
* [ ] Post-installation tests are defined
 No newline at end of file
Original line number Diff line number Diff line
@@ -37,7 +37,7 @@ cp $INSTALLATION_ROOT/spack/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.s
# and the location of python modules installed in the base docker Collab image
cat <<EOF >> $KERNEL_PATH/bin/env.sh
export PATH=\$PATH:/opt/app-root/src/.local/bin
export PYTHONPATH=\$PYTHONPATH:/opt/app-root/src/.local/lib/python3.8/site-packages:/usr/local/lib/python3.8/dist-packages
export PYTHONPATH=\$PYTHONPATH:/opt/conda/lib/python3.11/site-packages
export R_LIBS_USER=/opt/app-root/src/.local/lib/R/site-library
mkdir -p \$R_LIBS_USER
export R_LIBS=\$R_LIBS_USER:\$R_LIBS
+4 −9
Original line number Diff line number Diff line
@@ -38,9 +38,11 @@ spec:
          limits:
            cpu: '8'
            memory: '32Gi'
            ephemeral-storage: '20Gi'
          requests:
            cpu: '4'
            cpu: '3'
            memory: '20Gi'
            ephemeral-storage: '256Mi'
        volumeMounts:
          - name: sharedbin
            mountPath: /srv
@@ -105,13 +107,6 @@ spec:
          persistentVolumeClaim:
            claimName: shared-binaries
        - name: tmp
          ephemeral:
            volumeClaimTemplate:
              spec:
                accessModes: [ "ReadWriteMany" ]
                storageClassName: "longhorn-0"
                resources:
                  requests:
                    storage: 50Gi
          emptyDir: {}
      restartPolicy: Never
EOT
+68 −0
Original line number Diff line number Diff line
#!/bin/bash

# ===========================================================================================================
# title         : create_job_widget_script.sh
# usage         : ./create_job_widget_script.sh $OC_JOB_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT
#                 $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
# description   : creates job file that generates a script to load the jupyter extensions for a given env
# ===========================================================================================================

OC_JOB_ID=$1
RUN_ENV_DOCKER_IMAGE=$2
INSTALLATION_ROOT=$3
EBRAINS_SPACK_ENV=$4
RELEASE_NAME=$5
LAB_KERNEL_ROOT=$6

cat <<EOT >> widget-script.yml
apiVersion: batch/v1
kind: Job
metadata:
  name: widget-script${OC_JOB_ID}
spec:
  parallelism: 1
  completions: 1
  backoffLimit: 0
  template:
    spec:
      containers:
      - name: widget-script
        image: ${RUN_ENV_DOCKER_IMAGE}
        imagePullPolicy: Always
        resources:
          limits:
            cpu: '1'
            memory: '1Gi'
          requests:
            cpu: '0.5'
            memory: '500Mi'
        volumeMounts:
          - name: sharedbin
            mountPath: /srv
        command:
        - /bin/bash
        - -c
        - |
          . \$INSTALLATION_ROOT/spack/share/spack/setup-env.sh
          spack env activate --without-view \$EBRAINS_SPACK_ENV
          KERNEL_PATH=\$LAB_KERNEL_ROOT/\$(echo "\$RELEASE_NAME" | tr '[:upper:]' '[:lower:]')
          spack load --sh --first clb-nb-utils py-pip py-tvb-ext-bucket py-tvb-ext-unicore py-tvb-ext-xircuits > \$KERNEL_PATH/bin/widget_activation.sh
        env:
          - name: SYSTEMNAME
            value: ebrainslab
          - name: SPACK_DISABLE_LOCAL_CONFIG
            value: "true"
          - name: INSTALLATION_ROOT
            value: "$INSTALLATION_ROOT"
          - name: EBRAINS_SPACK_ENV
            value: "$EBRAINS_SPACK_ENV"
          - name: RELEASE_NAME
            value: "$RELEASE_NAME"
          - name: LAB_KERNEL_ROOT
            value: "$LAB_KERNEL_ROOT"
      volumes:
        - name: sharedbin
          persistentVolumeClaim:
            claimName: shared-binaries
      restartPolicy: Never
EOT
Original line number Diff line number Diff line
@@ -27,6 +27,10 @@ export SPACK_USER_CACHE_PATH=/tmp/spack

# define SYSTEMNAME variable in sites where it's not already defined
export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}}
if [ -z "${SYSTEMNAME}" ]; then
    echo "Could not derive a SYSTEMNAME (none provided)"
    exit 1
fi

# cache related variables
export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"}
@@ -96,9 +100,6 @@ cp /tmp/spack.yaml ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/
# activate environment
spack env activate --without-view $EBRAINS_SPACK_ENV

# deactivate view during concretization and installation
spack env view disable

spack concretize --force --fresh --test root

# dump dag to file
@@ -133,9 +134,9 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled
    else
        echo "Updating of the source cache disabled."
    fi
    if [ "$ret" -ne 0 ]; then
        (exit $ret)
    fi
    # if [ "$ret" -ne 0 ]; then
    #     (exit $ret)
    # fi
)

if [ -n "${OCI_CACHE_PREFIX}" ]; then
@@ -147,10 +148,6 @@ fi
# delay exit code until we have updated the cache below
spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$?

# re-enable view
spack env view enable
spack env view regenerate

# no need to update the local cache nor the remote cache if we don't want to update
if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
    # push previously missing (but now installed) packages to the local cache
Original line number Diff line number Diff line
@@ -59,6 +59,9 @@ class Apbs(CMakePackage):
        # add suite-sparse libs to path because tests can't find them
        env.prepend_path('LD_LIBRARY_PATH', self.spec['suite-sparse'].prefix.lib)
        env.prepend_path('LD_LIBRARY_PATH', self.spec['blas'].prefix.lib)
        if "%gcc@14:" in self.spec:
            env.append_flags("CFLAGS", "-Wno-implicit-int")
            env.append_flags("CFLAGS", "-Wno-incompatible-pointer-types")

    def setup_dependent_build_environment(self, env, dependent_spec):
        self.setup_build_environment(env)
Original line number Diff line number Diff line
@@ -84,7 +84,7 @@ class Bazel(Package):
    # end EBRAINS

    # https://bazel.build/install/compile-source#bootstrap-unix-prereq
    depends_on("java@11", when="@5.3:", type=("build", "run"))
    depends_on("java@11:17", when="@5.3:", type=("build", "run"))
    depends_on("java@8,11", when="@3.3:5.2", type=("build", "run"))
    depends_on("java@8", when="@0.6:3.2", type=("build", "run"))
    depends_on("python+pythoncmd", type=("build", "run"))
@@ -132,6 +132,15 @@ class Bazel(Package):
        sha256="85dde31d129bbd31e004c5c87f23cdda9295fbb22946dc6d362f23d83bae1fd8",
        when="@6.0:6.4",
    )

    # begin EBRAINS (added): fix https://github.com/bazelbuild/bazel/issues/18961
    patch(
        "https://github.com/bazelbuild/bazel/commit/af50ad37eda9173f6c1ffe50c8522b67f70baf79.patch?full_index=1",
        sha256="b5e7e0122975bdd3daed478e561e5694620b52dee32b94d097692e990692776f",
        when="@5.3:6^java@17",
    )
    # end EBRAINS

    conflicts("%gcc@13:", when="@:5")

    # Patches for compiling various older bazels which had ICWYU violations revealed by
Original line number Diff line number Diff line
@@ -15,6 +15,48 @@ import spack.build_environment
class BuildBrainscales(WafPackage):
    """Common stuff for BrainScaleS packages..."""

    version(
        "11.0-a6",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a6",
        commit="b9cd2c0f5362167f056aebf8f7f1ff04f262517a",
        submodules=True,
    )
    version(
        "11.0-a5",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a5",
        commit="1e84308e0abbda45ab2d3a6ad5b8e27846df1290",
        submodules=True,
    )
    version(
        "11.0-a4",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a4",
        commit="fd3137c6bea71f1820c48999796545e59f9f14b7",
        submodules=True,
    )
    version(
        "11.0-a3",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a3",
        commit="4f27672655f57f41d13b44520e315398887d29ff",
        submodules=True,
    )
    version(
        "11.0-a2",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a2",
        commit="65abdc96d737415af853a524e8c5177381a7845c",
        submodules=True,
    )
    version(
        "11.0-a1",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a1",
        commit="1bf0f6d3c7da681d3db9401bfe0681f95d0baed1",
        submodules=True,
    )
    version(
        "10.0-a1",
        git="https://github.com/electronicvisions/releases-ebrains",
@@ -66,7 +108,8 @@ class BuildBrainscales(WafPackage):
    )

    # common dependencies of BuildBrainscales-derived packages
    depends_on('oppulance@9.0-a9', when='@10.0-a1', type=('build', 'link', 'run', 'test')) # keep the old one for now
    depends_on('oppulance@11.0-a5:', when='@11.0-a5:', type=('build', 'link', 'run', 'test'))
    depends_on('oppulance@9.0-a9:', when='@10.0-a1:', type=('build', 'link', 'run', 'test'))
    depends_on('oppulance@9.0-a9', when='@9.0-a9', type=('build', 'link', 'run', 'test'))
    depends_on('oppulance@9.0-a8', when='@9.0-a8', type=('build', 'link', 'run', 'test'))
    depends_on('oppulance@9.0-a7', when='@9.0-a7', type=('build', 'link', 'run', 'test'))
Original line number Diff line number Diff line
from spack.package import *
import os

class BuildJupyterNotebook(Package):

    depends_on('py-notebook', type='test')

    def execute_notebook(self, input_nb, output_nb, timeout=None):
        """
        Execute a Jupyter notebook and save the executed copy.

        Args:
            input_nb (str): Path to the input notebook.
            output_nb (str): Path to the executed notebook.
            timeout (int | None): Cell execution timeout in seconds. If None, no timeout.
        """
        jupyter = Executable("jupyter")
        cmd = [
            "nbconvert",
            "--ExecutePreprocessor.kernel_name=python3",
            "--execute",
            "--to", "notebook",
            input_nb,
            "--output", output_nb,
        ]
        if timeout:
            cmd.append(f"--ExecutePreprocessor.timeout={timeout}")

        try:
            # Run notebook and save output
            jupyter(*cmd, output=str.split, error=str.split)
        except Exception as err:
            # If execution fails, re-run with --allow-errors (except on timeout)
            if "CellTimeoutError" not in str(err):
                jupyter(*cmd, "--allow-errors")
            raise

    def batch_execute_notebooks(self, notebooks, output_dir, timeout=None):
        """
        Execute a batch of Jupyter notebooks, saving outputs in the given directory.

        Args:
            notebooks (list[str]): List of notebook file paths to execute.
            output_dir (str): Directory where executed notebooks are saved.
            timeout (int | None): Execution timeout for each notebook.
        """
        mkdirp(output_dir)
        failures: list[Exception] = []

        for notebook in notebooks:
            output_path = join_path(output_dir, os.path.basename(notebook))
            try:
                self.execute_notebook(notebook, output_path, timeout=timeout)
            except Exception as err:
                failures.append(err)

        if failures:
            raise Exception(f"Notebook execution failed for {len(failures)} file(s).")
+92 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class Cmdstan(MakefilePackage):
    """CmdStan is the command line interface to Stan."""

    homepage = "https://mc-stan.org/users/interfaces/cmdstan"
    url = "https://github.com/stan-dev/cmdstan/releases/download/v2.30.1/cmdstan-2.30.1.tar.gz"

    license("BSD-3-Clause")

    # begin EBRAINS (added)
    version("2.37.0", sha256="635e2e1cf9c4774c9019001325354d3610bae2adc4ed5af2fed87872dfc671fc")
    # end EBRAINS
    version("2.30.1", sha256="bab76dcefa7f4c955595c0bf0496770507fc6ab0df5896e8cf8c2db0a17eedb9")

    depends_on("c", type="build")  # generated
    depends_on("cxx", type="build")  # generated
    depends_on("fortran", type="build")  # generated

    variant("threads", default=True, description="enable thread support")
    variant("opencl", default=False, description="enable OpenCl support")
    variant("mpi", default=False, description="enable MPI support")

    depends_on("opencl", when="+opencl")
    depends_on("mpi", when="+mpi")

    build_targets = ["build"]

    filter_compiler_wrappers("local", relative_root="make")

    def edit(self, spec, prefix):
        if spec.satisfies("%intel"):
            cxx_type = "icc"
        else:
            cxx_type = spec.compiler.name

        if spec.satisfies("+mpi"):
            cxx = spec["mpi"].mpicxx
        else:
            cxx = spack_cxx

        make_options = [
            "CXX={0}\n".format(cxx),
            "CXXFLAGS+= -O2 -funroll-loops\n",
            "LDFLAGS+={0}{1}\n".format(
                self.compiler.cc_rpath_arg,
                join_path(prefix, "stan", "lib", "stan_math", "lib", "tbb"),
            ),
            "STANCFLAGS+= --warn-pedantic\n",
            "TBB_CXX_TYPE={0}\n".format(cxx_type),
        ]

        if spec.satisfies("+threads"):
            make_options.append("STAN_THREADS=true\n")

        if spec.satisfies("+opencl"):
            make_options.append("STAN_OPENCL=true\n")

        if spec.satisfies("+mpi"):
            make_options.append("STAN_MPI=true\n")

        filepath = join_path(self.stage.source_path, "make", "local")
        with open(filepath, "w") as make_file:
            make_file.writelines(make_options)

    def install(self, spec, prefix):
        make(join_path("examples", "bernoulli", "bernoulli"))

        mkdir(prefix.bin)

        with working_dir(self.build_directory):
            copy("makefile", prefix)
            copy_tree("make", prefix.make)
            copy_tree("examples", prefix.examples)
            copy_tree("lib", prefix.lib)
            copy_tree("src", prefix.src)
            copy_tree("stan", prefix.stan)

        with working_dir(join_path(self.build_directory, "bin")):
            install("diagnose", prefix.bin)
            install("print", prefix.bin)
            install("stanc", prefix.bin)
            install("stansummary", prefix.bin)

    def setup_run_environment(self, env):
        env.set("CMDSTAN", self.prefix)
+409 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import os
import platform

from spack.package import *

_versions = {
    # cuDNN 9.8.0
    "9.8.0.87-12": {
        "Linux-x86_64": "321b9b33bb1287404d93d5672d352f16feabc4b220ac6ae0b86e4b27f257dcf4",
        "Linux-aarch64": "f03ece3ff07d1719f06218973a8797cec1be387cc317baab5bb118dc988199e7",
    },
    "9.8.0.87-11": {
        "Linux-x86_64": "cf4dfaef8311d987d640a322f668cd5240ac3e5302abe9617dd991b5b2532758"
    },
    # cuDNN 9.2.0
    "9.2.0.82-12": {
        "Linux-x86_64": "1362b4d437e37e92c9814c3b4065db5106c2e03268e22275a5869e968cee7aa8",
        "Linux-aarch64": "24cc2a0308dfe412c02c7d41d4b07ec12dacb021ebf8c719de38eb77d22f68c1",
    },
    "9.2.0.82-11": {
        "Linux-x86_64": "99dcb3fa2bf7eed7f35b0f8e58e7d1f04d9a52e01e382efc1de16fed230d3b26"
    },
    # cuDNN 8.9.7
    "8.9.7.29-12": {
        "Linux-x86_64": "475333625c7e42a7af3ca0b2f7506a106e30c93b1aa0081cd9c13efb6e21e3bb",
        "Linux-ppc64le": "8574d291b299f9cc0134304473c9933bd098cc717e8d0876f4aba9f9eebe1b76",
    },
    "8.9.7.29-11": {
        "Linux-x86_64": "a3e2509028cecda0117ce5a0f42106346e82e86d390f4bb9475afc976c77402e",
        "Linux-ppc64le": "f23fd7d59f9d4f743fa926f317dab0d37f6ea21edb2726ceb607bea45b0f9f36",
    },
    # cuDNN 8.9.5
    "8.9.5.30-12": {
        "Linux-x86_64": "2a2eb89a2ab51071151c6082f1e816c702167a711a9372f9f73a7b5c4b06e01a",
        "Linux-ppc64le": "38388ec3c99c6646aaf5c707985cd35e25c67f653d780c4081c2df5557ab665f",
        "Linux-aarch64": "0491f7b02f55c22077eb678bf314c1f917524bd507cf5b658239bf98a47233a1",
    },
    "8.9.5.30-11": {
        "Linux-x86_64": "bbe10e3c08cd7e4aea1012213781e4fe270e1c908263444f567cafefb2cc6525",
        "Linux-ppc64le": "d678f8b2903b95de7eeaef38890c5674705864ea049b2b63e90565f2c0ea682f",
    },
    # cuDNN 8.9.0
    "8.9.0.131-12": {
        "Linux-x86_64": "477631002be61022b60961cba0a501271507a93f81d6b08384bc320cb8706c98",
        "Linux-ppc64le": "ff239e4cbbf21fa18104b62a887686e2197f820ad58817d62e509c735a331829",
        "Linux-aarch64": "fab70f4fb3b933ff502200a1d954d2c6fc205ff9c9b1d271ea4c41e980a66596",
    },
    "8.9.0.131-11": {
        "Linux-x86_64": "3cb82c50723f14b41d43523f222cd52cc9d50b3ad67c380f4be51bd1133daa2d",
        "Linux-ppc64le": "18778de490550c5b584e96560208e5e37678397037946e10a1c2824174c69725",
    },
    # cuDNN 8.8.1
    "8.8.1.3-12": {
        "Linux-x86_64": "79d77a769c7e7175abc7b5c2ed5c494148c0618a864138722c887f95c623777c",
        "Linux-ppc64le": "b0e89021a846952cad8cfc674edce2883f6e344ebd47a2394f706b1136715bc7",
    },
    "8.8.1.3-11": {
        "Linux-x86_64": "af7584cae0cc5524b5913ef08c29ba6154113c60eb0a37a0590a91b515a8a8f9",
        "Linux-ppc64le": "d086003d09d5388aa42142f07483a773aa74b602478b0933e24fc63f56f1658f",
    },
    # cuDNN 8.7.0
    "8.7.0.84-11.8": {
        "Linux-x86_64": "976c4cba7233c97ae74006afab5172976300ba40f5b250a21f8cf71f59c9f76d",
        "Linux-ppc64le": "0433d6d8b6841298e049e8a542750aa330a6e046a52ad95fae0c2f75dabe5575",
        "Linux-aarch64": "cf967f78dbf6c075243cc83aa18759e370db3754aa15b12a0a14e8bf67a3a9d4",
    },
    # cuDNN 8.6.0
    "8.6.0.163-11.8": {
        "Linux-x86_64": "bbc396df47294c657edc09c600674d608cb1bfc80b82dcf4547060c21711159e",
        "Linux-ppc64le": "c8a25e7e3df1bb9c4e18a4f24dd5f25cfd4bbe8b7054e34008e53b2be4f58a80",
        "Linux-aarch64": "a0202278d3cbd4f3adc3f7816bff6071621cb042b0903698b477acac8928ac06",
    },
    # cuDNN 8.5.0
    "8.5.0.96-11.7": {
        "Linux-x86_64": "5454a6fd94f008728caae9adad993c4e85ef36302e26bce43bea7d458a5e7b6d",
        "Linux-ppc64le": "00373c3d5e0b536a5557d0d0eb50706777f213a222b4030e1b71b1bec43d205f",
        "Linux-aarch64": "86780abbecd4634e7363fad1d000ae23b7905a5f8383bddbf7332c6934791dde",
    },
    # cuDNN 8.4.0
    "8.4.0.27-11.6": {
        "Linux-x86_64": "d19bdafd9800c79d29e6f6fffa9f9e2c10d1132d6c2ff10b1593e057e74dd050",
        "Linux-ppc64le": "7ef72353331cf42b357f53cb4a4971fb07e2f0b2ae66e03d54933df52de411c8",
        "Linux-aarch64": "3972ab37b6f0271274931f69c5675c3b61d16f8f5a2dedd422a5efd7b0f358e5",
    },
    "8.4.0.27-10.2": {
        "Linux-x86_64": "14c5e3ca4258271996d1fd959c42d17c582ce4d9aff451f84524469e784fd154"
    },
    # cuDNN 8.3.3
    "8.3.3.40-11.5": {
        "Linux-x86_64": "eabe96c75cf03ea4f5379894d914f1f8ae14ceab121989e84b0836d927fb7731",
        "Linux-ppc64le": "eaedc8dea675767f9445c11d96e6b472110d2fed728db4179153ca7da6503083",
        "Linux-aarch64": "83b1d21b0f6495dfdc2316e6d53489db8ab1b752e4e4d21caca0a08fb2136cdc",
    },
    "8.3.3.40-10.2": {
        "Linux-x86_64": "d8554f2b32e6295d5fc8f3ac25e68f94058b018c801dab9c143e36812f8926ab"
    },
    # cuDNN 8.3.2
    "8.3.2.44-11.5": {
        "Linux-x86_64": "5500953c08c5e5d1dddcfda234f9efbddcdbe43a53b26dc0a82c723fa170c457",
        "Linux-ppc64le": "0581bce48023a3ee71c3a819aaefcabe693eca18b61e2521dc5f8e6e71567b1b",
        "Linux-aarch64": "7eb8c96bfeec98e8aa7cea1e95633d2a9481fc99040eb0311d31bf137a7aa6ea",
    },
    # cuDNN 8.3.1
    "8.3.1.22-11.5": {
        "Linux-x86_64": "f5ff3c69b6a8a9454289b42eca1dd41c3527f70fcf49428eb80502bcf6b02f6e",
        "Linux-ppc64le": "1d2419a20ee193dc6a3a0ba87e79f408286d3d317c9831cbc1f0b7a268c100b0",
        "Linux-aarch64": "ff23a881366c0ee79b973a8921c6dd400628a321557550ad4e0a26a21caad263",
    },
    # cuDNN 8.2.4
    "8.2.4.15-11.4": {
        "Linux-x86_64": "0e5d2df890b9967efa6619da421310d97323565a79f05a1a8cb9b7165baad0d7",
        "Linux-ppc64le": "af8749ca83fd6bba117c8bee31b787b7f204946e864294030ee0091eb7d3577e",
        "Linux-aarch64": "48b11f19e9cd3414ec3c6c357ad228aebbd43282aae372d42cab2af67c32a08b",
    },
    # cuDNN 8.2.0
    "8.2.0.53-11.3": {
        "Linux-x86_64": "7a195dc93a7cda2bdd4d9b73958d259c784be422cd941a9a625aab75309f19dc",
        "Linux-ppc64le": "cfe06735671a41a5e25fc7542d740177ac8eab1ab146bd30f19e0fa836895611",
        "Linux-aarch64": "0f44af94eef7826dc7b41f92aade3d5210891cdb10858bc0a28ba7167909ab7c",
    },
    "8.2.0.53-10.2": {
        "Linux-x86_64": "6ecbc98b3795e940ce0831ffb7cd2c0781830fdd6b1911f950bcaf6d569f807c"
    },
    # cuDNN 8.1.1
    "8.1.1.33-11.2": {
        "Linux-x86_64": "98a8784e92862f20018d20c281b30d4a0cd951f93694f6433ccf4ae9c502ba6a",
        "Linux-ppc64le": "c3e535a5d633ad8f4d50be0b6f8efd084c6c6ed3525c07cbd89fc508b1d76c7a",
        "Linux-aarch64": "4f7e4f5698539659d51f28dff0da11e5445a5ae58439af1d8a8e9f2d93535245",
    },
    "8.1.1.33-10.2": {
        "Linux-x86_64": "2a4a7b99a6e9bfa690eb19bb41e49553f2a7a491a5b3abfcae900e166c5b6ebd"
    },
    # cuDNN 8.1.0
    "8.1.0.77-11.2": {
        "Linux-x86_64": "dbe82faf071d91ba9bcf00480146ad33f462482dfee56caf4479c1b8dabe3ecb",
        "Linux-ppc64le": "0d3f8fa21959e9f94889841cc8445aecf41d2f3c557091b447313afb43034037",
        "Linux-aarch64": "ba16ff486b68a8b50b69b32702612634954de529f39cfff68c12b8bfc1958499",
    },
    "8.1.0.77-10.2": {
        "Linux-x86_64": "c5bc617d89198b0fbe485156446be15a08aee37f7aff41c797b120912f2b14b4"
    },
    # cuDNN 8.0.5
    "8.0.5.39-11.1": {
        "Linux-x86_64": "1d046bfa79399dabcc6f6cb1507918754439442ea0ca9e0fbecdd446f9b00cce",
        "Linux-aarch64": "0c3542c51b42131247cd9f839d0ebefe4e02bb46d1716be1682cb2919278085a",
    },
    "8.0.5.39-11.0": {
        "Linux-x86_64": "4e16ee7895deb4a8b1c194b812ba49586ef7d26902051401d3717511898a9b73",
        "Linux-ppc64le": "05207a02c0b4f22464dbb0ee646693df4a70ae557640ba576ba8678c26393004",
    },
    "8.0.5.39-10.2": {
        "Linux-x86_64": "21f84c05c67bf1ec859e77c38ccd5bf154964fa1c308f449959be4c356e382f3",
        "Linux-ppc64le": "ce128ea090b05e36d00ffe921e45982ca10e8207e40cfc2e0067d0f62d9b36f9",
    },
    "8.0.5.39-10.1": {
        "Linux-x86_64": "90908495298896b33aa95063a3471f93c36627d7ac01c17dc36d75c65eea4a00",
        "Linux-ppc64le": "e43b10bb3932d5e7a598dcc726d16dc9938dd99dd319cd74b3420f3ed65fe5e0",
    },
    # cuDNN 8.0.4
    "8.0.4.30-11.1": {
        "Linux-x86_64": "8f4c662343afce5998ce963500fe3bb167e9a508c1a1a949d821a4b80fa9beab",
        "Linux-ppc64le": "b4ddb51610cbae806017616698635a9914c3e1eb14259f3a39ee5c84e7106712",
    },
    "8.0.4.30-11.0": {
        "Linux-x86_64": "38a81a28952e314e21577432b0bab68357ef9de7f6c8858f721f78df9ee60c35",
        "Linux-ppc64le": "8da8ed689b1a348182ddd3f59b6758a502e11dc6708c33f96e3b4a40e033d2e1",
    },
    "8.0.4.30-10.2": {
        "Linux-x86_64": "c12c69eb16698eacac40aa46b9ce399d4cd86efb6ff0c105142f8a28fcfb980e",
        "Linux-ppc64le": "32a5b92f9e1ef2be90e10f220c4ab144ca59d215eb6a386e93597f447aa6507e",
    },
    "8.0.4.30-10.1": {
        "Linux-x86_64": "eb4b888e61715168f57a0a0a21c281ada6856b728e5112618ed15f8637487715",
        "Linux-ppc64le": "690811bbf04adef635f4a6f480575fc2a558c4a2c98c85c7090a3a8c60dacea9",
    },
    # cuDNN 8.0.3
    "8.0.3.33-11.0": {
        "Linux-x86_64": "8924bcc4f833734bdd0009050d110ad0c8419d3796010cf7bc515df654f6065a",
        "Linux-ppc64le": "c2d0519831137b43d0eebe07522edb4ef5d62320e65e5d5fa840a9856f25923d",
    },
    "8.0.3.33-10.2": {
        "Linux-x86_64": "b3d487c621e24b5711983b89bb8ad34f0378bdbf8a1a4b86eefaa23b19956dcc",
        "Linux-ppc64le": "ff22c9c37af191c9104989d784427cde744cdde879bfebf3e4e55ca6a9634a11",
    },
    "8.0.3.33-10.1": {
        "Linux-x86_64": "4752ac6aea4e4d2226061610d6843da6338ef75a93518aa9ce50d0f58df5fb07",
        "Linux-ppc64le": "c546175f6ec86a11ee8fb9ab5526fa8d854322545769a87d35b1a505992f89c3",
    },
    # cuDNN 8.0.2
    "8.0.2.39-11.0": {
        "Linux-x86_64": "672f46288b8edd98f8d156a4f1ff518201ca6de0cff67915ceaa37f6d6d86345",
        "Linux-ppc64le": "b7c1ce5b1191eb007ba3455ea5f497fdce293a646545d8a6ed93e9bb06d7f057",
    },
    "8.0.2.39-10.2": {
        "Linux-x86_64": "c9cbe5c211360f3cfbc0fb104f0e9096b37e53f89392525679f049276b2f701f",
        "Linux-ppc64le": "c32325ff84a8123491f2e58b3694885a9a672005bc21764b38874688c0e43262",
    },
    "8.0.2.39-10.1": {
        "Linux-x86_64": "82148a68bd6bdaab93af5e05bb1842b8ccb3ab7de7bed41f609a7616c102213d",
        "Linux-ppc64le": "8196ec4f031356317baeccefbc4f61c8fccb2cf0bdef0a6431438918ddf68fb9",
    },
    # cuDNN 8.0
    "8.0.0.180-11.0": {
        "Linux-x86_64": "9e75ea70280a77de815e0bdc85d08b67e081bc99a708b574092142344d2ba07e",
        "Linux-ppc64le": "1229e94731bbca63ee7f5a239f4e1838a51a301d896f3097fbf7377d74704060",
    },
    "8.0.0.180-10.2": {
        "Linux-x86_64": "0c87c12358ee2b99d57c2a8c7560e3bb93e54bb929f5f8bec4964a72a2bb261d",
        "Linux-ppc64le": "59e4ad6db15fcc374976e8052fe39e3f30f34079710fb3c7751a64c853d9243f",
    },
    # cuDNN 7.6.5
    "7.6.5.32-10.2": {
        "Linux-x86_64": "600267f2caaed2fd58eb214ba669d8ea35f396a7d19b94822e6b36f9f7088c20",
        "Linux-ppc64le": "7dc08b6ab9331bfd12207d4802c61db1ad7cace7395b67a6e7b16efa0335668b",
    },
    "7.6.5.32-10.1": {
        "Linux-x86_64": "7eaec8039a2c30ab0bc758d303588767693def6bf49b22485a2c00bf2e136cb3",
        "Darwin-x86_64": "8ecce28a5ed388a2b9b2d239e08d7c550f53b79288e6d9e5eb4c152bfc711aff",
        "Linux-ppc64le": "97b2faf73eedfc128f2f5762784d21467a95b2d5ba719825419c058f427cbf56",
    },
    "7.6.5.32-10.0": {
        "Linux-x86_64": "28355e395f0b2b93ac2c83b61360b35ba6cd0377e44e78be197b6b61b4b492ba",
        "Darwin-x86_64": "6fa0b819374da49102e285ecf7fcb8879df4d0b3cc430cc8b781cdeb41009b47",
        "Linux-ppc64le": "b1717f4570083bbfc6b8b59f280bae4e4197cc1cb50e9d873c05adf670084c5b",
    },
    "7.6.5.32-9.2": {
        "Linux-x86_64": "a2a2c7a8ba7b16d323b651766ee37dcfdbc2b50d920f73f8fde85005424960e4",
        "Linux-ppc64le": "a11f44f9a827b7e69f527a9d260f1637694ff7c1674a3e46bd9ec054a08f9a76",
    },
    "7.6.5.32-9.0": {
        "Linux-x86_64": "bd0a4c0090d5b02feec3f195738968690cc2470b9bc6026e6fe8ff245cd261c8"
    },
    # cuDNN 7.6.4
    "7.6.4.38-10.1": {
        "Linux-x86_64": "32091d115c0373027418620a09ebec3658a6bc467d011de7cdd0eb07d644b099",
        "Darwin-x86_64": "bfced062c3689ced2c1fb49c7d5052e6bc3da6974c1eb707e4dcf8cd209d4236",
        "Linux-ppc64le": "f3615fea50986a4dfd05d7a0cf83396dfdceefa9c209e8bf9691e20a48e420ce",
    },
    "7.6.4.38-10.0": {
        "Linux-x86_64": "417bb5daf51377037eb2f5c87649000ca1b9cec0acb16cfe07cb1d3e9a961dbf",
        "Darwin-x86_64": "af01ab841caec25087776a6b8fc7782883da12e590e24825ad1031f9ae0ed4b1",
        "Linux-ppc64le": "c1725ad6bd7d7741e080a1e6da4b62eac027a94ac55c606cce261e3f829400bb",
    },
    "7.6.4.38-9.2": {
        "Linux-x86_64": "c79156531e641289b6a6952888b9637059ef30defd43c3cf82acf38d67f60a27",
        "Linux-ppc64le": "98d8aae2dcd851558397a9a30b73242f257e1556be17c83650e63a0685969884",
    },
    "7.6.4.38-9.0": {
        "Linux-x86_64": "8db78c3623c192d4f03f3087b41c32cb0baac95e13408b5d9dabe626cb4aab5d"
    },
    # cuDNN 7.6.3
    "7.6.3.30-10.1": {
        "Linux-x86_64": "352557346d8111e2f954c494be1a90207103d316b8777c33e62b3a7f7b708961",
        "Linux-ppc64le": "f274735a8fc31923d3623b1c3d2b1d0d35bb176687077c6a4d4353c6b900d8ee",
    },
    # cuDNN 7.5.1
    "7.5.1.10-10.1": {
        "Linux-x86_64": "2c833f43c9147d9a25a20947a4c5a5f5c33b2443240fd767f63b330c482e68e0",
        "Linux-ppc64le": "a9e23bc83c970daec20874ccd1d8d80b648adf15440ecd0164818b330b1e2663",
    },
    "7.5.1.10-10.0": {
        "Linux-x86_64": "c0a4ec438920aa581dd567117b9c316745b4a451ac739b1e04939a3d8b229985",
        "Linux-ppc64le": "d9205718da5fbab85433476f9ff61fcf4b889d216d6eea26753bbc24d115dd70",
    },
    # cuDNN 7.5.0
    "7.5.0.56-10.1": {
        "Linux-x86_64": "c31697d6b71afe62838ad2e57da3c3c9419c4e9f5635d14b683ebe63f904fbc8",
        "Linux-ppc64le": "15415eb714ab86ab6c7531f2cac6474b5dafd989479b062776c670b190e43638",
    },
    "7.5.0.56-10.0": {
        "Linux-x86_64": "701097882cb745d4683bb7ff6c33b8a35c7c81be31bac78f05bad130e7e0b781",
        "Linux-ppc64le": "f0c1cbd9de553c8e2a3893915bd5fff57b30e368ef4c964d783b6a877869e93a",
    },
    # cuDNN 7.3.0
    "7.3.0.29-9.0": {
        "Linux-x86_64": "403f9043ff2c7b2c5967454872275d07bca11fd41dfc7b21995eadcad6dbe49b"
    },
    # cuDNN 7.2.1
    "7.2.1.38-9.0": {
        "Linux-x86_64": "cf007437b9ac6250ec63b89c25f248d2597fdd01369c80146567f78e75ce4e37"
    },
    # cuDNN 7.1.3
    "7.1.3-9.1": {
        "Linux-x86_64": "dd616d3794167ceb923d706bf73e8d6acdda770751492b921ee6827cdf190228",
        "Linux-ppc64le": "e3b4837f711b98a52faacc872a68b332c833917ef3cf87c0108f1d01af9b2931",
    },
    # cuDNN 6.0
    "6.0-8.0": {
        "Linux-x86_64": "9b09110af48c9a4d7b6344eb4b3e344daa84987ed6177d5c44319732f3bb7f9c"
    },
    # cuDNN 5.1
    "5.1-8.0": {
        "Linux-x86_64": "c10719b36f2dd6e9ddc63e3189affaa1a94d7d027e63b71c3f64d449ab0645ce"
    },
}


class Cudnn(Package):
    """NVIDIA cuDNN is a GPU-accelerated library of primitives for deep
    neural networks"""

    homepage = "https://developer.nvidia.com/cudnn"

    # Latest versions available at:
    #     https://developer.nvidia.com/rdp/cudnn-download
    # Archived versions available at:
    #     https://developer.nvidia.com/rdp/cudnn-archive
    # Note that download links don't work from command line,
    # need to use modified URLs like in url_for_version.
    maintainers("adamjstewart", "bvanessen")

    skip_version_audit = ["platform=darwin", "platform=windows"]

    license("MIT")

    for ver, packages in _versions.items():
        key = "{0}-{1}".format(platform.system(), platform.machine())
        pkg = packages.get(key)
        cudnn_ver, cuda_ver = ver.split("-")
        long_ver = "{0}-{1}".format(cudnn_ver, cuda_ver)
        if pkg:
            version(long_ver, sha256=pkg)
            # Add constraints matching CUDA version to cuDNN version
            # cuDNN builds for CUDA 11.x are compatible with all CUDA 11.x:
            # https://docs.nvidia.com/deeplearning/cudnn/support-matrix/index.html#fntarg_2
            if Version(cuda_ver) >= Version("11"):
                cuda_ver = Version(cuda_ver).up_to(1)
            depends_on("cuda@{}".format(cuda_ver), when="@{}".format(long_ver))

    def url_for_version(self, version):
        # Get the system and machine arch for building the file path
        sys = "{0}-{1}".format(platform.system(), platform.machine())
        # Munge it to match Nvidia's naming scheme
        sys_key = sys.lower()
        if version < Version("8.3.1"):
            sys_key = (
                sys_key.replace("x86_64", "x64")
                .replace("darwin", "osx")
                .replace("aarch64", "aarch64sbsa")
            )
        elif version < Version("8.8.0"):
            sys_key = sys_key.replace("aarch64", "sbsa")

        if version >= Version("8.3.1"):
            # NOTE: upload layout changed for 8.3.1, they include a 10.2
            # artifact for cuda@10.2 x86_64, but the runtime is only supported
            # for cuda@11.  See
            # https://docs.nvidia.com/deeplearning/cudnn/release-notes/rel_8.html
            # As such, hacking the `directory` to include the extra
            # local_installers/11.5 is included as this may not happen again.
            directory = version[:3]
            ver = version[:4]
            cuda = version[4:]
            directory = "{0}/local_installers/{1}".format(directory, cuda)
        elif version >= Version("7.2"):
            directory = version[:3]
            ver = version[:4]
            cuda = version[4:]
        elif version >= Version("7.1"):
            directory = version[:3]
            ver = version[:2]
            cuda = version[3:]
        elif version >= Version("7.0"):
            directory = version[:3]
            ver = version[0]
            cuda = version[3:]
        else:
            directory = version[:2]
            ver = version[:2]
            cuda = version[2:]

        # 8.8.0 changed the base url again
        if version >= Version("8.8.0"):
            url = "https://developer.download.nvidia.com/compute/cudnn/redist/cudnn/{0}/cudnn-{0}-{1}_cuda{2}-archive.tar.xz"
            return url.format(sys_key, ver, cuda.up_to(1))
        # 8.5.0 removed minor from cuda version
        elif version >= Version("8.5.0"):
            url = "https://developer.download.nvidia.com/compute/redist/cudnn/v{0}/cudnn-{1}-{2}_cuda{3}-archive.tar.xz"
            return url.format(directory, sys_key, ver, cuda.up_to(1))
        # 8.3.1 switched to xzip tarballs and reordered url parts.
        elif version >= Version("8.3.1"):
            url = "https://developer.download.nvidia.com/compute/redist/cudnn/v{0}/cudnn-{1}-{2}_cuda{3}-archive.tar.xz"
            return url.format(directory, sys_key, ver, cuda)
        else:
            url = "https://developer.download.nvidia.com/compute/redist/cudnn/v{0}/cudnn-{1}-{2}-v{3}.tgz"
            return url.format(directory, cuda, sys_key, ver)

    def setup_run_environment(self, env):
        # Package is not compiled, and does not work unless LD_LIBRARY_PATH is set
        env.prepend_path("LD_LIBRARY_PATH", self.prefix.lib)

        if self.spec.satisfies("target=ppc64le: platform=linux"):
            env.set("cuDNN_ROOT", os.path.join(self.prefix, "targets", "ppc64le-linux"))

    def install(self, spec, prefix):
        install_tree(".", prefix)

        if spec.satisfies("target=ppc64le: platform=linux"):
            target_lib = os.path.join(prefix, "targets", "ppc64le-linux", "lib")
            if os.path.isdir(target_lib) and not os.path.isdir(prefix.lib):
                symlink(target_lib, prefix.lib)
            target_include = os.path.join(prefix, "targets", "ppc64le-linux", "include")
            if os.path.isdir(target_include) and not os.path.isdir(prefix.include):
                symlink(target_include, prefix.include)
Original line number Diff line number Diff line
Fix Tcl private header detection on macOS

https://sourceforge.net/p/expect/patches/17/

diff -Naur expect5.45.orig/tclconfig/tcl.m4 expect5.45/tclconfig/tcl.m4
--- expect5.45.orig/tclconfig/tcl.m4	2010-11-09 11:42:10.000000000 -0800
+++ expect5.45/tclconfig/tcl.m4	2013-09-23 00:10:00.000000000 -0700
@@ -3389,9 +3389,12 @@
             # the framework's Headers and PrivateHeaders directories
             case ${TCL_DEFS} in
 	    	*TCL_FRAMEWORK*)
-		    if test -d "${TCL_BIN_DIR}/Headers" -a \
-			    -d "${TCL_BIN_DIR}/PrivateHeaders"; then
-			TCL_INCLUDES="-I\"${TCL_BIN_DIR}/Headers\" -I\"${TCL_BIN_DIR}/PrivateHeaders\" ${TCL_INCLUDES}"
+		    if test -d "${TCL_BIN_DIR}/Headers"; then
+			if test -d "${TCL_BIN_DIR}/PrivateHeaders"; then
+			    TCL_INCLUDES="-I\"${TCL_BIN_DIR}/Headers\" -I\"${TCL_BIN_DIR}/PrivateHeaders\" ${TCL_INCLUDES}"
+			elif test -d "${TCL_BIN_DIR}/Headers/tcl-private"; then
+			    TCL_INCLUDES="-I\"${TCL_BIN_DIR}/Headers\" -I\"${TCL_BIN_DIR}/Headers/tcl-private\" ${TCL_INCLUDES}"
+			fi
 		    else
 			TCL_INCLUDES="${TCL_INCLUDES} ${TCL_INCLUDE_SPEC} `echo "${TCL_INCLUDE_SPEC}" | sed -e 's/Headers/PrivateHeaders/'`"
 		    fi
+90 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import glob
import os

from spack.package import *


# EBRAINS: based on spack/0.23.x
class Expect(AutotoolsPackage):
    """Expect is a tool for automating interactive applications such as
    telnet, ftp, passwd, fsck, rlogin, tip, etc."""

    homepage = "https://expect.sourceforge.net/"
    url = (
        "https://sourceforge.net/projects/expect/files/Expect/5.45.4/expect5.45.4.tar.gz/download"
    )

    license("NIST-PD")

    version("5.45.4", sha256="49a7da83b0bdd9f46d04a04deec19c7767bb9a323e40c4781f89caf760b92c34")
    version("5.45.3", sha256="c520717b7195944a69ce1492ec82ca0ac3f3baf060804e6c5ee6d505ea512be9")
    version("5.45", sha256="b28dca90428a3b30e650525cdc16255d76bb6ccd65d448be53e620d95d5cc040")

    depends_on("c", type="build")  # generated

    depends_on("tcl")

    depends_on("automake", type="build")
    depends_on("autoconf", type="build")
    depends_on("libtool", type="build")
    depends_on("m4", type="build")

    force_autoreconf = True

    patch("xcode_12.patch", when="%apple-clang@12:")
    patch("expect_detect_tcl_private_header_os_x_mountain_lion.patch", when="@5.45:5.45.0")

    def configure_args(self):
        spec = self.spec

        args = [
            # Without this, expect binary and library are not installed
            "--exec-prefix={0}".format(self.prefix),
            "--enable-threads",
            "--enable-shared",
            "--enable-64bit",
            "--with-tcl={0}".format(spec["tcl"].libs.directories[0]),
            "--with-tclinclude={0}".format(spec["tcl"].headers.directories[0]),
        ]

        return args

    # begin EBRAINS (modified):
    def setup_build_environment(self, env):
        # gcc@14: fails with "configure: error: could not find source file 'pty_.c'" at configure
        # https://core.tcl-lang.org/expect/tktview/ab6c7a502777ba361f21b87d33cb4892c584f96b
        if "%gcc@14:" in self.spec:
            env.append_flags("CFLAGS", "-fpermissive")
    # end EBRAINS

    @run_after("install")
    def symlink_library(self):
        """Expect installs libraries into:

        lib/expect5.45/libexpect5.45.so

        Create a symlink so that the library can be found in lib."""

        target = join_path(self.prefix.lib, "expect*", "libexpect*")
        target = glob.glob(target)[0]

        link_name = os.path.basename(target)
        link_name = join_path(self.prefix.lib, link_name)

        symlink(target, link_name)

    @run_after("install")
    def darwin_fix(self):
        # The shared library is not installed correctly on Darwin; fix this
        if self.spec.satisfies("platform=darwin"):
            fix_darwin_install_name(join_path(self.prefix.lib, "expect{0}".format(self.version)))

            old = "libexpect{0}.dylib".format(self.version)
            new = glob.glob(join_path(self.prefix.lib, "expect*", "libexpect*"))[0]
            install_name_tool = Executable("install_name_tool")
            install_name_tool("-change", old, new, self.prefix.bin.expect)
+282 −0
Original line number Diff line number Diff line
https://core.tcl-lang.org/expect/tktview/0d5b33c00e5b4bbedb835498b0360d7115e832a0
--- expect5.45.4/configure.in.ORIG	2020-12-05 17:26:55.000000000 +0000
+++ expect5.45.4/configure.in	2020-12-05 18:39:00.000000000 +0000
@@ -452,7 +452,11 @@
 # because Unixware 2.0 handles it specially and refuses to compile
 # autoconf's automatic test that is a call with no arguments
 AC_MSG_CHECKING([for memcpy])
-AC_TRY_LINK(,[
+AC_TRY_LINK([
+#ifdef HAVE_STRING_H
+#include <string.h>
+#endif
+],[
 char *s1, *s2;
 memcpy(s1,s2,0);
 ],
@@ -469,7 +473,7 @@
 AC_MSG_CHECKING([if WNOHANG requires _POSIX_SOURCE])
 AC_TRY_RUN([
 #include <sys/wait.h>
-main() {
+int main() {
 #ifndef WNOHANG
 	return 0;
 #else
@@ -489,7 +493,7 @@
 AC_TRY_RUN([
 #include <stdio.h>
 #include <sys/wait.h>
-main() {
+int main() {
 #ifdef WNOHANG
 	FILE *fp = fopen("wnohang","w");
 	fprintf(fp,"%d",WNOHANG);
@@ -536,6 +540,13 @@
 AC_MSG_CHECKING([if signals need to be re-armed])
 AC_TRY_RUN([
 #include <signal.h>
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+#ifndef NO_SYS_WAIT_H
+#include <sys/wait.h>
+#endif
+
 #define RETSIGTYPE $retsigtype
 
 int signal_rearms = 0;
@@ -553,7 +564,7 @@
 signal_rearms++;
 }
 
-main()
+int main()
 {
 	signal(SIGINT,parent_sigint_handler);
 
@@ -567,8 +578,9 @@
 
 		wait(&status);
 		unlink("core");
-		exit(signal_rearms);
+		return signal_rearms;
 	}
+	return -1;
 }],
 	AC_MSG_RESULT(yes)
 	AC_DEFINE(REARM_SIG)
@@ -714,10 +726,10 @@
 AC_MSG_CHECKING([for struct sgttyb])
 AC_TRY_RUN([
 #include <sgtty.h>
-main()
+int main()
 {
   struct sgttyb tmp;
-  exit(0);
+  return 0;
 }],
         AC_MSG_RESULT(yes)
         AC_DEFINE(HAVE_SGTTYB)
@@ -738,10 +750,10 @@
   # pty_termios.c is set up to handle pty_termio.
   AC_MSG_CHECKING([for struct termio])
   AC_TRY_RUN([#include <termio.h>
-  main()
+  int main()
   {
     struct termio tmp;
-    exit(0);
+    return 0;
   }],
         AC_DEFINE(HAVE_TERMIO)
         PTY_TYPE=termios
@@ -760,10 +772,10 @@
 #  include <inttypes.h>
 #  endif
 #  include <termios.h>
-  main()
+  int main()
   {
     struct termios tmp;
-    exit(0);
+    return 0;
   }],
         AC_DEFINE(HAVE_TERMIOS)
         PTY_TYPE=termios
@@ -782,7 +794,7 @@
 #include <inttypes.h>
 #endif
 #include <termios.h>
-main() {
+int main() {
 #if defined(TCGETS) || defined(TCGETA)
 	return 0;
 #else
@@ -797,21 +809,18 @@
 	AC_MSG_ERROR([Expect can't be cross compiled])
 )
 
-AC_MSG_CHECKING([if TIOCGWINSZ in termios.h])
+AC_MSG_CHECKING([if termios.h and sys/ioctl.h may both be included])
 AC_TRY_RUN([
 /* including termios.h on Solaris 5.6 fails unless inttypes.h included */
 #ifdef HAVE_INTTYPES_H
 #include <inttypes.h>
 #endif
 #include <termios.h>
-main() {
-#ifdef TIOCGWINSZ
+#include <sys/ioctl.h>
+int main() {
 	return 0;
-#else
-	return 1;
-#endif
 }],
-	AC_DEFINE(HAVE_TIOCGWINSZ_IN_TERMIOS_H)
+	AC_DEFINE(HAVE_TERMIOS_AND_IOCTL_H_TOGETHER)
 	AC_MSG_RESULT(yes)
 ,
 	AC_MSG_RESULT(no)
@@ -823,7 +832,7 @@
 AC_MSG_CHECKING([for Cray-style ptys])
 SETUID=":"
 AC_TRY_RUN([
-main(){
+int main(){
 #ifdef CRAY
 	return 0;
 #else
@@ -878,12 +887,12 @@
 AC_TRY_RUN([
 extern char *tzname[2];
 extern int daylight;
-main()
+int main()
 {
   int *x = &daylight;
   char **y = tzname;
 
-  exit(0);
+  return 0;
 }],
 	AC_DEFINE(HAVE_SV_TIMEZONE)
 	AC_MSG_RESULT(yes),
--- expect5.45.4/tclconfig/tcl.m4.ORIG	2020-12-05 17:31:41.000000000 +0000
+++ expect5.45.4/tclconfig/tcl.m4	2020-12-05 17:32:39.000000000 +0000
@@ -2400,7 +2400,7 @@
 	AC_TRY_COMPILE([#include <time.h>],
 	    [extern long timezone;
 	    timezone += 1;
-	    exit (0);],
+	    return 0;],
 	    tcl_cv_timezone_long=yes, tcl_cv_timezone_long=no)])
     if test $tcl_cv_timezone_long = yes ; then
 	AC_DEFINE(HAVE_TIMEZONE_VAR, 1, [Should we use the global timezone variable?])
@@ -2412,7 +2412,7 @@
 	    AC_TRY_COMPILE([#include <time.h>],
 		[extern time_t timezone;
 		timezone += 1;
-		exit (0);],
+		return 0;],
 		tcl_cv_timezone_time=yes, tcl_cv_timezone_time=no)])
 	if test $tcl_cv_timezone_time = yes ; then
 	    AC_DEFINE(HAVE_TIMEZONE_VAR, 1, [Should we use the global timezone variable?])
@@ -2452,17 +2452,17 @@
 		    double value;
 		    value = strtod(infString, &term);
 		    if ((term != infString) && (term[-1] == 0)) {
-			exit(1);
+			return 1;
 		    }
 		    value = strtod(nanString, &term);
 		    if ((term != nanString) && (term[-1] == 0)) {
-			exit(1);
+			return 1;
 		    }
 		    value = strtod(spaceString, &term);
 		    if (term == (spaceString+1)) {
-			exit(1);
+			return 1;
 		    }
-		    exit(0);
+		    return 0;
 		}], tcl_cv_strtod_buggy=ok, tcl_cv_strtod_buggy=buggy,
 		    tcl_cv_strtod_buggy=buggy)])
 	if test "$tcl_cv_strtod_buggy" = buggy; then
--- expect5.45.4/exp_tty.h.ORIG	2020-12-05 18:25:06.000000000 +0000
+++ expect5.45.4/exp_tty.h	2020-12-05 18:24:14.000000000 +0000
@@ -19,6 +19,7 @@
 void exp_tty_echo(int set);
 void exp_tty_break(Tcl_Interp *interp, int fd);
 int exp_tty_raw_noecho(Tcl_Interp *interp, exp_tty *tty_old, int *was_raw, int *was_echo);
+int exp_tty_cooked_echo(Tcl_Interp *interp, exp_tty *tty_old, int *was_raw, int *was_echo);
 int exp_israw(void);
 int exp_isecho(void);
 
--- expect5.45.4/exp_chan.c.ORIG	2020-12-05 18:42:14.000000000 +0000
+++ expect5.45.4/exp_chan.c	2020-12-05 18:42:36.000000000 +0000
@@ -35,6 +35,7 @@
 #include "exp_prog.h"
 #include "exp_command.h"
 #include "exp_log.h"
+#include "exp_event.h"
 #include "tcldbg.h" /* Dbg_StdinMode */
 
 extern int		expSetBlockModeProc _ANSI_ARGS_((int fd, int mode));
--- expect5.45.4/exp_clib.c.ORIG	2020-12-05 18:40:52.000000000 +0000
+++ expect5.45.4/exp_clib.c	2020-12-05 18:41:18.000000000 +0000
@@ -8,6 +8,7 @@
 */
 
 #include "expect_cf.h"
+#include "exp_command.h"
 #include <stdio.h>
 #include <setjmp.h>
 #ifdef HAVE_INTTYPES_H
--- expect5.45.4/exp_win.c.ORIG	2020-12-05 18:27:20.000000000 +0000
+++ expect5.45.4/exp_win.c	2020-12-05 18:33:24.000000000 +0000
@@ -32,17 +32,13 @@
 
 #ifdef HAVE_TERMIOS
 #  include <termios.h>
+#  ifdef HAVE_TERMIOS_AND_IOCTL_H_TOGETHER
+#    include <sys/ioctl.h>
+#  endif
 #else
 #  include <sys/ioctl.h>
 #endif
 
-/* Sigh.  On AIX 2.3, termios.h exists but does not define TIOCGWINSZ */
-/* Instead, it has to come from ioctl.h.  However, As I said above, this */
-/* can't be cavalierly included on all machines, even when it exists. */
-#if defined(HAVE_TERMIOS) && !defined(HAVE_TIOCGWINSZ_IN_TERMIOS_H)
-#  include <sys/ioctl.h>
-#endif
-
 /* SCO defines window size structure in PTEM and TIOCGWINSZ in termio.h */
 /* Sigh... */
 #if defined(HAVE_SYS_PTEM_H)
--- expect5.45.4/pty_termios.c.ORIG	2020-12-05 18:43:05.000000000 +0000
+++ expect5.45.4/pty_termios.c	2020-12-05 18:45:20.000000000 +0000
@@ -77,6 +77,10 @@
 #include <sys/sysmacros.h>
 #endif
 
+#ifdef HAVE_OPENPTY
+#include <util.h>
+#endif
+
 #ifdef HAVE_PTYTRAP
 #include <sys/ptyio.h>
 #endif
@@ -102,6 +106,7 @@
 #include "exp_tty_in.h"
 #include "exp_rename.h"
 #include "exp_pty.h"
+#include "exp_int.h"
 
 void expDiagLog();
 void expDiagLogPtr();
Original line number Diff line number Diff line
diff --git a/halco/src/halco/hicann/v2/l1.cpp b/halco/src/halco/hicann/v2/l1.cpp
index 6f41d74..3dcdf12 100644
--- a/halco/src/halco/hicann/v2/l1.cpp
+++ b/halco/src/halco/hicann/v2/l1.cpp
@@ -5,6 +5,8 @@
 
 #include "halco/common/iter_all.h"
 
+#include <algorithm>
+
 using namespace halco::common;
 
 namespace halco {
+29 −0
Original line number Diff line number Diff line
diff --git a/grenade/include/grenade/common/vertex_on_graph.h b/grenade/include/grenade/common/vertex_on_graph.h
index 34e5af1..b4ee345 100644
--- a/grenade/include/grenade/common/vertex_on_graph.h
+++ b/grenade/include/grenade/common/vertex_on_graph.h
@@ -52,9 +52,6 @@ private:
 
 namespace std {
 
-template <typename T>
-struct hash;
-
 template <typename Derived, typename Backend>
 struct hash<grenade::common::VertexOnGraph<Derived, Backend>>
 {
diff --git a/grenade/include/grenade/common/edge_on_graph.h b/grenade/include/grenade/common/edge_on_graph.h
index aa51e98..0c3f136 100644
--- a/grenade/include/grenade/common/edge_on_graph.h
+++ b/grenade/include/grenade/common/edge_on_graph.h
@@ -41,9 +41,6 @@ private:
 
 namespace std {
 
-template <typename T>
-struct hash;
-
 template <typename Derived, typename Backend>
 struct hash<grenade::common::EdgeOnGraph<Derived, Backend>>
 {
Original line number Diff line number Diff line
diff --git a/src/hxtorch/wscript b/src/hxtorch/wscript
index aaf670f..39322c8 100644
--- a/hxtorch/wscript
+++ b/hxtorch/wscript

@@ -45,13 +45,19 @@
     )
 
     site_packages = site.getsitepackages()
-    assert isinstance(site_packages, list) and len(site_packages) == 1
-    includes_torch = [os.path.join(x, 'torch/include') for x in site_packages]
-    includes_torch_csrc_api = [os.path.join(x, 'torch/include/torch/csrc/api/include') for x in site_packages]
+    includes_torch = []
+    includes_torch_csrc_api = []
+    for x in site_packages:
+        torch_inc = os.path.join(x, 'torch/include')
+        if os.path.exists(torch_inc):
+            includes_torch.append(torch_inc)
+        torch_csrc_inc = os.path.join(x, 'torch/include/torch/csrc/api/include')
+        if os.path.exists(torch_csrc_inc):
+            includes_torch_csrc_api.append(torch_csrc_inc)
     libpath_torch = [os.path.join(x, 'torch/lib') for x in site_packages]
     libnames = []
     # if torch isn't available via site-packages, try sys.path/PYTHONPATH
-    if not os.path.exists(libpath_torch[0]):
+    if not any([os.path.exists(x) for x in libpath_torch]):
         # find other possible paths
         libpath_torch = [os.path.join(x, 'torch/lib') for x in sys.path if 'torch' in x]
         # filter on existance of path
Original line number Diff line number Diff line
@@ -68,7 +68,6 @@ class Hxtorch(build_brainscales.BuildBrainscales):
        ('py-h5py', { "type": ('build', 'link', 'run') } ), # PyNN tests need it
        ('py-matplotlib', { "type": ('build', 'link', 'run') } ),
        ('py-networkx', { "type": ('build', 'link', 'run') } ),
        ('py-nose', { "type": ('build', 'link', 'run') } ),
        ('py-numpy', { "type": ('build', 'link', 'run') } ),
        ('py-pybind11', { "type": ('build', 'link', 'run') } ),
        ('py-pybind11-stubgen', { "type": ('build', 'link', 'run') } ),
@@ -96,6 +95,12 @@ class Hxtorch(build_brainscales.BuildBrainscales):

    patch("include-SparseTensorUtils.patch", when="@:8.0-a5")

    patch("fix-multi-site-packages.patch", when="@:11")

    patch("fix-11_halco_gcc14.patch", when="@11.0-a5:11")
    patch("fix-grenade-hash.patch", when="@11.0-a5:11")
    patch("fix-lib-rcf.patch", when="@11.0-a5:11")

    def install_test(self):
        with working_dir('spack-test', create=True):
            old_pythonpath = os.environ.get('PYTHONPATH', '')
Original line number Diff line number Diff line
diff --git a/halco/src/halco/hicann/v2/l1.cpp b/halco/src/halco/hicann/v2/l1.cpp
index 6f41d74..3dcdf12 100644
--- a/halco/src/halco/hicann/v2/l1.cpp
+++ b/halco/src/halco/hicann/v2/l1.cpp
@@ -5,6 +5,8 @@
 
 #include "halco/common/iter_all.h"
 
+#include <algorithm>
+
 using namespace halco::common;
 
 namespace halco {
+29 −0
Original line number Diff line number Diff line
diff --git a/grenade/include/grenade/common/vertex_on_graph.h b/grenade/include/grenade/common/vertex_on_graph.h
index 34e5af1..b4ee345 100644
--- a/grenade/include/grenade/common/vertex_on_graph.h
+++ b/grenade/include/grenade/common/vertex_on_graph.h
@@ -52,9 +52,6 @@ private:
 
 namespace std {
 
-template <typename T>
-struct hash;
-
 template <typename Derived, typename Backend>
 struct hash<grenade::common::VertexOnGraph<Derived, Backend>>
 {
diff --git a/grenade/include/grenade/common/edge_on_graph.h b/grenade/include/grenade/common/edge_on_graph.h
index aa51e98..0c3f136 100644
--- a/grenade/include/grenade/common/edge_on_graph.h
+++ b/grenade/include/grenade/common/edge_on_graph.h
@@ -41,9 +41,6 @@ private:
 
 namespace std {
 
-template <typename T>
-struct hash;
-
 template <typename Derived, typename Backend>
 struct hash<grenade::common::EdgeOnGraph<Derived, Backend>>
 {
Original line number Diff line number Diff line
diff --git a/src/hxtorch/wscript b/src/hxtorch/wscript
index aaf670f..39322c8 100644
--- a/hxtorch/wscript
+++ b/hxtorch/wscript

@@ -45,13 +45,19 @@
     )
 
     site_packages = site.getsitepackages()
-    assert isinstance(site_packages, list) and len(site_packages) == 1
-    includes_torch = [os.path.join(x, 'torch/include') for x in site_packages]
-    includes_torch_csrc_api = [os.path.join(x, 'torch/include/torch/csrc/api/include') for x in site_packages]
+    includes_torch = []
+    includes_torch_csrc_api = []
+    for x in site_packages:
+        torch_inc = os.path.join(x, 'torch/include')
+        if os.path.exists(torch_inc):
+            includes_torch.append(torch_inc)
+        torch_csrc_inc = os.path.join(x, 'torch/include/torch/csrc/api/include')
+        if os.path.exists(torch_csrc_inc):
+            includes_torch_csrc_api.append(torch_csrc_inc)
     libpath_torch = [os.path.join(x, 'torch/lib') for x in site_packages]
     libnames = []
     # if torch isn't available via site-packages, try sys.path/PYTHONPATH
-    if not os.path.exists(libpath_torch[0]):
+    if not any([os.path.exists(x) for x in libpath_torch]):
         # find other possible paths
         libpath_torch = [os.path.join(x, 'torch/lib') for x in sys.path if 'torch' in x]
         # filter on existance of path
+41 −0
Original line number Diff line number Diff line
From d12ff24ccb39f861067661b01973862e83552baf Mon Sep 17 00:00:00 2001
From: Elias Arnold <elias.arnold@kip.uni-heidelberg.de>
Date: Mon, 31 Mar 2025 15:13:03 +0200
Subject: [PATCH] fix: tests for new jax

Change-Id: I278454c7a51c0c15071a7ab8496a9655c52ff495
---

diff --git a/tests/sw/event/hardware/utils_test.py b/tests/sw/event/hardware/utils_test.py
index ab73452..0753947 100644
--- a/jaxsnn/tests/sw/event/hardware/utils_test.py
+++ b/jaxsnn/tests/sw/event/hardware/utils_test.py
@@ -15,12 +15,12 @@
         rng = random.PRNGKey(42)
         with_noise = add_noise_batch(spikes, rng, std=1)
         assert_array_equal(
-            with_noise.idx, np.array([[0, 1, 2, 5, 3, 4, 6, 7, 8, 9]])
+            with_noise.idx, np.array([[0, 1, 2, 3, 4, 6, 5, 7, 8, 9]])
         )
 
         with_noise = add_noise_batch(spikes, rng, std=3)
         assert_array_equal(
-            with_noise.idx, np.array([[2, 1, 0, 5, 6, 7, 3, 4, 8, 9]])
+            with_noise.idx, np.array([[0, 6, 1, 2, 3, 4, 5, 7, 8, 9]])
         )
 
     def test_sort_batch(self):
diff --git a/tests/sw/event/tasks/constant_test.py b/tests/sw/event/tasks/constant_test.py
index be82deb..a7906af 100644
--- a/jaxsnn/tests/sw/event/tasks/constant_test.py
+++ b/jaxsnn/tests/sw/event/tasks/constant_test.py
@@ -52,7 +52,7 @@
         )
 
         # init weights
-        rng = random.PRNGKey(42)
+        rng = random.PRNGKey(45)
         weights = init_fn(rng, input_shape)
 
         loss_fn = partial(
+27 −0
Original line number Diff line number Diff line
From d12ff24ccb39f861067661b01973862e83552baf Mon Sep 17 00:00:00 2001
From: Elias Arnold <elias.arnold@kip.uni-heidelberg.de>
Date: Mon, 31 Mar 2025 15:13:03 +0200
Subject: [PATCH] fix: tests for new jax

Change-Id: I278454c7a51c0c15071a7ab8496a9655c52ff495
---

diff --git a/tests/sw/event/hardware/utils_test.py b/tests/sw/event/hardware/utils_test.py
index ab73452..0753947 100644
--- a/jaxsnn/tests/sw/event/hardware/utils_test.py
+++ b/jaxsnn/tests/sw/event/hardware/utils_test.py
@@ -15,12 +15,12 @@
         rng = random.PRNGKey(42)
         with_noise = add_noise_batch(spikes, rng, std=1)
         assert_array_equal(
-            with_noise.idx, np.array([[0, 1, 2, 5, 3, 4, 6, 7, 8, 9]])
+            with_noise.idx, np.array([[0, 1, 2, 3, 4, 6, 5, 7, 8, 9]])
         )
 
         with_noise = add_noise_batch(spikes, rng, std=3)
         assert_array_equal(
-            with_noise.idx, np.array([[2, 1, 0, 5, 6, 7, 3, 4, 8, 9]])
+            with_noise.idx, np.array([[0, 6, 1, 2, 3, 4, 5, 7, 8, 9]])
         )
 
     def test_sort_batch(self):
+19 −0
Original line number Diff line number Diff line
diff --git a/tests/sw/event/hardware/test_utils.py b/tests/sw/event/hardware/test_utils.py
index fcccf08..a5749ab 100644
--- a/jaxsnn/tests/sw/event/hardware/test_utils.py
+++ b/jaxsnn/tests/sw/event/hardware/test_utils.py
@@ -16,12 +16,12 @@ class TestEventHwUtils(unittest.TestCase):
         with_noise = add_noise_batch(spikes, rng, std=1)
         self.assertIsNone(
             assert_array_equal(
-                with_noise.idx, jnp.array([[0, 1, 2, 5, 3, 4, 6, 7, 8, 9]])))
+                with_noise.idx, jnp.array([[0, 1, 2, 3, 4, 6, 5, 7, 8, 9]])))
 
         with_noise = add_noise_batch(spikes, rng, std=3)
         self.assertIsNone(
             assert_array_equal(
-                with_noise.idx, jnp.array([[2, 1, 0, 5, 6, 7, 3, 4, 8, 9]])))
+                with_noise.idx, jnp.array([[0, 6, 1, 2, 3, 4, 5, 7, 8, 9]])))
 
     def test_sort_batch(self):
         spikes = Spike(
Original line number Diff line number Diff line
@@ -43,9 +43,19 @@ class Jaxsnn(build_brainscales.BuildBrainscales):
    depends_on('py-matplotlib', type=('build', 'link', 'run'))
    depends_on('py-optax', type=('build', 'link', 'run'))
    depends_on('py-tree-math', type=('build', 'link', 'run'))
    depends_on('py-nir', type=('build', 'run'), when="@11.0-a3:")
    extends('python')

    patch("include-SparseTensorUtils.patch", when="@:8.0-a5")
    patch("newjax.patch", when="@:10.0-a1 ^py-jax@0.5:")
    patch("newjax2.patch", when="@11.0-a1:11.0-a2 ^py-jax@0.5:")
    patch("newjax3.patch", when="@11.0-a3:11 ^py-jax@0.5:")

    patch("fix-multi-site-packages.patch", when="@:11")

    patch("fix-11_halco_gcc14.patch", when="@11.0-a5:11")
    patch("fix-grenade-hash.patch", when="@11.0-a5:11")
    patch("fix-lib-rcf.patch", when="@11.0-a5:11")

    def install_test(self):
        with working_dir('spack-test', create=True):
Original line number Diff line number Diff line
@@ -252,7 +252,7 @@ diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/li
+        case TSK_ImplicitInstantiation:
+        case TSK_ExplicitInstantiationDeclaration:
+        case TSK_ExplicitInstantiationDefinition: {
+          const Optional<bool> V = handleDeclForVisitation(RD);
+          const std::optional<bool> V = handleDeclForVisitation(RD);
+          if (!V.hasValue())
+            continue;
+          return V.getValue();
@@ -288,7 +288,7 @@ diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/li
+          // Visit the implicit instantiations with the requested pattern.
+        case TSK_Undeclared:
+        case TSK_ImplicitInstantiation: {
+          const Optional<bool> V = handleDeclForVisitation(RD);
+          const std::optional<bool> V = handleDeclForVisitation(RD);
+          if (!V.hasValue())
+            continue;
+          return V.getValue();
Original line number Diff line number Diff line
@@ -185,9 +185,9 @@ diff -pur spack-src-newer/clang/include/clang-c/Index.h spack-src-newerst/clang/
 /**
  * Describe the linkage of the entity referred to by a cursor.
  */
@@ -3827,6 +3832,32 @@ enum CXChildVisitResult {
   CXChildVisit_Recurse
 };
@@ -3843,6 +3848,32 @@ typedef enum CXChildVisitResult (*CXCurs
                                                    CXCursor parent,
                                                    CXClientData client_data);

+typedef enum {
+  /**
@@ -216,7 +216,7 @@ diff -pur spack-src-newer/clang/include/clang-c/Index.h spack-src-newerst/clang/
+                                                       unsigned options);
+
 /**
  * Visitor invoked for each cursor found by a traversal.
  * Visit the children of a particular cursor.
  *
diff -pur spack-src-newer/clang/tools/libclang/CIndex.cpp spack-src-newerst/clang/tools/libclang/CIndex.cpp
--- spack-src-newer/clang/tools/libclang/CIndex.cpp	2025-02-03 19:34:29.861852808 +0100
@@ -275,7 +275,7 @@ diff -pur spack-src-newer/clang/tools/libclang/CIndex.cpp spack-src-newerst/clan
+        case TSK_ImplicitInstantiation:
+        case TSK_ExplicitInstantiationDeclaration:
+        case TSK_ExplicitInstantiationDefinition: {
+          const Optional<bool> V = handleDeclForVisitation(RD);
+          const std::optional<bool> V = handleDeclForVisitation(RD);
+          if (!V.hasValue())
+            continue;
+          return V.getValue();
@@ -311,7 +311,7 @@ diff -pur spack-src-newer/clang/tools/libclang/CIndex.cpp spack-src-newerst/clan
+          // Visit the implicit instantiations with the requested pattern.
+        case TSK_Undeclared:
+        case TSK_ImplicitInstantiation: {
+          const Optional<bool> V = handleDeclForVisitation(RD);
+          const std::optional<bool> V = handleDeclForVisitation(RD);
+          if (!V.hasValue())
+            continue;
+          return V.getValue();
Original line number Diff line number Diff line
@@ -29,6 +29,7 @@ class Nest(CMakePackage):
    maintainers = ['terhorst']

    version('master', branch='master')
    version('3.9',    sha256='8e67b9dcb72b029f24f3d70ff6d3dd64776dc21bf3e458c822c862677d67d076')
    version('3.8',    sha256='eb255f8828be001abea0cddad2f14d78b70857fc82bece724551f27c698318c8')
    version('3.7',    sha256='b313e03aa05a0d8053b895a1d14ea42e75805393c6daa0cbc62f9398d0dacd8b')
    version('3.6',    sha256='68d6b11791e1284dc94fef35d84c08dd7a11322c0f1e1fc9b39c5e6882284922')
@@ -75,6 +76,8 @@ class Nest(CMakePackage):
            description="Enable direct reading of connectivity from SONATA files")
    variant('boost',    default=True,
            description="Enable optimizations provided via Boost library algorithms and containers")
    variant('jemalloc', default=True,
            description="Pull in jemalloc as run-time dependency.")
    # TODO add variants for neurosim and music when these are in spack

    conflicts('~gsl', when='@:2.10.99',
@@ -99,6 +102,7 @@ class Nest(CMakePackage):
    depends_on('hdf5+cxx+mpi',      when='@3.4.99:+sonata+mpi', type=('build', 'run'))
    depends_on('hdf5+cxx~mpi',      when='@3.4.99:+sonata~mpi', type=('build', 'run'))
    depends_on('py-pandas',         when='@3.4.99:+sonata', type=('build', 'run'))
    depends_on('jemalloc',          when='@3:+jemalloc', type=('run', 'test'))

    depends_on('py-nose',           when='@:2.99.99+python+testsuite', type='test')
    depends_on('py-pytest',         when='@3.0:+testsuite', type='test')
Original line number Diff line number Diff line
@@ -16,12 +16,13 @@ class Neuron(CMakePackage):
    """

    homepage = "https://www.neuron.yale.edu/"
    url = "https://github.com/neuronsimulator/nrn/releases/download/8.2.3/full-src-package-8.2.3.tar.gz"
    url = "https://github.com/neuronsimulator/nrn/releases/download/9.0.0/full-src-package-9.0.0.tar.gz"
    git = "https://github.com/neuronsimulator/nrn"

    maintainers = ["kumbhar"]
    maintainers = ["JCGoran"]

    version("develop", branch="master", submodules="True")
    version("9.0.0", tag="9.0.0", submodules="True")
    version("8.2.3", tag="8.2.3", submodules="True")
    version("8.2.2", tag="8.2.2", submodules="True")
    version("8.1.0", tag="8.1.0", submodules="True")
@@ -42,6 +43,8 @@ class Neuron(CMakePackage):
    depends_on("bison", type="build")
    depends_on("flex", type="build")
    depends_on("py-cython", when="+rx3d", type="build")
    depends_on("py-jinja2", when="+coreneuron", type="build")
    depends_on("py-pyyaml", when="+coreneuron", type="build")

    depends_on("gettext")
    depends_on("mpi", when="+mpi")
@@ -57,7 +60,8 @@ class Neuron(CMakePackage):

    patch("patch-v782-git-cmake-avx512.patch", when="@7.8.2")
    patch("nrn_find_ncurses.patch", when="@8.2.2")
    patch("install-python-prefix.patch", when="@8:9+python")
    patch("install-python-prefix.patch", when="@8+python")
    patch("python_path.patch", when="@9:+python")

    def cmake_args(self):
        spec = self.spec
@@ -81,6 +85,9 @@ class Neuron(CMakePackage):
        ]
        args.append("-DNRN_ENABLE_BINARY_SPECIAL=ON")

        if self.spec.satisfies("@9:+python"):
            args.append(f"-DNRN_INSTALL_PYTHON_PREFIX={env['NRN_INSTALL_PYTHON_PREFIX']}")

        if "~mpi" in spec and "+coreneuron" in spec:
            args.append("-DCORENRN_ENABLE_MPI=OFF")

@@ -143,6 +150,9 @@ class Neuron(CMakePackage):
            corenrn_makefile = join_path(self.prefix, "share/coreneuron/nrnivmodl_core_makefile")
            filter_file(env["CXX"], cxx_compiler, corenrn_makefile, **kwargs)

    def setup_build_environment(self, env):
        env.set("NRN_INSTALL_PYTHON_PREFIX", f"{python_platlib}/neuron/")

    @run_after('install')
    @on_package_attributes(run_tests=True)
    def install_test(self):
+56 −0
Original line number Diff line number Diff line
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 2b8bf9e02..195dca5c4 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -39,6 +39,10 @@ if(POLICY CMP0177)
   cmake_policy(SET CMP0177 NEW)
 endif()
 
+set(NRN_INSTALL_PYTHON_PREFIX
+    ""
+    CACHE STRING "Path to Python installation")
+
 # if we're building a wheel, we have a different layout of files
 if(SKBUILD)
   set(NRN_INSTALL_PYTHON_PREFIX "neuron/")
@@ -53,7 +57,9 @@ if(SKBUILD)
   set(IV_HEADERS_INSTALL_DIR "${NRN_INSTALL_DATA_PREFIX}/include")
   set(IV_BIN_INSTALL_DIR "${NRN_INSTALL_DATA_PREFIX}/bin")
 else()
-  set(NRN_INSTALL_PYTHON_PREFIX "lib/python/neuron/")
+  if(NOT NRN_INSTALL_PYTHON_PREFIX)
+    set(NRN_INSTALL_PYTHON_PREFIX "lib/python/neuron/")
+  endif()
   set(NRN_INSTALL_DATA_PREFIX)
 endif()
 
diff --git a/share/lib/python/neuron/rxd/geometry3d/CMakeLists.txt b/share/lib/python/neuron/rxd/geometry3d/CMakeLists.txt
index 68c539b45..4cb4e0966 100644
--- a/share/lib/python/neuron/rxd/geometry3d/CMakeLists.txt
+++ b/share/lib/python/neuron/rxd/geometry3d/CMakeLists.txt
@@ -21,7 +21,9 @@ set(surfaces_sources
     ${PROJECT_SOURCE_DIR}/src/nrnpython/rxd_llgramarea.cpp)
 
 if(NOT SKBUILD)
-  set(rel_rpath "/../../")
+  file(RELATIVE_PATH rel_path "${CMAKE_INSTALL_PREFIX}/${NRN_INSTALL_PYTHON_PREFIX}/rxd/geometry3d"
+       "${CMAKE_INSTALL_PREFIX}/${NRN_INSTALL_DATA_PREFIX}/lib")
+  set(rel_rpath "/${rel_path}")
 else()
   set(rel_rpath "/../../.data/lib")
 endif()
diff --git a/src/nrnpython/CMakeLists.txt b/src/nrnpython/CMakeLists.txt
index 30ae7087b..fb7b1e171 100644
--- a/src/nrnpython/CMakeLists.txt
+++ b/src/nrnpython/CMakeLists.txt
@@ -9,7 +9,9 @@ include(${PROJECT_SOURCE_DIR}/cmake/PythonCompileHelper.cmake)
 # Some modules should be placed in the `neuron` Python directory (as they are directly importable),
 # while others should be placed in `neuron/.data/lib` (as they are not directly importable).
 if(NOT SKBUILD)
-  set(libraries_rpath "/../../")
+  file(RELATIVE_PATH rel_path "${CMAKE_INSTALL_PREFIX}/${NRN_INSTALL_PYTHON_PREFIX}"
+       "${CMAKE_INSTALL_PREFIX}/${NRN_INSTALL_DATA_PREFIX}/lib")
+  set(libraries_rpath "/${rel_path}")
 else()
   set(libraries_rpath "/.data/lib/")
 endif()
Original line number Diff line number Diff line
@@ -22,6 +22,55 @@ class Oppulance(Package):
    depends_on('wget')
    depends_on('gmp')

    version(
        "11.0-a6",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a6",
        commit="b9cd2c0f5362167f056aebf8f7f1ff04f262517a",
        submodules=True,
    )
    version(
        "11.0-a5",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a5",
        commit="1e84308e0abbda45ab2d3a6ad5b8e27846df1290",
        submodules=True,
    )
    version(
        "11.0-a4",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a4",
        commit="fd3137c6bea71f1820c48999796545e59f9f14b7",
        submodules=True,
    )
    version(
        "11.0-a4",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a4",
        commit="fd3137c6bea71f1820c48999796545e59f9f14b7",
        submodules=True,
    )
    version(
        "11.0-a3",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a3",
        commit="4f27672655f57f41d13b44520e315398887d29ff",
        submodules=True,
    )
    version(
        "11.0-a2",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a2",
        commit="65abdc96d737415af853a524e8c5177381a7845c",
        submodules=True,
    )
    version(
        "11.0-a1",
        git="https://github.com/electronicvisions/releases-ebrains",
        tag="ebrains-11.0-a1",
        commit="1bf0f6d3c7da681d3db9401bfe0681f95d0baed1",
        submodules=True,
    )
    version(
        "10.0-a1",
        git="https://github.com/electronicvisions/releases-ebrains",
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyAntlr4Python3Runtime(PythonPackage):
    """This package provides runtime libraries required to use
    parsers generated for the Python3 language by version 4 of
    ANTLR (ANother Tool for Language Recognition).
    """

    homepage = "https://www.antlr.org"
    
    # begin EBRAINS(added): new version
    pypi = "antlr4-python3-runtime/antlr4_python3_runtime-4.7.2.tar.gz"

    version("4.13.2", sha256="909b647e1d2fc2b70180ac586df3933e38919c85f98ccc656a96cd3f25ef3916")
    # end EBRAINS
    version("4.10", sha256="061a49bc72ae05a35d9b61c0ba0ac36c0397708819f02fbfb20a80e47d287a1b")
    version("4.9.3", sha256="f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b")
    version("4.9.2", sha256="31f5abdc7faf16a1a6e9bf2eb31565d004359b821b09944436a34361929ae85a")
    version("4.8", sha256="15793f5d0512a372b4e7d2284058ad32ce7dd27126b105fb0b2245130445db33")
    version("4.7.2", sha256="168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b")

    depends_on("python@3:", type=("build", "run"))
    depends_on("py-setuptools", type="build")
+26 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyAnywidget(PythonPackage):
    """Custom jupyter widgets made easy"""

    homepage = "https://github.com/manzt/anywidget"
    pypi = "anywidget/anywidget-0.9.16.tar.gz"

    version("0.9.13", sha256="c655455bf51f82182eb23c5947d37cc41f0b1ffacaf7e2b763147a2332cb3f07")
    version("0.9.12", sha256="25855b2496dc6541356b6d26ca0c9f9525203bb7cf2817f84b93bfe3f53cf81e")
    version("0.9.11", sha256="58dded8e86ec27ce28760c9457317663113d9af608947ee6a8d0ecb528b22cdc")
    version("0.9.10", sha256="390a628240981c09813c7523279ddcab82ff4fd3287add5433b78b1369089068")
    version("0.8.1", sha256="a7627770be455f89afd7d58faff926f45ef52b53257ae3c595177d5ff9b12fd1")
    version("0.8.0", sha256="847a0fece58ea7a32d20dccd54a026dd3dd28f76daa6b4f0a74c253e5c7cfeec")

    depends_on("py-hatchling", type="build")
    depends_on("py-hatch-jupyter-builder", type="build")
    depends_on("py-ipywidgets@7.6:", type=("build", "run"))
    depends_on("py-typing-extensions@4.2:", type=("build", "run"))
    depends_on("py-psygnal@0.8.1:", type=("build", "run"))
Original line number Diff line number Diff line
@@ -16,11 +16,14 @@ class PyArborize(PythonPackage):
    pypi = "arborize/arborize-4.1.0.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1403e5c48d20922b0c6e6059f573e23f1c2bda6ece34242e189c013740482e0b")
    version("6.0.0", sha256="6868176ff71ee20d0791099f69146bb3a95b7b663b6838ce8810126329e76b67")
    version("4.1.0", sha256="2cb88b890fa69de42a49b38ea4defd0caa4ee91b34f9b75216e1536d9058f57f")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-numpy@1.21:")
    depends_on("py-errr@1.2:")
    depends_on("py-morphio@3.3.6:4")
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-numpy@1.21:", type=("build","run"))
    depends_on("py-errr@1.2:", type=("build","run"))
    depends_on("py-morphio@3.3.6:3", type=("build","run"))
    depends_on("py-bsb-core@6:", type=("build","run"), when="@6:")
Original line number Diff line number Diff line
@@ -8,6 +8,7 @@ import os
from spack.package import *


# EBRAINS: based on spack/0.23.x
class PyAstropy(PythonPackage):
    """The Astropy Project is a community effort to develop a single core
    package for Astronomy in Python and foster interoperability between
@@ -126,6 +127,12 @@ class PyAstropy(PythonPackage):
        if os.path.exists("astropy/cython_version.py"):
            os.remove("astropy/cython_version.py")

    # begin EBRAINS (modified):
    def setup_build_environment(self, env):
        if "%gcc@14:" in self.spec:
            env.append_flags("CFLAGS", "-Wno-incompatible-pointer-types")
    # end EBRAINS

    def install_options(self, spec, prefix):
        args = [
            "--use-system-libraries",
+26 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyBatchtk(PythonPackage):
    """Batch submission toolkit for Python"""

    homepage = "https://github.com/suny-downstate-medical-center/batchtk"
    url = "https://github.com/suny-downstate-medical-center/batchtk/archive/refs/tags/v0.1.0.tar.gz"
    git = "https://github.com/suny-downstate-medical-center/batchtk.git"

    # list of GitHub accounts to notify when the package is updated.
    maintainers = ["salvadord", "vvbragin"]

    version("0.1.0", sha256="0a10cac9aac46d3fcd894c729831ad6a41509747aacca9e2957de659b9d9800f")

    depends_on("py-setuptools", type="build")

    depends_on("py-importlib-metadata", type=("build", "run"))
    depends_on("py-numpy", type=("build", "run"))
    depends_on("py-pandas", type=("build", "run"))
    depends_on("py-filelock", type=("build", "run"))
+26 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyBiobbGromacs(PythonPackage):
    """Biobb_gromacs is the Biobb module collection to perform
    molecular dynamics simulations using the GROMACS MD suite"""

    pypi = "biobb_gromacs/biobb_gromacs-4.1.1.tar.gz"

    maintainers("d-beltran")

    # Versions
    version("4.1.1", sha256="270cce747fc214471527438c8319bda0613be5b76da9f4684e6f138d1927d2f7")

    # Dependencies
    depends_on("py-setuptools", type="build")
    depends_on("python@3.8:", type=("build", "run"))
    depends_on("py-biobb-common@4.1.0", type=("build", "run"))
    # begin EBRAINS(modified): biobb-gromacs checks for a `gmx` binary, with no suffix
    depends_on("gromacs+nosuffix", type=("run"))
    # end EBRAINS
+42 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *


class PyBluepyopt(PythonPackage):
    """Bluebrain Python Optimisation Library"""

    homepage = "https://github.com/BlueBrain/BluePyOpt"
    pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz"

    license("LGPL-3.0-only")

    # NOTE : while adding new release check pmi_rank.patch compatibility
    version("1.14.18", sha256="23d6239294d944c8f9d4ea298091bcf243d236735844e1bcba60535a0f520ca8")
    version("1.14.11", sha256="fe2830c36699a93d2ef9ddef316da42f9c57ca6654c92356eab973ee2298ebf7")
    version("1.14.4", sha256="7567fd736053250ca06030f67ad93c607b100c2b98df8dc588c26b64cb3e171c")

    # patch required to avoid hpe-mpi linked mechanism library
    patch("pmi_rank.patch")

    variant("scoop", default=False, description="Use BluePyOpt together with py-scoop")

    depends_on("py-setuptools", type="build")
    depends_on("py-numpy@1.6:", type=("build", "run"))
    depends_on("py-pandas@0.18:", type=("build", "run"))
    depends_on("py-deap@1.3.3:", type=("build", "run"))
    depends_on("py-efel@2.13:", type=("build", "run"))
    depends_on("py-ipyparallel", type=("build", "run"))
    depends_on("py-pickleshare@0.7.3:", type=("build", "run"))
    depends_on("py-jinja2@2.8:", type=("build", "run"))
    depends_on("py-future", type=("build", "run"))
    depends_on("py-pebble@4.6:", type=("build", "run"))
    depends_on("py-scoop@0.7:", type=("build", "run"), when="+scoop")
    depends_on("neuron@7.4:", type=("build", "run"), when="@:1.14.4")
    depends_on("neuron@7.8:", type=("build", "run"), when="@1.14.11:")

    def setup_run_environment(self, env):
        env.unset("PMI_RANK")
        env.set("NEURON_INIT_MPI", "0")
+17 −0
Original line number Diff line number Diff line
diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py
index e71ad8b..3c93237 100644
--- a/bluepyopt/ephys/simulators.py
+++ b/bluepyopt/ephys/simulators.py
@@ -89,6 +89,12 @@ class NrnSimulator(object):
             NrnSimulator._nrn_disable_banner()
             self.banner_disabled = True
 
+        # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize
+        # MPI before calling MPI_Init (which is undesirable). Unset this variable
+        # if exist to avoid issue with loading neuron and mechanism library.
+        if 'PMI_RANK' in os.environ:
+            os.environ.pop("PMI_RANK")
+
         import neuron  # NOQA
 
         return neuron
+37 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyBsbArbor(PythonPackage):
    """
    The BSB-ARBOR is a component framework for neural modeling, used for simulate SNN with Arbor software.
    """

    homepage = "https://github.com/dbbs-lab/bsb/tree/main/packages/bsb-arbor"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"
    #pypi = "bsb-arbor/bsb_arbor-6.0.7.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")

    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-numpy@1.21:", type=("build", "run"))
    depends_on("py-bsb-core@6:", type=("build", "run"))
    depends_on("py-arborize@6:", type=("build", "run"))
    depends_on("arbor@0.10:", type=("build", "run"))
    depends_on("py-bsb-test@6:", type="test", when="@6:")
    depends_on("py-bsb-hdf5@6:", type="test", when="@6:")

    build_directory = "packages/bsb-arbor"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
           python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -13,11 +13,14 @@ class PyBsbCore(PythonPackage):
    """

    homepage = "https://bsb.readthedocs.io"
    pypi = "bsb-core/bsb_core-5.0.0.tar.gz"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"
    #pypi = "bsb-core/bsb_core-5.0.0.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="ee833bb4d074f7a05410bcd777377a9eba9d32995fbe6cff9b9534618b5b3cd0")
    version("5.0.2", sha256="414be0f3ba72b2f656b89f8e4636e4a1d19b1f4dc9ba9360cc984020cb1859dc")
    version("5.0.1", sha256="7cb905ee38419709b4ead2ffb40e1005d813d2c6780706b3f5eb2696aabeb983")
    version("5.0.0", sha256="08e1776d351a8bb5c056ffbd8108d0bd941f71518b475aecbad9f22050b7cc91")
@@ -26,34 +29,42 @@ class PyBsbCore(PythonPackage):
            description='Build with MPI bindings')

    depends_on("python@3.9:3.12", type=("build", "run"))
    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-numpy@1.19:")
    depends_on("py-scipy@1.5:")
    depends_on("py-scikit-learn@1.0:")
    depends_on("py-rtree@1.0:")
    depends_on("py-psutil@5.8:")
    depends_on("py-pynrrd@1.0:")
    depends_on("py-toml@0.10:")
    depends_on("py-requests")
    depends_on("py-urllib3@2:")
    depends_on("py-appdirs@1.4:")
    depends_on("py-neo")
    depends_on("py-tqdm@4.50:")
    depends_on("py-shortuuid")
    depends_on("py-quantities@0.15.0:")
    depends_on("py-morphio@3.3:")
    depends_on("py-errr@1.2.0:")
    depends_on("py-dashing@0.1.0:")
    depends_on("py-exceptiongroup")
    
    depends_on('mpi', when='+parallel')
    depends_on('py-mpi4py', when='+parallel')
    depends_on('py-mpipool@2.2.1:3', when='+parallel')
    depends_on('py-mpilock@1.1:', when='+parallel')
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-numpy@1.19:", type=("build", "run"))
    depends_on("py-scipy@1.5:", type=("build", "run"))
    depends_on("py-scikit-learn@1.0:", type=("build", "run"))
    depends_on("py-rtree@1.0:", type=("build", "run"))
    depends_on("py-psutil@5.8:", type=("build", "run"))
    depends_on("py-pynrrd@1.0:", type=("build", "run"))
    depends_on("py-toml@0.10:", type=("build", "run"))
    depends_on("py-requests", type=("build", "run"))
    depends_on("py-urllib3@2:", type=("build", "run"))
    depends_on("py-appdirs@1.4:", type=("build", "run"))
    depends_on("py-neo", type=("build", "run"))
    depends_on("py-tqdm@4.50:", type=("build", "run"))
    depends_on("py-shortuuid", type=("build", "run"))
    depends_on("py-quantities@0.15.0:", type=("build", "run"))
    depends_on("py-morphio@3.3:", type=("build", "run"))
    depends_on("py-errr@1.2.0:", type=("build", "run"))
    depends_on("py-dashing@0.1.0:", type=("build", "run"))
    depends_on("py-exceptiongroup", type=("build", "run"))
    
    depends_on("mpi", type=("build", "run"), when="+parallel")
    depends_on("py-mpi4py", type=("build", "run"), when="+parallel")
    depends_on("py-mpipool@2.2.1:3", type=("build", "run"), when="+parallel")
    depends_on("py-mpilock@1.1:", type=("build", "run"), when="+parallel")

    build_directory = "packages/bsb-core"

    def setup_build_environment(self, env):
        env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0])

    def setup_run_environment(self, env):
        self.setup_build_environment(env)

    # can't run because of circular test dependencies (tests need packages that depend on py-bsb-core itself)
    #@run_after("install")
    #@on_package_attributes(run_tests=True)
    #def install_test(self):
    #    with working_dir(self.build_directory):
    #        python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -10,13 +10,26 @@ class PyBsbHdf5(PythonPackage):
    """An HDF-5 based storage engine for the BSB framework."""

    homepage = "https://github.com/dbbs-lab/bsb-hdf5"
    pypi = "bsb-hdf5/bsb_hdf5-5.0.4.tar.gz"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"
    #pypi = "bsb-hdf5/bsb_hdf5-5.0.4.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version('5.0.2', sha256='ed11177887848a3f177982201e1adb5770131bd541055a96935af38b39439fac')
    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="533fd2f3d9e4eb852d3fb29c21c3010834dfcc652ac885da8dc4ca94e376a741")
    version("5.0.2", sha256="ed11177887848a3f177982201e1adb5770131bd541055a96935af38b39439fac")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-bsb-core@5.0.0:",when='@5.0.2')
    depends_on("py-shortuuid")
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-bsb-core@5", type=("build", "run"), when="@5.0.2")
    depends_on("py-bsb-core@6:", type=("build", "run"), when="@6:")
    depends_on("py-shortuuid", type=("build", "run"))
    depends_on("py-bsb-test@6:", type="test", when="@6:")

    build_directory = "packages/bsb-hdf5"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
            python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -10,13 +10,26 @@ class PyBsbJson(PythonPackage):
    """A plugin that allows the user to write their models' configuration in the json format, for the BSB framework."""

    homepage = "https://github.com/dbbs-lab/bsb-json"
    pypi = "bsb-json/bsb_json-4.2.2.tar.gz"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"
    #pypi = "bsb-json/bsb_json-4.2.2.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="38dd515f23941cd7de942fd415df3200804e4b90183eaa027085add3f244731d")
    version("4.2.2", sha256="0c9e0af2a50f8ebbce353ba19bd11bafaf2536d74f0a79af3b0b6d8241fa6937")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-bsb-core@5.0.0:")
    depends_on("py-shortuuid")
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-bsb-core@5", type=("build", "run"), when="@4.2.2")
    depends_on("py-bsb-core@6:", type=("build", "run"), when="@6:")
    depends_on("py-shortuuid", type=("build", "run"))
    depends_on("py-bsb-test@6:", type="test", when="@6:")

    build_directory = "packages/bsb-json"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
            python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -11,14 +11,29 @@ class PyBsbNest(PythonPackage):
    The BSB-NEST is a component framework for neural modeling, used for simulate SNN with NEST software.
    """

    homepage = "https://github.com/dbbs-lab/bsb-nest"
    pypi = "bsb-nest/bsb_nest-4.3.2.tar.gz"
    homepage = "https://github.com/dbbs-lab/bsb/tree/main/packages/bsb-nest"
    #pypi = "bsb-nest/bsb_nest-4.3.2.tar.gz"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="e1198418464abc522444eaf5d099e21d695065fc7620d429b1ff72bb805f03db")
    version("4.3.2", sha256="478aa2937ca554ff291ce726cc69e1c1b283d7353a56e3b6878b585ed0684041")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-bsb-core@5.0.2:")
    depends_on("nest")
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-bsb-core@5", type=("build", "run"), when="@4.3.2")
    depends_on("py-bsb-core@6:", type=("build", "run"), when="@6:")
    depends_on("nest", type=("build", "run"))
    depends_on("py-bsb-test@6:", type="test", when="@6:")
    depends_on("py-bsb-hdf5@6:", type="test", when="@6:")
    depends_on("py-bsb-arbor@6:", type="test", when="@6:")

    build_directory = "packages/bsb-nest"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
            python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -11,16 +11,36 @@ class PyBsbNeuron(PythonPackage):
    The BSB-NEURON is a component framework for neural modeling, used for simulate with NEURON software.
    """

    homepage = "https://github.com/dbbs-lab/bsb-neuron"
    pypi = "bsb-neuron/bsb_neuron-4.2.2.tar.gz"
    homepage = "https://github.com/dbbs-lab/bsb/tree/main/packages/bsb-neuron"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"
    #pypi = "bsb-neuron/bsb_neuron-4.2.2.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="2b2286508d7bb58c3c8ed56a60909721589bf6a8c31fd07d4151d88ded392e7b")
    version("4.2.2", sha256="e7570c0cb17d31349eb8e88487e8ba48653f0fad0d7c232df8815cadde34a941")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-bsb-core@5.0.2:")
    depends_on("neuron")
    depends_on("py-arborize@4.1:")
    depends_on("py-nrn-patch@4:")
    depends_on("py-flit-core@3.2:3", type="build")
    with when("@4.2.2"):
        depends_on("py-bsb-core@5", type=("build", "run"))
        depends_on("neuron@8.2.3:8", type=("build", "run"))
        depends_on("py-arborize@4.1:4", type=("build", "run"))
        depends_on("py-nrn-patch@4", type=("build", "run"))

    with when("@6:"):
        depends_on("py-bsb-core@6:", type=("build", "run"))
        depends_on("neuron@9:", type=("build", "run"))
        depends_on("py-arborize@6:", type=("build", "run"))
        depends_on("py-nrn-patch@6:", type=("build", "run"))
        depends_on("py-bsb-test@6:", type="test")
        depends_on("py-bsb-hdf5@6:", type="test")

    build_directory = "packages/bsb-neuron"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
            python("-m", "unittest", "discover", "-v", "-s", "./tests/")
+31 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyBsbTest(PythonPackage):
    """A plugin of the BSB that contains configurations and tools to test BSB code."""

    homepage = "https://github.com/dbbs-lab/bsb/tree/main/packages/bsb-test"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")

    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("python@3.10:", type=("build", "run"))
    depends_on("py-bsb-core@6:", type=("build", "run"))

    build_directory = "packages/bsb-test"

    # can't run because of circular test dependencies (tests need packages that depend on py-bsb-test itself)
    #@run_after("install")
    #@on_package_attributes(run_tests=True)
    #def install_test(self):
    #    with working_dir(self.build_directory):
    #        python("-m", "unittest", "discover", "-v", "-s", "./tests/")
Original line number Diff line number Diff line
@@ -10,14 +10,27 @@ class PyBsbYaml(PythonPackage):
    """A plugin that allows the user to write their models' configuration in the yaml format, for the BSB framework."""

    homepage = "https://github.com/dbbs-lab/bsb-yaml"
    pypi = "bsb-yaml/bsb_yaml-4.2.2.tar.gz"
    #pypi = "bsb-yaml/bsb_yaml-4.2.2.tar.gz"
    url = "https://github.com/dbbs-lab/bsb/archive/refs/tags/v6.0.7.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["helveg","filimarc","drodarie"]
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="1f8391f5a658e97758d42d9b06a098ef0f2a6887992469c80215ba3acc09f01c")
    version("6.0.0", sha256="c9f73e7efa1ed175a30db78b17d6c01301843b9ddc0fd4ce6cfe86b99c18724f")
    version("4.2.2", sha256="c5614bc5fe57b78a445303756819a8d4ba032924484f88a07f6c26dd7e5afbec")

    depends_on("py-flit-core@3.2:4.0", type="build")
    depends_on("py-bsb-core@5.0.0:")
    depends_on("py-pyyaml@6.0:")
    depends_on("py-shortuuid")
    depends_on("py-flit-core@3.2:3", type="build")
    depends_on("py-bsb-core@5", type=("build", "run"), when="@4.2.2")
    depends_on("py-bsb-core@6:", type=("build", "run"), when="@6:")
    depends_on("py-pyyaml@6.0:", type=("build", "run"))
    depends_on("py-shortuuid", type=("build", "run"))
    depends_on("py-pydantic", type=("build", "run"), when="@6:")

    build_directory = "packages/bsb-yaml"

    @run_after("install")
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir(self.build_directory):
            python("-m", "unittest", "discover", "-v", "-s", "./tests/")
+45 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyBsb(PythonPackage):
    """
    The BSB is a component framework for neural modeling, which focuses on component
    declarations to piece together a model.
    """

    homepage = "https://bsb.readthedocs.io"
    pypi = "bsb/bsb-6.0.0.tar.gz"

    license("GPL-3.0-only")
    maintainers = ["filimarc","drodarie","helveg"]

    version("6.0.7", sha256="af0a6de04f3217de6b2b10c7a6a9ba9cb3f3248f022ffb2564e378699a5c15f8")
    version("6.0.0", sha256="c453b6dafc724136c7ba5a599da37ab1882bb73113d4dda75bcd827bc12d3957")

    variant("nest", default=True,
            description="Build with NEST interface")
    variant("neuron", default=True,
            description="Build with NEURON interface")
    variant("arbor", default=True,
            description="Build with ARBOR interface")

    depends_on("python@3.9:", type=("build", "run"))
    depends_on("py-setuptools", type="build")
    depends_on("py-bsb-core@6:+parallel", type=("build","run"))
    depends_on("py-bsb-hdf5@6:", type=("build","run"), when="@6:")
    depends_on("py-bsb-json@6:", type=("build","run"), when="@6:")
    depends_on("py-bsb-yaml@6:", type=("build","run"), when="@6:")
    depends_on("py-bsb-neuron@6:", type=("build","run"), when="@6:+neuron")
    depends_on("py-bsb-nest@6:", type=("build","run"), when="@6:+nest")
    depends_on("py-bsb-arbor@6:", type=("build","run"), when="@6:+arbor")

    def setup_build_environment(self, env):
        env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0])

    def setup_run_environment(self, env):
        self.setup_build_environment(env) 
+36 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyCmdstanpy(PythonPackage):
    """
    Python interface to CmdStan
    """

    homepage = "https://github.com/stan-dev/cmdstanpy"
    pypi = 'cmdstanpy/cmdstanpy-1.2.5.tar.gz'

    maintainers = ['ldomide', 'paulapopa', 'teodoramisan']

    version('1.2.5', '53314e934ac63d894affb36263ac062b1e0e483c368a7b691087726803917b3b')

    # python_requires
    depends_on('python@3.8:', type=('build', 'run'))

    # build_requires
    depends_on('py-setuptools', type='build')

    # install_requires
    depends_on('py-numpy', type=('build', 'run'))
    depends_on('py-pandas', type=('build', 'run'))
    depends_on('py-stanio', type=('build', 'run'))
    depends_on('py-tqdm', type=('build', 'run'))

    @run_after('install')
    @on_package_attributes(run_tests=True)
    def install_test(self):
        python("-c", 'import cmdstanpy')
Original line number Diff line number Diff line
@@ -23,6 +23,8 @@ class PyCobrawap(PythonPackage):
    version("0.1.1", sha256="73d0c2c7053948d377030b29ff7839c5a346d4e65bb91d50173993e70e75d204")
    version("0.2.0", sha256="5ee0dc064d60b95dde75f693817b2b59c1de3601ba19b7d70dae240da14d71d2")
    version("0.2.1", sha256="1dffdcfb8b82eae2a4d67b0c2b97842f21a30333803df05c57a12392a905c2f2")
    version("0.2.2", sha256="d424d4ee053ad9e77be775c132298f45effca5442139c442306a530fe70a112f")
    version("0.2.3", sha256="38f3cb9519a062b576fd102180a3ce8c7c85e01cebd53a5a6a94b5306dd19ae6")

    depends_on("python@3.8:", type=("build", "run"), when="@0.1:")

@@ -53,6 +55,6 @@ class PyCobrawap(PythonPackage):
    def init_test(self):
        # run test here
        cobrawap = Executable(self.prefix.bin + "/cobrawap")
        cobrawap("init", "--output_path", "/tmp/output_path", "--config_path", "/tmp/config_path")
        cobrawap("init", "--output_path", "/tmp/output_path", "--config_path", "/tmp/config_path", "--force_overwrite")
        cobrawap("settings")
        # test_dataset_url = "https://object.cscs.ch/v1/AUTH_63ea6845b1d34ad7a43c8158d9572867/hbp-d000036_m-SWA-WF-wide-regions-v2_pub/m-SWA-WF-wide-regions/GCaMP6f_Ketamine%20I/t3.rar"
+25 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyDeepcomparer(PythonPackage):
    homepage = "https://github.com/n1nj4t4nuk1/deepcomparer.py"
    pypi = 'deepcomparer/deepcomparer-0.4.0.tar.gz'

    maintainers = ['ldomide', 'paulapopa', 'teodoramisan']

    version('0.4.0', sha256='c99a1fd328b350bbdfcf597f61e1cfb76d150c38dedf46ec709bf5eeeb7b88de')

    depends_on('python@3.8:', type=('build', 'run'))

    depends_on('py-setuptools', type='build')

    @run_after('install')
    @on_package_attributes(run_tests=True)
    def install_test(self):
        with working_dir('spack-test', create=True):
            python('-c', 'from deepcomparer import deep_compare')
Original line number Diff line number Diff line
@@ -14,6 +14,7 @@ class PyEbrainsKgCore(PythonPackage):

    maintainers = ["ioannistsanaktsidis", "olinux"]

    version('0.9.20', sha256='df0f087b1abaf02710adb0a9343c18d2c2d6e89c99f1b527d9f6fab5c4728266')
    version('0.9.15', sha256='c672815ebcd6968f090620f68d85f0fbf282a83636c3a1845b078ef5ac5a06be')
    version('0.9.14', sha256='e898761abb999b09e5da49b25d13ffcadebaec7acd87ed079abe23f3bb12a5e7')
    version('0.9.13', sha256='25e155e9eb6c516dd33e29d9ff433c996630989a0f017c5920b66fe5334f2d82')
@@ -24,4 +25,5 @@ class PyEbrainsKgCore(PythonPackage):

    depends_on("py-requests", type=("build", "run"))
    depends_on("py-pydantic", type=("build", "run"))
    depends_on("py-pydantic@:1.10.10", when="@:0.9.15", type=("build", "run"))
    depends_on("py-jinja2", type=("build", "run"))
Original line number Diff line number Diff line
diff --git a/elephant/test/test_asset.py b/elephant/test/test_asset.py
index e9309e4..fad6d75 100644
--- a/elephant/test/test_asset.py
+++ b/elephant/test/test_asset.py
@@ -22,6 +22,7 @@ from numpy.testing import assert_array_almost_equal, assert_array_equal
 
 from elephant import statistics, kernels
 from elephant.spike_train_generation import homogeneous_poisson_process
+from elephant.utils import get_cuda_capability_major, get_opencl_capability
 
 try:
     import sklearn
@@ -33,17 +34,8 @@ else:
     HAVE_SKLEARN = True
     stretchedmetric2d = asset._stretched_metric_2d
 
-try:
-    import pyopencl
-    HAVE_PYOPENCL = asset.get_opencl_capability()
-except ImportError:
-    HAVE_PYOPENCL = False
-
-try:
-    import pycuda
-    HAVE_CUDA = asset.get_cuda_capability_major() > 0
-except ImportError:
-    HAVE_CUDA = False
+HAVE_PYOPENCL = get_opencl_capability()
+HAVE_CUDA = get_cuda_capability_major() != 0
 
 
 class AssetBinningTestCase(unittest.TestCase):
@@ -513,6 +505,7 @@ class AssetTestCase(unittest.TestCase):
 
     #  regression test Issue #481
     #  see: https://github.com/NeuralEnsemble/elephant/issues/481
+    @unittest.skipIf(HAVE_CUDA, "CUDA available, will be used instead of CPU")
     def test_asset_choose_backend_opencl(self):
         class TestClassBackend(asset._GPUBackend):
 
diff --git a/elephant/utils.py b/elephant/utils.py
index b4ddfee..dfdd575 100644
--- a/elephant/utils.py
+++ b/elephant/utils.py
@@ -337,40 +337,30 @@ def round_binning_errors(values, tolerance=1e-8):
 
 def get_cuda_capability_major():
     """
-    Extracts CUDA capability major version of the first available Nvidia GPU
-    card, if detected. Otherwise, return 0.
+    If PyCUDA is available, extracts CUDA capability major version of the
+    first available Nvidia GPU card, if detected. Otherwise, returns 0.
 
     Returns
     -------
     int
         CUDA capability major version.
     """
-    cuda_success = 0
-    for libname in ("libcuda.so", "libcuda.dylib", "cuda.dll"):
-        try:
-            cuda = ctypes.CDLL(libname)
-        except OSError:
-            continue
-        else:
-            break
-    else:
-        # not found
-        return 0
-    result = cuda.cuInit(0)
-    if result != cuda_success:
+    try:
+        import pycuda.driver as cuda
+    except ImportError:
         return 0
-    device = ctypes.c_int()
-    # parse the first GPU card only
-    result = cuda.cuDeviceGet(ctypes.byref(device), 0)
-    if result != cuda_success:
+
+    try:
+        import pycuda.autoinit
+    except (cuda.RuntimeError, AttributeError):
         return 0
 
-    cc_major = ctypes.c_int()
-    cc_minor = ctypes.c_int()
-    cuda.cuDeviceComputeCapability(
-        ctypes.byref(cc_major), ctypes.byref(cc_minor), device
-    )
-    return cc_major.value
+    try:
+        device = cuda.Device(0)
+        major, _ = device.compute_capability()
+        return major
+    except cuda.Error:
+        return 0
 
 
 def get_opencl_capability():
@@ -380,7 +370,7 @@ def get_opencl_capability():
     Returns
     -------
     bool
-        True: if openCL platform detected and at least one device is found,
+        True: if OpenCL platform detected and at least one device is found,
         False: if OpenCL is not found or if no OpenCL devices are found
     """
     try:
Original line number Diff line number Diff line
diff --git a/elephant/asset/asset.py b/elephant/asset/asset.py
index 4f464d7..6b793db 100644
--- a/elephant/asset/asset.py
+++ b/elephant/asset/asset.py
@@ -1146,10 +1146,11 @@ class _PMatNeighbors(_GPUBackend):
     """
 
     def __init__(self, filter_shape, n_largest, max_chunk_size=None,
-                 verbose=None):
+                 cuda_threads=None, verbose=None):
         super().__init__(max_chunk_size=max_chunk_size)
         self.n_largest = n_largest
         self.max_chunk_size = max_chunk_size
+        self.cuda_threads = cuda_threads
         if verbose is not None:
             warnings.warn("The 'verbose' parameter is deprecated and will be "
                           "removed in the future. Its functionality is still "
@@ -1290,7 +1291,6 @@ class _PMatNeighbors(_GPUBackend):
         self._check_input(mat)
 
         device = pycuda.autoinit.device
-        n_threads = device.MAX_THREADS_PER_BLOCK
 
         filt_size = self.filter_kernel.shape[0]
         filt_rows, filt_cols = self.filter_kernel.nonzero()
@@ -1348,13 +1348,63 @@ class _PMatNeighbors(_GPUBackend):
 
             drv.Context.synchronize()
 
+            kernel = module.get_function("pmat_neighbors")
+
+            # Adjust number of threads depending on the number of registers
+            # needed for the kernel, to avoid exceeding the resources
+            if self.cuda_threads:
+                # Override with the number in the parameter `cuda_threads`
+                n_threads = min(self.cuda_threads,
+                                device.MAX_THREADS_PER_BLOCK)
+            else:
+                # Automatically determine the number of threads based on
+                # the register count.
+                regs_per_thread = kernel.NUM_REGS
+                max_regs_per_block = device.MAX_REGISTERS_PER_BLOCK
+                max_threads_by_regs = max_regs_per_block // regs_per_thread
+
+                # A safety margin of 10% with respect to the number of threads
+                # computed for the kernel is used in order to account for a
+                # fraction of registers that might be used by the GPU for
+                # control purposes.
+                max_threads_by_regs = int(max_threads_by_regs * 0.9)
+
+                n_threads = min(max_threads_by_regs,
+                                device.MAX_THREADS_PER_BLOCK)
+
+            if n_threads > device.WARP_SIZE:
+                # It's more efficient to make the number of threads
+                # a multiple of the warp size (32).
+                n_threads -= n_threads % device.WARP_SIZE
+
             grid_size = math.ceil(it_todo / n_threads)
+
+            if logger.level == logging.DEBUG:
+                logger.debug(f"Registers per thread: {kernel.NUM_REGS}")
+
+                shared_memory = kernel.SHARED_SIZE_BYTES
+                local_memory = kernel.LOCAL_SIZE_BYTES
+                const_memory = kernel.CONST_SIZE_BYTES
+                logger.debug(f"Memory: shared = {shared_memory}; "
+                             f"local = {local_memory}, const = {const_memory}")
+
+                logger.debug("Maximum per block: threads = "
+                             f"{device.MAX_THREADS_PER_BLOCK}; "
+                             "registers = "
+                             f"{device.MAX_REGISTERS_PER_BLOCK}; "
+                             "shared memory = "
+                             f"{device.MAX_SHARED_MEMORY_PER_BLOCK}")
+
+                logger.debug(f"It_todo: {it_todo}")
+                logger.debug(f"N threads: {n_threads}")
+                logger.debug(f"Max grid X: {device.MAX_GRID_DIM_X}")
+                logger.debug(f"Grid size: {grid_size}")
+
             if grid_size > device.MAX_GRID_DIM_X:
                 raise ValueError("Cannot launch a CUDA kernel with "
                                  f"{grid_size} num. of blocks. Adjust the "
                                  "'max_chunk_size' parameter.")
 
-            kernel = module.get_function("pmat_neighbors")
             kernel(lmat_gpu.gpudata, mat_gpu, grid=(grid_size, 1),
                    block=(n_threads, 1, 1))
 
@@ -2483,7 +2533,7 @@ class ASSET(object):
             Double floating-point precision is typically x4 times slower than
             the single floating-point equivalent.
             Default: 'float'
-        cuda_threads : int, optional
+        cuda_threads : int or tuple of int, optional
             [CUDA/OpenCL performance parameter that does not influence the
             result.]
             The number of CUDA/OpenCL threads per block (in X axis) between 1
@@ -2492,6 +2542,18 @@ class ASSET(object):
             Old GPUs (Tesla K80) perform faster with `cuda_threads` larger
             than 64 while new series (Tesla T4) with capabilities 6.x and more
             work best with 32 threads.
+            The computation of the joint probability matrix consists of two
+            GPU-accelerated steps. In the first step, the optimal number of
+            CUDA threads is determined automatically. The `cuda_threads`
+            parameter primarily controls the number of threads used in the
+            second (main) computation step. However, if the `n_largest`
+            parameter is set to a high value, the first step may fail with a
+            "too many resources" CUDA error due to excessive register usage.
+            To avoid this, you can explicitly specify the number of threads
+            for both steps using a tuple for `cuda_threads`. In this case, the
+            first element of the tuple sets the thread count for the main
+            computation, and the second element overrides the automatically
+            determined thread count for the first step.
             Default: 64
         cuda_cwr_loops : int, optional
             [CUDA/OpenCL performance parameter that does not influence the
@@ -2539,11 +2601,21 @@ class ASSET(object):
 
         logger.info("Finding neighbors in probability matrix...")
 
+        # Get any override in the number of CUDA threads
+        if isinstance(cuda_threads, tuple) and len(cuda_threads) == 2:
+            jsf_threads, pmat_threads = cuda_threads
+        elif isinstance(cuda_threads, int):
+            jsf_threads = cuda_threads
+            pmat_threads = None
+        else:
+            raise ValueError("'cuda_threads' must be int or a tuple of int.")
+
         # Find for each P_ij in the probability matrix its neighbors and
         # maximize them by the maximum value 1-p_value_min
         pmat = np.asarray(pmat, dtype=np.float32)
         pmat_neighb_obj = _PMatNeighbors(filter_shape=filter_shape,
-                                         n_largest=n_largest)
+                                         n_largest=n_largest,
+                                         cuda_threads=pmat_threads)
         pmat_neighb = pmat_neighb_obj.compute(pmat)
 
         logger.info("Finding unique set of values...")
@@ -2564,7 +2636,7 @@ class ASSET(object):
                 w + 1)  # number of entries covered by kernel
         jsf = _JSFUniformOrderStat3D(n=n, d=pmat_neighb.shape[1],
                                      precision=precision,
-                                     cuda_threads=cuda_threads,
+                                     cuda_threads=jsf_threads,
                                      cuda_cwr_loops=cuda_cwr_loops,
                                      tolerance=tolerance)
         jpvmat = jsf.compute(u=pmat_neighb)
diff --git a/elephant/test/test_asset.py b/elephant/test/test_asset.py
index e9309e4..d3d25e8 100644
--- a/elephant/test/test_asset.py
+++ b/elephant/test/test_asset.py
@@ -334,6 +334,36 @@ class AssetTestCase(unittest.TestCase):
                     array_file=Path(tmpdir) / f"test_dist_{working_memory}")
                 assert_array_equal(cmat, cmat_true)
 
+    def test_pmat_neighbors_gpu_threads(self):
+        # The number of threads must not influence the result.
+        np.random.seed(12)
+        n_largest = 3
+        pmat1 = np.random.random_sample((40, 40)).astype(np.float32)
+        np.fill_diagonal(pmat1, 0.5)
+        pmat2 = np.random.random_sample((70, 23)).astype(np.float32)
+        pmat3 = np.random.random_sample((27, 93)).astype(np.float32)
+        for pmat in (pmat1, pmat2, pmat3):
+            for filter_size in (4, 11):
+                filter_shape = (filter_size, 3)
+                # Check numbers for automatic (None) to more than the maximum
+                # number of threads (2048), and one value that is not a factor
+                # of the warp size (500 % 32 != 0)
+                for n_threads in (None, 64, 128, 256, 500, 512, 1024, 2048):
+                    with warnings.catch_warnings():
+                        # ignore even filter sizes
+                        warnings.simplefilter('ignore', UserWarning)
+                        pmat_neigh = asset._PMatNeighbors(
+                            filter_shape=filter_shape, n_largest=n_largest,
+                            cuda_threads=n_threads
+                        )
+                    lmat_true = pmat_neigh.cpu(pmat)
+                    if HAVE_PYOPENCL:
+                        lmat_opencl = pmat_neigh.pyopencl(pmat)
+                        assert_array_almost_equal(lmat_opencl, lmat_true)
+                    if HAVE_CUDA:
+                        lmat_cuda = pmat_neigh.pycuda(pmat)
+                        assert_array_almost_equal(lmat_cuda, lmat_true)
+
     def test_pmat_neighbors_gpu(self):
         np.random.seed(12)
         n_largest = 3
@@ -730,6 +760,97 @@ class TestJSFUniformOrderStat3D(unittest.TestCase):
         self.assertWarns(UserWarning, jsf.compute, u)
 
 
+@unittest.skipUnless(HAVE_SKLEARN and (HAVE_CUDA or HAVE_PYOPENCL),
+                     'requires sklearn and a GPU')
+class AssetTestJointProbabilityMatrixGPUThreads(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        # Save the state of the environment variables
+        cls.use_cuda = os.getenv("ELEPHANT_USE_CUDA", None)
+        cls.use_opencl = os.getenv("ELEPHANT_USE_OPENCL", None)
+
+        # Force using CPU to compute expected values
+        os.environ["ELEPHANT_USE_CUDA"] = "0"
+        os.environ["ELEPHANT_USE_OPENCL"] = "0"
+
+        # Generate spike train data
+        np.random.seed(1)
+        n_spiketrains = 50
+        rate = 50 * pq.Hz
+        spiketrains = [homogeneous_poisson_process(rate, t_stop=100 * pq.ms)
+                       for _ in range(n_spiketrains)]
+
+        # Initialize ASSET object and compute IMAT/PMAT
+        bin_size = 3 * pq.ms
+        kernel_width = 9 * pq.ms
+
+        asset_obj = asset.ASSET(spiketrains, bin_size=bin_size)
+        imat = asset_obj.intersection_matrix()
+        cls.pmat = asset_obj.probability_matrix_analytical(
+            kernel_width=kernel_width)
+
+        cls.filter_shape = (5, 1)
+        cls.n_largest = 3
+        cls.expected_jmat = asset_obj.joint_probability_matrix(
+            cls.pmat,
+            filter_shape=cls.filter_shape,
+            n_largest=cls.n_largest,
+        )
+        cls.asset_obj = asset_obj
+
+    def test_invalid_threads_parameter(self):
+        for cuda_threads in ("64", (64, 64, 64)):
+            with self.assertRaises(ValueError):
+                self.asset_obj.joint_probability_matrix(
+                    self.pmat,
+                    filter_shape=self.filter_shape,
+                    n_largest=self.n_largest,
+                    cuda_threads=cuda_threads,
+                )
+
+    @unittest.skipUnless(HAVE_CUDA, "CUDA not available")
+    def test_cuda_threads(self):
+        os.environ["ELEPHANT_USE_CUDA"] = "1"
+        os.environ["ELEPHANT_USE_OPENCL"] = "0"
+
+        for cuda_threads in (64, (64, None), (64, 512)):
+            jmat = self.asset_obj.joint_probability_matrix(
+                self.pmat,
+                filter_shape=self.filter_shape,
+                n_largest=self.n_largest,
+                cuda_threads=cuda_threads,
+            )
+            assert_array_almost_equal(jmat, self.expected_jmat)
+
+    @unittest.skipUnless(HAVE_PYOPENCL, "PyOpenCL not available")
+    def test_pyopencl_threads(self):
+        os.environ["ELEPHANT_USE_CUDA"] = "0"
+        os.environ["ELEPHANT_USE_OPENCL"] = "1"
+
+        for cuda_threads in (64, (64, None), (64, 512)):
+            jmat = self.asset_obj.joint_probability_matrix(
+                self.pmat,
+                filter_shape=self.filter_shape,
+                n_largest=self.n_largest,
+                cuda_threads=cuda_threads,
+            )
+            assert_array_almost_equal(jmat, self.expected_jmat)
+
+    @classmethod
+    def cleanUpClass(cls):
+        # Restore environment flags
+        if cls.use_cuda:
+            os.environ["ELEPHANT_USE_CUDA"] = cls.use_cuda
+        else:
+            os.environ.pop("ELEPHANT_USE_CUDA")
+
+        if cls.use_opencl:
+            os.environ["ELEPHANT_USE_OPENCL"] = cls.use_opencl
+        else:
+            os.environ.pop("ELEPHANT_USE_OPENCL")
+
+
 @unittest.skipUnless(HAVE_SKLEARN, 'requires sklearn')
 class AssetTestIntegration(unittest.TestCase):
     def setUp(self):
Original line number Diff line number Diff line
@@ -13,7 +13,7 @@ class PyElephant(PythonPackage, CudaPackage):
    pypi = "elephant/elephant-0.11.0.tar.gz"
    git = "https://github.com/NeuralEnsemble/elephant.git"

    maintainers = ["moritzkern"]
    maintainers = ["denker"]

    version('develop', branch='master')

@@ -39,6 +39,9 @@ class PyElephant(PythonPackage, CudaPackage):
        "extras", default=False, description="Build with extras for GPFA, ASSET", when="@0.6.4:"
    )

    patch("fix_cuda_backend_selection.patch", when="@1.1.1")
    patch("fix_cuda_too_many_resources.patch", when="@1.1.1")

    depends_on("py-setuptools", type="build")
    depends_on("python@3.7:", type=("build", "run"), when="@0.11.0:")
    depends_on("py-neo@0.3.4:", type=("build", "run"), when="@0.3.0:0.4.1")
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyHatchJupyterBuilder(PythonPackage):
    """A hatch plugin to help build Jupyter packages."""

    homepage = "https://github.com/jupyterlab/hatch-jupyter-builder"
    pypi = "hatch_jupyter_builder/hatch_jupyter_builder-0.8.3.tar.gz"

    license("BSD-3-Clause")

    # begin EBRAINS (added): add version
    version("0.9.1", sha256="79278198d124c646b799c5e8dca8504aed9dcaaa88d071a09eb0b5c2009a58ad")
    # end EBRAINS
    version("0.8.3", sha256="0dbd14a8aef6636764f88a8fd1fcc9a91921e5c50356e6aab251782f264ae960")

    depends_on("npm", type="run")

    depends_on("python@3.8:", type=("build", "run"))
    depends_on("py-hatchling@1.5:", type=("build", "run"))
    # begin EBRAINS (added): add version
    depends_on("py-hatchling@1.17:", type=("build", "run"), when="@0.9.1:")
    # end EBRAINS
+0 −25
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyHbpArchive(PythonPackage):
    """A high-level API for interacting with the Human Brain Project
        archival storage at CSCS
    """

    homepage = "https://hbp-archive.readthedocs.io/"
    pypi     = "hbp-archive/hbp_archive-1.1.1.tar.gz"
    git      = "https://github.com/HumanBrainProject/hbp_archive.git"

    version('1.1.1', sha256='08059d3bcca4dcbc36951c82e75513bc50d8d59f1720a28bebf57a622d869f41')

    depends_on('python@3.6.9:')
    depends_on('py-setuptools',                   type=('build'))
    depends_on('py-lxml@4.6.4:',                  type=('build', 'run'))
    depends_on('py-keystoneauth1@4.3.1:',         type=('build', 'run'))
    depends_on('py-python-keystoneclient@4.2.0:', type=('build', 'run'))
    depends_on('py-python-swiftclient@3.12.0:',   type=('build', 'run'))
Original line number Diff line number Diff line
@@ -14,20 +14,30 @@ class PyIpycanvas(PythonPackage):

    license("BSD-3-Clause")

    # begin EBRAINS (added): add version
    version("0.12.0", sha256="3984339cef0c15674e347dd65ffb0cd1edc62e37869cbb5efea46f3259e976f3")
    # end EBRAINS
    version("0.14.1", sha256="921f1482258b5929b599317b5c129931d80e16be35fa38300a32e7aa4cfe9f89")
    version("0.10.2", sha256="a02c494834cb3c60509801172e7429beae837b3cb6c61d3becf8b586c5a66004")
    version("0.9.0", sha256="f29e56b93fe765ceace0676c3e75d44e02a3ff6c806f3b7e5b869279f470cc43")

    depends_on("python@3.5:", type=("build", "run"))
    # begin EBRAINS (added)
    depends_on("python@3.6:", when="@0.10:", type=("build", "run"))
    depends_on("python@3.7:", when="@0.11:", type=("build", "run"))
    patch("typescript_version.patch", when="@0.14.1")
    # end EBRAINS

    with default_args(type="build"):
        depends_on("py-hatchling", when="@0.14:")
        depends_on("py-jupyterlab@3:4", when="@0.14:")
        depends_on("py-jupyterlab@3", when="@:0.10")
        # begin EBRAINS (added)
        depends_on("yarn")
        # end EBRAINS

        # Historical dependencies
        depends_on("py-setuptools@40.8:", when="@:0.10")
        depends_on("py-jupyter-packaging@0.7", when="@:0.10")

    with default_args(type=("build", "run")):
        depends_on("py-ipywidgets@7.6:8")
        depends_on("py-numpy")
        depends_on("pil@6:")
        # begin EBRAINS (added)
        depends_on('py-jupyter-core')
        # end EBRAINS
    depends_on("py-setuptools@40.8:", type="build")
    depends_on("py-jupyter-packaging@0.7", type="build")
    depends_on("py-jupyterlab@3.0:3", type="build")
    depends_on("py-ipywidgets@7.6:", type=("build", "run"))
    depends_on("pil@6:", type=("build", "run"))
    depends_on("py-numpy", type=("build", "run"))
Original line number Diff line number Diff line
diff --git a/package.json b/package.json
index 91cd7bf..854d994 100644
--- a/package.json
+++ b/package.json
@@ -74,7 +74,7 @@
     "rimraf": "^2.6.2",
     "source-map-loader": "^0.2.4",
     "ts-loader": "^5.2.1",
-    "typescript": "^5",
+    "typescript": "~5.6.2",
     "webpack": "^5",
     "webpack-cli": "^4"
   },
+32 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyIpydatawidgets(PythonPackage):
    """A set of widgets to help facilitate reuse of large datasets across widgets"""

    homepage = "https://github.com/vidartf/ipydatawidgets"
    pypi = "ipydatawidgets/ipydatawidgets-4.3.5.tar.gz"

    version("4.3.5", sha256="394f2489576587cfd755377a09a067f46cad22081965092021fd1abcbe7852a8")
    version("4.3.4", sha256="124c0fc7b10e2d1ad5c20942f3e5a7e6c28cc8283f1e1bb7283a2d0ee55832d9")
    version("4.3.3", sha256="4fb2ce693fb268cdae900374cfa1a91641e22d9514d1ec1e62da747050924f76")
    version("4.3.2", sha256="2f2b997f6569d3ee1f4f7e35db0cb1da08c077b21a88f1c01c59f5b986a31aa6")
    version("4.3.1", sha256="a934e51a2ad84380c63ff55ecf09bcd23d56e2c47c55c9c6f068cc91bdc1a379")
    version("4.2.0", sha256="d0e4b58b59b508165e8562b8f5d1dbfcd739855847ec0477bd9185a5e9b7c5bc")

    depends_on("py-setuptools@40.8:", type="build")
    depends_on("py-wheel", type="build")
    depends_on("py-jupyterlab@3", type="build")
    depends_on("py-jupyter-packaging", type="build")
    depends_on("yarn", type="build")

    depends_on("py-numpy", type=("build", "run"))
    depends_on("py-six", when="@:4.3.1", type=("build", "run"))
    depends_on("py-traittypes@0.2:", type=("build", "run"))
    depends_on("py-ipywidgets@7:", type=("build", "run"))
    depends_on("py-jupyter-core", type=("build", "run"))

packages/py-ipympl/package.py

deleted100644 → 0
+0 −62
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyIpympl(PythonPackage):
    """Matplotlib Jupyter Extension."""

    homepage = "https://github.com/matplotlib/ipympl"
    pypi = "ipympl/ipympl-0.8.8.tar.gz"
    maintainers("haralmha")

    license("BSD-3-Clause")

    version("0.9.4", sha256="cfb53c5b4fcbcee6d18f095eecfc6c6c474303d5b744e72cc66e7a2804708907")
    # Build failures
    version(
        "0.8.8",
        sha256="5bf5d780b07fafe7924922ac6b2f3abd22721f341e5e196b3b82737dfbd0e1c9",
        # begin EBRAINS (modified): don't deprecate to keep compatibility with jupyterlab@3
        deprecated=False,
        # end EBRAINS
    )

    with default_args(type="build"):
        with when("@0.9:"):
            depends_on("py-hatchling")
            depends_on("py-jupyterlab@4")
            depends_on("py-hatch-nodejs-version@0.3.2:")

        # Historical dependencies
        with when("@:0.8"):
            depends_on("py-jupyter-packaging@0.7")
            depends_on("py-jupyterlab@3")
            depends_on("py-setuptools@40.8:")
            depends_on("yarn")

    with default_args(type=("build", "run")):
        depends_on("py-ipython@:8")
        depends_on("py-ipython-genutils")
        depends_on("py-ipywidgets@7.6:8", when="@0.9:")
        depends_on("py-ipywidgets@7.6:7", when="@:0.8")
        depends_on("py-matplotlib@3.4:3", when="@0.9:")
        depends_on("py-matplotlib@2:3", when="@:0.8")
        depends_on("py-numpy")
        depends_on("pil")
        depends_on("py-traitlets@:5")
        # begin EBRAINS (added): add missing dependency
        depends_on("py-tomli")
        # end EBRAINS

        # Necessary for jupyter extension env vars
        depends_on("py-jupyter-core")

    # begin EBRAINS (added): use newer typescript
    # (see https://github.com/DefinitelyTyped/DefinitelyTyped/issues/69932)
    patch("typescript_version.patch", when="@0.8.8")
    # end EBRAINS
+0 −14
Original line number Diff line number Diff line
diff --git a/package.json b/package.json
index 3f0ce55..c40ac99 100644
--- a/package.json
+++ b/package.json
@@ -76,7 +76,7 @@
         "style-loader": "^1.0.0",
         "ts-jest": "^26.0.0",
         "ts-loader": "^8.0.0",
-        "typescript": "~4.1.3",
+        "typescript": "~4.8.0",
         "webpack": "^5.0.0",
         "webpack-cli": "^4.0.0"
     },
+59 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyIpyparallel(PythonPackage):
    """IPython's architecture for parallel and distributed computing."""

    homepage = "https://github.com/ipython/ipyparallel"
    pypi = "ipyparallel/ipyparallel-7.1.0.tar.gz"

    # begin EBRAINS (added): add new version
    version("9.0.1", sha256="2e592cad2200c5a94fbbff639bff36e6ec9122f34b36b2fc6b4d678d9e98f29c")
    # end EBRAINS
    version("8.4.1", sha256="670bbe05755381742e1ea01177dc428ff8f3e94af1f0d5642c9d19f37ca8289b")
    version("8.0.0", sha256="95305a886f2c42e9603c034ea684e5c031d9d4222c66ed6d85eb3ae15d631e4b")
    version("7.1.0", sha256="ea756df0d2485bac19cccb0dbf4cafbc855c922b9b5905b4906e6cfac8b3c648")
    version("6.3.0", sha256="0a97b276c62db633e9e97a816282bdd166f9df74e28204f0c8fa54b71944cfdc")
    version("6.2.5", sha256="33416179665f9c2f567011ab1a618232bc32c0845c0a3a5c388f6c71048bc053")
    version("6.2.4", sha256="76c7b028962b0ba762e4e45b450ee3a4353e7221526a8af812e817d7ef6ac065")

    depends_on("python@3.6:", type=("build", "run"), when="@7.1:")
    depends_on("python@3.5:", type=("build", "run"), when="@6.3:")
    depends_on("python@2.7,3.4:", type=("build", "run"))

    # begin EBRAINS (modified): update dependencies for version 9.0.1
    depends_on("py-jupyterlab@4", type="build", when="@9:")
    depends_on("py-jupyterlab@3", type="build", when="@7.1:8")
    # end EBRAINS
    depends_on("py-packaging", type="build", when="@7.1:8.0.0")
    depends_on("py-setuptools@40.8:", type="build", when="@7.1:8.2")
    depends_on("py-setuptools@:60", type="build", when="@:8.2.0")
    depends_on("py-hatchling@0.25:", type="build", when="@8.4:")

    depends_on("py-ipython-genutils", type=("build", "run"), when="@:6.3")
    depends_on("py-entrypoints", type=("build", "run"), when="@7.1:")
    depends_on("py-decorator", type=("build", "run"))
    # begin EBRAINS (modified): update dependencies for version 9.0.1
    depends_on("py-pyzmq@25:", type=("build", "run"), when="@9:")
    depends_on("py-pyzmq@18:", type=("build", "run"), when="@7.1:8")
    depends_on("py-pyzmq@13:", type=("build", "run"))
    depends_on("py-traitlets@5:", type=("build", "run"), when="@9:")
    depends_on("py-traitlets@4.3:", type=("build", "run"))
    depends_on("py-ipython@5:", type=("build", "run"), when="@9:")
    depends_on("py-ipython@4:", type=("build", "run"))
    depends_on("py-jupyter-client@7:", type=("build", "run"), when="@9:")
    depends_on("py-jupyter-client", type=("build", "run"))
    depends_on("py-ipykernel@6.9.1:", type=("build", "run"), when="@9:")
    depends_on("py-ipykernel@4.4:", type=("build", "run"))
    depends_on("py-tornado@6.1:", type=("build", "run"), when="@9:")
    depends_on("py-tornado@5.1:", type=("build", "run"), when="@7.1:8")
    depends_on("py-tornado@4:", type=("build", "run"))
    depends_on("py-psutil", type=("build", "run"), when="@7.1:")
    depends_on("py-python-dateutil@2.1:", type=("build", "run"))
    depends_on("py-tqdm", type=("build", "run"), when="@7.1:")
    # end EBRAINS
+24 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyIpyreact(PythonPackage):
    """React for ipywidgets that just works"""

    homepage = "https://github.com/widgetti/ipyreact"
    pypi = "ipyreact/ipyreact-0.5.0.tar.gz"

    version("0.5.0", sha256="398b37c57abbf3d453a0fb4bb34d39956f2de212276de053630ad907eabde9e5")
    version("0.4.2", sha256="a74b9f61176ca75c1c80ccfc46c35a7ad5591a30612d0e1e6fdbf91549fbf6a6")
    version("0.4.1", sha256="80560311eb4946a793d1108d21c893f697f45af71df52afe4a3cddfa0a9fadd4")
    version("0.4.0", sha256="e1900052a14c548ff4943f01c5eec09881c29c96845e70dcd0fe25b6c234d268")

    depends_on("py-hatchling", type="build")
    depends_on("py-hatch-jupyter-builder", type="build")
    depends_on("py-ipywidgets@7:", type=("build", "run"))
    depends_on("py-anywidget@0.2:", type=("build", "run"))
    depends_on("py-jupyter-core", type=("build", "run"))
+151 −0
Original line number Diff line number Diff line
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)


from spack.package import *


class PyJax(PythonPackage):
    """Differentiate, compile, and transform Numpy code.

    JAX is a Python library for accelerator-oriented array computation and program transformation,
    designed for high-performance numerical computing and large-scale machine learning.
    """

    homepage = "https://github.com/jax-ml/jax"
    pypi = "jax/jax-0.4.27.tar.gz"

    license("Apache-2.0")
    maintainers("adamjstewart", "jonas-eschle")

    version("0.5.2", sha256="2aef7d1912df329470c47ce8f2e6521c105e84aa620311494048c391235087c6")
    version("0.5.1", sha256="c098f74846ee718165bbfa83521ae10cd52cf50b47f043f8b33a6cfd3c20ddfd")
    version("0.5.0", sha256="49df70bf293a345a7fb519f71193506d37a024c4f850b358042eb32d502c81c8")
    version("0.4.38", sha256="43bae65881628319e0a2148e8f81a202fbc2b8d048e35c7cb1df2416672fa4a8")
    version("0.4.37", sha256="7774f3d9e23fe199c65589c680c5a5be87a183b89598421a632d8245222b637b")
    version("0.4.36", sha256="088bff0575d01fc82682a9af4eb07433d60de7e5164686bd2cea3439492e608a")
    version("0.4.35", sha256="c0c986993026b10bf6f607fecb7417377460254640766ce40f1fef3fd139c12e")
    version("0.4.34", sha256="44196854f40c5f9cea3142824b9f1051f85afc3fcf7593ec5479fc8db01c58db")
    version("0.4.33", sha256="f0d788692fc0179653066c9e1c64e57311b8c15a389837fd7baf328abefcbb92")
    version("0.4.32", sha256="eb703909968da161894fb6135a931c5f3d2aab64fff7cba5fcb803ce6d968e08")
    version("0.4.31", sha256="fd2d470643a0073d822737f0788f71391656af7e62cc5b2e7995ee390ceac287")
    version("0.4.30", sha256="94d74b5b2db0d80672b61d83f1f63ebf99d2ab7398ec12b2ca0c9d1e97afe577")
    version("0.4.29", sha256="12904571eaefddcdc8c3b8d4936482b783d5a216e99ef5adcd3522fdfb4fc186")
    version("0.4.28", sha256="dcf0a44aff2e1713f0a2b369281cd5b79d8c18fc1018905c4125897cb06b37e9")
    version("0.4.27", sha256="f3d7f19bdc0a17ccdb305086099a5a90c704f904d4272a70debe06ae6552998c")
    version("0.4.26", sha256="2cce025d0a279ec630d550524749bc8efe25d2ff47240d2a7d4cfbc5090c5383")
    version("0.4.25", sha256="a8ee189c782de2b7b2ffb64a8916da380b882a617e2769aa429b71d79747b982")
    version("0.4.24", sha256="4a6b6fd026ddd22653c7fa2fac1904c3de2dbe845b61ede08af9a5cc709662ae")
    version("0.4.23", sha256="2a229a5a758d1b803891b2eaed329723f6b15b4258b14dc0ccb1498c84963685")
    version("0.4.22", sha256="801434dda6e14f82a45fff753969a33281ab22fb2a50fe801b651390321057ba")
    version("0.4.21", sha256="c97fd0d2751d6e1eb15aa2052ff7cfdc129f8fafc2c14cd779720658926a587b")
    version("0.4.20", sha256="ea96a763a8b1a9374639d1159ab4de163461d01cd022f67c34c09581b71ed2ac")
    version("0.4.19", sha256="29f87f9a50964d3ca5eeb2973de3462f0e8b4eca6d46027894a0e9a903420601")
    version("0.4.18", sha256="776cf33890100803e98f45f9af10aa727271c6993d4e766c069118733c928132")
    version("0.4.17", sha256="d7508a69e87835f534cb07a2f21d79cc1cb8c4cfdcf7fb010927267ef7355f1d")
    version("0.4.16", sha256="e2ca82c9bf973c2c1c01f5340a583692b31f277aa3abd0544229c1fe5fa44b02")
    version("0.4.15", sha256="2aa123ccef591e355dea94a6e714b6559f8e1d6368a576a223f97d031ece0d15")
    version("0.4.14", sha256="18fed3881f26e8b13c8cb46eeeea3dba9eb4d48e3714d8e8f2304dd6e237083d")
    version("0.4.13", sha256="03bfe6749dfe647f16f15f6616638adae6c4a7ca7167c75c21961ecfd3a3baaa")
    version("0.4.12", sha256="d2de9a2388ffe002f16506d3ad1cc6e34d7536b98948e49c7e05bbcfe8e57998")
    version("0.4.11", sha256="8b1cd443b698339df8d8807578ee141e5b67e36125b3945b146f600177d60d79")
    version("0.4.10", sha256="1bf0f2720f778f2937301a16a4d5cd3497f13a4d6c970c24a88918a81816a888")
    version("0.4.9", sha256="1ed135cd08f48e4baf10f6eafdb4a4cdae781f9052b5838c09c91a9f4fa75f09")
    version("0.4.8", sha256="08116481f7336db16c24812bfb5e6f9786915f4c2f6ff4028331fa69e7535202")
    version("0.4.7", sha256="5e7002d74db25f97c99b979d4ba1233b1ef26e1597e5fc468ad11d1c8a9dc4f8")
    version("0.4.6", sha256="d06ea8fba4ed315ec55110396058cb48c8edb2ab0b412f28c8a123beee9e58ab")
    version("0.4.5", sha256="1633e56d34b18ddfa7d2a216ce214fa6fa712d36552532aaa71da416aede7268")
    version("0.4.4", sha256="39b07e07343ed7c74492ee5e75db77456d3afdd038a322671f09fc748f6392cb")
    version("0.4.3", sha256="d43f08f940aa30eb339965cfb3d6bee2296537b0dc2f0c65ccae3009279529ae")

    depends_on("py-setuptools", type="build")

    with default_args(type=("build", "run")):
        # setup.py
        depends_on("python@3.10:", when="@0.4.31:")
        depends_on("python@3.9:", when="@0.4.14:")
        depends_on("py-ml-dtypes@0.4:", when="@0.4.29,0.4.35:")
        depends_on("py-ml-dtypes@0.2:", when="@0.4.14:")
        depends_on("py-ml-dtypes@0.1:", when="@0.4.9:")
        depends_on("py-ml-dtypes@0.0.3:", when="@0.4.7:")
        depends_on("py-numpy@1.25:", when="@0.5:")
        depends_on("py-numpy@1.24:", when="@0.4.31:")
        depends_on("py-numpy@1.22:", when="@0.4.14:")
        depends_on("py-numpy@1.21:", when="@0.4.7:")
        depends_on("py-numpy@1.20:", when="@0.3:")
        # https://github.com/google/jax/issues/19246
        depends_on("py-numpy@:1", when="@:0.4.25")
        depends_on("py-opt-einsum")
        depends_on("py-scipy@1.11.1:", when="@0.5:")
        depends_on("py-scipy@1.10:", when="@0.4.31:")
        depends_on("py-scipy@1.9:", when="@0.4.19:")
        depends_on("py-scipy@1.7:", when="@0.4.7:")
        depends_on("py-scipy@1.5:", when="@0.3:")

        # jax/_src/lib/__init__.py
        # https://github.com/google/jax/commit/8be057de1f50756fe7522f7e98b2f30fad56f7e4
        for v in [
            # "0.5.0",
            # "0.4.38",
            # "0.4.37",
            # "0.4.36",
            # "0.4.35",
            # "0.4.34",
            # "0.4.33",
            # "0.4.32",
            "0.4.31",
            "0.4.30",
            "0.4.29",
            "0.4.28",
            "0.4.27",
            "0.4.26",
            "0.4.25",
            "0.4.24",
            "0.4.23",
            "0.4.22",
            "0.4.21",
            "0.4.20",
            "0.4.19",
            "0.4.18",
            "0.4.17",
            "0.4.16",
            "0.4.15",
            "0.4.14",
            "0.4.13",
            "0.4.12",
            "0.4.11",
            "0.4.10",
            "0.4.9",
            "0.4.8",
            "0.4.7",
            "0.4.6",
            "0.4.5",
            "0.4.4",
            "0.4.3",
        ]:
            depends_on(f"py-jaxlib@:{v}", when=f"@{v}")

        # See _minimum_jaxlib_version in jax/version.py
        # depends_on("py-jaxlib@0.5:", when="@0.5:")
        # depends_on("py-jaxlib@0.4.38:", when="@0.4.38:")
        # depends_on("py-jaxlib@0.4.36:", when="@0.4.36:")
        # depends_on("py-jaxlib@0.4.35:", when="@0.4.35:")
        # depends_on("py-jaxlib@0.4.34:", when="@0.4.34:")
        # depends_on("py-jaxlib@0.4.33:", when="@0.4.33:")
        # depends_on("py-jaxlib@0.4.32:", when="@0.4.32:")
        depends_on("py-jaxlib@0.4.30:", when="@0.4.31:")
        depends_on("py-jaxlib@0.4.27:", when="@0.4.28:")
        depends_on("py-jaxlib@0.4.23:", when="@0.4.27:")
        depends_on("py-jaxlib@0.4.20:", when="@0.4.25:")
        depends_on("py-jaxlib@0.4.19:", when="@0.4.21:")
        depends_on("py-jaxlib@0.4.14:", when="@0.4.15:")
        depends_on("py-jaxlib@0.4.11:", when="@0.4.12:")
        depends_on("py-jaxlib@0.4.7:", when="@0.4.8:")
        depends_on("py-jaxlib@0.4.6:", when="@0.4.7:")
        depends_on("py-jaxlib@0.4.4:", when="@0.4.5:")
        depends_on("py-jaxlib@0.4.2:", when="@0.4.3:")
        depends_on("py-jaxlib@0.4.1:", when="@0.4.2:")

        # Historical dependencies
        depends_on("py-importlib-metadata@4.6:", when="@0.4.11:0.4.30 ^python@:3.9")
Original line number Diff line number Diff line
diff --git a/build/tools/utils.py b/build/tools/utils.py
index f38dc5b..255dd98 100644
--- a/build/tools/utils.py
+++ b/build/tools/utils.py
@@ -208,7 +208,7 @@ def get_gcc_major_version(gcc_path: str):
     capture_output=True,
     text=True,
   )
-  major_version = int(gcc_version_proc.stdout)
+  major_version = int(gcc_version_proc.stdout.split(".")[0])
 
   return major_version
 
+100 −0
Original line number Diff line number Diff line
From 8fce7378ed8ce994107568449806cd99274ab22b Mon Sep 17 00:00:00 2001
From: Andrew Elble <aweits@rit.edu>
Date: Mon, 21 Oct 2024 19:42:31 -0400
Subject: [PATCH] patchit

---
 ...ch-for-Abseil-to-fix-build-on-Jetson.patch | 68 +++++++++++++++++++
 third_party/xla/workspace.bzl                 |  1 +
 2 files changed, 69 insertions(+)
 create mode 100644 third_party/xla/0001-Add-patch-for-Abseil-to-fix-build-on-Jetson.patch

diff --git a/third_party/xla/0001-Add-patch-for-Abseil-to-fix-build-on-Jetson.patch b/third_party/xla/0001-Add-patch-for-Abseil-to-fix-build-on-Jetson.patch
new file mode 100644
index 000000000000..5138a045082b
--- /dev/null
+++ b/third_party/xla/0001-Add-patch-for-Abseil-to-fix-build-on-Jetson.patch
@@ -0,0 +1,68 @@
+From 40da87a0476436ca1da2eafe08935787a05e9a61 Mon Sep 17 00:00:00 2001
+From: David Dunleavy <ddunleavy@google.com>
+Date: Mon, 5 Aug 2024 11:42:53 -0700
+Subject: [PATCH] Add patch for Abseil to fix build on Jetson
+
+Patches in https://github.com/abseil/abseil-cpp/commit/372124e6af36a540e74a2ec31d79d7297a831f98
+
+PiperOrigin-RevId: 659627531
+---
+ .../tsl/third_party/absl/nvidia_jetson.patch  | 35 +++++++++++++++++++
+ .../tsl/third_party/absl/workspace.bzl        |  1 +
+ 2 files changed, 36 insertions(+)
+ create mode 100644 third_party/tsl/third_party/absl/nvidia_jetson.patch
+
+diff --git a/third_party/tsl/third_party/absl/nvidia_jetson.patch b/third_party/tsl/third_party/absl/nvidia_jetson.patch
+new file mode 100644
+index 000000000000..5328c3a0d605
+--- /dev/null
++++ b/third_party/tsl/third_party/absl/nvidia_jetson.patch
+@@ -0,0 +1,35 @@
++From 372124e6af36a540e74a2ec31d79d7297a831f98 Mon Sep 17 00:00:00 2001
++From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Bastien?= <frederic.bastien@gmail.com>
++Date: Thu, 1 Aug 2024 12:38:52 -0700
++Subject: [PATCH] PR #1732: Fix build on NVIDIA Jetson board. Fix #1665
++
++Imported from GitHub PR https://github.com/abseil/abseil-cpp/pull/1732
++
++Fix build on NVIDIA Jetson board. Fix #1665
++
++This patch is already used by the spark project.
++I'm fixing this as this break the build of Tensorflow and JAX on Jetson board.
++Merge 7db2d2ab9fbed1f0fabad10a6ec73533ba71bfff into 6b8ebb35c0414ef5a2b6fd4a0f59057e41beaff9
++
++Merging this change closes #1732
++
++COPYBARA_INTEGRATE_REVIEW=https://github.com/abseil/abseil-cpp/pull/1732 from nouiz:fix_neon_on_jetson 7db2d2ab9fbed1f0fabad10a6ec73533ba71bfff
++PiperOrigin-RevId: 658501520
++Change-Id: If502ede4efc8c877fb3fed227eca6dc7622dd181
++---
++ absl/base/config.h | 2 +-
++ 1 file changed, 1 insertion(+), 1 deletion(-)
++
++diff --git a/absl/base/config.h b/absl/base/config.h
++index 97c9a22a109..ab1e9860a91 100644
++--- a/absl/base/config.h
+++++ b/absl/base/config.h
++@@ -926,7 +926,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
++ // https://llvm.org/docs/CompileCudaWithLLVM.html#detecting-clang-vs-nvcc-from-code
++ #ifdef ABSL_INTERNAL_HAVE_ARM_NEON
++ #error ABSL_INTERNAL_HAVE_ARM_NEON cannot be directly set
++-#elif defined(__ARM_NEON) && !defined(__CUDA_ARCH__)
+++#elif defined(__ARM_NEON) && !(defined(__NVCC__) && defined(__CUDACC__))
++ #define ABSL_INTERNAL_HAVE_ARM_NEON 1
++ #endif
++ 
+diff --git a/third_party/tsl/third_party/absl/workspace.bzl b/third_party/tsl/third_party/absl/workspace.bzl
+index 06f75166ce4b..9565a82c3319 100644
+--- a/third_party/tsl/third_party/absl/workspace.bzl
++++ b/third_party/tsl/third_party/absl/workspace.bzl
+@@ -44,4 +44,5 @@ def repo():
+         system_link_files = SYS_LINKS,
+         strip_prefix = "abseil-cpp-{commit}".format(commit = ABSL_COMMIT),
+         urls = tf_mirror_urls("https://github.com/abseil/abseil-cpp/archive/{commit}.tar.gz".format(commit = ABSL_COMMIT)),
++        patch_file = ["//third_party/absl:nvidia_jetson.patch"],
+     )
+-- 
+2.31.1
+
diff --git a/third_party/xla/workspace.bzl b/third_party/xla/workspace.bzl
index af52e7671507..70481bc970a5 100644
--- a/third_party/xla/workspace.bzl
+++ b/third_party/xla/workspace.bzl
@@ -29,6 +29,7 @@ def repo():
         name = "xla",
         sha256 = XLA_SHA256,
         strip_prefix = "xla-{commit}".format(commit = XLA_COMMIT),
+	patch_file = ["//third_party/xla:0001-Add-patch-for-Abseil-to-fix-build-on-Jetson.patch"],
         urls = tf_mirror_urls("https://github.com/openxla/xla/archive/{commit}.tar.gz".format(commit = XLA_COMMIT)),
     )
 
-- 
2.31.1
+59 −0
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import os

from spack.package import *


class PyJupyterCore(PythonPackage):
    """Core Jupyter functionality"""

    homepage = "https://jupyter-core.readthedocs.io/"
    pypi = "jupyter-core/jupyter_core-4.6.0.tar.gz"
    git = "https://github.com/jupyter/jupyter_core.git"

    license("BSD-3-Clause")

    # begin EBRAINS (added): add version
    version("5.7.0", sha256="cb8d3ed92144d2463a3c5664fdd686a3f0c1442ea45df8babb1c1a9e6333fe03")
    # end EBRAINS
    version("5.3.0", sha256="6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc")
    version("5.1.0", sha256="a5ae7c09c55c0b26f692ec69323ba2b62e8d7295354d20f6cd57b749de4a05bf")
    version("4.11.1", sha256="2e5f244d44894c4154d06aeae3419dd7f1b0ef4494dc5584929b398c61cfd314")
    version("4.9.2", sha256="d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a")
    version("4.7.1", sha256="79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4")
    version("4.6.3", sha256="394fd5dd787e7c8861741880bdf8a00ce39f95de5d18e579c74b882522219e7e")
    version("4.6.1", sha256="a183e0ec2e8f6adddf62b0a3fc6a2237e3e0056d381e536d3e7c7ecc3067e244")
    version("4.6.0", sha256="85103cee6548992780912c1a0a9ec2583a4a18f1ef79a248ec0db4446500bce3")
    version("4.4.0", sha256="ba70754aa680300306c699790128f6fbd8c306ee5927976cbe48adacf240c0b7")
    version("4.2.0", sha256="44ec837a53bebf4e937112d3f9ccf31fee4f8db3e406dd0dd4f0378a354bed9c")
    version("4.1.1", sha256="ae0e69435258126466c86cd989e465a9c334c50107ef4f257decc8693650bf4c")
    version("4.1.0", sha256="146af0679c33c56db4b85b785f3dacd933ffaca97e7d2d56ff577a5485c2bd13")
    version("4.0.6", sha256="96a68a3b1d018ff7776270b26b7cb0cfd7a18a53ef2061421daff435707d198c")
    version("4.0.5", sha256="9f6581b827f56cfa1771d7b1bd8ecc1274afa7f6e3e1046b7e0d4e05d52bf6e8")
    version("4.0.4", sha256="fcf45478025f34174943993947f51a41ad871ac998a14bf1cb87d8eb61e75c6d")
    version("4.0.3", sha256="12258d8c593c53bb08e09f3da63a418d7cb5b5852b3d0ffa29639402f56dcbdb")
    version("4.0.2", sha256="13a46b3c493ac63bd75048d6d2142cfc44258bc6c260d96c506f0214fcd78a70")
    version("4.0.1", sha256="7c165f7de7a063596f8be1bcfc86e9ba6897e38baf24e8510514690963600122")
    version("4.0.0", sha256="9025208cdfc40718c7e3ab62b5e17aacf68e3fc66e34ff21fe032d553620122a")

    depends_on("python@3.8:", when="@5:", type=("build", "run"))
    depends_on("py-hatchling@1.4:", when="@4.11.1:", type="build")

    depends_on("py-platformdirs@2.5:", when="@5.1:", type=("build", "run"))
    depends_on("py-traitlets@5.3:", when="@5.1:", type=("build", "run"))
    depends_on("py-traitlets", type=("build", "run"))
    # additional pywin32>=300 dependency for windows

    # Historical dependencies
    depends_on("py-setuptools", when="@:4.9.2", type=("build", "run"))

    def setup_dependent_run_environment(self, env, dependent_spec):
        # https://docs.jupyter.org/en/stable/use/jupyter-directories.html
        if os.path.exists(dependent_spec.prefix.etc.jupyter):
            env.prepend_path("JUPYTER_CONFIG_PATH", dependent_spec.prefix.etc.jupyter)
        if os.path.exists(dependent_spec.prefix.share.jupyter):
            env.prepend_path("JUPYTER_PATH", dependent_spec.prefix.share.jupyter)
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyJupyterServerProxy(PythonPackage):
    """
    Jupyter Server Proxy lets you run arbitrary external processes
    (such as RStudio, Shiny Server, Syncthing, PostgreSQL, Code Server, etc)
    alongside your notebook server and provide authenticated web access to them
    using a path like /rstudio next to others like /lab.
    """

    homepage = "https://github.com/jupyterhub/jupyter-server-proxy"
    pypi = "jupyter-server-proxy/jupyter_server_proxy-4.4.0.tar.gz"

    license("BSD-3-Clause")

    # begin EBRAINS (added): add new version (compatible with jupyterlab 4)
    version("4.4.0", sha256="e5732eb9c810c0caa997f90a2f15f7d09af638e7eea9c67eb5c43e9c1f0e1157")
    # end EBRAINS
    version("3.2.2", sha256="54690ea9467035d187c930c599e76065017baf16e118e6eebae0d3a008c4d946")

    # begin EBRAINS (modified): update dependencies for version 4.4.0
    depends_on("py-jupyter-packaging@0.7.9:0.7", when="@:3", type="build")
    depends_on("py-jupyterlab@3.0:3", when="@:3", type="build")
    depends_on("py-setuptools@40.8.0:", when="@:3", type="build")

    depends_on("py-hatch-jupyter-builder@0.8.3:", when="@4.4:", type="build")
    depends_on("py-hatchling@1.18:", when="@4.4:", type="build")
    depends_on("py-jupyterlab@4.0.6:", when="@4.4:", type="build")

    depends_on("py-aiohttp", type=("build", "run"))
    depends_on("py-jupyter-server@1.0:", type=("build", "run"))
    depends_on("py-jupyter-server@1.24:", when="@4.4:", type=("build", "run"))
    depends_on("py-simpervisor@0.4:", type=("build", "run"))
    depends_on("py-simpervisor@1:", when="@4.4:", type=("build", "run"))
    depends_on("py-tornado@6.1:", when="@4.4:", type=("build", "run"))
    depends_on("py-traitlets@5.1:", when="@4.4:", type=("build", "run"))
    # end EBRAINS

    # begin EBRAINS (added): add missing py-jupyter-core dependency
    depends_on('py-jupyter-core', type=("build", "run"))
    # end EBRAINS
Original line number Diff line number Diff line
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack.package import *


class PyJupyterlabWidgets(PythonPackage):
    """A JupyterLab extension."""

    homepage = "https://github.com/jupyter-widgets/ipywidgets"
    # Source is also available, but I'm having issues getting it to build:
    # https://github.com/jupyter-widgets/ipywidgets/issues/3324
    url = "https://files.pythonhosted.org/packages/py3/j/jupyterlab_widgets/jupyterlab_widgets-1.0.2-py3-none-any.whl"

    license("BSD-3-Clause")

    # begin EBRAINS (added): add version
    version("3.0.15", sha256="d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c")
    # end EBRAINS
    version("3.0.3", sha256="6aa1bc0045470d54d76b9c0b7609a8f8f0087573bae25700a370c11f82cb38c8")
    version("1.1.0", sha256="c2a9bd3789f120f64d73268c066ed3b000c56bc1dda217be5cdc43e7b4ebad3f")
    version("1.0.2", sha256="f5d9efface8ec62941173ba1cffb2edd0ecddc801c11ae2931e30b50492eb8f7")

    depends_on("python@3.6:", type=("build", "run"))
    depends_on("python@3.7:", when="@3.0.3:", type=("build", "run"))
Original line number Diff line number Diff line
@@ -8,7 +8,7 @@ from spack.package import *

class PyLems(PythonPackage):
    """
    Requirement necessary for py-tvb-library package.
    Requirement necessary for py-tvb-contrib package.
    """

    homepage = "https://pypi.org/project/PyLEMS/"
+34 −30

File changed.

Preview size limit exceeded, changes collapsed.