Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • rshimoura/ebrains-spack-builds
  • ziaee/ebrains-spack-builds
  • hl11/ebrains-spack-builds
  • filippomarchetti/ebrains-spack-builds
  • jkaiser/ebrains-spack-builds
  • hjorth/ebrains-spack-builds-sept-2024
  • dsegebarth/ebrains-spack-builds
  • kozlov/ebrains-spack-builds
  • dsegebarth/ebrains-spack-builds-na-3
  • ansimsek/ebrains-spack-builds
  • lupoc/ebrains-spack-builds
  • hartmut/ebrains-spack-builds
  • ri/tech-hub/platform/esd/ebrains-spack-builds
  • lcalori0/ebrains-spack-builds
  • deepu/ebrains-spack-builds
  • noelp/ebrains-spack-builds
16 results
Show changes
Commits on Source (21)
Showing
with 284 additions and 121 deletions
...@@ -3,31 +3,34 @@ stages: ...@@ -3,31 +3,34 @@ stages:
- test - test
variables: variables:
BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/base:devel BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12
SPACK_VERSION: v0.21.1 SPACK_VERSION: v0.21.1
SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 SPACK_PATH_GITLAB: /mnt/spack_v0.21.1
SYSTEMNAME: ebrainslab SYSTEMNAME: ebrainslab
OC_PROJECT: jupyterhub
# =================================================================== # ===================================================================
# LAB DEPLOYMENTS # LAB DEPLOYMENTS
# =================================================================== # ===================================================================
# start an OpenShift Job that will build the Spack environment # start a k8s Job that will build the Spack environment
.deploy-build-environment: .deploy-build-environment:
stage: build stage: build
tags:
- docker-runner
- read-only
image: alpine:3.21.0
before_script:
- apk add kubectl
script: script:
# login and select project in openshift # use the site-specific kubectl context
- oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN" - kubectl config use-context $KUBE_CONTEXT
- oc project $OC_PROJECT
# create job description file # create job description file
- chmod a+x create_job.sh - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_VERSION $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT
- ./create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_VERSION $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT
- cat simplejob.yml - cat simplejob.yml
# start the deploy job # start the deploy job
- oc create -f simplejob.yml - kubectl create -f simplejob.yml
# wait for job to finish to get the logs # wait for job to finish to get the logs
- while true; do sleep 300; x=$(oc get pods | grep simplejob${CI_PIPELINE_ID} | awk '{ print $3}'); if [ $x != "Running" ]; then break; fi; done - while true; do sleep 300; x=$(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.phase}'); if [ $x != "Running" ]; then break; fi; done
# # copy logs of failed packages locally, to keep as job artifacts # # copy logs of failed packages locally, to keep as job artifacts
# - oc rsync $(oc get pods -l job-name=simplejob${CI_PIPELINE_ID} -o name):/tmp ./ --include="*/" --include="spack/spack-stage/*/*.txt" --exclude="*" # - oc rsync $(oc get pods -l job-name=simplejob${CI_PIPELINE_ID} -o name):/tmp ./ --include="*/" --include="spack/spack-stage/*/*.txt" --exclude="*"
# - mv tmp/spack/spack-stage spack_logs # - mv tmp/spack/spack-stage spack_logs
...@@ -36,12 +39,10 @@ variables: ...@@ -36,12 +39,10 @@ variables:
# - oc rsync $(oc get pods -l job-name=simplejob${CI_PIPELINE_ID} -o name):$LAB_KERNEL_PATH ./ # - oc rsync $(oc get pods -l job-name=simplejob${CI_PIPELINE_ID} -o name):$LAB_KERNEL_PATH ./
# - mv .$LAB_KERNEL_PATH kernel_specs # - mv .$LAB_KERNEL_PATH kernel_specs
# if spack install has failed, fail the pipeline # if spack install has failed, fail the pipeline
- oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt - kubectl logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt
- if [ $(cat log.txt | grep "No module available for package" | wc -l) -gt 0 ]; then exit 1; fi; - if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
# delete the job from OpenShift as we have the logs here # delete the job, as we have the logs here
- oc delete job simplejob${CI_PIPELINE_ID} || true - kubectl delete job simplejob${CI_PIPELINE_ID} || true
tags:
- shell-runner
# artifacts: # artifacts:
# paths: # paths:
# - spack_logs # - spack_logs
...@@ -66,52 +67,26 @@ variables: ...@@ -66,52 +67,26 @@ variables:
LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
INSTALLATION_ROOT: /srv/main-spack-instance-2402 INSTALLATION_ROOT: /srv/main-spack-instance-2402
# deploy to the dev lab environment at CSCS
.deploy-dev-server-cscs:
extends: .deploy-dev-server
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/okd:okd_23.06
OC_PROJECT: jupyterhub-int
resource_group: shared-NFS-mount-dev-cscs
tags: # this is just to ensure that the two jobs will run on different runners
- read-write # to avoid issues with common environment variables
- shell-runner
# deploy to the prod lab environment at CSCS
.deploy-prod-server-cscs:
extends: .deploy-prod-server
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/okd:okd_23.06
resource_group: shared-NFS-mount-prod-cscs
tags: # this is just to ensure that the two jobs will run on different runners
- read-write # to avoid issues with common environment variables
- shell-runner
# deploy to the dev lab environment at CINECA # deploy to the dev lab environment at CINECA
.deploy-dev-server-cineca: .deploy-dev-server-cineca:
extends: .deploy-dev-server extends: .deploy-dev-server
variables: variables:
OPENSHIFT_SERVER: $CINECA_K8S_DEV_SERVER KUBE_CONTEXT: cineca-int
OPENSHIFT_TOKEN: $CINECA_K8S_DEV_TOKEN
resource_group: shared-NFS-mount-dev-cineca resource_group: shared-NFS-mount-dev-cineca
tags: # this is just to ensure that the two jobs will run on different runners
- read-only # to avoid issues with common environment variables
- shell-runner
# deploy to the prod lab environment at JSC # deploy to the prod lab environment at JSC
.deploy-prod-server-jsc: .deploy-prod-server-jsc:
extends: .deploy-prod-server extends: .deploy-prod-server
variables: variables:
OPENSHIFT_SERVER: $JSC_K8S_PROD_SERVER KUBE_CONTEXT: jsc-prod
OPENSHIFT_TOKEN: $JSC_K8S_PROD_TOKEN
resource_group: shared-NFS-mount-prod-jsc resource_group: shared-NFS-mount-prod-jsc
tags: # this is just to ensure that the two jobs will run on different runners
- read-only # to avoid issues with common environment variables # deploy to the prod lab environment at CINECA
- shell-runner .deploy-prod-server-cineca:
extends: .deploy-prod-server
variables:
KUBE_CONTEXT: cineca-prod
resource_group: shared-NFS-mount-prod-cineca
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# Release types: test, experimental and official releases # Release types: test, experimental and official releases
...@@ -156,47 +131,29 @@ variables: ...@@ -156,47 +131,29 @@ variables:
# Lab deployment jobs # Lab deployment jobs
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# deploy int release to dev environment at CSCS
deploy-int-release-dev-cscs:
extends:
- .deploy-int-release
- .deploy-dev-server-cscs
# deploy int release to dev environment at CINECA # deploy int release to dev environment at CINECA
deploy-int-release-dev-cineca: deploy-int-release-dev-cineca:
extends: extends:
- .deploy-int-release - .deploy-int-release
- .deploy-dev-server-cineca - .deploy-dev-server-cineca
# deploy exp release to dev environment at CSCS
deploy-exp-release-dev-cscs:
extends:
- .deploy-exp-dev-release
- .deploy-dev-server-cscs
# deploy exp release to dev environment at CINECA # deploy exp release to dev environment at CINECA
deploy-exp-release-dev-cineca: deploy-exp-release-dev-cineca:
extends: extends:
- .deploy-exp-dev-release - .deploy-exp-dev-release
- .deploy-dev-server-cineca - .deploy-dev-server-cineca
# deploy exp release to prod environment at CSCS
deploy-exp-release-prod-cscs:
extends:
- .deploy-exp-prod-release
- .deploy-prod-server-cscs
# deploy exp release to prod environment at JSC # deploy exp release to prod environment at JSC
deploy-exp-release-prod-jsc: deploy-exp-release-prod-jsc:
extends: extends:
- .deploy-exp-prod-release - .deploy-exp-prod-release
- .deploy-prod-server-jsc - .deploy-prod-server-jsc
# deploy prod release to prod environment at CSCS # deploy exp release to prod environment at CINECA
deploy-prod-release-prod-cscs: deploy-exp-release-prod-cineca:
extends: extends:
- .deploy-prod-release - .deploy-exp-prod-release
- .deploy-prod-server-cscs - .deploy-prod-server-cineca
# deploy prod release to prod environment at JSC # deploy prod release to prod environment at JSC
deploy-prod-release-prod-jsc: deploy-prod-release-prod-jsc:
...@@ -204,6 +161,12 @@ deploy-prod-release-prod-jsc: ...@@ -204,6 +161,12 @@ deploy-prod-release-prod-jsc:
- .deploy-prod-release - .deploy-prod-release
- .deploy-prod-server-jsc - .deploy-prod-server-jsc
# deploy prod release to prod environment at CINECA
deploy-prod-release-prod-cineca:
extends:
- .deploy-prod-release
- .deploy-prod-server-cineca
# =================================================================== # ===================================================================
# GITLAB RUNNER DEPLOYMENTS # GITLAB RUNNER DEPLOYMENTS
# =================================================================== # ===================================================================
...@@ -224,11 +187,7 @@ build-spack-env-on-runner: ...@@ -224,11 +187,7 @@ build-spack-env-on-runner:
- > - >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
# run installation script # run installation script
- . install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $SPACK_VERSION $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $SPACK_VERSION $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB
# re-activate envionment and run tests
- spack env activate $SPACK_DEV_ENV
# TODO: run all tests when test dependency issue is fixed
# - spack test run -x wf-brainscales2-demos wf-multi-area-model
after_script: after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job # for succesfully installed packages: keep the spack logs for any package modified during this CI job
...@@ -263,10 +222,10 @@ sync-gitlab-spack-instance: ...@@ -263,10 +222,10 @@ sync-gitlab-spack-instance:
# get latest state of EBRAINS repo # get latest state of EBRAINS repo
- rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH
# run installation script # run installation script
- . install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_VERSION $SPACK_REPO_PATH $SPACK_NFS_ENV - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_VERSION $SPACK_REPO_PATH $SPACK_NFS_ENV
# create kernel spec, so that the environment can be used in gitlab CI jobs # create kernel spec, so that the environment can be used in gitlab CI jobs
- RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac); - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
- . create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
after_script: after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job # for succesfully installed packages: keep the spack logs for any package modified during this CI job
......
...@@ -40,7 +40,7 @@ git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/ ...@@ -40,7 +40,7 @@ git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/
Clone Spack. We currently use version v0.21.1: Clone Spack. We currently use version v0.21.1:
``` ```
git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.20.0 https://github.com/spack/spack git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.21.1 https://github.com/spack/spack
``` ```
Activate Spack: Activate Spack:
...@@ -53,11 +53,6 @@ Add the project repository to your Spack environment: ...@@ -53,11 +53,6 @@ Add the project repository to your Spack environment:
spack repo add ebrains-spack-builds spack repo add ebrains-spack-builds
``` ```
Create the environment:
```
spack env create -d ebrains-spack-builds/
```
Define your site-specific configurations: Define your site-specific configurations:
``` ```
export SYSTEMNAME=<your-system-name> export SYSTEMNAME=<your-system-name>
......
...@@ -7,6 +7,8 @@ ...@@ -7,6 +7,8 @@
# loaded by all users. # loaded by all users.
# =========================================================================================================== # ===========================================================================================================
set -euo pipefail
INSTALLATION_ROOT=$1 INSTALLATION_ROOT=$1
EBRAINS_SPACK_ENV=$2 EBRAINS_SPACK_ENV=$2
RELEASE_NAME=$3 RELEASE_NAME=$3
......
...@@ -58,21 +58,19 @@ spec: ...@@ -58,21 +58,19 @@ spec:
# reset spack repository dir by cloning the selected version # reset spack repository dir by cloning the selected version
rm -rf \$EBRAINS_REPO_PATH rm -rf \$EBRAINS_REPO_PATH
git clone https://gitlab.ebrains.eu/technical-coordination/project-internal/devops/platform/ebrains-spack-builds.git --branch \$BRANCH \$EBRAINS_REPO_PATH git clone ${CI_PROJECT_URL} --branch \$BRANCH \$EBRAINS_REPO_PATH
# run installation script # run installation script
. \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$SPACK_VERSION \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$SPACK_VERSION \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV
if [ \$? -eq 0 ] if [ \$? -eq 0 ]
then then
# build process succeeded - create or update kernel on the NFS based on the current spack environment # build process succeeded - create or update kernel on the NFS based on the current spack environment
chmod +x \$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh bash \$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh \$INSTALLATION_ROOT \$EBRAINS_SPACK_ENV \$RELEASE_NAME \$LAB_KERNEL_ROOT && exit 0
\$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh \$INSTALLATION_ROOT \$EBRAINS_SPACK_ENV \$RELEASE_NAME \$LAB_KERNEL_ROOT
exit 0
else else
# build process failed - keep spack build logs and fail the pipeline # build process failed - keep spack build logs and fail the pipeline
cp -r /tmp/spack/spack-stage/* \$SPACK_BUILD_LOGS cp -r /tmp/spack/spack-stage/* \$BUILD_LOGS_DIR
exit exit 1
fi fi
env: env:
- name: SYSTEMNAME - name: SYSTEMNAME
......
...@@ -7,6 +7,8 @@ ...@@ -7,6 +7,8 @@
# (if the specified spack instance doesn't exist, it also creates it) # (if the specified spack instance doesn't exist, it also creates it)
# ========================================================================================================================================= # =========================================================================================================================================
set -eo pipefail
SPACK_JOBS=$1 # number of jobs SPACK_JOBS=$1 # number of jobs
INSTALLATION_ROOT=$2 # where to set up the installation INSTALLATION_ROOT=$2 # where to set up the installation
SPACK_VERSION=$3 # which spack version to use SPACK_VERSION=$3 # which spack version to use
...@@ -15,7 +17,7 @@ EBRAINS_SPACK_ENV=$5 # name of EBRAINS Spack environment to be created/updated ...@@ -15,7 +17,7 @@ EBRAINS_SPACK_ENV=$5 # name of EBRAINS Spack environment to be created/updated
UPSTREAM_INSTANCE=$6 # path to Spack instance to use as upstream (optional) UPSTREAM_INSTANCE=$6 # path to Spack instance to use as upstream (optional)
SPACK_REPO=https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git SPACK_REPO=https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git
SPACK_VERSION_EBRAINS=${SPACK_VERSION}_ebrains24.04 SPACK_VERSION_EBRAINS=${SPACK_VERSION}_ebrains24.11
# specify location of .spack dir (by default in ~) # specify location of .spack dir (by default in ~)
# this is where cache and configuration settings are stored # this is where cache and configuration settings are stored
......
...@@ -15,6 +15,20 @@ import spack.build_environment ...@@ -15,6 +15,20 @@ import spack.build_environment
class BuildBrainscales(WafPackage): class BuildBrainscales(WafPackage):
"""Common stuff for BrainScaleS packages...""" """Common stuff for BrainScaleS packages..."""
version(
"9.0-a8",
git="https://github.com/electronicvisions/releases-ebrains",
tag="ebrains-9.0-a8",
commit="44323be431da4b4b43890815f453c27207dee0b2",
submodules=True,
)
version(
"9.0-a7",
git="https://github.com/electronicvisions/releases-ebrains",
tag="ebrains-9.0-a7",
commit="2337adc6a33f907900d2b8be5d9f0b15872a200a",
submodules=True,
)
version( version(
"9.0-a6", "9.0-a6",
git="https://github.com/electronicvisions/releases-ebrains", git="https://github.com/electronicvisions/releases-ebrains",
...@@ -38,6 +52,8 @@ class BuildBrainscales(WafPackage): ...@@ -38,6 +52,8 @@ class BuildBrainscales(WafPackage):
) )
# common dependencies of BuildBrainscales-derived packages # common dependencies of BuildBrainscales-derived packages
depends_on('oppulance@9.0-a8', when='@9.0-a8', type=('build', 'link', 'run', 'test'))
depends_on('oppulance@9.0-a7', when='@9.0-a7', type=('build', 'link', 'run', 'test'))
depends_on('oppulance@9.0-a6', when='@9.0-a6', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a6', when='@9.0-a6', type=('build', 'link', 'run', 'test'))
depends_on('oppulance@9.0-a5', when='@9.0-a5', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a5', when='@9.0-a5', type=('build', 'link', 'run', 'test'))
depends_on('oppulance@9.0-a4', when='@9.0-a4', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a4', when='@9.0-a4', type=('build', 'link', 'run', 'test'))
......
...@@ -10,16 +10,16 @@ class Log4cxx(CMakePackage): ...@@ -10,16 +10,16 @@ class Log4cxx(CMakePackage):
"""A C++ port of Log4j""" """A C++ port of Log4j"""
homepage = "https://logging.apache.org/log4cxx/latest_stable/" homepage = "https://logging.apache.org/log4cxx/latest_stable/"
url = "https://dlcdn.apache.org/logging/log4cxx/0.12.0/apache-log4cxx-0.12.0.tar.gz" url = "https://github.com/apache/logging-log4cxx/archive/refs/tags/rel/v1.2.0.tar.gz"
maintainers("nicmcd") maintainers("nicmcd")
# begin EBRAINS (added): bring upstream (ref. spack@0.21.2) # begin EBRAINS (added): bring upstream (ref. spack@0.21.2)
version("1.2.0", sha256="09f4748aa5675ef5c0770bedbf5e00488668933c5a935a43ac5b85be2436c48a") version("1.2.0", sha256="3e0af426011718c634194200cdd79b49ec13c322697bdcddef3d8b2ac9efd7b6")
version("1.1.0", sha256="1fc7d82697534184bc0f757348d969d24852b948f63d6b17283fd1ee29c2c28a") version("1.1.0", sha256="feb425ce35a391cf0927356bebb7da53f96c8a7aaf634aaf740e011203c732bb")
# end EBRAINS # end EBRAINS
version("0.12.1", sha256="7bea5cb477f0e31c838f0e1f4f498cc3b30c2eae74703ddda923e7e8c2268d22") version("0.12.1", sha256="567a4200c5b005a816c401e798d98294782950c7750eb3e285e851b970c8beed")
version("0.12.0", sha256="bd5b5009ca914c8fa7944b92ea6b4ca6fb7d146f65d526f21bf8b3c6a0520e44") version("0.12.0", sha256="31730a17b8ff3f416256755b7aa6d7e95b167c670eb469eb9ff99aa006376e79")
variant("cxxstd", default="17", description="C++ standard", values=("11", "17"), multi=False) variant("cxxstd", default="17", description="C++ standard", values=("11", "17"), multi=False)
# begin EBRAINS (added) # begin EBRAINS (added)
......
...@@ -21,6 +21,20 @@ class Oppulance(Package): ...@@ -21,6 +21,20 @@ class Oppulance(Package):
depends_on('wget') depends_on('wget')
depends_on('gmp') depends_on('gmp')
version(
"9.0-a8",
git="https://github.com/electronicvisions/releases-ebrains",
tag="ebrains-9.0-a8",
commit="44323be431da4b4b43890815f453c27207dee0b2",
submodules=True,
)
version(
"9.0-a7",
git="https://github.com/electronicvisions/releases-ebrains",
tag="ebrains-9.0-a7",
commit="2337adc6a33f907900d2b8be5d9f0b15872a200a",
submodules=True,
)
version( version(
"9.0-a6", "9.0-a6",
git="https://github.com/electronicvisions/releases-ebrains", git="https://github.com/electronicvisions/releases-ebrains",
......
...@@ -13,11 +13,12 @@ class PyNestml(PythonPackage): ...@@ -13,11 +13,12 @@ class PyNestml(PythonPackage):
""" """
homepage = 'https://nestml.readthedocs.org/' homepage = 'https://nestml.readthedocs.org/'
url = 'https://pypi.org/packages/py3/N/NESTML/NESTML-7.0.2-py3-none-any.whl' url = 'https://pypi.org/packages/py3/N/NESTML/NESTML-8.0.0-py3-none-any.whl'
git = 'https://github.com/nest/nestml/' git = 'https://github.com/nest/nestml/'
maintainers = ['clinssen', 'pnbabu'] maintainers = ['clinssen', 'pnbabu']
version('8.0.0', sha256='bb2182fadd5f3ff7fa538e7f9865bafefb8be67938c83028174491768e88ef09', expand=False)
version('7.0.2', sha256='3611239ff8436bf1c74b878562564007285c6da5df0317c6cd52f65e6bcd3f8b', expand=False) version('7.0.2', sha256='3611239ff8436bf1c74b878562564007285c6da5df0317c6cd52f65e6bcd3f8b', expand=False)
version('7.0.1', sha256='38392bdd06c5be5af65050153f34fb78dee6032158b268b83599bd70aab5c030', expand=False) version('7.0.1', sha256='38392bdd06c5be5af65050153f34fb78dee6032158b268b83599bd70aab5c030', expand=False)
version('7.0.0', sha256='4e271048b4a9ad2c161dda98d3ee25e143537649a264d521deb0ff5543020d73', expand=False) version('7.0.0', sha256='4e271048b4a9ad2c161dda98d3ee25e143537649a264d521deb0ff5543020d73', expand=False)
......
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNnmt(PythonPackage):
"""NNMT is an open-source, community centered Python package for collecting
reusable implementations of analytical methods for neuronal network model analysis
based on mean-field theory."""
homepage = "https://nnmt.readthedocs.io/en/latest/index.html"
pypi = "nnmt/nnmt-1.3.0.tar.gz"
maintainers = ["rshimoura", "terhorstd"]
version("1.3.0", sha256="0cb4f7c58e08520e383506b5b15fb0a9552801adc03fd1006b9e3dd17b1b636d")
depends_on("py-setuptools@23.1.0:", type="build")
depends_on("py-numpy@1.8:", type=("build", "run"))
depends_on("py-scipy@0.14:", type=("build", "run"))
depends_on("py-cython@0.20:", type=("build", "run"))
depends_on("py-h5py@2.5:", type=("build", "run"))
depends_on("py-matplotlib@2.0:", type=("build", "run"))
depends_on("py-pint", type=("build", "run"))
depends_on("py-pyyaml", type=("build", "run"))
depends_on("py-requests", type=("build", "run"))
depends_on("py-mpmath", type=("build", "run"))
depends_on("py-decorator", type=("build", "run"))
depends_on("py-pytest@5.4:", type=("build", "run"))
depends_on("py-pytest-mock@3.1:", type=("build", "run"))
depends_on("python@3:", type=("build", "run"))
...@@ -13,8 +13,17 @@ class PySpalloc(PythonPackage): ...@@ -13,8 +13,17 @@ class PySpalloc(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/spalloc" homepage = "https://github.com/SpiNNakerManchester/spalloc"
pypi = "spalloc/spalloc-1!7.0.0.tar.gz" pypi = "spalloc/spalloc-1!7.0.0.tar.gz"
def url_for_version(self, version):
url = "https://pypi.org/packages/source/s/spalloc/spalloc-1!{}.tar.gz"
return url.format(version)
version('7.3.0', sha256='5664546187a57c87743c8bf1db812a2ab1c14db15fb0f44ee64f5f72d1cdedac')
version('7.0.0', sha256='e141a0e661efd6fd634f3793752d8d6deef56ee37a21fa8e3d7208f4edd86f51') version('7.0.0', sha256='e141a0e661efd6fd634f3793752d8d6deef56ee37a21fa8e3d7208f4edd86f51')
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnutilities@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnutilities@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-jsonschema", type=("build", "run")) depends_on("py-jsonschema", type=("build", "run"))
depends_on("py-spinnutilities@7.0.0", type=("build", "run"))
...@@ -13,9 +13,21 @@ class PySpinnakerPacman(PythonPackage): ...@@ -13,9 +13,21 @@ class PySpinnakerPacman(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/PACMAN" homepage = "https://github.com/SpiNNakerManchester/PACMAN"
pypi = "SpiNNaker_PACMAN/SpiNNaker_PACMAN-1!7.0.0.tar.gz" pypi = "SpiNNaker_PACMAN/SpiNNaker_PACMAN-1!7.0.0.tar.gz"
def url_for_version(self, version):
name = "spinnaker_pacman" if version >= Version("7.2.0") else "SpiNNaker_PACMAN"
url = "https://pypi.org/packages/source/s/SpiNNaker_PACMAN/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="ef597e14aac9877c676181082e11e77ea3d4b0dfb5977b0d3ce78020229fb055")
version("7.0.0", sha256="d9e7e620d02fda88f57a8cf157cc9421b5606d453230847f3d35985eae4c074d") version("7.0.0", sha256="d9e7e620d02fda88f57a8cf157cc9421b5606d453230847f3d35985eae4c074d")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnutilities@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnmachine@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnutilities@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-spinnmachine@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-jsonschema", type=("build", "run")) depends_on("py-jsonschema", type=("build", "run"))
depends_on("py-spinnutilities@7.0.0", type=("build", "run"))
depends_on("py-spinnmachine@7.0.0", type=("build", "run"))
...@@ -6,8 +6,11 @@ ...@@ -6,8 +6,11 @@
from spack.package import * from spack.package import *
_JAR_URL = "https://github.com/SpiNNakerManchester/JavaSpiNNaker/releases/download/7.0.0/spinnaker-exe.jar" _JAR_URL_7_3_0 = "https://github.com/SpiNNakerManchester/JavaSpiNNaker/releases/download/7.3.0/spinnaker-exe.jar"
_JAR_SHA256 = "2d909c7fb3aa15886acf26febb1bd48e25db0c347a231944aa6a5f86107bb55b" _JAR_SHA256_7_3_0 = "8fea399e835d053eb9b9b8b6f4752475d19cc3995389ca544f3ad1758007edbf"
_JAR_URL_7_0_0 = "https://github.com/SpiNNakerManchester/JavaSpiNNaker/releases/download/7.0.0/spinnaker-exe.jar"
_JAR_SHA256_7_0_0 = "2d909c7fb3aa15886acf26febb1bd48e25db0c347a231944aa6a5f86107bb55b"
class PySpinnfrontendcommon(PythonPackage): class PySpinnfrontendcommon(PythonPackage):
"""This package provides utilities for specifying binary data """This package provides utilities for specifying binary data
...@@ -16,18 +19,32 @@ class PySpinnfrontendcommon(PythonPackage): ...@@ -16,18 +19,32 @@ class PySpinnfrontendcommon(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/SpiNNFrontEndCommon" homepage = "https://github.com/SpiNNakerManchester/SpiNNFrontEndCommon"
pypi = "SpiNNFrontEndCommon/SpiNNFrontEndCommon-1!7.0.0.tar.gz" pypi = "SpiNNFrontEndCommon/SpiNNFrontEndCommon-1!7.0.0.tar.gz"
def url_for_version(self, version):
name = "spinnfrontendcommon" if version >= Version("7.2.0") else "SpiNNFrontEndCommon"
url = "https://pypi.org/packages/source/s/SpiNNFrontEndCommon/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="c3aea0160525c4f08bc74244f219a9664a06aa70876cfb68944c7d6378daf161")
version("7.0.0", sha256="07539734ed0105472d06d655bbd92e149ef44c77c388fcca28857558faa6dd10") version("7.0.0", sha256="07539734ed0105472d06d655bbd92e149ef44c77c388fcca28857558faa6dd10")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("py-spinnman@7.0.0", type=("build", "run")) depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnaker-pacman@7.0.0", type=("build", "run"))
depends_on("py-spalloc@7.0.0", type=("build", "run")) depends_on("py-spinnman@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnaker-pacman@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spalloc@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnman@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-spinnaker-pacman@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-spalloc@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-scipy@0.16.0:", type=("build", "run")) depends_on("py-scipy@0.16.0:", type=("build", "run"))
depends_on("py-ebrains-drive@0.5.1:", type=("build", "run")) depends_on("py-ebrains-drive@0.5.1:", type=("build", "run"))
depends_on("java@11:") depends_on("java@11:")
resource(name="spinnaker-exe.jar", url=_JAR_URL, checksum=_JAR_SHA256, expand=False, placement="resource_root/JavaSpiNNaker/SpiNNaker-front-end/target") resource(name="spinnaker-exe.jar", url=_JAR_URL_7_0_0, checksum=_JAR_SHA256_7_0_0, expand=False, placement="resource_root/JavaSpiNNaker/SpiNNaker-front-end/target", when="@7.0.0")
resource(name="spinnaker-exe.jar", url=_JAR_URL_7_3_0, checksum=_JAR_SHA256_7_3_0, expand=False, placement="resource_root/JavaSpiNNaker/SpiNNaker-front-end/target", when="@7.3.0")
def install(self, spec, prefix): def install(self, spec, prefix):
super(PySpinnfrontendcommon, self).install(spec, prefix) super(PySpinnfrontendcommon, self).install(spec, prefix)
......
...@@ -13,7 +13,16 @@ class PySpinnmachine(PythonPackage): ...@@ -13,7 +13,16 @@ class PySpinnmachine(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/SpiNNMachine" homepage = "https://github.com/SpiNNakerManchester/SpiNNMachine"
pypi = "SpiNNMachine/SpiNNMachine-1!7.0.0.tar.gz" pypi = "SpiNNMachine/SpiNNMachine-1!7.0.0.tar.gz"
def url_for_version(self, version):
name = "spinnmachine" if version >= Version("7.2.0") else "SpiNNMachine"
url = "https://pypi.org/packages/source/s/SpiNNMachine/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="7c23def7deac54d56e23f4679c2317ddd053e6f6632c81ddf497fe021f37960c")
version("7.0.0", sha256="5da374fd9208287799fbc324136fe5954dd1b370792ea81ea10d4537643272ad") version("7.0.0", sha256="5da374fd9208287799fbc324136fe5954dd1b370792ea81ea10d4537643272ad")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("py-spinnutilities@7.0.0", type=("build", "run")) depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnutilities@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnutilities@7.0.0", type=("build", "run"), when="@7.0.0")
...@@ -13,8 +13,18 @@ class PySpinnman(PythonPackage): ...@@ -13,8 +13,18 @@ class PySpinnman(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/SpiNNMan" homepage = "https://github.com/SpiNNakerManchester/SpiNNMan"
pypi = "SpiNNMan/SpiNNMan-1!7.0.0.tar.gz" pypi = "SpiNNMan/SpiNNMan-1!7.0.0.tar.gz"
def url_for_version(self, version):
name = "spinnman" if version >= Version("7.2.0") else "SpiNNMan"
url = "https://pypi.org/packages/source/s/SpiNNMan/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="8b4924ee31cae35845164da8d2da69391d306246772c706fdcd935e4ce8535db")
version("7.0.0", sha256="61bc8934e4ad6798b48c02ff6c8a3ef5c8e080a5ee2f4b88fc9cd587ed1b1ae6") version("7.0.0", sha256="61bc8934e4ad6798b48c02ff6c8a3ef5c8e080a5ee2f4b88fc9cd587ed1b1ae6")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("py-spinnmachine@7.0.0", type=("build", "run")) depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnmachine@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnmachine@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-websocket-client", type=("build", "run")) depends_on("py-websocket-client", type=("build", "run"))
...@@ -14,9 +14,17 @@ class PySpinnutilities(PythonPackage): ...@@ -14,9 +14,17 @@ class PySpinnutilities(PythonPackage):
homepage = "https://github.com/SpiNNakerManchester/SpiNNUtils" homepage = "https://github.com/SpiNNakerManchester/SpiNNUtils"
pypi = "SpiNNUtilities/SpiNNUtilities-1!7.0.0.tar.gz" pypi = "SpiNNUtilities/SpiNNUtilities-1!7.0.0.tar.gz"
def url_for_version(self, version):
name = "spinnutilities" if version >= Version("7.2.0") else "SpiNNUtilities"
url = "https://pypi.org/packages/source/s/SpiNNUtilities/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="5343004fd2aeec0124267e91c2649356b20bf8f2a5d33c9d7cd5ea6cce7dd86b")
version("7.0.0", sha256="662855395ec367008735047a66a7ca75d1e5070e309ca3aa6ba3a843fb722841") version("7.0.0", sha256="662855395ec367008735047a66a7ca75d1e5070e309ca3aa6ba3a843fb722841")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-appdirs", type=("build", "run")) depends_on("py-appdirs", type=("build", "run"))
depends_on("py-numpy", type=("build", "run")) depends_on("py-numpy", type=("build", "run"))
depends_on("py-pyyaml", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run"))
......
...@@ -15,10 +15,18 @@ class PySpynnaker(PythonPackage): ...@@ -15,10 +15,18 @@ class PySpynnaker(PythonPackage):
maintainers = ["rowley"] maintainers = ["rowley"]
def url_for_version(self, version):
name = "spynnaker" if version >= Version("7.2.0") else "sPyNNaker"
url = "https://pypi.org/packages/source/s/sPyNNaker/{}-1!{}.tar.gz"
return url.format(name, version)
version("7.3.0", sha256="f052a50b8a31b526f0249b7aa1f7fe77c2f34fc35838600bef17c43e9d3bf9a9")
version("7.0.0", sha256="caeaa624e3fdbca3b938c9be7ea4c78a51a037e659389fb01952822f069664db") version("7.0.0", sha256="caeaa624e3fdbca3b938c9be7ea4c78a51a037e659389fb01952822f069664db")
depends_on("python@3.7:", type=("build", "run")) depends_on("python@3.8:", type=("build", "run"), when="@7.3.0:")
depends_on("py-spinnfrontendcommon@7.0.0", type=("build", "run")) depends_on("python@3.7:", type=("build", "run"), when="@7.0.0:")
depends_on("py-spinnfrontendcommon@7.3.0", type=("build", "run"), when="@7.3.0")
depends_on("py-spinnfrontendcommon@7.0.0", type=("build", "run"), when="@7.0.0")
depends_on("py-matplotlib", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run"))
depends_on("py-quantities", type=("build", "run")) depends_on("py-quantities", type=("build", "run"))
depends_on("py-pynn", type=("build", "run")) depends_on("py-pynn", type=("build", "run"))
......
...@@ -47,8 +47,7 @@ class PyTvbWidgets(PythonPackage): ...@@ -47,8 +47,7 @@ class PyTvbWidgets(PythonPackage):
depends_on('py-traitlets@5.7.1:', type=('build', 'run')) depends_on('py-traitlets@5.7.1:', type=('build', 'run'))
depends_on('py-toml', type=('build', 'run')) depends_on('py-toml', type=('build', 'run'))
depends_on('py-bokeh', type=('build', 'run')) depends_on('py-bokeh', type=('build', 'run'))
depends_on('vtk@=9.2.6', type=('build', 'run')) depends_on('vtk@9:9.3', type=('build', 'run'))
depends_on('py-pytest', type='test') depends_on('py-pytest', type='test')
depends_on('py-pytest-mock', type='test') depends_on('py-pytest-mock', type='test')
......
...@@ -16,6 +16,8 @@ class WfBrainscales2Demos(Package): ...@@ -16,6 +16,8 @@ class WfBrainscales2Demos(Package):
maintainers = ["emuller", "muffgaga"] maintainers = ["emuller", "muffgaga"]
# ECM: we probably should build the ipynb file in this package # ECM: we probably should build the ipynb file in this package
version("9.0-a8", tag="jupyter-notebooks-9.0-a8")
version("9.0-a7", tag="jupyter-notebooks-9.0-a7")
version("9.0-a6", tag="jupyter-notebooks-9.0-a6") version("9.0-a6", tag="jupyter-notebooks-9.0-a6")
version("9.0-a5", tag="jupyter-notebooks-9.0-a5") version("9.0-a5", tag="jupyter-notebooks-9.0-a5")
version("9.0-a4", tag="jupyter-notebooks-9.0-a4") version("9.0-a4", tag="jupyter-notebooks-9.0-a4")
......
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class WfHumanMultiAreaModel(Package):
"""Meta-package to collect all dependencies of the Human Multi-Area-Model."""
git = "https://github.com/INM-6/human-multi-area-model"
maintainer = ["rshimoura", "terhorstd"]
version("2.0.1", tag="v2.0.1")
version("master", branch="master")
depends_on("nest@3.8:", type=("run", "test"))
depends_on("py-joblib@1.2.0:", type=("run", "test"))
depends_on("py-xlrd@2.0.1:", type=("run", "test"))
depends_on("py-matplotlib@3.7.3:", type=("run", "test"))
depends_on("py-pyyaml@6.0:", type=("run", "test"))
depends_on("py-numpy@1.23.5:", type=("run", "test"))
depends_on("py-seaborn@0.12.2:", type=("run", "test"))
depends_on("python@3.8:", type=("run", "test"))
depends_on("py-pandas@2.0.3:", type=("run", "test"))
depends_on("py-scipy@1.10.1:", type=("run", "test"))
depends_on("py-nnmt@1.3.0:", type=("run", "test"))
depends_on("py-dicthash@0.0.1:", type=("run", "test"))
depends_on("py-networkx@3.1:", type=("run", "test"))
depends_on("py-notebook@6.5.4:", type=("run", "test"))
depends_on("py-future@0.18.2:", type=("run", "test"))
def install(self, spec, prefix):
install_tree(".", join_path(prefix, "notebooks"))
def _nbconvert(self, nb, nb_out):
jupyter = Executable("jupyter")
args = [
"nbconvert",
"--ExecutePreprocessor.kernel_name=python3",
"--execute",
"--to",
"notebook",
nb,
"--output",
nb_out
]
try:
# execute notebook and save
jupyter(*args)
except Exception as e:
# if the above fails, re-run notebook to produce output with error
jupyter(*(args+["--allow-errors"]))
raise
def _run_notebooks(self, output_dir):
mkdirp(output_dir)
self._nbconvert(join_path(self.prefix, "notebooks", "humam_tutorial.ipynb"), join_path(output_dir, "humam_tutorial.ipynb"))
@run_after("install")
@on_package_attributes(run_tests=True)
def installcheck(self):
self._run_notebooks(join_path(self.stage.path, ".install_time_tests"))
copy_tree(join_path(self.stage.path, ".install_time_tests"), join_path(self.prefix, '.build'))
def test_notebook(self):
self._run_notebooks(join_path(self.test_suite.stage, self.spec.format("out-{name}-{version}-{hash:7}")))