Skip to content
Snippets Groups Projects
Commit b2a4169c authored by Shailesh Appukuttan's avatar Shailesh Appukuttan
Browse files

Merge branch 'master' into 'ADD_ebrains_drive'

# Conflicts:
#   spack.yaml
parents 72ed678f dd0e51d7
No related branches found
No related tags found
1 merge request!62Add ebrains drive
__pycache__
*.pyc
......@@ -4,12 +4,11 @@ stages:
# start an OpenShift Job that will build the Spack environment
.deploy-build-environment:
stage: deploy
before_script:
script:
- oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN"
- tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh
- mkdir copy_folder
- mv ${SPACK_ENV_TAR_FILE} copy_folder
script:
# create job description file
- chmod a+x create_job.sh
- ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID
......@@ -35,9 +34,11 @@ stages:
tags:
- shell-runner
# Deploy in the lab-int environment the version of the tools to be
# tested before released to production (push pipeline)
# deploy on the dev environment of the okd dev cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-dev-environment-cscs:
deploy-int-release-dev-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
......@@ -54,10 +55,14 @@ deploy-dev-environment-cscs:
resource_group: shared-NFS-mount-dev-cscs
only:
- master
except:
variables:
- $CI_PIPELINE_SOURCE == "schedule"
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-prod-environment-cscs:
deploy-prod-release-prod-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
......@@ -77,9 +82,10 @@ deploy-prod-environment-cscs:
when: manual
allow_failure: false
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-prod-environment-jsc:
deploy-prod-release-prod-jsc:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
......@@ -98,3 +104,126 @@ deploy-prod-environment-jsc:
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /release/'
when: manual
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the dev environment of the okd dev cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-dev-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/int/experimental
OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub-int
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"name": "experimental_release",
"language": "python"
}
EOF
EOS
resource_group: shared-NFS-mount-dev-cscs
only:
refs:
- schedules
variables:
- $RELEASE == "experimental-dev"
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"name": "experimental_release",
"language": "python"
}
EOF
EOS
resource_group: shared-NFS-mount-prod-cscs
only:
refs:
- schedules
variables:
- $RELEASE == "experimental"
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-jsc:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"name": "experimental_release",
"language": "python"
}
EOF
EOS
resource_group: shared-NFS-mount-prod-jsc
only:
refs:
- schedules
variables:
- $RELEASE == "experimental"
allow_failure: false
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Arbor(CMakePackage, CudaPackage):
"""Arbor is a high-performance library for computational neuroscience
simulations."""
homepage = 'https://arbor-sim.org'
git = 'https://github.com/arbor-sim/arbor.git'
url = 'https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz'
maintainers = ['bcumming', 'brenthuisman', 'haampie', 'schmitts']
version('master', branch='master', submodules=True)
version('0.6', sha256='4cd333b18effc8833428ddc0b99e7dc976804771bc85da90034c272c7019e1e8', url='https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz')
version('0.5.2', sha256='290e2ad8ca8050db1791cabb6b431e7c0409c305af31b559e397e26b300a115d', url='https://github.com/arbor-sim/arbor/releases/download/v0.5.2/arbor-v0.5.2-full.tar.gz')
version('0.5', sha256='d0c8a4c7f97565d7c30493c66249be794d1dc424de266fc79cecbbf0e313df59', url='https://github.com/arbor-sim/arbor/releases/download/v0.5/arbor-v0.5-full.tar.gz')
variant('assertions', default=False, description='Enable arb_assert() assertions in code.')
variant('doc', default=False, description='Build documentation.')
variant('mpi', default=False, description='Enable MPI support')
variant('neuroml', default=True, description='Build NeuroML support library.')
variant('python', default=True, description='Enable Python frontend support')
variant('vectorize', default=False, description='Enable vectorization of computational kernels')
# https://docs.arbor-sim.org/en/latest/install/build_install.html?highlight=requirements#compilers
conflicts('%gcc@:8.3')
conflicts('%clang@:7')
# Cray compiler v9.2 and later is Clang-based.
conflicts('%cce@:9.1')
conflicts('%intel')
depends_on('cmake@3.12:', type='build')
# misc dependencies
depends_on('fmt@7.1:', when='@0.5.3:') # required by the modcc compiler
depends_on('nlohmann-json')
depends_on('cuda@10:', when='+cuda')
depends_on('libxml2', when='+neuroml')
# mpi
depends_on('mpi', when='+mpi')
depends_on('py-mpi4py', when='+mpi+python', type=('build', 'run'))
# python (bindings)
extends('python', when='+python')
depends_on('python@3.6:', when="+python", type=('build', 'run'))
depends_on('py-numpy', when='+python', type=('build', 'run'))
depends_on('py-pybind11@2.8.1:', when='+python', type=('build', 'run'))
# sphinx based documentation
depends_on('python@3.6:', when="+doc", type='build')
depends_on('py-sphinx', when="+doc", type='build')
depends_on('py-svgwrite', when='+doc', type='build')
@property
def build_targets(self):
return ['all', 'html'] if '+doc' in self.spec else ['all']
def cmake_args(self):
args = [
self.define_from_variant('ARB_WITH_ASSERTIONS', 'assertions'),
self.define_from_variant('ARB_WITH_MPI', 'mpi'),
self.define_from_variant('ARB_WITH_NEUROML', 'neuroml'),
self.define_from_variant('ARB_WITH_PYTHON', 'python'),
self.define_from_variant('ARB_VECTORIZE', 'vectorize'),
]
if '+cuda' in self.spec:
args.append('-DARB_GPU=cuda')
# query spack for the architecture-specific compiler flags set by its wrapper
args.append('-DARB_ARCH=none')
opt_flags = self.spec.target.optimization_flags(
self.spec.compiler.name,
self.spec.compiler.version)
args.append('-DARB_CXX_FLAGS_TARGET=' + opt_flags)
return args
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class BiobbCommon(PythonPackage):
"""Biobb_common is the base package required to use the biobb packages"""
# Homepage and download url
homepage = "https://github.com/bioexcel/biobb_common"
#url = "https://github.com/bioexcel/biobb_common/tarball/v3.7.0"
git = 'https://github.com/bioexcel/biobb_common.git'
# FIXME: Add a list of GitHub accounts to
# notify when the package is updated.
# maintainers = ['github_user1', 'github_user2']
# Versions
version('3.7.0', branch='master')
# Dependencies
depends_on('py-setuptools')
depends_on('python@3.7:', type=('build', 'run'))
depends_on('py-pyyaml', type=('build', 'run'))
depends_on('py-requests', type=('build', 'run'))
depends_on('py-biopython@1.78:1.80', type=('build', 'run'))
......@@ -49,7 +49,7 @@ class MetaBrainscales(Package):
depends_on('py-matplotlib')
depends_on('py-nose')
depends_on('py-numpy')
depends_on('py-pybind11@2.6.0:2.6.999') # workaround concretization error (py-scipy doesn't like 2.7)
depends_on('py-pybind11')
depends_on('py-pybind11-stubgen')
depends_on('py-pycodestyle')
depends_on('py-pyelftools')
......
diff --git a/README.md b/README.md
index 93a4727f2..c8be0e019 100644
--- a/README.md
+++ b/README.md
@@ -3,18 +3,18 @@
[![Documentation](https://img.shields.io/readthedocs/nest-simulator?logo=readthedocs&logo=Read%20the%20Docs&label=Documentation)](https://nest-simulator.org/documentation)
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/2218/badge)](https://bestpractices.coreinfrastructure.org/projects/2218)
[![License](http://img.shields.io/:license-GPLv2+-green.svg)](http://www.gnu.org/licenses/gpl-2.0.html)
-[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.4739103.svg)](https://doi.org/10.5281/zenodo.4739103)
+[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.5886894.svg)](https://doi.org/10.5281/zenodo.5886894)
[![Latest release](https://img.shields.io/github/release/nest/nest-simulator.svg?color=brightgreen&label=latest%20release)](https://github.com/nest/nest-simulator/releases)
[![GitHub contributors](https://img.shields.io/github/contributors/nest/nest-simulator?logo=github)](https://github.com/nest/nest-simulator)
[![GitHub commit activity](https://img.shields.io/github/commit-activity/y/nest/nest-simulator?logo=github&color=%23ff6633)](https://github.com/nest/nest-simulator)
-[![Ubuntu version](https://img.shields.io/badge/ubuntu-v2.20.0%20(PPA)-blue?logo=debian)](https://nest-simulator.readthedocs.io/en/latest/installation/)
+[![Ubuntu version](https://img.shields.io/badge/ubuntu-(PPA)-blue?logo=debian)](https://nest-simulator.readthedocs.io/en/latest/installation/)
[![Fedora package](https://img.shields.io/fedora/v/nest?logo=fedora)](https://src.fedoraproject.org/rpms/nest)
[![Conda version](https://img.shields.io/conda/vn/conda-forge/nest-simulator.svg?logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/nest-simulator)
[![Homebrew version](https://img.shields.io/homebrew/v/nest.svg?logo=apple)](https://formulae.brew.sh/formula/nest)
-[![Docker Image Version](https://img.shields.io/docker/v/nestsim/nest/2.20.0?label=docker&logo=docker&logoColor=white)](https://hub.docker.com/r/nestsim/nest)
-[![Virtual applicance](https://img.shields.io/badge/VM-v2.20.0-blue?logo=CodeSandbox)](https://nest-simulator.readthedocs.io/en/latest/download.html#download-livemedia)
+[![Docker Image Version](https://img.shields.io/docker/v/nestsim/nest?label=docker&sort=semver&logo=docker&logoColor=white)](https://hub.docker.com/r/nestsim/nest)
+[![Virtual applicance](https://img.shields.io/badge/VM-v3.1-blue?logo=CodeSandbox)](https://nest-simulator.readthedocs.io/en/latest/download.html#download-livemedia)
[![YouTube Video Views](https://img.shields.io/youtube/views/K7KXmIv6ROY?style=social)](https://www.youtube.com/results?search_query=nest-simulator+neurons)
[![Twitter Follow](https://img.shields.io/twitter/follow/nestsimulator?style=social)](https://twitter.com/nestsimulator)
diff --git a/cmake/NestVersionInfo.cmake b/cmake/NestVersionInfo.cmake
index 158cd49cf..5efd991e3 100644
--- a/cmake/NestVersionInfo.cmake
+++ b/cmake/NestVersionInfo.cmake
@@ -58,7 +58,7 @@ macro(get_version_info)
endif()
if (NOT NEST_VERSION_BRANCH)
- set(NEST_VERSION_BRANCH "UNKNOWN")
+ set(NEST_VERSION_BRANCH "3.2")
endif()
string(SUBSTRING "${NEST_VERSION_BRANCH}" 0 5 isRelease)
diff --git a/doc/userdoc/release_notes/index.rst b/doc/userdoc/release_notes/index.rst
index 5c7b46726..44560fe29 100644
--- a/doc/userdoc/release_notes/index.rst
+++ b/doc/userdoc/release_notes/index.rst
@@ -10,3 +10,4 @@ transition your simulation code to the new versions.
* :ref:`NEST 3.0 (June 10, 2021 ) <release_3.0>`
* :ref:`NEST 3.1 (September 15, 2021 ) <release_3.1>`
+* :ref:`NEST 3.2 (January 21, 2022 ) <release_3.2>`
......@@ -14,12 +14,21 @@ class Nest(CMakePackage):
homepage = "http://www.nest-simulator.org"
urls = [
'https://github.com/nest/nest-simulator/releases/download/v2.12.0/nest-2.12.0.tar.gz',
'https://github.com/nest/nest-simulator/archive/v3.0.tar.gz',
#
# note: for early nest releases the refs/tags/xxx.tar.gz is different
# from download/v2.x.y/...tar.gz! The download one already had the
# `make dist` boot-strapping done.
#
#'https://github.com/nest/nest-simulator/releases/download/v2.12.0/nest-2.12.0.tar.gz',
'https://github.com/nest/nest-simulator/archive/refs/tags/v2.12.0.tar.gz',
'https://github.com/nest/nest-simulator/archive/refs/tags/v3.0.tar.gz'
]
git = "https://github.com/nest/nest-simulator.git"
version('master', branch='master')
version('3.2', sha256='583d5725882ad5e8fd4fc7ffab425da97cbbb91fadbc327e940c184e8892b958')
patch('nest-simulator-3.2-p1-VersionNumber.patch', when='@3.2')
version('3.1', sha256='5c11dd6b451c4c6bf93037bf29d5231c6c75a0e1a8863344f6fb9bb225f279ca')
version('3.0', sha256='d481ea67f3251fe3aadf5252ab0a999172f0cd5536c5985366d271d772e686e6')
patch('2021-07-17_fix-pyexecdir.patch', when='@3.0')
......@@ -50,6 +59,8 @@ class Nest(CMakePackage):
description="Enable GNU Scientific Library")
variant('shared', default=True,
description="Build shared libraries")
variant('boost', default=True,
description="Enable optimizations provided via Boost library algorithms and containers")
# TODO add variants for neurosim and music when these are in spack
conflicts('~gsl', when='@:2.10.99',
......@@ -68,6 +79,7 @@ class Nest(CMakePackage):
depends_on('py-cython@0.19.2:', when='+python', type='build')
depends_on('py-nose', when='+python', type='test')
depends_on('py-setuptools', when='+python', type='build')
depends_on('boost', when="@2.16:+boost", type='build')
depends_on('py-setuptools@:44.99.99', when='@:2.15.99+python', type='build')
depends_on('mpi', when='+mpi')
......@@ -121,10 +133,11 @@ class Nest(CMakePackage):
def cmake_args(self):
args = []
if '+mpi' in self.spec:
args.append('-Dwith-mpi=ON')
else:
args.append('-Dwith-mpi=OFF')
for flag in "boost mpi openmp optimize".split():
if '+' + flag in self.spec:
args.append('-Dwith-'+flag+'=ON')
else:
args.append('-Dwith-'+flag+'=OFF')
if '+gsl' in self.spec:
args.append('-Dwith-gsl=' + self.spec['gsl'].prefix)
......@@ -140,16 +153,6 @@ class Nest(CMakePackage):
args.append('-Dwith-python=OFF')
args.append('-Dcythonize-pynest=OFF')
if '+optimize' in self.spec:
args.append('-Dwith-optimize=ON')
else:
args.append('-Dwith-optimize=OFF')
if '+openmp' in self.spec:
args.append('-Dwith-openmp=ON')
else:
args.append('-Dwith-openmp=OFF')
if '+shared' in self.spec:
args.append('-Dstatic-libraries=OFF')
else:
......
......@@ -2,9 +2,6 @@ spack:
specs:
# Base
- python@3.8.11 %gcc@10.3.0
# R
- r@4.1.0 %gcc@10.3.0
- r-irkernel@1.2 %gcc@10.3.0
# Notebook
- py-jupyter %gcc@10.3.0
- py-ipython %gcc@10.3.0
......@@ -20,8 +17,8 @@ spack:
- py-seaborn %gcc@10.3.0
- py-matplotlib %gcc@10.3.0
# EBRAINS simulators
- nest@3.0 +python +gsl +mpi %gcc@10.3.0
- arbor +mpi ^python@3:3.9 %gcc@10.3.0
- nest@3.2 +python +gsl +mpi %gcc@10.3.0
- arbor +python +mpi ^python@3:3.9 %gcc@10.3.0
- neuron +mpi %gcc@10.3.0
- py-pynn@0.9.6 %gcc@10.3.0
- tvb-data %gcc@10.3.0
......@@ -33,10 +30,11 @@ spack:
- meta-brainscales %gcc@10.3.0
- pynn-brainscales@2.0-rc1 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock %gcc@10.3.0
#- py-lfpy@2.2.3 %gcc@10.3.0
- biobb-common %gcc@10.3.0
# demo for codejam12
- funniest1022 %gcc@10.3.0
# NRP
- py-torch~mkldnn~cuda~cudnn~onnx_ml~rocm~tensorpipe~mpi~distributed ^protobuf@:3.17.999 %gcc@10.3.0
#- py-torch~mkldnn~cuda~cudnn~onnx_ml~rocm~tensorpipe~mpi~distributed ^protobuf@:3.17.999 %gcc@10.3.0
# Storage access
- py-ebrains-drive@0.4.0 %gcc@10.3.0
concretization: together
# release_v0.1_202109
# release_v0.1_202109_hotfix
Spack upstream commit sha -> 9853fd50e2dc6253a2c80b38fc3bad8f226ce94e
"display_name": "EBRAINS_release_v0.1_202109"
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment