Skip to content
Snippets Groups Projects
Commit 4a2c3dd6 authored by Maximilian Schmidt's avatar Maximilian Schmidt
Browse files

Implement Siegert integration with python/scipy, edit network_scaling test

parent b3c9c9fa
No related branches found
No related tags found
1 merge request!1Add all necessary files for the multi-area model
...@@ -18,6 +18,7 @@ provedure. ...@@ -18,6 +18,7 @@ provedure.
import json import json
import pprint import pprint
import multiprocessing
import nest import nest
import numpy as np import numpy as np
...@@ -25,8 +26,10 @@ from copy import copy ...@@ -25,8 +26,10 @@ from copy import copy
from .default_params import nested_update, theory_params from .default_params import nested_update, theory_params
from .default_params import check_custom_params from .default_params import check_custom_params
from dicthash import dicthash from dicthash import dicthash
from functools import partial
from .multiarea_helpers import create_mask, create_vector_mask, dict_to_vector from .multiarea_helpers import create_mask, create_vector_mask, dict_to_vector
from .theory_helpers import d_nu_d_mu_fb_numeric, d_nu_d_sigma_fb_numeric from .theory_helpers import d_nu_d_mu_fb_numeric, d_nu_d_sigma_fb_numeric
from .theory_helpers import nu0_fb
class Theory(): class Theory():
...@@ -61,7 +64,7 @@ class Theory(): ...@@ -61,7 +64,7 @@ class Theory():
def __hash__(self): def __hash__(self):
return hash(self.label) return hash(self.label)
def integrate_siegert(self, full_output=True): def integrate_siegert_nest(self, full_output=True):
""" """
Integrate siegert formula to obtain stationary rates. See Eq. (3) Integrate siegert formula to obtain stationary rates. See Eq. (3)
and following in Schuecker, Schmidt et al. (2017). and following in Schuecker, Schmidt et al. (2017).
...@@ -184,6 +187,99 @@ class Theory(): ...@@ -184,6 +187,99 @@ class Theory():
else: else:
return self.network.structure_vec, rates return self.network.structure_vec, rates
def integrate_siegert_python(self, full_output=True, parallel=True):
"""
Integrate siegert formula to obtain stationary rates. See Eq. (3)
and following in Schuecker, Schmidt et al. (2017).
Use Runge-Kutta in Python.
"""
dt = self.params['dt']
T = self.params['T']
K = copy(self.network.K_matrix)
tau = self.NP['tau_m'] * 1e-3
dim = np.shape(K)[0]
# Set initial rates of neurons:
# If defined as None, set them 0
if self.params['initial_rates'] is None:
num_iter = 1
gen = (np.zeros(dim) for ii in range(num_iter))
# iterate over different initial conditions drawn from a random distribution
elif self.params['initial_rates'] == 'random_uniform':
gen = self.initial_rates(self.params['initial_rates_iter'],
dim,
mode=self.params['initial_rates'],
rate_max=1000.)
num_iter = self.params['initial_rates_iter']
# initial rates are explicitly defined in self.params
elif isinstance(self.params['initial_rates'], np.ndarray):
num_iter = 1
gen = (self.params['initial_rates'] for ii in range(num_iter))
rates = []
# Loop over all iterations of different initial conditions
for nsim in range(num_iter):
print("Iteration {}".format(nsim))
initial_rates = next(gen)
self.t = np.arange(0, T, dt)
y = np.zeros((len(self.t), dim), dtype=float)
y[0, :] = initial_rates
# Integration loop
for i, tl in enumerate(self.t[:-1]):
print(tl)
delta_y, new_mu, new_sigma = self.Phi(
y[i, :], parallel=parallel, return_mu_sigma=True)
k1 = delta_y
k2 = self.Phi(
y[i, :] + dt * 1e-3 / tau / 2 * k1, parallel=parallel, return_mu_sigma=False)
k3 = self.Phi(
y[i, :] + dt * 1e-3 / tau / 2 * k2, parallel=parallel, return_mu_sigma=False)
k4 = self.Phi(
y[i, :] + dt * 1e-3 / tau * k3, parallel=parallel, return_mu_sigma=False)
y[i + 1, :] = y[i, :].copy() + dt * 1e-3 / 6. * (k1 + 2 * k2 + 2 * k3 + k4) / tau
if full_output:
rates.append(y.T)
else:
# Keep only initial and final rates
rates.append(y.T[:, [0, -1]])
if num_iter == 1:
return self.network.structure_vec, rates[0]
else:
return self.network.structure_vec, rates
def nu0_fb(self, arg):
mu = arg[0]
sigma = arg[1]
return nu0_fb(mu, sigma,
self.NP['tau_m'] * 1e-3,
self.NP['tau_syn'] * 1e-3,
self.NP['t_ref'] * 1e-3,
self.NP['theta'],
self.NP['V_reset'])
def Phi(self, rates, parallel=False,
return_mu_sigma=False, return_leak=True):
mu, sigma = self.mu_sigma(rates, external=True)
if parallel:
pool = multiprocessing.Pool(processes=4)
new_rates = np.array(
pool.map(self.nu0_fb, zip(mu, sigma)))
pool.close()
pool.join()
else:
new_rates = [self.nu0_fb((m, s)) for m, s in zip(mu, sigma)]
if return_leak:
new_rates -= rates
if return_mu_sigma:
return (new_rates), mu, sigma
else:
return (new_rates)
def replace_cc_input(self): def replace_cc_input(self):
""" """
Helper function to replace cortico-cortical input by different variants. Helper function to replace cortico-cortical input by different variants.
...@@ -233,7 +329,7 @@ class Theory(): ...@@ -233,7 +329,7 @@ class Theory():
n += 1 n += 1
def mu_sigma(self, rates, external=True, matrix_filter=None, def mu_sigma(self, rates, external=True, matrix_filter=None,
vector_filter=None, replace_cc=None): vector_filter=None):
""" """
Calculates mean and variance according to the Calculates mean and variance according to the
theory. theory.
...@@ -246,8 +342,9 @@ class Theory(): ...@@ -246,8 +342,9 @@ class Theory():
else: else:
K = self.network.K_matrix K = self.network.K_matrix
J = self.network.J_matrix J = self.network.J_matrix
if replace_cc is not None: if (self.network.params['connection_params']['replace_cc'] in
mu_CC, sigma2_CC = self.replace_cc_input(replace_cc) ['hom_poisson_stat', 'het_poisson_stat']):
mu_CC, sigma2_CC = self.replace_cc_input()
mask = create_mask(self.network.structure, cortico_cortical=True, external=False) mask = create_mask(self.network.structure, cortico_cortical=True, external=False)
K[mask] = 0. K[mask] = 0.
else: else:
...@@ -297,7 +394,7 @@ class Theory(): ...@@ -297,7 +394,7 @@ class Theory():
# Connection probabilities between populations # Connection probabilities between populations
C = 1. - (1.-1./(N_pre * N_post))**(K*N_post) C = 1. - (1.-1./(N_pre * N_post))**(K*N_post)
mu, sigma = self.mu_sigma(rates, replace_cc=replace_cc) mu, sigma = self.mu_sigma(rates)
if np.any(vector_filter is not None): if np.any(vector_filter is not None):
mu = mu[vector_filter] mu = mu[vector_filter]
......
...@@ -26,7 +26,7 @@ import scipy.stats ...@@ -26,7 +26,7 @@ import scipy.stats
import scipy.special import scipy.special
def nu0_fb(tau_m, tau_s, tau_r, V_th, V_r, mu, sigma): def nu0_fb(mu, sigma, tau_m, tau_s, tau_r, V_th, V_r):
""" """
Compute the stationary firing rate of a neuron with synaptic Compute the stationary firing rate of a neuron with synaptic
filter of time constant tau_s driven by Gaussian white noise, from filter of time constant tau_s driven by Gaussian white noise, from
......
...@@ -19,7 +19,7 @@ K0 = M0.K_matrix ...@@ -19,7 +19,7 @@ K0 = M0.K_matrix
W0 = M0.W_matrix W0 = M0.W_matrix
N0 = M0.N_vec N0 = M0.N_vec
syn0 = M0.syn_matrix syn0 = M0.syn_matrix
p, r0 = M0.theory.integrate_siegert() p, r0 = M0.theory.integrate_siegert_nest()
d = vector_to_dict(r0[:, -1], d = vector_to_dict(r0[:, -1],
M0.area_list, M0.area_list,
...@@ -31,14 +31,15 @@ with open('mf_rates.json', 'w') as f: ...@@ -31,14 +31,15 @@ with open('mf_rates.json', 'w') as f:
network_params = {'N_scaling': .1, network_params = {'N_scaling': .1,
'K_scaling': .1, 'K_scaling': .1,
'fullscale_rates': 'mf_rates.json'} 'fullscale_rates': 'mf_rates.json'}
theory_params = {'initial_rates': r0[:, -1]} theory_params = {'initial_rates': r0[:, -1],
'T': 50.}
M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params)
K = M.K_matrix K = M.K_matrix
W = M.W_matrix W = M.W_matrix
N = M.N_vec N = M.N_vec
syn = M.syn_matrix syn = M.syn_matrix
p, r = M.theory.integrate_siegert() p, r = M.theory.integrate_siegert_nest()
assert(np.allclose(K, network_params['K_scaling'] * K0)) assert(np.allclose(K, network_params['K_scaling'] * K0))
assert(np.allclose(N, network_params['N_scaling'] * N0)) assert(np.allclose(N, network_params['N_scaling'] * N0))
...@@ -56,6 +57,10 @@ mu = 1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix, r0_extend) + tau_m / C_m * M ...@@ -56,6 +57,10 @@ mu = 1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix, r0_extend) + tau_m / C_m * M
sigma0 = np.sqrt(1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix**2, r0_extend)) sigma0 = np.sqrt(1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix**2, r0_extend))
sigma = np.sqrt(1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix**2, r0_extend)) sigma = np.sqrt(1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix**2, r0_extend))
p, r0_py = M0.theory.integrate_siegert_python(parallel=False)
p, r_py = M.theory.integrate_siegert_python(parallel=False)
assert(np.allclose(mu, mu0)) assert(np.allclose(mu, mu0))
assert(np.allclose(sigma, sigma0)) assert(np.allclose(sigma, sigma0))
assert(np.allclose(r[:, -1], r0[:, -1])) assert(np.allclose(r[:, -1], r0[:, -1]))
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment