diff --git a/multiarea_model/theory.py b/multiarea_model/theory.py index d584df91204fcd955eb305a456d426017208fa7a..c0a08088165583f6d6315ea211c3cba6191dc20d 100644 --- a/multiarea_model/theory.py +++ b/multiarea_model/theory.py @@ -64,7 +64,7 @@ class Theory(): def __hash__(self): return hash(self.label) - def integrate_siegert_nest(self, full_output=True): + def integrate_siegert(self, full_output=True): """ Integrate siegert formula to obtain stationary rates. See Eq. (3) and following in Schuecker, Schmidt et al. (2017). @@ -187,99 +187,6 @@ class Theory(): else: return self.network.structure_vec, rates - def integrate_siegert_python(self, full_output=True, parallel=True): - """ - Integrate siegert formula to obtain stationary rates. See Eq. (3) - and following in Schuecker, Schmidt et al. (2017). - - Use Runge-Kutta in Python. - """ - dt = self.params['dt'] - T = self.params['T'] - K = copy(self.network.K_matrix) - tau = self.NP['tau_m'] * 1e-3 - dim = np.shape(K)[0] - - # Set initial rates of neurons: - # If defined as None, set them 0 - if self.params['initial_rates'] is None: - num_iter = 1 - gen = (np.zeros(dim) for ii in range(num_iter)) - # iterate over different initial conditions drawn from a random distribution - elif self.params['initial_rates'] == 'random_uniform': - gen = self.initial_rates(self.params['initial_rates_iter'], - dim, - mode=self.params['initial_rates'], - rate_max=1000.) - num_iter = self.params['initial_rates_iter'] - # initial rates are explicitly defined in self.params - elif isinstance(self.params['initial_rates'], np.ndarray): - num_iter = 1 - gen = (self.params['initial_rates'] for ii in range(num_iter)) - - rates = [] - # Loop over all iterations of different initial conditions - for nsim in range(num_iter): - print("Iteration {}".format(nsim)) - initial_rates = next(gen) - - self.t = np.arange(0, T, dt) - y = np.zeros((len(self.t), dim), dtype=float) - y[0, :] = initial_rates - - # Integration loop - for i, tl in enumerate(self.t[:-1]): - print(tl) - delta_y, new_mu, new_sigma = self.Phi( - y[i, :], parallel=parallel, return_mu_sigma=True) - k1 = delta_y - k2 = self.Phi( - y[i, :] + dt * 1e-3 / tau / 2 * k1, parallel=parallel, return_mu_sigma=False) - k3 = self.Phi( - y[i, :] + dt * 1e-3 / tau / 2 * k2, parallel=parallel, return_mu_sigma=False) - k4 = self.Phi( - y[i, :] + dt * 1e-3 / tau * k3, parallel=parallel, return_mu_sigma=False) - y[i + 1, :] = y[i, :].copy() + dt * 1e-3 / 6. * (k1 + 2 * k2 + 2 * k3 + k4) / tau - - if full_output: - rates.append(y.T) - else: - # Keep only initial and final rates - rates.append(y.T[:, [0, -1]]) - - if num_iter == 1: - return self.network.structure_vec, rates[0] - else: - return self.network.structure_vec, rates - - def nu0_fb(self, arg): - mu = arg[0] - sigma = arg[1] - return nu0_fb(mu, sigma, - self.NP['tau_m'] * 1e-3, - self.NP['tau_syn'] * 1e-3, - self.NP['t_ref'] * 1e-3, - self.NP['theta'], - self.NP['V_reset']) - - def Phi(self, rates, parallel=False, - return_mu_sigma=False, return_leak=True): - mu, sigma = self.mu_sigma(rates, external=True) - if parallel: - pool = multiprocessing.Pool(processes=4) - new_rates = np.array( - pool.map(self.nu0_fb, zip(mu, sigma))) - pool.close() - pool.join() - else: - new_rates = [self.nu0_fb((m, s)) for m, s in zip(mu, sigma)] - if return_leak: - new_rates -= rates - if return_mu_sigma: - return (new_rates), mu, sigma - else: - return (new_rates) - def replace_cc_input(self): """ Helper function to replace cortico-cortical input by different variants. diff --git a/tests/test_meanfield.py b/tests/test_meanfield.py index bf0a694ef946c1d995f210fd41dab7c7f257496f..3c670469780556a465a465a1409bcbbb33b05943 100644 --- a/tests/test_meanfield.py +++ b/tests/test_meanfield.py @@ -10,4 +10,4 @@ def test_meanfield(): network_params = {} theory_params = {} M0 = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) - p, r0 = M0.theory.integrate_siegert_nest() + p, r0 = M0.theory.integrate_siegert() diff --git a/tests/test_network_scaling.py b/tests/test_network_scaling.py index 1f870a1b1eaefbfb658a7bf7e7d72c94bd0cbe4f..7854c8da5ab3990b2917f09c5ca2dea304dd4b71 100644 --- a/tests/test_network_scaling.py +++ b/tests/test_network_scaling.py @@ -19,7 +19,7 @@ K0 = M0.K_matrix W0 = M0.W_matrix N0 = M0.N_vec syn0 = M0.syn_matrix -p, r0 = M0.theory.integrate_siegert_nest() +p, r0 = M0.theory.integrate_siegert() d = vector_to_dict(r0[:, -1], M0.area_list, @@ -39,7 +39,7 @@ K = M.K_matrix W = M.W_matrix N = M.N_vec syn = M.syn_matrix -p, r = M.theory.integrate_siegert_nest() +p, r = M.theory.integrate_siegert() assert(np.allclose(K, network_params['K_scaling'] * K0)) assert(np.allclose(N, network_params['N_scaling'] * N0)) @@ -58,8 +58,24 @@ sigma0 = np.sqrt(1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix**2, r0_extend)) sigma = np.sqrt(1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix**2, r0_extend)) -p, r0_py = M0.theory.integrate_siegert_python(parallel=False) -p, r_py = M.theory.integrate_siegert_python(parallel=False) +r0_alex_loaded = np.load('test_network_scaling_r0.npy') +r_alex_loaded = np.load('test_network_scaling_r.npy') + +network_params = {} +theory_params = {'initial_rates': r0_alex_loaded[:, -1], + 'T': 50.} +M0_alex = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) +p, r0_alex = M0_alex.theory.integrate_siegert() + +network_params = {} +theory_params = {'initial_rates': r_alex_loaded[:, -1], + 'T': 50.} +M0_alex = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) +p, r_alex = M0_alex.theory.integrate_siegert() + + +# p, r0_py = M0.theory.integrate_siegert_python(parallel=False) +# p, r_py = M.theory.integrate_siegert_python(parallel=False) assert(np.allclose(mu, mu0)) assert(np.allclose(sigma, sigma0)) diff --git a/tests/test_replace_cc.py b/tests/test_replace_cc.py index 421361112c3abaa8a9fb9b181eb25704248a5fdf..2b2791d5bd6ebea87e7a3856cbedc0d7ea096600 100644 --- a/tests/test_replace_cc.py +++ b/tests/test_replace_cc.py @@ -16,7 +16,7 @@ def test_het_poisson_stat_mf(): network_params = {} theory_params = {} M0 = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) - p, r0 = M0.theory.integrate_siegert_nest() + p, r0 = M0.theory.integrate_siegert() rates = vector_to_dict(r0[:, -1], M0.area_list, M0.structure) with open('mf_rates.json', 'w') as f: @@ -26,7 +26,7 @@ def test_het_poisson_stat_mf(): 'replace_cc_input_source': 'mf_rates.json'}} theory_params = {} M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) - p, r = M.theory.integrate_siegert_nest() + p, r = M.theory.integrate_siegert() assert(np.allclose(r0[:, -1], r[:, -1])) @@ -35,7 +35,7 @@ def test_hom_poisson_stat_mf(): network_params = {'connection_params': {'replace_cc': 'hom_poisson_stat'}} theory_params = {} M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params) - p, r = M.theory.integrate_siegert_nest() + p, r = M.theory.integrate_siegert() mu, sigma = M.theory.replace_cc_input() # Test for V1