diff --git a/README.md b/README.md index 63d8edb5863da2dda858d8798e654a3e9f440ba0..98ab245b5a0575fc74d30ac7c7d6b8ed23a57809 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ basic analysis and plotting. ## Analysis and figure scripts for [1-3] The `figures` folder contains subfolders with all scripts necessary to produce -the figures from [1-3]. If Snakemake (Köster J & Rahmann S, Bioinformatics (2012) 28(19): 2520-2522) +the figures from [1-3]. If Snakemake (Köster J & Rahmann S, Bioinformatics (2012) 28(19): 2520-2522) is installed, the figures can be produced by executing `snakemake` in the respective folder, e.g.: @@ -203,7 +203,7 @@ conveniently run by executing `pytest` in the `tests/` folder: ## Requirements Python 3, python\_dicthash ([https://github.com/INM-6/python-dicthash](https://github.com/INM-6/python-dicthash)), correlation\_toolbox ([https://github.com/INM-6/correlation-toolbox](https://github.com/INM-6/correlation-toolbox)), -pandas, numpy, nested_dict, matplotlib (2.1.2), scipy, NEST 2.14.0 +pandas, numpy, nested_dict, matplotlib (2.1.2), scipy, pytest, NEST 2.14.0 Optional: seaborn, Sumatra diff --git a/figures/Schmidt2018_dyn/Snakefile b/figures/Schmidt2018_dyn/Snakefile index 10ec12318b913e420321b2e605a5a8c0d1dbf0de..5b7edcbebd6c5a8093d2afc5de0cca9413f7c731 100644 --- a/figures/Schmidt2018_dyn/Snakefile +++ b/figures/Schmidt2018_dyn/Snakefile @@ -26,7 +26,7 @@ ORIGINAL_SIM_LABELS = {'all': ['533d73357fbe99f6178029e6054b571b485f40f6', 'Fig2': ['533d73357fbe99f6178029e6054b571b485f40f6', '0adda4a542c3d5d43aebf7c30d876b6c5fd1d63e', '33fb5955558ba8bb15a3fdce49dfd914682ef3ea'], - 'Fig3': '33fb5955558ba8bb15a3fdce49dfd914682ef3ea', + 'Fig3': ['33fb5955558ba8bb15a3fdce49dfd914682ef3ea'], 'Fig4': ['33fb5955558ba8bb15a3fdce49dfd914682ef3ea', '1474e1884422b5b2096d3b7a20fd4bdf388af7e0', '99c0024eacc275d13f719afd59357f7d12f02b77', @@ -88,17 +88,17 @@ rule Fig2_bistability: 'Fig2_bistability.eps' shell: 'python3 Fig2_bistability.py' - + rule Fig3_ground_state_chi1: input: - os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'Analysis/pop_rates.json'), - os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'Analysis/pop_LvR.json'), - os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'Analysis/corrcoeff.json'), - expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'recordings', '-'.join((SIM_LABELS['Fig3'], 'spikes-{area}-{pop}.npy'))), + os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'Analysis/pop_rates.json'), + os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'Analysis/pop_LvR.json'), + os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'Analysis/corrcoeff.json'), + expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'recordings', '-'.join((SIM_LABELS['Fig3'][0], 'spikes-{area}-{pop}.npy'))), area=['V1', 'V2', 'FEF'], pop=population_list), - expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'Analysis', 'rate_time_series_full', 'rate_time_series_full_{area}.npy'), + expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'Analysis', 'rate_time_series_full', 'rate_time_series_full_{area}.npy'), area=['V1', 'V2', 'FEF']), - expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'], 'Analysis', 'rate_time_series_auto_kernel', 'rate_time_series_auto_kernel_{area}.npy'), area=['V1', 'V2', 'FEF']), + expand(os.path.join(DATA_DIR, SIM_LABELS['Fig3'][0], 'Analysis', 'rate_time_series_auto_kernel', 'rate_time_series_auto_kernel_{area}.npy'), area=['V1', 'V2', 'FEF']), output: 'Fig3_ground_state_chi1.eps' shell: @@ -108,7 +108,7 @@ rule Fig4_theory_calculations: output: 'Fig4_theory_data/results_{cc_weights_factor}.npy' shell: - 'python3 Fig4_theory.py {wildcards.cc_weights_factor}' + 'python3 Fig4_theory.py {wildcards.cc_weights_factor}' rule Fig4_metastability: input: @@ -120,7 +120,7 @@ rule Fig4_metastability: 'Fig4_metastability.eps' shell: 'python3 Fig4_metastability.py' - + rule Fig5_ground_state: input: os.path.join(DATA_DIR, SIM_LABELS['Fig5'][1], 'Analysis', 'pop_rates.json'), @@ -189,7 +189,7 @@ rule Fig8_interactions: 'Fig8_interactions.eps' shell: 'python3 Fig8_interactions.py' - + rule Fig9_laminar_interactions: input: expand(os.path.join(DATA_DIR, '{simulation}', 'Analysis', 'granger_causality', 'granger_causality_{area}_{pop}.json'), @@ -242,7 +242,7 @@ rule Fig9_path_analysis: 'Fig9_tex_files/{simulation}_lw_HZ_paths.tex' shell: 'python3 Fig9_path_analysis.py {} {{wildcards.simulation}}'.format(DATA_DIR) - + rule Fig9_paths: input: 'Fig9_tex_files/{simulation}_lw_{type}_paths.tex' diff --git a/figures/Schmidt2018_dyn/compute_granger_causality.py b/figures/Schmidt2018_dyn/compute_granger_causality.py index e4575826e9bf083a7fc50a8da1fbd1b923e062cd..6d725aba9abc10e35cbee45b1f52d9c0b511d469 100644 --- a/figures/Schmidt2018_dyn/compute_granger_causality.py +++ b/figures/Schmidt2018_dyn/compute_granger_causality.py @@ -49,7 +49,7 @@ connection_params = {'g': -11., network_params = {'connection_params': connection_params} M = MultiAreaModel(network_params) # We exclude external input from the analysis -K = M.K_matrix[:, :-1] +K = M.K[:, :-1] def indices_to_population(structure, indices): @@ -150,7 +150,7 @@ else: test = (np.nan, np.nan) res_red = np.nan - gc[source_area][source_pop] = (cause, test[1]) + gc[source_area][source_pop] = (cause, test[1]) fn = os.path.join(save_path, 'granger_causality_{}_{}.json'.format(area, pop)) diff --git a/multiarea_model/analysis_helpers.py b/multiarea_model/analysis_helpers.py index ce4c2e47e49425a9d85e0334354af7db95ed864c..546ef8012fd3a32101ffb2f2bbf306bf3c1ecfd4 100644 --- a/multiarea_model/analysis_helpers.py +++ b/multiarea_model/analysis_helpers.py @@ -87,7 +87,7 @@ def model_iter(mode='single', Cartesian product of 2 ('single' mode) or 4 ('double' mode) lists """ if mode == 'single': - assert((areas2 is None) and (pops2 is 'complete')) + assert((areas2 is None) and (pops2 == 'complete')) if pops is None or pops2 is None: assert((pops is None) and (pops2 is None) or mode == 'single') if pops == 'complete': @@ -538,7 +538,7 @@ def pop_cv_isi(data_array, t_min, t_max): for i in np.unique(data_array[:, 0]): intervals = np.diff(data_array[indices][ np.where(data_array[indices, 0] == i), 1]) - if (len(intervals) > 0): + if intervals.size > 0: cv_isi.append(np.std(intervals) / np.mean(intervals)) if len(cv_isi) > 0: return np.mean(cv_isi) @@ -668,8 +668,11 @@ def synchrony(data_array, num_neur, t_min, t_max, resolution=1.0): spike_count_histogramm = pop_rate_time_series( data_array, num_neur, t_min, t_max, resolution=resolution) mean = np.mean(spike_count_histogramm) - variance = np.var(spike_count_histogramm) - synchrony = variance / mean + std_dev = np.std(spike_count_histogramm) + try: + synchrony = std_dev / mean + except ZeroDivisionError: + synchrony = np.inf return synchrony diff --git a/multiarea_model/data_multiarea/Model.py b/multiarea_model/data_multiarea/Model.py index 6d68fba7906b192ba3631f498599ababfcf5f8b3..8e5cc5e0fdcb3d4dc404f429fcfcf32fbd613823 100644 --- a/multiarea_model/data_multiarea/Model.py +++ b/multiarea_model/data_multiarea/Model.py @@ -20,7 +20,6 @@ Sacha van Albada import numpy as np import json import re -import sys import os import scipy import scipy.integrate @@ -262,8 +261,8 @@ def compute_Model_params(out_label='', mode='default'): """ def integrand(r, R, sig): gauss = np.exp(-r ** 2 / (2 * sig ** 2)) - x1 = scipy.arctan(np.sqrt((2 * R - r) / (2 * R + r))) - x2 = scipy.sin(4 * scipy.arctan(np.sqrt((2 * R - r) / (2 * R + r)))) + x1 = np.arctan(np.sqrt((2 * R - r) / (2 * R + r))) + x2 = np.sin(4 * np.arctan(np.sqrt((2 * R - r) / (2 * R + r)))) factor = 4 * x1 - x2 return r * gauss * factor @@ -876,7 +875,7 @@ def compute_Model_params(out_label='', mode='default'): ext_syn = External_synapses[area] vector[-1] = ext_syn solution, residues, rank, s = np.linalg.lstsq( - nonvisual_fraction_matrix, vector) + nonvisual_fraction_matrix, vector, rcond=-1) for i, pop in enumerate(structure[area]): synapse_numbers[area][pop]['external'] = { 'external': solution[i] * neuronal_numbers[area][pop]} diff --git a/multiarea_model/default_params.py b/multiarea_model/default_params.py index d1158c4db3d1cea7087392a1f22779850edc6a45..499bfaeffd8ca0ab699e4200ad6dd768928190d9 100644 --- a/multiarea_model/default_params.py +++ b/multiarea_model/default_params.py @@ -122,11 +122,11 @@ General connection parameters """ connection_params = { # Whether to apply the stabilization method of - # Schuecker, Schmidt et al. (2017). Default is False. + # Schuecker, Schmidt et al. (2017). Default is None. # Options are True to perform the stabilization or # a string that specifies the name of a binary # numpy file containing the connectivity matrix - 'K_stable': False, + 'K_stable': None, # Whether to replace all cortico-cortical connections by stationary # Poisson input with population-specific rates (het_poisson_stat) diff --git a/multiarea_model/multiarea_helpers.py b/multiarea_model/multiarea_helpers.py index 7bf2fe3a5738d49ae07df0cc8325ef48aadf6b60..787af3d79ae081f13702d2a3136f482b89396da3 100644 --- a/multiarea_model/multiarea_helpers.py +++ b/multiarea_model/multiarea_helpers.py @@ -32,7 +32,10 @@ import json import numpy as np import os from itertools import product -import collections +try: + from collections.abc import Iterable +except ImportError: + from collections import Iterable from config import base_path from .default_params import complete_area_list, population_list @@ -229,7 +232,7 @@ def matrix_to_dict(m, area_list, structure, external=None): x = x.reshape((8, 8)) for i, pop in enumerate(population_list): for j, pop2 in enumerate(population_list): - if x[i][j] < 1e-20: + if np.isclose(0., x[i][j]): x[i][j] = 0. dic[area][pop][area2][pop2] = x[i][j] if external is not None: @@ -306,7 +309,7 @@ def dict_to_vector(d, area_list, structure): for target_area in area_list: if target_area in structure: for target_pop in structure[target_area]: - if isinstance(d[target_area][target_pop], collections.Iterable): + if isinstance(d[target_area][target_pop], Iterable): V[i] = d[target_area][target_pop][0] else: V[i] = d[target_area][target_pop] diff --git a/requirements.txt b/requirements.txt index 334dadee1be7c79946682bab225daab7af9f1f8a..8e4d8fb814917e5c4f3877817701810bcb215559 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,5 +3,6 @@ numpy matplotlib pandas scipy +pytest nested_dict dicthash