Skip to content
Snippets Groups Projects
Unverified Commit a94c6674 authored by Didi Hou's avatar Didi Hou Committed by GitHub
Browse files

Delete multiarea_model/.ipynb_checkpoints directory

parent 365d2cee
No related branches found
No related tags found
1 merge request!35Pre-release MAM v1.1.0
This diff is collapsed.
This diff is collapsed.
"""
default_parameters.py
=====================
This script defines the default values of all
parameters and defines functions to compute
single neuron and synapse parameters and to
properly set the seed of the random generators.
Authors
-------
Maximilian Schmidt
"""
from config import base_path
import json
import os
import nest
import numpy as np
complete_area_list = ['V1', 'V2', 'VP', 'V3', 'V3A', 'MT', 'V4t', 'V4', 'VOT', 'MSTd',
'PIP', 'PO', 'DP', 'MIP', 'MDP', 'VIP', 'LIP', 'PITv', 'PITd',
'MSTl', 'CITv', 'CITd', 'FEF', 'TF', 'AITv', 'FST', '7a', 'STPp',
'STPa', '46', 'AITd', 'TH']
population_list = ['23E', '23I', '4E', '4I', '5E', '5I', '6E', '6I']
f1 = open(os.path.join(base_path, 'multiarea_model/data_multiarea',
'viscortex_raw_data.json'), 'r')
raw_data = json.load(f1)
f1.close()
av_indegree_Cragg = raw_data['av_indegree_Cragg']
av_indegree_OKusky = raw_data['av_indegree_OKusky']
"""
Simulation parameters
"""
sim_params = {
# master seed for random number generators
'rng_seed': 1,
# simulation step (in ms)
'dt': 0.1,
# simulated time (in ms)
't_sim': 10.0,
# no. of MPI processes:
'num_processes': 1,
# no. of threads per MPI process':
'local_num_threads': 1,
# Areas represented in the network
'areas_simulated': complete_area_list,
}
"""
Network parameters
"""
network_params = {
# Surface area of each area in mm^2
'surface': 1.0,
# Scaling of population sizes
'N_scaling': 1.,
# Scaling of indegrees
'K_scaling': 1.,
# Absolute path to the file holding full-scale rates for scaling
# synaptic weights
'fullscale_rates': None,
# Check whether NEST 2 or 3 is used. No straight way of checking this is
# available. But PrintNetwork was removed in NEST 3, so checking for its
# existence should suffice.
'USING_NEST_3': 'PrintNetwork' not in dir(nest)
}
"""
Single-neuron parameters
"""
sim_params.update(
{
'initial_state': {
# mean of initial membrane potential (in mV)
'V_m_mean': -58.0,
# std of initial membrane potential (in mV)
'V_m_std': 10.0
}
})
# dictionary defining single-cell parameters
single_neuron_dict = {
# Leak potential of the neurons (in mV).
'E_L': -65.0,
# Threshold potential of the neurons (in mV).
'V_th': -50.0,
# Membrane potential after a spike (in mV).
'V_reset': -65.0,
# Membrane capacitance (in pF).
'C_m': 250.0,
# Membrane time constant (in ms).
'tau_m': 10.0,
# Time constant of postsynaptic excitatory currents (in ms).
'tau_syn_ex': 0.5,
# Time constant of postsynaptic inhibitory currents (in ms).
'tau_syn_in': 0.5,
# Refractory period of the neurons after a spike (in ms).
't_ref': 2.0}
neuron_params = {
# neuron model
'neuron_model': 'iaf_psc_exp',
# neuron parameters
'single_neuron_dict': single_neuron_dict,
# Mean and standard deviation for the
# distribution of initial membrane potentials
'V0_mean': -100.,
'V0_sd': 50.}
network_params.update({'neuron_params': neuron_params})
"""
General connection parameters
"""
connection_params = {
# Whether to apply the stabilization method of
# Schuecker, Schmidt et al. (2017). Default is None.
# Options are True to perform the stabilization or
# a string that specifies the name of a binary
# numpy file containing the connectivity matrix
'K_stable': None,
# Whether to replace all cortico-cortical connections by stationary
# Poisson input with population-specific rates (het_poisson_stat)
# or by time-varying current input (het_current_nonstat)
# while still simulating all areas. In both cases, the data to replace
# the cortico-cortical input is loaded from `replace_cc_input_source`.
'replace_cc': False,
# Whether to replace non-simulated areas by Poisson sources
# with the same global rate rate_ext ('hom_poisson_stat') or
# by specific rates ('het_poisson_stat')
# or by time-varying specific current ('het_current_nonstat')
# In the two latter cases, the data to replace the cortico-cortical
# input is loaded from `replace_cc_input_source`
'replace_non_simulated_areas': None,
# Source of the input rates to replace cortico-cortical input
# Either a json file (has to end on .json) holding a scalar values
# for each population or
# a base name such that files with names
# $(replace_cc_input_source)-area-population.npy
# (e.g. '$(replace_cc_input_source)-V1-23E.npy')
# contain the time series for each population.
# We recommend using absolute paths rather than relative paths.
'replace_cc_input_source': None,
# whether to redistribute CC synapse to meet literature value
# of E-specificity
'E_specificity': True,
# Relative inhibitory synaptic strength (in relative units).
'g': -16.,
# compute average indegree in V1 from data
'av_indegree_V1': np.mean([av_indegree_Cragg, av_indegree_OKusky]),
# synaptic volume density
# area-specific --> conserves average in-degree
# constant --> conserve syn. volume density
'rho_syn': 'constant',
# Increase the external Poisson indegree onto 5E and 6E
'fac_nu_ext_5E': 1.,
'fac_nu_ext_6E': 1.,
# to increase the ext. input to 23E and 5E in area TH
'fac_nu_ext_TH': 1.,
# synapse weight parameters for current-based neurons
# excitatory intracortical synaptic weight (mV)
'PSP_e': 0.15,
'PSP_e_23_4': 0.3,
# synaptic weight (mV) for external input
'PSP_ext': 0.15,
# relative SD of normally distributed synaptic weights
'PSC_rel_sd_normal': 0.1,
# relative SD of lognormally distributed synaptic weights
'PSC_rel_sd_lognormal': 3.0,
# scaling factor for cortico-cortical connections (chi)
'cc_weights_factor': 1.,
# factor to scale cortico-cortical inh. weights in relation
# to exc. weights (chi_I)
'cc_weights_I_factor': 1.,
# 'switch whether to distribute weights lognormally
'lognormal_weights': False,
# 'switch whether to distribute only EE weight lognormally if
# 'lognormal_weights': True
'lognormal_EE_only': False,
}
network_params.update({'connection_params': connection_params})
"""
Delays
"""
delay_params = {
# Local dendritic delay for excitatory transmission [ms]
'delay_e': 1.5,
# Local dendritic delay for inhibitory transmission [ms]
'delay_i': 0.75,
# Relative standard deviation for both local and inter-area delays
'delay_rel': 0.5,
# Axonal transmission speed to compute interareal delays [mm/ms]
'interarea_speed': 3.5
}
network_params.update({'delay_params': delay_params})
"""
Input parameters
"""
input_params = {
# Whether to use Poisson or DC input (True or False)
'poisson_input': True,
# synapse type for Poisson input
'syn_type_ext': 'static_synapse_hpc',
# Rate of the Poissonian spike generator (in spikes/s).
'rate_ext': 10.,
# Whether to switch on time-dependent DC input
'dc_stimulus': False,
}
network_params.update({'input_params': input_params})
"""
Recording settings
"""
recording_dict = {
# Which areas to record spike data from
'areas_recorded': complete_area_list,
# voltmeter
'record_vm': False,
# Fraction of neurons to record membrane potentials from
# in each population if record_vm is True
'Nrec_vm_fraction': 0.01,
# Parameters for the spike detectors
'spike_dict': {
'label': 'spikes',
'start': 0.},
# Parameters for the voltmeters
'vm_dict': {
'label': 'vm',
'start': 0.,
'stop': 1000.,
'interval': 0.1}
}
if network_params['USING_NEST_3']:
recording_dict['spike_dict'].update({'record_to': 'ascii'})
recording_dict['vm_dict'].update({'record_to': 'ascii'})
else:
recording_dict['spike_dict'].update({'withtime': True,
'record_to': ['file']})
recording_dict['vm_dict'].update({'withtime': True,
'record_to': ['file']})
sim_params.update({'recording_dict': recording_dict})
"""
Theory params
"""
theory_params = {'neuron_params': neuron_params,
# Initial rates can be None (start integration at
# zero rates), a numpy.ndarray defining the initial
# rates or 'random_uniform' which leads to randomly
# drawn initial rates from a uniform distribution.
'initial_rates': None,
# If 'initial_rates' is set to 'random_uniform',
# 'initial_rates_iter' defines the number of
# different initial conditions
'initial_rates_iter': None,
# If 'initial_rates' is set to 'random_uniform',
# 'initial_rates_max' defines the maximum rate of the
# uniform distribution to draw the initial rates from
'initial_rates_max': 1000.,
# The simulation time of the mean-field theory integration
'T': 50.,
# The time step of the mean-field theory integration
'dt': 0.01,
# Time interval for recording the trajectory of the mean-field calcuation
# If None, then the interval is set to dt
'rec_interval': None}
"""
Helper function to update default parameters with custom
parameters
"""
def nested_update(d, d2):
for key in d2:
if isinstance(d2[key], dict) and key in d:
nested_update(d[key], d2[key])
else:
d[key] = d2[key]
def check_custom_params(d, def_d):
for key, val in d.items():
if isinstance(val, dict):
check_custom_params(d[key], def_d[key])
else:
try:
def_val = def_d[key]
except KeyError:
raise KeyError('Unused key in custom parameter dictionary: {}'.format(key))
"""
multiarea_model
==============
Network class to instantiate and administer instances of the
multi-area model of macaque visual cortex by Schmidt et al. (2018).
Classes
-------
MultiAreaModel : Loads a parameter file that specifies custom parameters for a
particular instance of the model. An instance of the model has a unique hash
label. As members, it may contain three classes:
- simulation : contains all relevant parameters for a simulation of
the network
- theory : theory class that serves to estimate the stationary state
of the network using mean-field theory
Schuecker J, Schmidt M, van Albada SJ, Diesmann M, Helias M (2017)
Fundamental Activity Constraints Lead to Specific Interpretations of
the Connectome. PLoS Comput Biol 13(2): e1005179.
doi:10.1371/journal.pcbi.1005179
- analysis: provides methods to load data and perform data analysis
"""
import json
import numpy as np
import os
import pprint
import shutil
from .default_params import complete_area_list, nested_update, network_params
from .default_params import check_custom_params
from collections import OrderedDict
from copy import deepcopy
from .data_multiarea.Model import compute_Model_params
from .analysis import Analysis
from config import base_path
from dicthash import dicthash
from .multiarea_helpers import (
area_level_dict,
load_degree_data,
convert_syn_weight,
dict_to_matrix,
dict_to_vector,
indegree_to_synapse_numbers,
matrix_to_dict,
vector_to_dict,
)
from .simulation import Simulation
from .theory import Theory
# Set precision of dicthash library to 1e-4
# because this is sufficient for indegrees
# and neuron numbers and guarantees reproducibility
# of the class label despite inevitably imprecise float calculations
# in the data scripts.
dicthash.FLOAT_FACTOR = 1e4
dicthash.FLOOR_SMALL_FLOATS = True
class MultiAreaModel:
def __init__(self, network_spec, theory=False, simulation=False,
analysis=False, *args, **keywords):
"""
Multiarea model class.
An instance of the multiarea model with the given parameters.
Parameters
----------
network_spec : dict or str
Specify the network. If it is of type dict, the parameters defined
in the dictionary overwrite the default parameters defined in
default_params.py.
If it is of type str, the string defines the label of a previously
initialized model instance that is now loaded.
theory : bool
whether to create an instance of the theory class as member.
simulation : bool
whether to create an instance of the simulation class as member.
analysis : bool
whether to create an instance of the analysis class as member.
"""
self.params = deepcopy(network_params)
if isinstance(network_spec, dict):
print("Initializing network from dictionary.")
check_custom_params(network_spec, self.params)
self.custom_params = network_spec
p_ = 'multiarea_model/data_multiarea/custom_data_files'
# Draw random integer label for data script to avoid clashes with
# parallelly created class instances
rand_data_label = np.random.randint(10000)
print("RAND_DATA_LABEL", rand_data_label)
tmp_parameter_fn = os.path.join(base_path,
p_,
'custom_{}_parameter_dict.json'.format(rand_data_label))
tmp_data_fn = os.path.join(base_path,
p_,
'custom_Data_Model_{}.json'.format(rand_data_label))
with open(tmp_parameter_fn, 'w') as f:
json.dump(self.custom_params, f)
# Execute Data script
compute_Model_params(out_label=str(rand_data_label),
mode='custom')
else:
print("Initializing network from label.")
parameter_fn = os.path.join(base_path,
'config_files',
'{}_config'.format(network_spec))
tmp_data_fn = os.path.join(base_path,
'config_files',
'custom_Data_Model_{}.json'.format(network_spec))
with open(parameter_fn, 'r') as f:
self.custom_params = json.load(f)
nested_update(self.params, self.custom_params)
with open(tmp_data_fn, 'r') as f:
dat = json.load(f)
self.structure = OrderedDict()
for area in dat['area_list']:
self.structure[area] = dat['structure'][area]
self.N = dat['neuron_numbers']
self.synapses = dat['synapses']
self.W = dat['synapse_weights_mean']
self.W_sd = dat['synapse_weights_sd']
self.area_list = complete_area_list
self.distances = dat['distances']
ind, inda, out, outa = load_degree_data(tmp_data_fn)
# If K_stable is specified in the params, load the stabilized matrix
# TODO: Extend this by calling the stabilization method
if self.params['connection_params']['K_stable'] is None:
self.K = ind
else:
if not isinstance(self.params['connection_params']['K_stable'], str):
raise TypeError("Not supported. Please store the "
"matrix in a binary numpy file and define "
"the path to the file as the parameter value.")
# Assume that the parameter defines a filename containing the matrix
K_stable = np.load(self.params['connection_params']['K_stable'])
ext = {area: {pop: ind[area][pop]['external'] for pop in
self.structure['V1']} for area in self.area_list}
self.K = matrix_to_dict(
K_stable, self.area_list, self.structure, external=ext)
self.synapses = indegree_to_synapse_numbers(self.K, self.N)
self.vectorize()
if self.params['K_scaling'] != 1. or self.params['N_scaling'] != 1.:
if self.params['fullscale_rates'] is None:
raise KeyError('For downscaling, you have to define a file'
' with fullscale rates.')
self.scale_network()
self.K_areas = area_level_dict(self.K, self.N)
self.label = dicthash.generate_hash_from_dict({'params': self.params,
'K': self.K,
'N': self.N,
'structure': self.structure},
blacklist=[('params', 'fullscale_rates'),
('params',
'connection_params',
'K_stable'),
('params',
'connection_params',
'replace_cc_input_source')])
if isinstance(network_spec, dict):
parameter_fn = os.path.join(base_path,
'config_files',
'{}_config'.format(self.label))
data_fn = os.path.join(base_path,
'config_files',
'custom_Data_Model_{}.json'.format(self.label))
shutil.move(tmp_parameter_fn,
parameter_fn)
shutil.move(tmp_data_fn,
data_fn)
elif isinstance(network_spec, str):
assert(network_spec == self.label)
# Initialize member classes
if theory:
if 'theory_spec' not in keywords:
theory_spec = {}
else:
theory_spec = keywords['theory_spec']
self.init_theory(theory_spec)
if simulation:
if 'sim_spec' not in keywords:
sim_spec = {}
else:
sim_spec = keywords['sim_spec']
self.init_simulation(sim_spec)
if analysis:
assert(getattr(self, 'simulation'))
if 'ana_spec' not in keywords:
ana_spec = {}
else:
ana_spec = keywords['ana_spec']
self.init_analysis(ana_spec)
def __str__(self):
s = "Multi-area network {} with custom parameters: \n".format(self.label)
s += pprint.pformat(self.params, width=1)
return s
def __eq__(self, other):
return self.label == other.label
def __hash__(self):
return hash(self.label)
def init_theory(self, theory_spec):
self.theory = Theory(self, theory_spec)
def init_simulation(self, sim_spec):
self.simulation = Simulation(self, sim_spec)
def init_analysis(self, ana_spec):
assert(hasattr(self, 'simulation'))
if 'load_areas' in ana_spec:
load_areas = ana_spec['load_areas']
else:
load_areas = None
if 'data_list' in ana_spec:
data_list = ana_spec['data_list']
else:
data_list = ['spikes']
self.analysis = Analysis(self, self.simulation,
data_list=data_list,
load_areas=load_areas)
def scale_network(self):
"""
Scale the network if `N_scaling` and/or `K_scaling` differ from 1.
This function:
- adjusts the synaptic weights such that the population-averaged
stationary spike rates approximately match the given `full-scale_rates`.
- scales the population sizes with `N_scaling` and indegrees with `K_scaling`.
- scales the synapse numbers with `N_scaling`*`K_scaling`.
"""
# population sizes
self.N_vec *= self.params['N_scaling']
# Scale the synaptic weights before the indegrees to use full-scale indegrees
self.adj_W_to_K()
# Then scale the indegrees and synapse numbers
self.K_matrix *= self.params['K_scaling']
self.syn_matrix *= self.params['K_scaling'] * self.params['N_scaling']
# Finally recreate dictionaries
self.N = vector_to_dict(self.N_vec, self.area_list, self.structure)
self.K = matrix_to_dict(self.K_matrix[:, :-1], self.area_list,
self.structure, external=self.K_matrix[:, -1])
self.W = matrix_to_dict(self.W_matrix[:, :-1], self.area_list,
self.structure, external=self.W_matrix[:, -1])
self.synapses = matrix_to_dict(self.syn_matrix, self.area_list, self.structure)
def vectorize(self):
"""
Create matrix and vector version of neuron numbers, synapses
and synapse weight dictionaries.
"""
self.N_vec = dict_to_vector(self.N, self.area_list, self.structure)
self.syn_matrix = dict_to_matrix(self.synapses, self.area_list, self.structure)
self.K_matrix = dict_to_matrix(self.K, self.area_list, self.structure)
self.W_matrix = dict_to_matrix(self.W, self.area_list, self.structure)
self.J_matrix = convert_syn_weight(self.W_matrix,
self.params['neuron_params']['single_neuron_dict'])
self.structure_vec = ['-'.join((area, pop)) for area in
self.area_list for pop in self.structure[area]]
self.add_DC_drive = np.zeros_like(self.N_vec)
def adj_W_to_K(self):
"""
Adjust weights to scaling of neuron numbers and indegrees.
The recurrent and external weights are adjusted to the scaling
of the indegrees. Extra DC input is added to compensate the scaling
and preserve the mean and variance of the input.
"""
tau_m = self.params['neuron_params']['single_neuron_dict']['tau_m']
C_m = self.params['neuron_params']['single_neuron_dict']['C_m']
if isinstance(self.params['fullscale_rates'], np.ndarray):
raise ValueError("Not supported. Please store the "
"rates in a file and define the path to the file as "
"the parameter value.")
else:
with open(self.params['fullscale_rates'], 'r') as f:
d = json.load(f)
full_mean_rates = dict_to_vector(d, self.area_list, self.structure)
rate_ext = self.params['input_params']['rate_ext']
J_ext = self.J_matrix[:, -1]
K_ext = self.K_matrix[:, -1]
x1_ext = 1e-3 * tau_m * J_ext * K_ext * rate_ext
x1 = 1e-3 * tau_m * np.dot(self.J_matrix[:, :-1] * self.K_matrix[:, :-1], full_mean_rates)
K_scaling = self.params['K_scaling']
self.J_matrix /= np.sqrt(K_scaling)
self.add_DC_drive = C_m / tau_m * ((1. - np.sqrt(K_scaling)) * (x1 + x1_ext))
neuron_params = self.params['neuron_params']['single_neuron_dict']
self.W_matrix = (1. / convert_syn_weight(1., neuron_params) * self.J_matrix)
This diff is collapsed.
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment