Skip to content
Snippets Groups Projects
Unverified Commit 6929f804 authored by Jari Pronold's avatar Jari Pronold Committed by GitHub
Browse files

Merge pull request #19 from jarsi/nest_2_and_3

Enable use of NEST 3
parents ecd67114 fbae2073
No related branches found
No related tags found
No related merge requests found
......@@ -69,7 +69,7 @@ technical parameters such as the number of parallel MPI processes and
threads. The simulation uses the network simulator NEST
(https://www.nest-simulator.org). For the simulations in [2, 3], we
used NEST version 2.8.0. The code in this repository runs with a
later release of NEST, version 2.14.0.
later release of NEST, version 2.14.0, as well as NEST 3.0.
`Theory`
......@@ -203,7 +203,7 @@ conveniently run by executing `pytest` in the `tests/` folder:
## Requirements
Python 3, python\_dicthash ([https://github.com/INM-6/python-dicthash](https://github.com/INM-6/python-dicthash)),
correlation\_toolbox ([https://github.com/INM-6/correlation-toolbox](https://github.com/INM-6/correlation-toolbox)),
pandas, numpy, nested_dict, matplotlib (2.1.2), scipy, pytest, NEST 2.14.0
pandas, numpy, nested_dict, matplotlib (2.1.2), scipy, pytest, NEST 2.14.0 or NEST 3.0
Optional: seaborn, Sumatra
......
......@@ -128,17 +128,23 @@ class Analysis:
data[area][pop] = np.load(fn, allow_pickle=True)
except FileNotFoundError:
if not hasattr(self, 'all_spikes'):
csv_args = {'names': columns,
'sep': '\t',
'index_col': False}
if self.network.params['USING_NEST_3']:
csv_args.update({'skiprows': 3})
file_ending = 'dat'
else:
file_ending = 'gdf'
fp = '.'.join(('-'.join((self.simulation.label,
self.simulation.params[
'recording_dict'][d]['label'],
'*')),
'gdf'))
file_ending))
files = glob.glob(os.path.join(rec_dir, fp))
dat = pd.DataFrame(columns=columns)
for f in files:
dat = dat.append(pd.read_csv(f,
names=columns, sep='\t',
index_col=False),
dat = dat.append(pd.read_csv(f, **csv_args),
ignore_index=True)
self.all_spikes = dat
print(area, pop)
......
......@@ -14,6 +14,7 @@ Maximilian Schmidt
from config import base_path
import json
import os
import nest
import numpy as np
......@@ -37,7 +38,7 @@ Simulation parameters
"""
sim_params = {
# master seed for random number generators
'master_seed': 0,
'rng_seed': 1,
# simulation step (in ms)
'dt': 0.1,
# simulated time (in ms)
......@@ -62,7 +63,11 @@ network_params = {
'K_scaling': 1.,
# Absolute path to the file holding full-scale rates for scaling
# synaptic weights
'fullscale_rates': None
'fullscale_rates': None,
# Check whether NEST 2 or 3 is used. No straight way of checking this is
# available. But PrintNetwork was removed in NEST 3, so checking for its
# existence should suffice.
'USING_NEST_3': 'PrintNetwork' not in dir(nest)
}
......@@ -246,18 +251,22 @@ recording_dict = {
# Parameters for the spike detectors
'spike_dict': {
'label': 'spikes',
'withtime': True,
'record_to': ['file'],
'start': 0.},
# Parameters for the voltmeters
'vm_dict': {
'label': 'vm',
'start': 0.,
'stop': 1000.,
'interval': 0.1,
'withtime': True,
'record_to': ['file']}
'interval': 0.1}
}
if network_params['USING_NEST_3']:
recording_dict['spike_dict'].update({'record_to': 'ascii'})
recording_dict['vm_dict'].update({'record_to': 'ascii'})
else:
recording_dict['spike_dict'].update({'withtime': True,
'record_to': ['file']})
recording_dict['vm_dict'].update({'withtime': True,
'record_to': ['file']})
sim_params.update({'recording_dict': recording_dict})
"""
......
......@@ -145,7 +145,7 @@ class Simulation:
Prepare NEST Kernel.
"""
nest.ResetKernel()
master_seed = self.params['master_seed']
rng_seed = self.params['rng_seed']
num_processes = self.params['num_processes']
local_num_threads = self.params['local_num_threads']
vp = num_processes * local_num_threads
......@@ -153,15 +153,18 @@ class Simulation:
'total_num_virtual_procs': vp,
'overwrite_files': True,
'data_path': os.path.join(self.data_dir, 'recordings'),
'print_time': False,
'grng_seed': master_seed,
'rng_seeds': list(range(master_seed + 1,
master_seed + vp + 1))})
'print_time': False})
if self.network.params['USING_NEST_3']:
nest.SetKernelStatus({'rng_seed': rng_seed})
else:
nest.SetKernelStatus({'grng_seed': rng_seed,
'rng_seeds': list(range(rng_seed + 1,
rng_seed + vp + 1))})
self.pyrngs = [np.random.RandomState(s) for s in list(range(
rng_seed + vp + 1, rng_seed + 2 * (vp + 1)))]
nest.SetDefaults(self.network.params['neuron_params']['neuron_model'],
self.network.params['neuron_params']['single_neuron_dict'])
self.pyrngs = [np.random.RandomState(s) for s in list(range(
master_seed + vp + 1, master_seed + 2 * (vp + 1)))]
def create_recording_devices(self):
"""
......@@ -170,7 +173,10 @@ class Simulation:
- spike detector
- voltmeter
"""
self.spike_detector = nest.Create('spike_detector', 1)
try:
self.spike_detector = nest.Create('spike_recorder')
except:
self.spike_detector = nest.Create('spike_detector')
status_dict = deepcopy(self.params['recording_dict']['spike_dict'])
label = '-'.join((self.label,
status_dict['label']))
......@@ -347,10 +353,16 @@ class Simulation:
'network_gids.txt'), 'w') as f:
for area in self.areas:
for pop in self.network.structure[area.name]:
if self.network.params['USING_NEST_3']:
first_id = area.gids[pop][0].get()['global_id']
last_id = area.gids[pop][-1].get()['global_id']
else:
first_id = area.gids[pop][0]
last_id = area.gids[pop][1]
f.write("{area},{pop},{g0},{g1}\n".format(area=area.name,
pop=pop,
g0=area.gids[pop][0],
g1=area.gids[pop][1]))
g0=first_id,
g1=last_id))
def register_runtime(self):
if sumatra_found:
......@@ -443,31 +455,39 @@ class Area:
I_e += DC
nest.SetStatus(gid, {'I_e': I_e})
# Store first and last GID of each population
self.gids[pop] = (gid[0], gid[-1])
if self.network.params['USING_NEST_3']:
# Store GIDCollection of each population
self.gids[pop] = gid
else:
# Store first and last GID of each population
self.gids[pop] = (gid[0], gid[-1])
# Initialize membrane potentials
# This could also be done after creating all areas, which
# might yield better performance. Has to be tested.
for t in np.arange(nest.GetKernelStatus('local_num_threads')):
local_nodes = np.array(nest.GetNodes(
[0], {
'model': self.network.params['neuron_params']['neuron_model'],
'thread': t
}, local_only=True
)[0])
local_nodes_pop = local_nodes[(np.logical_and(local_nodes >= gid[0],
local_nodes <= gid[-1]))]
if len(local_nodes_pop) > 0:
vp = nest.GetStatus([local_nodes_pop[0]], 'vp')[0]
# vp is the same for all local nodes on the same thread
nest.SetStatus(
list(local_nodes_pop), 'V_m', self.simulation.pyrngs[vp].normal(
self.network.params['neuron_params']['V0_mean'],
self.network.params['neuron_params']['V0_sd'],
len(local_nodes_pop))
)
self.num_local_nodes += len(local_nodes_pop)
if self.network.params['USING_NEST_3']:
gid.V_m = nest.random.normal(self.network.params['neuron_params']['V0_mean'],
self.network.params['neuron_params']['V0_sd'])
else:
for t in np.arange(nest.GetKernelStatus('local_num_threads')):
local_nodes = np.array(nest.GetNodes(
[0], {
'model': self.network.params['neuron_params']['neuron_model'],
'thread': t
}, local_only=True
)[0])
local_nodes_pop = local_nodes[(np.logical_and(local_nodes >= gid[0],
local_nodes <= gid[-1]))]
if len(local_nodes_pop) > 0:
vp = nest.GetStatus([local_nodes_pop[0]], 'vp')[0]
# vp is the same for all local nodes on the same thread
nest.SetStatus(
list(local_nodes_pop), 'V_m', self.simulation.pyrngs[vp].normal(
self.network.params['neuron_params']['V0_mean'],
self.network.params['neuron_params']['V0_sd'],
len(local_nodes_pop))
)
self.num_local_nodes += len(local_nodes_pop)
def connect_populations(self):
"""
......@@ -482,28 +502,41 @@ class Area:
for pop in self.populations:
# Always record spikes from all neurons to get correct
# statistics
nest.Connect(tuple(range(self.gids[pop][0], self.gids[pop][1] + 1)),
self.simulation.spike_detector)
if self.network.params['USING_NEST_3']:
nest.Connect(self.gids[pop],
self.simulation.spike_detector)
else:
nest.Connect(tuple(range(self.gids[pop][0], self.gids[pop][1] + 1)),
self.simulation.spike_detector)
if self.simulation.params['recording_dict']['record_vm']:
for pop in self.populations:
nrec = int(self.simulation.params['recording_dict']['Nrec_vm_fraction'] *
self.neuron_numbers[pop])
nest.Connect(self.simulation.voltmeter,
tuple(range(self.gids[pop][0], self.gids[pop][0] + nrec + 1)))
if self.network.params['USING_NEST_3']:
nest.Connect(self.simulation.voltmeter,
self.gids[pop][:nrec])
else:
nest.Connect(self.simulation.voltmeter,
tuple(range(self.gids[pop][0], self.gids[pop][0] + nrec + 1)))
if self.network.params['input_params']['poisson_input']:
self.poisson_generators = []
for pop in self.populations:
K_ext = self.external_synapses[pop]
W_ext = self.network.W[self.name][pop]['external']['external']
pg = nest.Create('poisson_generator', 1)
pg = nest.Create('poisson_generator')
nest.SetStatus(
pg, {'rate': self.network.params['input_params']['rate_ext'] * K_ext})
syn_spec = {'weight': W_ext}
nest.Connect(pg,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
if self.network.params['USING_NEST_3']:
nest.Connect(pg,
self.gids[pop],
syn_spec=syn_spec)
else:
nest.Connect(pg,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
self.poisson_generators.append(pg[0])
def create_additional_input(self, input_type, source_area_name, cc_input):
......@@ -543,7 +576,7 @@ class Area:
K = synapses[pop][source_pop] / self.neuron_numbers[pop]
if input_type == 'het_current_nonstat':
curr_gen = nest.Create('step_current_generator', 1)
curr_gen = nest.Create('step_current_generator')
dt = self.simulation.params['dt']
T = self.simulation.params['t_sim']
assert(len(cc_input[source_pop]) == int(T))
......@@ -551,17 +584,27 @@ class Area:
'amplitude_times': np.arange(dt,
T + dt,
1.)})
nest.Connect(curr_gen,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
if self.network.params['USING_NEST_3']:
nest.Connect(curr_gen,
self.gids[pop],
syn_spec=syn_spec)
else:
nest.Connect(curr_gen,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
elif 'poisson_stat' in input_type: # hom. and het. poisson lead here
pg = nest.Create('poisson_generator', 1)
pg = nest.Create('poisson_generator')
nest.SetStatus(pg, {'rate': K * cc_input[source_pop]})
nest.Connect(pg,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
if self.network.params['USING_NEST_3']:
nest.Connect(pg,
self.gids[pop],
syn_spec=syn_spec)
else:
nest.Connect(pg,
tuple(
range(self.gids[pop][0], self.gids[pop][1] + 1)),
syn_spec=syn_spec)
def connect(simulation,
......@@ -579,6 +622,7 @@ def connect(simulation,
source_area : Area instance
Source area of the projection
"""
network = simulation.network
synapses = extract_area_dict(network.synapses,
network.structure,
......@@ -592,37 +636,80 @@ def connect(simulation,
network.structure,
target_area.name,
source_area.name)
for target in target_area.populations:
for source in source_area.populations:
conn_spec = {'rule': 'fixed_total_number',
'N': int(synapses[target][source])}
syn_weight = {'distribution': 'normal_clipped',
'mu': W[target][source],
'sigma': W_sd[target][source]}
if target_area == source_area:
if 'E' in source:
syn_weight.update({'low': 0.})
mean_delay = network.params['delay_params']['delay_e']
elif 'I' in source:
syn_weight.update({'high': 0.})
mean_delay = network.params['delay_params']['delay_i']
else:
v = network.params['delay_params']['interarea_speed']
s = network.distances[target_area.name][source_area.name]
mean_delay = s / v
syn_delay = {'distribution': 'normal_clipped',
'low': simulation.params['dt'],
'mu': mean_delay,
'sigma': mean_delay * network.params['delay_params']['delay_rel']}
syn_spec = {'weight': syn_weight,
'delay': syn_delay,
'model': 'static_synapse'}
nest.Connect(tuple(range(source_area.gids[source][0],
source_area.gids[source][1] + 1)),
tuple(range(target_area.gids[target][0],
target_area.gids[target][1] + 1)),
conn_spec,
syn_spec)
if network.params['USING_NEST_3']:
for target in target_area.populations:
for source in source_area.populations:
conn_spec = {'rule': 'fixed_total_number',
'N': int(synapses[target][source])}
if target_area == source_area:
if 'E' in source:
w_min = 0.
w_max = np.Inf
mean_delay = network.params['delay_params']['delay_e']
elif 'I' in source:
w_min = np.NINF
w_max = 0.
mean_delay = network.params['delay_params']['delay_i']
else:
w_min = 0.
w_max = np.Inf
v = network.params['delay_params']['interarea_speed']
s = network.distances[target_area.name][source_area.name]
mean_delay = s / v
syn_spec = {
'synapse_model': 'static_synapse',
'weight': nest.math.redraw(
nest.random.normal(mean=W[target][source],
std=W_sd[target][source]),
min=w_min,
max=w_max),
'delay': nest.math.redraw(
nest.random.normal(
mean=mean_delay,
std=(mean_delay *
network.params['delay_params']['delay_rel'])),
min=simulation.params['dt'],
max=np.Inf)
}
nest.Connect(source_area.gids[source],
target_area.gids[target],
conn_spec,
syn_spec)
else:
for target in target_area.populations:
for source in source_area.populations:
conn_spec = {'rule': 'fixed_total_number',
'N': int(synapses[target][source])}
syn_weight = {'distribution': 'normal_clipped',
'mu': W[target][source],
'sigma': W_sd[target][source]}
if target_area == source_area:
if 'E' in source:
syn_weight.update({'low': 0.})
mean_delay = network.params['delay_params']['delay_e']
elif 'I' in source:
syn_weight.update({'high': 0.})
mean_delay = network.params['delay_params']['delay_i']
else:
v = network.params['delay_params']['interarea_speed']
s = network.distances[target_area.name][source_area.name]
mean_delay = s / v
syn_delay = {'distribution': 'normal_clipped',
'low': simulation.params['dt'],
'mu': mean_delay,
'sigma': mean_delay * network.params['delay_params']['delay_rel']}
syn_spec = {'weight': syn_weight,
'delay': syn_delay,
'model': 'static_synapse'}
nest.Connect(tuple(range(source_area.gids[source][0],
source_area.gids[source][1] + 1)),
tuple(range(target_area.gids[target][0],
target_area.gids[target][1] + 1)),
conn_spec,
syn_spec)
......@@ -89,8 +89,11 @@ class Theory:
# external drive
syn_dict = {'drift_factor': tau * np.array([K[:, -1] * J[:, -1]]).transpose(),
'diffusion_factor': tau * np.array([K[:, -1] * J[:, -1]**2]).transpose(),
'model': 'diffusion_connection',
'receptor_type': 0}
if self.network.params['USING_NEST_3']:
syn_dict['synapse_model'] = 'diffusion_connection'
else:
syn_dict['model'] = 'diffusion_connection'
nest.Connect(drive, neurons, 'all_to_all', syn_dict)
# external DC drive (expressed in mV)
......@@ -101,8 +104,11 @@ class Theory:
syn_dict = {'drift_factor': 1e3 * tau / C_m * np.array(
self.network.add_DC_drive).reshape(dim, 1),
'diffusion_factor': 0.,
'model': 'diffusion_connection',
'receptor_type': 0}
if self.network.params['USING_NEST_3']:
syn_dict['synapse_model'] = 'diffusion_connection'
else:
syn_dict['model'] = 'diffusion_connection'
nest.Connect(DC_drive, neurons, 'all_to_all', syn_dict)
# handle switches for cortico-cortical connectivity
if (self.network.params['connection_params']['replace_cc'] in
......@@ -115,8 +121,11 @@ class Theory:
add_drive = nest.Create('siegert_neuron', 1, params={'rate': 1., 'mean': 1.})
syn_dict = {'drift_factor': np.array([mu_CC]).transpose(),
'diffusion_factor': np.array([sigma2_CC]).transpose(),
'model': 'diffusion_connection',
'receptor_type': 0}
if self.network.params['USING_NEST_3']:
syn_dict['synapse_model'] = 'diffusion_connection'
else:
syn_dict['model'] = 'diffusion_connection'
nest.Connect(add_drive, neurons, 'all_to_all', syn_dict)
elif self.network.params['connection_params']['replace_cc'] == 'het_current_nonstat':
raise NotImplementedError('Replacing the cortico-cortical input by'
......@@ -125,8 +134,11 @@ class Theory:
# network connections
syn_dict = {'drift_factor': tau * K[:, :-1] * J[:, :-1],
'diffusion_factor': tau * K[:, :-1] * J[:, :-1]**2,
'model': 'diffusion_connection',
'receptor_type': 0}
if self.network.params['USING_NEST_3']:
syn_dict['synapse_model'] = 'diffusion_connection'
else:
syn_dict['model'] = 'diffusion_connection'
nest.Connect(neurons, neurons, 'all_to_all', syn_dict)
# create recording device
......@@ -134,11 +146,14 @@ class Theory:
if interval is None:
interval = dt
multimeter = nest.Create('multimeter', params={'record_from': ['rate'],
'interval': interval,
'to_screen': False,
'to_file': False,
'to_memory': True})
multimeter_params = {'record_from': ['rate'], 'interval': interval}
if self.network.params['USING_NEST_3']:
multimeter_params.update({'record_to': 'memory'})
else:
multimeter_params.update({'to_screen': False,
'to_file': False,
'to_memory': True})
multimeter = nest.Create('multimeter', params=multimeter_params)
# multimeter
nest.Connect(multimeter, neurons)
......@@ -168,17 +183,26 @@ class Theory:
initial_rates = next(gen)
print("Iteration: {}".format(iteration))
for i in range(dim):
nest.SetStatus([neurons[i]], {'rate': initial_rates[i]})
if self.network.params['USING_NEST_3']:
nest.SetStatus(neurons[i], {'rate': initial_rates[i]})
else:
nest.SetStatus([neurons[i]], {'rate': initial_rates[i]})
# simulate
nest.Simulate(T + dt)
data = nest.GetStatus(multimeter)[0]['events']
# Extract the data of this iteration
ind = np.where(np.logical_and(data['times'] > total_time,
data['times'] <= total_time + T))
res = np.array([np.insert(data['rate'][ind][np.where(data['senders'][ind] == n)],
0,
initial_rates[i])
for i, n in enumerate(neurons)])
if self.network.params['USING_NEST_3']:
res = np.array([np.insert(data['rate'][ind][np.where(data['senders'][ind] == n.global_id)],
0,
initial_rates[i])
for i, n in enumerate(neurons)])
else:
res = np.array([np.insert(data['rate'][ind][np.where(data['senders'][ind] == n)],
0,
initial_rates[i])
for i, n in enumerate(neurons)])
rates.append(res)
iteration += 1
total_time += T
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment