Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
M
multi-area-model
Manage
Activity
Members
Labels
Plan
Issues
7
Issue boards
Milestones
Wiki
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
IAS-6
multi-area-model
Commits
4a2c3dd6
Commit
4a2c3dd6
authored
7 years ago
by
Maximilian Schmidt
Browse files
Options
Downloads
Patches
Plain Diff
Implement Siegert integration with python/scipy, edit network_scaling test
parent
b3c9c9fa
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!1
Add all necessary files for the multi-area model
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
multiarea_model/theory.py
+102
-5
102 additions, 5 deletions
multiarea_model/theory.py
multiarea_model/theory_helpers.py
+1
-1
1 addition, 1 deletion
multiarea_model/theory_helpers.py
tests/test_network_scaling.py
+8
-3
8 additions, 3 deletions
tests/test_network_scaling.py
with
111 additions
and
9 deletions
multiarea_model/theory.py
+
102
−
5
View file @
4a2c3dd6
...
...
@@ -18,6 +18,7 @@ provedure.
import
json
import
pprint
import
multiprocessing
import
nest
import
numpy
as
np
...
...
@@ -25,8 +26,10 @@ from copy import copy
from
.default_params
import
nested_update
,
theory_params
from
.default_params
import
check_custom_params
from
dicthash
import
dicthash
from
functools
import
partial
from
.multiarea_helpers
import
create_mask
,
create_vector_mask
,
dict_to_vector
from
.theory_helpers
import
d_nu_d_mu_fb_numeric
,
d_nu_d_sigma_fb_numeric
from
.theory_helpers
import
nu0_fb
class
Theory
():
...
...
@@ -61,7 +64,7 @@ class Theory():
def
__hash__
(
self
):
return
hash
(
self
.
label
)
def
integrate_siegert
(
self
,
full_output
=
True
):
def
integrate_siegert
_nest
(
self
,
full_output
=
True
):
"""
Integrate siegert formula to obtain stationary rates. See Eq. (3)
and following in Schuecker, Schmidt et al. (2017).
...
...
@@ -184,6 +187,99 @@ class Theory():
else
:
return
self
.
network
.
structure_vec
,
rates
def
integrate_siegert_python
(
self
,
full_output
=
True
,
parallel
=
True
):
"""
Integrate siegert formula to obtain stationary rates. See Eq. (3)
and following in Schuecker, Schmidt et al. (2017).
Use Runge-Kutta in Python.
"""
dt
=
self
.
params
[
'
dt
'
]
T
=
self
.
params
[
'
T
'
]
K
=
copy
(
self
.
network
.
K_matrix
)
tau
=
self
.
NP
[
'
tau_m
'
]
*
1e-3
dim
=
np
.
shape
(
K
)[
0
]
# Set initial rates of neurons:
# If defined as None, set them 0
if
self
.
params
[
'
initial_rates
'
]
is
None
:
num_iter
=
1
gen
=
(
np
.
zeros
(
dim
)
for
ii
in
range
(
num_iter
))
# iterate over different initial conditions drawn from a random distribution
elif
self
.
params
[
'
initial_rates
'
]
==
'
random_uniform
'
:
gen
=
self
.
initial_rates
(
self
.
params
[
'
initial_rates_iter
'
],
dim
,
mode
=
self
.
params
[
'
initial_rates
'
],
rate_max
=
1000.
)
num_iter
=
self
.
params
[
'
initial_rates_iter
'
]
# initial rates are explicitly defined in self.params
elif
isinstance
(
self
.
params
[
'
initial_rates
'
],
np
.
ndarray
):
num_iter
=
1
gen
=
(
self
.
params
[
'
initial_rates
'
]
for
ii
in
range
(
num_iter
))
rates
=
[]
# Loop over all iterations of different initial conditions
for
nsim
in
range
(
num_iter
):
print
(
"
Iteration {}
"
.
format
(
nsim
))
initial_rates
=
next
(
gen
)
self
.
t
=
np
.
arange
(
0
,
T
,
dt
)
y
=
np
.
zeros
((
len
(
self
.
t
),
dim
),
dtype
=
float
)
y
[
0
,
:]
=
initial_rates
# Integration loop
for
i
,
tl
in
enumerate
(
self
.
t
[:
-
1
]):
print
(
tl
)
delta_y
,
new_mu
,
new_sigma
=
self
.
Phi
(
y
[
i
,
:],
parallel
=
parallel
,
return_mu_sigma
=
True
)
k1
=
delta_y
k2
=
self
.
Phi
(
y
[
i
,
:]
+
dt
*
1e-3
/
tau
/
2
*
k1
,
parallel
=
parallel
,
return_mu_sigma
=
False
)
k3
=
self
.
Phi
(
y
[
i
,
:]
+
dt
*
1e-3
/
tau
/
2
*
k2
,
parallel
=
parallel
,
return_mu_sigma
=
False
)
k4
=
self
.
Phi
(
y
[
i
,
:]
+
dt
*
1e-3
/
tau
*
k3
,
parallel
=
parallel
,
return_mu_sigma
=
False
)
y
[
i
+
1
,
:]
=
y
[
i
,
:].
copy
()
+
dt
*
1e-3
/
6.
*
(
k1
+
2
*
k2
+
2
*
k3
+
k4
)
/
tau
if
full_output
:
rates
.
append
(
y
.
T
)
else
:
# Keep only initial and final rates
rates
.
append
(
y
.
T
[:,
[
0
,
-
1
]])
if
num_iter
==
1
:
return
self
.
network
.
structure_vec
,
rates
[
0
]
else
:
return
self
.
network
.
structure_vec
,
rates
def
nu0_fb
(
self
,
arg
):
mu
=
arg
[
0
]
sigma
=
arg
[
1
]
return
nu0_fb
(
mu
,
sigma
,
self
.
NP
[
'
tau_m
'
]
*
1e-3
,
self
.
NP
[
'
tau_syn
'
]
*
1e-3
,
self
.
NP
[
'
t_ref
'
]
*
1e-3
,
self
.
NP
[
'
theta
'
],
self
.
NP
[
'
V_reset
'
])
def
Phi
(
self
,
rates
,
parallel
=
False
,
return_mu_sigma
=
False
,
return_leak
=
True
):
mu
,
sigma
=
self
.
mu_sigma
(
rates
,
external
=
True
)
if
parallel
:
pool
=
multiprocessing
.
Pool
(
processes
=
4
)
new_rates
=
np
.
array
(
pool
.
map
(
self
.
nu0_fb
,
zip
(
mu
,
sigma
)))
pool
.
close
()
pool
.
join
()
else
:
new_rates
=
[
self
.
nu0_fb
((
m
,
s
))
for
m
,
s
in
zip
(
mu
,
sigma
)]
if
return_leak
:
new_rates
-=
rates
if
return_mu_sigma
:
return
(
new_rates
),
mu
,
sigma
else
:
return
(
new_rates
)
def
replace_cc_input
(
self
):
"""
Helper function to replace cortico-cortical input by different variants.
...
...
@@ -233,7 +329,7 @@ class Theory():
n
+=
1
def
mu_sigma
(
self
,
rates
,
external
=
True
,
matrix_filter
=
None
,
vector_filter
=
None
,
replace_cc
=
None
):
vector_filter
=
None
):
"""
Calculates mean and variance according to the
theory.
...
...
@@ -246,8 +342,9 @@ class Theory():
else
:
K
=
self
.
network
.
K_matrix
J
=
self
.
network
.
J_matrix
if
replace_cc
is
not
None
:
mu_CC
,
sigma2_CC
=
self
.
replace_cc_input
(
replace_cc
)
if
(
self
.
network
.
params
[
'
connection_params
'
][
'
replace_cc
'
]
in
[
'
hom_poisson_stat
'
,
'
het_poisson_stat
'
]):
mu_CC
,
sigma2_CC
=
self
.
replace_cc_input
()
mask
=
create_mask
(
self
.
network
.
structure
,
cortico_cortical
=
True
,
external
=
False
)
K
[
mask
]
=
0.
else
:
...
...
@@ -297,7 +394,7 @@ class Theory():
# Connection probabilities between populations
C
=
1.
-
(
1.
-
1.
/
(
N_pre
*
N_post
))
**
(
K
*
N_post
)
mu
,
sigma
=
self
.
mu_sigma
(
rates
,
replace_cc
=
replace_cc
)
mu
,
sigma
=
self
.
mu_sigma
(
rates
)
if
np
.
any
(
vector_filter
is
not
None
):
mu
=
mu
[
vector_filter
]
...
...
This diff is collapsed.
Click to expand it.
multiarea_model/theory_helpers.py
+
1
−
1
View file @
4a2c3dd6
...
...
@@ -26,7 +26,7 @@ import scipy.stats
import
scipy.special
def
nu0_fb
(
tau_m
,
tau_s
,
tau_r
,
V_th
,
V_r
,
mu
,
sigma
):
def
nu0_fb
(
mu
,
sigma
,
tau_m
,
tau_s
,
tau_r
,
V_th
,
V_r
):
"""
Compute the stationary firing rate of a neuron with synaptic
filter of time constant tau_s driven by Gaussian white noise, from
...
...
This diff is collapsed.
Click to expand it.
tests/test_network_scaling.py
+
8
−
3
View file @
4a2c3dd6
...
...
@@ -19,7 +19,7 @@ K0 = M0.K_matrix
W0
=
M0
.
W_matrix
N0
=
M0
.
N_vec
syn0
=
M0
.
syn_matrix
p
,
r0
=
M0
.
theory
.
integrate_siegert
()
p
,
r0
=
M0
.
theory
.
integrate_siegert
_nest
()
d
=
vector_to_dict
(
r0
[:,
-
1
],
M0
.
area_list
,
...
...
@@ -31,14 +31,15 @@ with open('mf_rates.json', 'w') as f:
network_params
=
{
'
N_scaling
'
:
.
1
,
'
K_scaling
'
:
.
1
,
'
fullscale_rates
'
:
'
mf_rates.json
'
}
theory_params
=
{
'
initial_rates
'
:
r0
[:,
-
1
]}
theory_params
=
{
'
initial_rates
'
:
r0
[:,
-
1
],
'
T
'
:
50.
}
M
=
MultiAreaModel
(
network_params
,
theory
=
True
,
theory_spec
=
theory_params
)
K
=
M
.
K_matrix
W
=
M
.
W_matrix
N
=
M
.
N_vec
syn
=
M
.
syn_matrix
p
,
r
=
M
.
theory
.
integrate_siegert
()
p
,
r
=
M
.
theory
.
integrate_siegert
_nest
()
assert
(
np
.
allclose
(
K
,
network_params
[
'
K_scaling
'
]
*
K0
))
assert
(
np
.
allclose
(
N
,
network_params
[
'
N_scaling
'
]
*
N0
))
...
...
@@ -56,6 +57,10 @@ mu = 1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix, r0_extend) + tau_m / C_m * M
sigma0
=
np
.
sqrt
(
1e-3
*
tau_m
*
np
.
dot
(
M0
.
K_matrix
*
M0
.
J_matrix
**
2
,
r0_extend
))
sigma
=
np
.
sqrt
(
1e-3
*
tau_m
*
np
.
dot
(
M
.
K_matrix
*
M
.
J_matrix
**
2
,
r0_extend
))
p
,
r0_py
=
M0
.
theory
.
integrate_siegert_python
(
parallel
=
False
)
p
,
r_py
=
M
.
theory
.
integrate_siegert_python
(
parallel
=
False
)
assert
(
np
.
allclose
(
mu
,
mu0
))
assert
(
np
.
allclose
(
sigma
,
sigma0
))
assert
(
np
.
allclose
(
r
[:,
-
1
],
r0
[:,
-
1
]))
This diff is collapsed.
Click to expand it.
Preview
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment