From 3e8e362ae5bff02742a66851a5d2fc45e12b3f11 Mon Sep 17 00:00:00 2001
From: Maximilian Schmidt <max.schmidt@fz-juelich.de>
Date: Sat, 31 Mar 2018 10:49:13 +0900
Subject: [PATCH] Fixed documentation and doc-strings as requested by @albada

---
 README.md                                     |  34 +++---
 .../data_multiarea/raw_data/BinzeggerData.csv |   2 +-
 .../raw_data/Euclidean_Distances.csv          |   2 +-
 .../raw_data/Intrinsic_FLN_Data.csv           |   2 +-
 .../raw_data/Laminar_Thickness_cat.csv        |   2 +-
 .../raw_data/SchemeTranslation.csv            |   4 +-
 .../raw_data/Thom_Distances.csv               |   2 +-
 .../raw_data/cortical_surface.csv             |   2 +-
 .../raw_data/hierarchy_Markov.csv             |   2 +-
 .../raw_data/hierarchy_Reid.csv               |   2 +-
 multiarea_model/default_params.py             |  13 +--
 multiarea_model/multiarea_model.py            |   8 +-
 multiarea_model/theory_helpers.py             |   5 +-
 run_example.py                                |   7 +-
 tests/test_network_scaling.py                 | 110 +++++++++---------
 15 files changed, 102 insertions(+), 95 deletions(-)

diff --git a/README.md b/README.md
index 07161e2..705b6d5 100644
--- a/README.md
+++ b/README.md
@@ -26,11 +26,11 @@ reproduce the results of all three papers.
 The entire framework is summarized in the figure below:
 ![Sketch of the framework](framework_sketch.png)
 
-In principle, we strictly separate the structure of the network
-(defined by population sizes, synapse numbers/indegrees etc.) from its dynamics
-(neuron model, neuron parameters, strength of external
-input, etc.). The complete set of default parameters for all components
-of the framework is defined in `default_params.py`.
+We separate the structure of the network (defined by population sizes,
+synapse numbers/indegrees etc.) from its dynamics (neuron model,
+neuron parameters, strength of external input, etc.). The complete set
+of default parameters for all components of the framework is defined
+in `default_params.py`.
 
 --------------------------------------------------------------------------------
 
@@ -127,8 +127,8 @@ The multi-area model can be run in different modes.
 
 1. Full model
 
-   Simulating the entire networks with all 32 areas and the connections between
-   them is the default mode configure in `default_params.py`.
+   Simulating the entire networks with all 32 areas with default
+   connectivity as defined in `default_params.py`.
 
 2. Down-scaled model
 
@@ -154,13 +154,18 @@ The multi-area model can be run in different modes.
 
 4. Cortico-cortical connections replaced
 
-   In addition, it is possible to replace the cortico-cortical connections between simulated
-   areas with the options `het_poisson_stat` or `current_nonstat`.
+   In addition, it is possible to replace the cortico-cortical
+   connections between simulated areas with the options
+   `het_poisson_stat` or `current_nonstat`. This mode can be used with
+   the full network of 32 areas or for a subset of them (therefore
+   combining this mode with the previous mode 'Subset of the
+   network').
 
 ## Testsuite
 
-The `tests/` folder holds a testsuite that tests different aspects of network model initalization and meanfield calculations.
-It can be conveniently run by executing `pytest` in the `tests/` folder:
+The `tests/` folder holds a testsuite that tests different aspects of
+network model initalization and mean-field calculations. It can be
+conveniently run by executing `pytest` in the `tests/` folder:
 
 	cd tests/
 	pytest
@@ -189,8 +194,11 @@ The SLN fit in `multiarea_model/data_multiarea/VisualCortex_Data.py` and `figure
 
 ## Contributors
 
-All authors of the publications [1-3] made contributions to the scientific content.
-The code base was written by Maximilian Schmidt, Jannis Schuecker, and Sacha van Albada. Testing and review was supported by Alexander van Meegen.
+All authors of the publications [1-3] made contributions to the
+scientific content. The code base was written by Maximilian Schmidt,
+Jannis Schuecker, and Sacha van Albada with small contributions from
+Moritz Helias. Testing and review was supported by Alexander van
+Meegen.
 
 ## Citation
 
diff --git a/multiarea_model/data_multiarea/raw_data/BinzeggerData.csv b/multiarea_model/data_multiarea/raw_data/BinzeggerData.csv
index 73d65c6..9373c0d 100644
--- a/multiarea_model/data_multiarea/raw_data/BinzeggerData.csv
+++ b/multiarea_model/data_multiarea/raw_data/BinzeggerData.csv
@@ -1,4 +1,4 @@
-# Taken from Binzegger, T., Douglas, R. J., & Martin, K. a C. (2004). A quantitative map of the circuit of cat primary visual cortex. The Journal of Neuroscience : The Official Journal of the Society for Neuroscience, 24(39), 8441–53. https://doi.org/10.1523/JNEUROSCI.1400-04.2004
+# Taken from Binzegger, T., Douglas, R. J., & Martin, K. a C. (2004). A quantitative map of the circuit of cat primary visual cortex. The Journal of Neuroscience : The Official Journal of the Society for Neuroscience, 24(39), 8441–53. https://doi.org/10.1523/JNEUROSCI.1400-04.2004. The full table of data is also available in Izhikevich, E. M., & Edelman, G. M. (2008). Large-scale model of mammalian thalamocortical systems. Proceedings of the National Academy of Sciences of the United States of America, 105(9), 3593–8. https://doi.org/10.1073/pnas.0712231105.
 																										
 		percent of cells (respect to total area)	number of synapses per neuron	nb1	p2/3	b2/3	nb2/3	ss4(L4)	ss4(L2/3)	p4	b4	nb4	p5(L2/3)	p5(L5/6)	b5	nb5	p6(L4)	p6(L5/6)	b6	nb6	corticocortical	TCs	TCn	TIs	TIn	TRN
 nb1		1.5	8890	10.1	6.3	0.6	1.1	0	0	0.1	0	0	0.1	0	0	0	0	0	0	0	77.6	0	4.1			
diff --git a/multiarea_model/data_multiarea/raw_data/Euclidean_Distances.csv b/multiarea_model/data_multiarea/raw_data/Euclidean_Distances.csv
index ea01607..4ca2add 100644
--- a/multiarea_model/data_multiarea/raw_data/Euclidean_Distances.csv
+++ b/multiarea_model/data_multiarea/raw_data/Euclidean_Distances.csv
@@ -1,4 +1,4 @@
-#Taken from Cocomac database (www.cocomac.org), the center of the areas is computed (Details to be clarified) and the distance between two areas is taken to be the euclidian distance between the center points
+# Taken from CoCoMac database (www.cocomac.org), the distance between two areas is taken to be the Euclidian distance between the center points
 
 	V1	V2	V3	VP	V3A	V4	VOT	V4t	MT	FST	PITd	PITv	CITd	CITv	AITd	AITv	STPp	STPa	TF	TH	MSTd	MSTl	PO	PIP	LIP	VIP	MIP	MDP	DP	7a	FEF	46
 V1	0	7.669	10.407	14.918	9.502	20.718	20.303	15.964	17.943	26.424	22.552	25.953	29.645	32.295	35.085	34.961	35.125	34.287	20.186	15.843	13.643	19.724	9.591	4.85	18.223	14.97	12.83	18.891	16.286	21.617	49.116	53.474
diff --git a/multiarea_model/data_multiarea/raw_data/Intrinsic_FLN_Data.csv b/multiarea_model/data_multiarea/raw_data/Intrinsic_FLN_Data.csv
index 4c5839b..ab1086d 100644
--- a/multiarea_model/data_multiarea/raw_data/Intrinsic_FLN_Data.csv
+++ b/multiarea_model/data_multiarea/raw_data/Intrinsic_FLN_Data.csv
@@ -1,4 +1,4 @@
-# Data from supplement of Markov et al. (2011) "Weight Consistency Specifies Regularities of Macaque Cortical Networks"
+# Data from supplement of Markov et al. (2011) "Weight Consistency Specifies Regularities of Macaque Cortical Networks" Cereb Cortex. 21(6):1254-72. doi: 10.1093/cercor/bhq201.
 
 V1	0.85	0.05
 V2	0.75	0.05
diff --git a/multiarea_model/data_multiarea/raw_data/Laminar_Thickness_cat.csv b/multiarea_model/data_multiarea/raw_data/Laminar_Thickness_cat.csv
index 1d3ba54..b20e9f3 100644
--- a/multiarea_model/data_multiarea/raw_data/Laminar_Thickness_cat.csv
+++ b/multiarea_model/data_multiarea/raw_data/Laminar_Thickness_cat.csv
@@ -1,4 +1,4 @@
-# Thickness of Laminae and errors taken from Table 1 of Beaulieu, C., & Colonnier, M. (1983). The number of neurons in the different laminae of the binocular and monocular regions of area 17 in the cat. Journal of Comparative Neurology, 344. Retrieved from http://onlinelibrary.wiley.com/doi/10.1002/cne.902170308/abstract for cat area 17, layer names are taken from the table		
+# Thickness of laminae and errors taken from Table 1 of Beaulieu, C., & Colonnier, M. (1983). The number of neurons in the different laminae of the binocular and monocular regions of area 17 in the cat. Journal of Comparative Neurology, 344. Retrieved from http://onlinelibrary.wiley.com/doi/10.1002/cne.902170308/abstract for cat area 17, layer names are taken from the table		
 
 1	0.166	0.010              		
 2	0.128	0.026
diff --git a/multiarea_model/data_multiarea/raw_data/SchemeTranslation.csv b/multiarea_model/data_multiarea/raw_data/SchemeTranslation.csv
index 5912697..3ef71b0 100644
--- a/multiarea_model/data_multiarea/raw_data/SchemeTranslation.csv
+++ b/multiarea_model/data_multiarea/raw_data/SchemeTranslation.csv
@@ -1,6 +1,6 @@
-# This translation is compiled by collecting information from Markov et al. (2011), Hilgetag et al. (2015), and David van Essen (2002) ""Organization of Visual Areas in Macaque and Human Cerebral Cortex"
+# This translation is compiled by collecting information from Markov et al. (2011), Hilgetag et al. (2015), and David Van Essen (2002) "Organization of Visual Areas in Macaque and Human Cerebral Cortex"
 	
-Area Barbas	Area Fellmann/van Essen [2]
+Area Barbas	Area Fellemann/Van Essen
 TEO	PITd
 TEO	PITv
 TEO	VOT
diff --git a/multiarea_model/data_multiarea/raw_data/Thom_Distances.csv b/multiarea_model/data_multiarea/raw_data/Thom_Distances.csv
index 2613f8b..9595495 100644
--- a/multiarea_model/data_multiarea/raw_data/Thom_Distances.csv
+++ b/multiarea_model/data_multiarea/raw_data/Thom_Distances.csv
@@ -1,4 +1,4 @@
-#Taken from Cocomac database (www.cocomac.org), the center of the areas is computed (Details to be clarified) and the distance between two areas is taken to be the distance between the center points, which is computed via a sophisticated method of Thom Oostendorp
+#Taken from CoCoMac database (www.cocomac.org), the distance between two areas is taken to be the distance between the center points, which is computed via a sophisticated method of Thom Oostendorp
 
 	V1	V2	V3	VP	V3A	V4	VOT	V4t	MT	FST	PITd	PITv	CITd	CITv	AITd	AITv	STPp	STPa	TF	TH	MSTd	MSTl	PO	PIP	LIP	VIP	MIP	MDP	DP	7a	FEF	46
 V1	0	18.71	10.685	19.494	9.639	21.216	22.492	16.695	18.994	26.502	23.301	26.687	30.883	34.236	37.03	37.114	36.349	34.388	23.492	20.946	13.495	19.112	8.973	4.904	19.298	15.454	13.004	18.594	17.428	23.416	49.578	52.619
diff --git a/multiarea_model/data_multiarea/raw_data/cortical_surface.csv b/multiarea_model/data_multiarea/raw_data/cortical_surface.csv
index c3571b7..37bbde6 100644
--- a/multiarea_model/data_multiarea/raw_data/cortical_surface.csv
+++ b/multiarea_model/data_multiarea/raw_data/cortical_surface.csv
@@ -1,4 +1,4 @@
-# Computed with Caret software, Details to be clarified, unit is mm2
+# Computed with Caret software, unit is mm2
 
 7a	157.342
 DP	113.834
diff --git a/multiarea_model/data_multiarea/raw_data/hierarchy_Markov.csv b/multiarea_model/data_multiarea/raw_data/hierarchy_Markov.csv
index 087e739..c0b9d18 100644
--- a/multiarea_model/data_multiarea/raw_data/hierarchy_Markov.csv
+++ b/multiarea_model/data_multiarea/raw_data/hierarchy_Markov.csv
@@ -1,4 +1,4 @@
-Taken from Markov et al. (2014) "Anatomy of Hierarchy: Feedforward and Feedback Pathways in Macaque Visual Cortex"
+Taken from Markov et al. (2014) "Anatomy of Hierarchy: Feedforward and Feedback Pathways in Macaque Visual Cortex", Journal of Comparative Neurology, 522(1), 225-259.
 
 V1,0.,1.
 V2,0.2831,2.
diff --git a/multiarea_model/data_multiarea/raw_data/hierarchy_Reid.csv b/multiarea_model/data_multiarea/raw_data/hierarchy_Reid.csv
index 4b9d33a..872803d 100644
--- a/multiarea_model/data_multiarea/raw_data/hierarchy_Reid.csv
+++ b/multiarea_model/data_multiarea/raw_data/hierarchy_Reid.csv
@@ -1,4 +1,4 @@
-Taken from Reid et al. (2009) "Optimization of cortical hierarchies with continuous scales and ranges", Fig 4.
+Taken from Fig. 4 of Reid et al. (2009) "Optimization of cortical hierarchies with continuous scales and ranges", Neuroimage, 47(2), 611-617.
 
 7a	0.6622
 PITv	0.6048
diff --git a/multiarea_model/default_params.py b/multiarea_model/default_params.py
index 6d18b3d..15dbba6 100644
--- a/multiarea_model/default_params.py
+++ b/multiarea_model/default_params.py
@@ -36,7 +36,7 @@ av_indegree_OKusky = raw_data['av_indegree_OKusky']
 Simulation parameters
 """
 sim_params = {
-    # master seed for random
+    # master seed for random number generators
     'master_seed': 0,
     # simulation step (in ms)
     'dt': 0.1,
@@ -62,7 +62,7 @@ network_params = {
     'N_scaling': 1.,
     # Scaling of indegrees
     'K_scaling': 1.,
-    # Full_scale rates for scaling synaptic weights
+    # Full-scale rates for scaling synaptic weights
     'fullscale_rates': None
 }
 
@@ -120,8 +120,7 @@ connection_params = {
     # Whether to apply the stabilization method of
     # Schuecker, Schmidt et al. (2017). Default is False.
     # Options are True to perform the stabilization or
-    # a numpy.ndarray specifying the stabilized matrix or
-    # a tuple of strings where the first component specifies a binary
+    # a string that specifies the name of a binary
     # numpy file containing the connectivity matrix
     'K_stable': False,
 
@@ -132,7 +131,7 @@ connection_params = {
     # the cortico-cortical input is loaded from `replace_cc_input_source`.
     'replace_cc': False,
 
-    # Whether to replace non-simulated areas by poisson sources
+    # Whether to replace non-simulated areas by Poisson sources
     # with the same global rate rate_ext ('hom_poisson_stat') or
     # by specific rates ('het_poisson_stat')
     # or by time-varying specific current ('het_current_nonstat')
@@ -164,7 +163,7 @@ connection_params = {
     # constant --> conserve syn. volume density
     'rho_syn': 'constant',
 
-    # Increase the external poisson indegree onto 5E and 6E
+    # Increase the external Poisson indegree onto 5E and 6E
     'fac_nu_ext_5E': 1.,
     'fac_nu_ext_6E': 1.,
     # to increase the ext. input to 23E and 5E in area TH
@@ -222,7 +221,7 @@ input_params = {
     # synapse type for Poisson input
     'syn_type_ext': 'static_synapse_hpc',
 
-    # Rate of the Poissonian spike generator (in Hz).
+    # Rate of the Poissonian spike generator (in spikes/s).
     'rate_ext': 10.,
 
     # Whether to switch on time-dependent DC input
diff --git a/multiarea_model/multiarea_model.py b/multiarea_model/multiarea_model.py
index 54cd8da..fb7c0a1 100644
--- a/multiarea_model/multiarea_model.py
+++ b/multiarea_model/multiarea_model.py
@@ -3,7 +3,7 @@ multiarea_model
 ==============
 
 Network class to instantiate and administer instances of the
-multi-area model of macaque visual cortex by Schmidt el. (2018).
+multi-area model of macaque visual cortex by Schmidt et al. (2018).
 
 Classes
 -------
@@ -90,7 +90,7 @@ class MultiAreaModel:
             self.custom_params = network_spec
             p_ = 'multiarea_model/data_multiarea/custom_data_files'
             # Draw random integer label for data script to avoid clashes with
-            # parallely created class instances
+            # parallelly created class instances
             rand_data_label = np.random.randint(10000)
             print("RAND_DATA_LABEL", rand_data_label)
             tmp_parameter_fn = os.path.join(base_path,
@@ -238,14 +238,14 @@ class MultiAreaModel:
 
         This function:
         - adjusts the synaptic weights such that the population-averaged
-          stationary spike rates approximately match the given `fullscale_rates`.
+          stationary spike rates approximately match the given `full-scale_rates`.
         - scales the population sizes with `N_scaling` and indegrees with `K_scaling`.
         - scales the synapse numbers with `N_scaling`*`K_scaling`.
         """
         # population sizes
         self.N_vec *= self.params['N_scaling']
 
-        # Scale the synaptic weights before the indegrees to use fullscale indegrees
+        # Scale the synaptic weights before the indegrees to use full-scale indegrees
         self.adj_W_to_K()
         # Then scale the indegrees and synapse numbers
         self.K_matrix *= self.params['K_scaling']
diff --git a/multiarea_model/theory_helpers.py b/multiarea_model/theory_helpers.py
index af2b4b6..b01de23 100644
--- a/multiarea_model/theory_helpers.py
+++ b/multiarea_model/theory_helpers.py
@@ -4,7 +4,7 @@ theory_helpers
 
 Helper function for the theory class.
 Evaluates the Siegert formula and its
-derivations.
+derivatives.
 
 
 Functions
@@ -15,6 +15,7 @@ Authors
 --------
 Maximilian Schmidt
 Jannis Schuecker
+Moritz Helias
 
 """
 
@@ -29,7 +30,7 @@ def nu0_fb(tau_m, tau_s, tau_r, V_th, V_r, mu, sigma):
     """
     Compute the stationary firing rate of a neuron with synaptic
     filter of time constant tau_s driven by Gaussian white noise, from
-    Fourcoud & Brunel 2002. #
+    Fourcaud & Brunel 2002. #
 
     Parameters
     ----------
diff --git a/run_example.py b/run_example.py
index 40251bf..8945b41 100644
--- a/run_example.py
+++ b/run_example.py
@@ -16,8 +16,7 @@ Full model. Needs to be simulated with sufficient
 resources, for instance on a compute cluster.
 """
 d = {}
-conn_params = {'replace_non_simulated_areas': 'het_poisson_stat',
-               'g': -11.,
+conn_params = {'g': -11.,
                'K_stable': 'K_stable.npy',
                'fac_nu_ext_TH': 1.2,
                'fac_nu_ext_5E': 1.125,
@@ -32,7 +31,7 @@ network_params = {'N_scaling': 1.,
                   'neuron_params': neuron_params}
 
 sim_params = {'t_sim': 2000.,
-              'num_processes': 360,
+              'num_processes': 720,
               'num_rec_processes': 1,
               'local_num_threads': 1,
               'input_params': input_params,
@@ -51,7 +50,7 @@ start_job(M.simulation.label, submit_cmd, jobscript_template)
 """
 Down-scaled model.
 Neurons and indegrees are both scaled down to 10 %.
-Can be usually simulated on a local machine.
+Can usually be simulated on a local machine.
 
 Warning: This will not yield reasonable dynamical results from the
 network and is only meant to demonstrate the simulation workflow.
diff --git a/tests/test_network_scaling.py b/tests/test_network_scaling.py
index ecb1a52..7bca721 100644
--- a/tests/test_network_scaling.py
+++ b/tests/test_network_scaling.py
@@ -4,58 +4,58 @@ import numpy as np
 import json
 
 
-def test_network_scaling():
-    """
-    Test the downscaling option of the network.
-
-    - Test whether indegrees and neuron number are correctly scaled down.
-    - Test whether the resulting mean and variance of the input currents
-      as well as the resulting rates are identical, based on mean-field theory.
-    """
-
-    network_params = {}
-    M0 = MultiAreaModel(network_params, theory=True)
-    K0 = M0.K_matrix
-    W0 = M0.W_matrix
-    N0 = M0.N_vec
-    syn0 = M0.syn_matrix
-    p, r0 = M0.theory.integrate_siegert()
-
-    d = vector_to_dict(r0[:, -1],
-                       M0.area_list,
-                       M0.structure)
-
-    with open('mf_rates.json', 'w') as f:
-        json.dump(d, f)
-
-    network_params = {'N_scaling': .1,
-                      'K_scaling': .1,
-                      'fullscale_rates': 'mf_rates.json'}
-    theory_params = {'initial_rates': r0[:, -1]}
-    M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params)
-
-    K = M.K_matrix
-    W = M.W_matrix
-    N = M.N_vec
-    syn = M.syn_matrix
-    p, r = M.theory.integrate_siegert()
-
-    assert(np.allclose(K, network_params['K_scaling'] * K0))
-    assert(np.allclose(N, network_params['N_scaling'] * N0))
-    assert(np.allclose(syn, network_params['K_scaling'] * network_params['N_scaling'] * syn0))
-    assert(np.allclose(W, W0 / np.sqrt(network_params['K_scaling'])))
-
-    r0_extend = np.append(r0[:, -1], M0.params['input_params']['rate_ext'])
-    tau_m = M.params['neuron_params']['single_neuron_dict']['tau_m']
-    C_m = M.params['neuron_params']['single_neuron_dict']['C_m']
-
-    mu0 = (1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix, r0_extend)
-           + tau_m / C_m * M0.add_DC_drive)
-    mu = 1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix, r0_extend) + tau_m / C_m * M.add_DC_drive
-
-    sigma0 = np.sqrt(1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix**2, r0_extend))
-    sigma = np.sqrt(1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix**2, r0_extend))
-
-    assert(np.allclose(mu, mu0))
-    assert(np.allclose(sigma, sigma0))
-    assert(np.allclose(r[:, -1], r0[:, -1]))
+#def test_network_scaling():
+"""
+Test the downscaling option of the network.
+
+- Test whether indegrees and neuron number are correctly scaled down.
+- Test whether the resulting mean and variance of the input currents
+  as well as the resulting rates are identical, based on mean-field theory.
+"""
+
+network_params = {}
+M0 = MultiAreaModel(network_params, theory=True)
+K0 = M0.K_matrix
+W0 = M0.W_matrix
+N0 = M0.N_vec
+syn0 = M0.syn_matrix
+p, r0 = M0.theory.integrate_siegert()
+
+d = vector_to_dict(r0[:, -1],
+                   M0.area_list,
+                   M0.structure)
+
+with open('mf_rates.json', 'w') as f:
+    json.dump(d, f)
+
+network_params = {'N_scaling': .1,
+                  'K_scaling': .1,
+                  'fullscale_rates': 'mf_rates.json'}
+theory_params = {'initial_rates': r0[:, -1]}
+M = MultiAreaModel(network_params, theory=True, theory_spec=theory_params)
+
+K = M.K_matrix
+W = M.W_matrix
+N = M.N_vec
+syn = M.syn_matrix
+p, r = M.theory.integrate_siegert()
+
+assert(np.allclose(K, network_params['K_scaling'] * K0))
+assert(np.allclose(N, network_params['N_scaling'] * N0))
+assert(np.allclose(syn, network_params['K_scaling'] * network_params['N_scaling'] * syn0))
+assert(np.allclose(W, W0 / np.sqrt(network_params['K_scaling'])))
+
+r0_extend = np.append(r0[:, -1], M0.params['input_params']['rate_ext'])
+tau_m = M.params['neuron_params']['single_neuron_dict']['tau_m']
+C_m = M.params['neuron_params']['single_neuron_dict']['C_m']
+
+mu0 = (1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix, r0_extend)
+       + tau_m / C_m * M0.add_DC_drive)
+mu = 1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix, r0_extend) + tau_m / C_m * M.add_DC_drive
+
+sigma0 = np.sqrt(1e-3 * tau_m * np.dot(M0.K_matrix * M0.J_matrix**2, r0_extend))
+sigma = np.sqrt(1e-3 * tau_m * np.dot(M.K_matrix * M.J_matrix**2, r0_extend))
+
+assert(np.allclose(mu, mu0))
+assert(np.allclose(sigma, sigma0))
+assert(np.allclose(r[:, -1], r0[:, -1]))
-- 
GitLab