From 48ad369d8d32a08c70f11247754c17557cd7930c Mon Sep 17 00:00:00 2001
From: Thorsten Hater <24411438+thorstenhater@users.noreply.github.com>
Date: Thu, 16 Jun 2022 12:02:17 +0200
Subject: [PATCH] Adopt Black for Python. (#1906)

---
 .github/workflows/lint.yml                    |  22 +
 doc/conf.py                                   |  50 +-
 doc/contrib/coding-style.rst                  |  21 +
 doc/scripts/gen-labels.py                     | 451 +++++++------
 doc/scripts/inputs.py                         | 578 +++++++++++++---
 doc/scripts/make_images.py                    | 577 +++++++++++-----
 doc/scripts/representation.py                 |  79 ++-
 example/lfp/neuron_lfp_example.py             | 130 ++--
 example/lfp/plot-lfp.py                       |  80 ++-
 python/__init__.py                            |   6 +-
 python/example/brunel.py                      | 192 +++++-
 python/example/dynamic-catalogue.py           |  12 +-
 python/example/gap_junctions.py               |  69 +-
 python/example/network_ring.py                |  76 ++-
 python/example/network_ring_mpi.py            |  72 +-
 python/example/network_ring_mpi_plot.py       |   6 +-
 python/example/single_cell_allen.py           | 115 ++--
 python/example/single_cell_cable.py           | 131 ++--
 python/example/single_cell_detailed.py        |  71 +-
 python/example/single_cell_detailed_recipe.py |  76 ++-
 .../single_cell_extracellular_potentials.py   | 157 +++--
 python/example/single_cell_model.py           |  29 +-
 python/example/single_cell_nml.py             |  59 +-
 python/example/single_cell_recipe.py          |  31 +-
 python/example/single_cell_stdp.py            |  67 +-
 python/example/single_cell_swc.py             |  61 +-
 python/example/two_cell_gap_junctions.py      | 102 +--
 python/setup.py                               |  41 +-
 python/test/cases.py                          |  14 +-
 python/test/fixtures.py                       |  49 +-
 python/test/unit/test_cable_probes.py         |  40 +-
 python/test/unit/test_catalogues.py           |  43 +-
 python/test/unit/test_clear_samplers.py       |  10 +-
 python/test/unit/test_contexts.py             |  35 +-
 python/test/unit/test_decor.py                |  13 +-
 .../test/unit/test_domain_decompositions.py   |  71 +-
 python/test/unit/test_event_generators.py     |  12 +-
 python/test/unit/test_identifiers.py          |   6 +-
 python/test/unit/test_morphology.py           |  54 +-
 python/test/unit/test_multiple_connections.py | 623 ++++++++++--------
 python/test/unit/test_profiling.py            |  38 +-
 python/test/unit/test_schedules.py            | 174 ++---
 python/test/unit/test_spikes.py               |   5 +-
 .../unit_distributed/test_contexts_arbmpi.py  |  17 +-
 .../unit_distributed/test_contexts_mpi4py.py  |  18 +-
 .../test_domain_decompositions.py             | 160 +++--
 .../test/unit_distributed/test_simulator.py   |  19 +-
 scripts/build-catalogue.in                    | 195 +++---
 scripts/patchwheel.py                         |  38 +-
 scripts/test-catalogue.py                     |   8 +-
 scripts/where.py                              |  24 +-
 setup.py                                      |  94 +--
 spack/package.py                              | 118 ++--
 validation/ref/neuron/ball_and_3stick.py      |  17 +-
 validation/ref/neuron/ball_and_squiggle.py    |  16 +-
 validation/ref/neuron/ball_and_stick.py       |  11 +-
 validation/ref/neuron/ball_and_taper.py       |  11 +-
 validation/ref/neuron/nrn_validation.py       | 181 ++---
 validation/ref/neuron/simple_exp2_synapse.py  |  11 +-
 validation/ref/neuron/simple_exp_synapse.py   |  10 +-
 validation/ref/neuron/soma.py                 |   5 +-
 61 files changed, 3484 insertions(+), 2017 deletions(-)
 create mode 100644 .github/workflows/lint.yml

diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000..b1c643f4
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,22 @@
+name: Sanitize
+
+on:
+  pull_request:
+    branches: [ master ]
+
+jobs:
+  build:
+    name: Lint
+    runs-on: ubuntu-20.04
+    strategy:
+      fail-fast: false
+    steps:
+      - name: Clone w/ submodules
+        uses: actions/checkout@v2
+        with:
+          submodules: recursive
+      - name: Python Formatting
+        uses: psf/black@stable
+        with:
+          options: --check --extend-exclude '/(ext|python/pybind11|doc/scripts/.*_theme)'
+          src: scripts/build-catalogue.in .
diff --git a/doc/conf.py b/doc/conf.py
index 7f56382a..e6c2dd34 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -5,59 +5,61 @@ import subprocess as sp
 from tempfile import TemporaryDirectory
 
 # Add /scripts to path. Used for Furo theme and to generate images
-this_path=os.path.split(os.path.abspath(__file__))[0]
-script_path=this_path+'/scripts'
+this_path = os.path.split(os.path.abspath(__file__))[0]
+script_path = this_path + "/scripts"
 sys.path.append(script_path)
 
-html_static_path = ['static']
-html_css_files = ['htmldiag.css']
-html_js_files = ['domarrow.js']
+html_static_path = ["static"]
+html_css_files = ["htmldiag.css"]
+html_js_files = ["domarrow.js"]
+
 
 def setup(app):
-    app.add_object_type('generic', 'gen', 'pair: %s; generic')
-    app.add_object_type('label', 'lab', 'pair: %s; label')
+    app.add_object_type("generic", "gen", "pair: %s; generic")
+    app.add_object_type("label", "lab", "pair: %s; label")
     app.add_js_file("latest-warning.js")
 
+
 extensions = [
-    'sphinx.ext.intersphinx',
-    'sphinx.ext.autodoc',
-    'sphinx.ext.todo',
-    'sphinx.ext.mathjax',
-    'divio_docs_theme'
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.autodoc",
+    "sphinx.ext.todo",
+    "sphinx.ext.mathjax",
+    "divio_docs_theme",
 ]
-source_suffix = '.rst'
-master_doc = 'index'
+source_suffix = ".rst"
+master_doc = "index"
 
-html_logo = 'images/arbor-lines-proto-colour.svg'
-html_favicon = 'images/arbor-lines-proto-colour-notext.svg'
+html_logo = "images/arbor-lines-proto-colour.svg"
+html_favicon = "images/arbor-lines-proto-colour-notext.svg"
 
 intersphinx_mapping = {
-    'lfpykit': ('https://lfpykit.readthedocs.io/en/latest/', None),
+    "lfpykit": ("https://lfpykit.readthedocs.io/en/latest/", None),
 }
 
-project = 'Arbor'
-copyright = '2017-2022, ETHZ & FZJ'
-author = 'ETHZ & FZJ'
+project = "Arbor"
+copyright = "2017-2022, ETHZ & FZJ"
+author = "ETHZ & FZJ"
 todo_include_todos = True
 
 html_theme = "divio_docs_theme"
-html_theme_options = {
-    }
+html_theme_options = {}
 
 # This style makes the source code pop out a bit more
 # from the background text, without being overpowering.
-pygments_style = 'perldoc'
+pygments_style = "perldoc"
 
 # Generate images for the documentation.
 print("--- generating images ---")
 
 # Output path for generated images
 # Dump inputs.py into tmpdir
-img_path=this_path+'/gen-images'
+img_path = this_path + "/gen-images"
 if not os.path.exists(img_path):
     os.mkdir(img_path)
 
 import make_images
+
 make_images.generate(img_path)
 
 print("-------------------------")
diff --git a/doc/contrib/coding-style.rst b/doc/contrib/coding-style.rst
index 21dd9c6d..db83fce3 100644
--- a/doc/contrib/coding-style.rst
+++ b/doc/contrib/coding-style.rst
@@ -3,6 +3,27 @@
 Coding Guidelines
 =================
 
+Python
+------
+
+We follow the `black <https://black.readthedocs.io/en/stable/index.html>`__
+coding style. It is enforced by an automated check on each pull request. You can
+run the following commands to apply it:
+
+.. code::
+
+   # Install the formatter if not present
+   pip install black
+   # Automatically apply style. If unsure what this does read on.
+   black --extend-exclude '/(ext|python/pybind11|doc/scripts/.*_theme)' . scripts/build-catalogue.in
+
+The formatter can also be run with ``--check`` to list offending files and
+``--diff`` to preview changes. Most editors can `integrate with black
+<https://black.readthedocs.io/en/stable/integrations/editors.html>`__.
+
+C++
+---
+
 The main development language of Arbor is C++. For Arbor we start with
 the community guidelines set out in the `C++ Core
 Guidelines <http://isocpp.github.io/CppCoreGuidelines/>`__. These
diff --git a/doc/scripts/gen-labels.py b/doc/scripts/gen-labels.py
index cf8466f6..8747e327 100644
--- a/doc/scripts/gen-labels.py
+++ b/doc/scripts/gen-labels.py
@@ -4,84 +4,89 @@ import os.path
 import sys
 from math import sqrt
 
+
 def is_collocated(l, r):
-    return l[0]==r[0] and l[1]==r[1]
+    return l[0] == r[0] and l[1] == r[1]
+
 
 def write_morphology(name, morph):
-    string = 'tmp = ['.format(name)
+    string = "tmp = [".format(name)
     for i in range(morph.num_branches):
         first = True
-        sections = '['
+        sections = "["
         for seg in morph.branch_segments(i):
             if not first:
                 if is_collocated((seg.prox.x, seg.prox.y), (last_dist.x, last_dist.y)):
-                    sections += ', '
+                    sections += ", "
                 else:
-                    sections += '], ['
+                    sections += "], ["
 
             first = False
             p = seg.prox
             d = seg.dist
-            sections += 'Segment(({}, {}, {}), ({}, {}, {}), {})'.format(p.x, p.y, p.radius, d.x, d.y, d.radius, seg.tag)
+            sections += "Segment(({}, {}, {}), ({}, {}, {}), {})".format(
+                p.x, p.y, p.radius, d.x, d.y, d.radius, seg.tag
+            )
             last_dist = seg.dist
-        sections += ']'
+        sections += "]"
 
-        string += '\n    [{}],'.format(sections)
-    string += ']\n'
-    string += '{} = representation.make_morph(tmp)\n\n'.format(name)
+        string += "\n    [{}],".format(sections)
+    string += "]\n"
+    string += "{} = representation.make_morph(tmp)\n\n".format(name)
     return string
 
+
 # Describe the morphologies
 
 mnpos = arbor.mnpos
 
 # The morphology used for all of the region/locset illustrations
 label_tree = arbor.segment_tree()
-label_tree.append(mnpos, mpoint(0,   0.0, 0, 2.0), mpoint( 4,  0.0, 0, 2.0), tag=1)
-label_tree.append(0,     mpoint(4,   0.0, 0, 0.8), mpoint( 8,  0.0, 0, 0.8), tag=3)
-label_tree.append(1,     mpoint(8,   0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
-label_tree.append(2,     mpoint(12, -0.5, 0, 0.8), mpoint(20,  4.0, 0, 0.4), tag=3)
-label_tree.append(3,     mpoint(20,  4.0, 0, 0.4), mpoint(26,  6.0, 0, 0.2), tag=3)
-label_tree.append(2,     mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
-label_tree.append(5,     mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
-label_tree.append(5,     mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
-label_tree.append(7,     mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
-label_tree.append(mnpos, mpoint(0,   0.0, 0, 2.0), mpoint(-7,  0.0, 0, 0.4), tag=2)
-label_tree.append(9,     mpoint(-7,  0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
+label_tree.append(mnpos, mpoint(0, 0.0, 0, 2.0), mpoint(4, 0.0, 0, 2.0), tag=1)
+label_tree.append(0, mpoint(4, 0.0, 0, 0.8), mpoint(8, 0.0, 0, 0.8), tag=3)
+label_tree.append(1, mpoint(8, 0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
+label_tree.append(2, mpoint(12, -0.5, 0, 0.8), mpoint(20, 4.0, 0, 0.4), tag=3)
+label_tree.append(3, mpoint(20, 4.0, 0, 0.4), mpoint(26, 6.0, 0, 0.2), tag=3)
+label_tree.append(2, mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
+label_tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
+label_tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
+label_tree.append(7, mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
+label_tree.append(mnpos, mpoint(0, 0.0, 0, 2.0), mpoint(-7, 0.0, 0, 0.4), tag=2)
+label_tree.append(9, mpoint(-7, 0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
 
 label_morph = arbor.morphology(label_tree)
 
 # The label morphology with some gaps (at start of dendritic tree and remove the axon hillock)
 label_tree = arbor.segment_tree()
-label_tree.append(mnpos, mpoint(0,   0.0, 0, 2.0), mpoint( 4,  0.0, 0, 2.0), tag=1)
-label_tree.append(0,    mpoint(5,   0.0, 0, 0.8), mpoint( 8,  0.0, 0, 0.8), tag=3)
-label_tree.append(1,    mpoint(8,   0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
-label_tree.append(2,    mpoint(12, -0.5, 0, 0.8), mpoint(20,  4.0, 0, 0.4), tag=3)
-label_tree.append(3,    mpoint(20,  4.0, 0, 0.4), mpoint(26,  6.0, 0, 0.2), tag=3)
-label_tree.append(2,    mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
-label_tree.append(5,    mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
-label_tree.append(5,    mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
-label_tree.append(7,    mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
-label_tree.append(mnpos, mpoint(-2,  0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
+label_tree.append(mnpos, mpoint(0, 0.0, 0, 2.0), mpoint(4, 0.0, 0, 2.0), tag=1)
+label_tree.append(0, mpoint(5, 0.0, 0, 0.8), mpoint(8, 0.0, 0, 0.8), tag=3)
+label_tree.append(1, mpoint(8, 0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
+label_tree.append(2, mpoint(12, -0.5, 0, 0.8), mpoint(20, 4.0, 0, 0.4), tag=3)
+label_tree.append(3, mpoint(20, 4.0, 0, 0.4), mpoint(26, 6.0, 0, 0.2), tag=3)
+label_tree.append(2, mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
+label_tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
+label_tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
+label_tree.append(7, mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
+label_tree.append(mnpos, mpoint(-2, 0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
 
 detached_morph = arbor.morphology(label_tree)
 
 # soma with "stacked cylinders"
 stacked_tree = arbor.segment_tree()
-stacked_tree.append(mnpos, mpoint(0,   0.0, 0, 0.5), mpoint( 1,  0.0, 0, 1.5), tag=1)
-stacked_tree.append(0,    mpoint(1,   0.0, 0, 1.5), mpoint( 2,  0.0, 0, 2.5), tag=1)
-stacked_tree.append(1,    mpoint(2,   0.0, 0, 2.5), mpoint( 3,  0.0, 0, 2.5), tag=1)
-stacked_tree.append(2,    mpoint(3,   0.0, 0, 2.5), mpoint( 4,  0.0, 0, 1.2), tag=1)
-stacked_tree.append(3,    mpoint(4,   0.0, 0, 0.8), mpoint( 8,  0.0, 0, 0.8), tag=3)
-stacked_tree.append(4,    mpoint(8,   0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
-stacked_tree.append(5,    mpoint(12, -0.5, 0, 0.8), mpoint(20,  4.0, 0, 0.4), tag=3)
-stacked_tree.append(6,    mpoint(20,  4.0, 0, 0.4), mpoint(26,  6.0, 0, 0.2), tag=3)
-stacked_tree.append(5,    mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
-stacked_tree.append(8,    mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
-stacked_tree.append(8,    mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
-stacked_tree.append(10,   mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
-stacked_tree.append(mnpos, mpoint(0,   0.0, 0, 0.4), mpoint(-7,  0.0, 0, 0.4), tag=2)
-stacked_tree.append(12,   mpoint(-7,  0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
+stacked_tree.append(mnpos, mpoint(0, 0.0, 0, 0.5), mpoint(1, 0.0, 0, 1.5), tag=1)
+stacked_tree.append(0, mpoint(1, 0.0, 0, 1.5), mpoint(2, 0.0, 0, 2.5), tag=1)
+stacked_tree.append(1, mpoint(2, 0.0, 0, 2.5), mpoint(3, 0.0, 0, 2.5), tag=1)
+stacked_tree.append(2, mpoint(3, 0.0, 0, 2.5), mpoint(4, 0.0, 0, 1.2), tag=1)
+stacked_tree.append(3, mpoint(4, 0.0, 0, 0.8), mpoint(8, 0.0, 0, 0.8), tag=3)
+stacked_tree.append(4, mpoint(8, 0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
+stacked_tree.append(5, mpoint(12, -0.5, 0, 0.8), mpoint(20, 4.0, 0, 0.4), tag=3)
+stacked_tree.append(6, mpoint(20, 4.0, 0, 0.4), mpoint(26, 6.0, 0, 0.2), tag=3)
+stacked_tree.append(5, mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
+stacked_tree.append(8, mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=3)
+stacked_tree.append(8, mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=3)
+stacked_tree.append(10, mpoint(23, -1.0, 0, 0.2), mpoint(26, -2.0, 0, 0.2), tag=3)
+stacked_tree.append(mnpos, mpoint(0, 0.0, 0, 0.4), mpoint(-7, 0.0, 0, 0.4), tag=2)
+stacked_tree.append(12, mpoint(-7, 0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
 
 stacked_morph = arbor.morphology(stacked_tree)
 
@@ -97,122 +102,124 @@ branch_morph1 = arbor.morphology(tree)
 
 # single branch: multiple segments, continuous radius
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint( 0.0,  0.0,  0.0, 1.0), mpoint( 3.0,  0.2,  0.0, 0.8), tag=1)
-tree.append(0,     mpoint( 3.0,  0.2,  0.0, 0.8), mpoint( 5.0, -0.1,  0.0, 0.7), tag=2)
-tree.append(1,     mpoint( 5.0, -0.1,  0.0, 0.7), mpoint( 8.0,  0.0,  0.0, 0.6), tag=2)
-tree.append(2,     mpoint( 8.0,  0.0,  0.0, 0.6), mpoint(10.0,  0.0,  0.0, 0.5), tag=3)
+tree.append(mnpos, mpoint(0.0, 0.0, 0.0, 1.0), mpoint(3.0, 0.2, 0.0, 0.8), tag=1)
+tree.append(0, mpoint(3.0, 0.2, 0.0, 0.8), mpoint(5.0, -0.1, 0.0, 0.7), tag=2)
+tree.append(1, mpoint(5.0, -0.1, 0.0, 0.7), mpoint(8.0, 0.0, 0.0, 0.6), tag=2)
+tree.append(2, mpoint(8.0, 0.0, 0.0, 0.6), mpoint(10.0, 0.0, 0.0, 0.5), tag=3)
 branch_morph2 = arbor.morphology(tree)
 
 # single branch: multiple segments, gaps
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint( 0.0,  0.0,  0.0, 1.0), mpoint(3.0,  0.2,  0.0, 0.8), tag=1)
-tree.append(0,     mpoint( 3.0,  0.2,  0.0, 0.8), mpoint(5.0, -0.1,  0.0, 0.7), tag=2)
-tree.append(1,     mpoint( 6.0, -0.1,  0.0, 0.7), mpoint(9.0, 0.0,  0.0, 0.6), tag=2)
-tree.append(2,     mpoint( 9.0,  0.0,  0.0, 0.6), mpoint(11.0, 0.0,  0.0, 0.5), tag=3)
+tree.append(mnpos, mpoint(0.0, 0.0, 0.0, 1.0), mpoint(3.0, 0.2, 0.0, 0.8), tag=1)
+tree.append(0, mpoint(3.0, 0.2, 0.0, 0.8), mpoint(5.0, -0.1, 0.0, 0.7), tag=2)
+tree.append(1, mpoint(6.0, -0.1, 0.0, 0.7), mpoint(9.0, 0.0, 0.0, 0.6), tag=2)
+tree.append(2, mpoint(9.0, 0.0, 0.0, 0.6), mpoint(11.0, 0.0, 0.0, 0.5), tag=3)
 branch_morph3 = arbor.morphology(tree)
 
 # single branch: multiple segments, discontinuous radius
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint( 0.0,  0.0,  0.0, 1.0), mpoint( 3.0,  0.2,  0.0, 0.8), tag=1)
-tree.append(0,     mpoint( 3.0,  0.2,  0.0, 0.8), mpoint( 5.0, -0.1,  0.0, 0.7), tag=2)
-tree.append(1,     mpoint( 5.0, -0.1,  0.0, 0.7), mpoint( 8.0,  0.0,  0.0, 0.5), tag=2)
-tree.append(2,     mpoint( 8.0,  0.0,  0.0, 0.3), mpoint(10.0,  0.0,  0.0, 0.5), tag=3)
+tree.append(mnpos, mpoint(0.0, 0.0, 0.0, 1.0), mpoint(3.0, 0.2, 0.0, 0.8), tag=1)
+tree.append(0, mpoint(3.0, 0.2, 0.0, 0.8), mpoint(5.0, -0.1, 0.0, 0.7), tag=2)
+tree.append(1, mpoint(5.0, -0.1, 0.0, 0.7), mpoint(8.0, 0.0, 0.0, 0.5), tag=2)
+tree.append(2, mpoint(8.0, 0.0, 0.0, 0.3), mpoint(10.0, 0.0, 0.0, 0.5), tag=3)
 branch_morph4 = arbor.morphology(tree)
 
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint( 0.0, 0.0, 0.0, 1.0), mpoint(10.0, 0.0, 0.0, 0.5), tag= 3)
-tree.append(0,     mpoint(15.0, 3.0, 0.0, 0.2), tag= 3)
-tree.append(0,     mpoint(15.0,-3.0, 0.0, 0.2), tag= 3)
+tree.append(mnpos, mpoint(0.0, 0.0, 0.0, 1.0), mpoint(10.0, 0.0, 0.0, 0.5), tag=3)
+tree.append(0, mpoint(15.0, 3.0, 0.0, 0.2), tag=3)
+tree.append(0, mpoint(15.0, -3.0, 0.0, 0.2), tag=3)
 yshaped_morph = arbor.morphology(tree)
 
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0), mpoint( 3.0, 0.0, 0.0, 3.0), tag=1)
-tree.append(0, mpoint( 4.0, -1.0,  0.0, 0.6), mpoint(10.0,  -2.0,  0.0, 0.5), tag=3)
-tree.append(1, mpoint(15.0, -1.0,  0.0, 0.5), tag=3)
-tree.append(2, mpoint(18.0, -5.0,  0.0, 0.3), tag=3)
-tree.append(2, mpoint(20.0,  2.0,  0.0, 0.3), tag=3)
+tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0), mpoint(3.0, 0.0, 0.0, 3.0), tag=1)
+tree.append(0, mpoint(4.0, -1.0, 0.0, 0.6), mpoint(10.0, -2.0, 0.0, 0.5), tag=3)
+tree.append(1, mpoint(15.0, -1.0, 0.0, 0.5), tag=3)
+tree.append(2, mpoint(18.0, -5.0, 0.0, 0.3), tag=3)
+tree.append(2, mpoint(20.0, 2.0, 0.0, 0.3), tag=3)
 ysoma_morph1 = arbor.morphology(tree)
 
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0),   mpoint( 3.0, 0.0, 0.0, 3.0), tag=1)
-tree.append(0,     mpoint( 4.0, -1.0,  0.0, 0.6), mpoint(10.0,-2.0,  0.0, 0.5), tag=3)
-tree.append(1,     mpoint(15.0, -1.0,  0.0, 0.5), tag= 3)
-tree.append(2,     mpoint(18.0, -5.0,  0.0, 0.3), tag= 3)
-tree.append(2,     mpoint(20.0,  2.0,  0.0, 0.3), tag= 3)
-tree.append(0,     mpoint( 2.0,  1.0,  0.0, 0.6), mpoint(12.0, 4.0,  0.0, 0.5), tag=3)
-tree.append(5,     mpoint(18.0,  4.0,  0.0, 0.3), tag= 3)
-tree.append(5,     mpoint(16.0,  9.0,  0.0, 0.1), tag= 3)
-tree.append(mnpos, mpoint(-3.5,  0.0,  0.0, 1.5), mpoint(-6.0,-0.2,  0.0, 0.5), tag=2)
-tree.append(8,     mpoint(-15.0,-0.1,  0.0, 0.5), tag=2)
+tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0), mpoint(3.0, 0.0, 0.0, 3.0), tag=1)
+tree.append(0, mpoint(4.0, -1.0, 0.0, 0.6), mpoint(10.0, -2.0, 0.0, 0.5), tag=3)
+tree.append(1, mpoint(15.0, -1.0, 0.0, 0.5), tag=3)
+tree.append(2, mpoint(18.0, -5.0, 0.0, 0.3), tag=3)
+tree.append(2, mpoint(20.0, 2.0, 0.0, 0.3), tag=3)
+tree.append(0, mpoint(2.0, 1.0, 0.0, 0.6), mpoint(12.0, 4.0, 0.0, 0.5), tag=3)
+tree.append(5, mpoint(18.0, 4.0, 0.0, 0.3), tag=3)
+tree.append(5, mpoint(16.0, 9.0, 0.0, 0.1), tag=3)
+tree.append(mnpos, mpoint(-3.5, 0.0, 0.0, 1.5), mpoint(-6.0, -0.2, 0.0, 0.5), tag=2)
+tree.append(8, mpoint(-15.0, -0.1, 0.0, 0.5), tag=2)
 ysoma_morph2 = arbor.morphology(tree)
 
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0),   mpoint( 3.0, 0.0, 0.0, 3.0), tag=1)
-tree.append(0,     mpoint( 3.0,  0.0,  0.0, 0.6), mpoint(9.0,-1.0,  0.0, 0.5), tag=3)
-tree.append(1,     mpoint(14.0,  0.0,  0.0, 0.5), tag= 3)
-tree.append(2,     mpoint(17.0, -4.0,  0.0, 0.3), tag= 3)
-tree.append(2,     mpoint(19.0,  3.0,  0.0, 0.3), tag= 3)
-tree.append(0,     mpoint( 3.0,  0.0,  0.0, 0.6), mpoint(13.0, 3.0,  0.0, 0.5), tag=3)
-tree.append(5,     mpoint(19.0,  3.0,  0.0, 0.3), tag= 3)
-tree.append(5,     mpoint(17.0,  8.0,  0.0, 0.1), tag= 3)
-tree.append(mnpos, mpoint(-3.0,  0.0,  0.0, 1.5), mpoint(-5.5,-0.2,  0.0, 0.5), tag=2)
-tree.append(8,     mpoint(-14.5,-0.1,  0.0, 0.5), tag=2)
+tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 3.0), mpoint(3.0, 0.0, 0.0, 3.0), tag=1)
+tree.append(0, mpoint(3.0, 0.0, 0.0, 0.6), mpoint(9.0, -1.0, 0.0, 0.5), tag=3)
+tree.append(1, mpoint(14.0, 0.0, 0.0, 0.5), tag=3)
+tree.append(2, mpoint(17.0, -4.0, 0.0, 0.3), tag=3)
+tree.append(2, mpoint(19.0, 3.0, 0.0, 0.3), tag=3)
+tree.append(0, mpoint(3.0, 0.0, 0.0, 0.6), mpoint(13.0, 3.0, 0.0, 0.5), tag=3)
+tree.append(5, mpoint(19.0, 3.0, 0.0, 0.3), tag=3)
+tree.append(5, mpoint(17.0, 8.0, 0.0, 0.1), tag=3)
+tree.append(mnpos, mpoint(-3.0, 0.0, 0.0, 1.5), mpoint(-5.5, -0.2, 0.0, 0.5), tag=2)
+tree.append(8, mpoint(-14.5, -0.1, 0.0, 0.5), tag=2)
 ysoma_morph3 = arbor.morphology(tree)
 
-fn = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__), "../fileformat/example.swc"))
+fn = os.path.realpath(
+    os.path.join(os.getcwd(), os.path.dirname(__file__), "../fileformat/example.swc")
+)
 swc_morph = arbor.load_swc_arbor(fn)
 
-regions  = {
-            'empty': '(region-nil)',
-            'all': '(all)',
-            'tag1': '(tag 1)',
-            'tag2': '(tag 2)',
-            'tag3': '(tag 3)',
-            'tag4': '(tag 4)',
-            'soma': '(region "tag1")',
-            'axon': '(region "tag2")',
-            'dend': '(join (region "tag3") (region "tag4"))',
-            'radlt5': '(radius-lt (all) 0.5)',
-            'radle5': '(radius-le (all) 0.5)',
-            'radgt5': '(radius-gt (all) 0.5)',
-            'radge5': '(radius-ge (all) 0.5)',
-            'rad36':  '(intersect (radius-gt (all) 0.3) (radius-lt (all) 0.6))',
-            'branch0': '(branch 0)',
-            'branch3': '(branch 3)',
-            'segment0': '(segment 0)',
-            'segment3': '(segment 3)',
-            'cable_0_28': '(cable 0 0.2 0.8)',
-            'cable_1_01': '(cable 1 0 1)',
-            'cable_1_31': '(cable 1 0.3 1)',
-            'cable_1_37': '(cable 1 0.3 0.7)',
-            'proxint':     '(proximal-interval (locset "proxint_in") 5)',
-            'proxintinf':  '(proximal-interval (locset "proxint_in"))',
-            'distint':     '(distal-interval   (locset "distint_in") 5)',
-            'distintinf':  '(distal-interval   (locset "distint_in"))',
-            'lhs' : '(join (cable 0 0.5 1) (cable 1 0 0.5))',
-            'rhs' : '(branch 1)',
-            'and': '(intersect (region "lhs") (region "rhs"))',
-            'or':  '(join      (region "lhs") (region "rhs"))',
-          }
+regions = {
+    "empty": "(region-nil)",
+    "all": "(all)",
+    "tag1": "(tag 1)",
+    "tag2": "(tag 2)",
+    "tag3": "(tag 3)",
+    "tag4": "(tag 4)",
+    "soma": '(region "tag1")',
+    "axon": '(region "tag2")',
+    "dend": '(join (region "tag3") (region "tag4"))',
+    "radlt5": "(radius-lt (all) 0.5)",
+    "radle5": "(radius-le (all) 0.5)",
+    "radgt5": "(radius-gt (all) 0.5)",
+    "radge5": "(radius-ge (all) 0.5)",
+    "rad36": "(intersect (radius-gt (all) 0.3) (radius-lt (all) 0.6))",
+    "branch0": "(branch 0)",
+    "branch3": "(branch 3)",
+    "segment0": "(segment 0)",
+    "segment3": "(segment 3)",
+    "cable_0_28": "(cable 0 0.2 0.8)",
+    "cable_1_01": "(cable 1 0 1)",
+    "cable_1_31": "(cable 1 0.3 1)",
+    "cable_1_37": "(cable 1 0.3 0.7)",
+    "proxint": '(proximal-interval (locset "proxint_in") 5)',
+    "proxintinf": '(proximal-interval (locset "proxint_in"))',
+    "distint": '(distal-interval   (locset "distint_in") 5)',
+    "distintinf": '(distal-interval   (locset "distint_in"))',
+    "lhs": "(join (cable 0 0.5 1) (cable 1 0 0.5))",
+    "rhs": "(branch 1)",
+    "and": '(intersect (region "lhs") (region "rhs"))',
+    "or": '(join      (region "lhs") (region "rhs"))',
+}
 locsets = {
-            'root': '(root)',
-            'term': '(terminal)',
-            'rand_dend': '(uniform (region "dend") 0 50 0)',
-            'loc15': '(location 1 0.5)',
-            'loc05': '(location 0 0.5)',
-            'uniform0': '(uniform (tag 3) 0 9 0)',
-            'uniform1': '(uniform (tag 3) 0 9 1)',
-            'branchmid': '(on-branches 0.5)',
-            'distal':  '(distal   (region "rad36"))',
-            'proximal':'(proximal (region "rad36"))',
-            'distint_in': '(sum (location 1 0.5) (location 2 0.7) (location 5 0.1))',
-            'proxint_in': '(sum (location 1 0.8) (location 2 0.3))',
-            'loctest' : '(distal (complete (join (branch 1) (branch 0))))',
-            'restrict': '(restrict  (terminal) (tag 3))',
-            'proximal_translate': '(proximal-translate (terminal) 10)',
-            'distal_translate_single': '(distal-translate (location 0 0.5) 5)',
-            'distal_translate_multi':  '(distal-translate (location 0 0.5) 15)',
-          }
+    "root": "(root)",
+    "term": "(terminal)",
+    "rand_dend": '(uniform (region "dend") 0 50 0)',
+    "loc15": "(location 1 0.5)",
+    "loc05": "(location 0 0.5)",
+    "uniform0": "(uniform (tag 3) 0 9 0)",
+    "uniform1": "(uniform (tag 3) 0 9 1)",
+    "branchmid": "(on-branches 0.5)",
+    "distal": '(distal   (region "rad36"))',
+    "proximal": '(proximal (region "rad36"))',
+    "distint_in": "(sum (location 1 0.5) (location 2 0.7) (location 5 0.1))",
+    "proxint_in": "(sum (location 1 0.8) (location 2 0.3))",
+    "loctest": "(distal (complete (join (branch 1) (branch 0))))",
+    "restrict": "(restrict  (terminal) (tag 3))",
+    "proximal_translate": "(proximal-translate (terminal) 10)",
+    "distal_translate_single": "(distal-translate (location 0 0.5) 5)",
+    "distal_translate_multi": "(distal-translate (location 0 0.5) 15)",
+}
 
 labels = {**regions, **locsets}
 d = arbor.label_dict(labels)
@@ -225,34 +232,33 @@ cell = arbor.cable_cell(label_morph, d, arbor.decor())
 ###############################################################################
 
 tree = arbor.segment_tree()
-tree.append(mnpos, mpoint(0,   0.0, 0, 2.0), mpoint( 4,  0.0, 0, 2.0), tag=1)
-tree.append(0,     mpoint(4,   0.0, 0, 0.8), mpoint( 8,  0.0, 0, 0.8), tag=3)
-tree.append(1,     mpoint(8,   0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
-tree.append(2,     mpoint(12, -0.5, 0, 0.8), mpoint(20,  4.0, 0, 0.4), tag=3)
-tree.append(3,     mpoint(20,  4.0, 0, 0.4), mpoint(26,  6.0, 0, 0.2), tag=3)
-tree.append(2,     mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
-tree.append(5,     mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=4)
-tree.append(5,     mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=4)
-tree.append(7,     mpoint(23, -1.0, 0, 0.2), mpoint(36, -2.0, 0, 0.2), tag=4)
-tree.append(mnpos, mpoint(0,   0.0, 0, 2.0), mpoint(-7,  0.0, 0, 0.4), tag=2)
-tree.append(9,     mpoint(-7,  0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
+tree.append(mnpos, mpoint(0, 0.0, 0, 2.0), mpoint(4, 0.0, 0, 2.0), tag=1)
+tree.append(0, mpoint(4, 0.0, 0, 0.8), mpoint(8, 0.0, 0, 0.8), tag=3)
+tree.append(1, mpoint(8, 0.0, 0, 0.8), mpoint(12, -0.5, 0, 0.8), tag=3)
+tree.append(2, mpoint(12, -0.5, 0, 0.8), mpoint(20, 4.0, 0, 0.4), tag=3)
+tree.append(3, mpoint(20, 4.0, 0, 0.4), mpoint(26, 6.0, 0, 0.2), tag=3)
+tree.append(2, mpoint(12, -0.5, 0, 0.5), mpoint(19, -3.0, 0, 0.5), tag=3)
+tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(24, -7.0, 0, 0.2), tag=4)
+tree.append(5, mpoint(19, -3.0, 0, 0.5), mpoint(23, -1.0, 0, 0.2), tag=4)
+tree.append(7, mpoint(23, -1.0, 0, 0.2), mpoint(36, -2.0, 0, 0.2), tag=4)
+tree.append(mnpos, mpoint(0, 0.0, 0, 2.0), mpoint(-7, 0.0, 0, 0.4), tag=2)
+tree.append(9, mpoint(-7, 0.0, 0, 0.4), mpoint(-10, 0.0, 0, 0.4), tag=2)
 tutorial_morph = arbor.morphology(tree)
 
-tutorial_regions  = {
-    'all': '(all)',
-    'soma': '(tag 1)',
-    'axon': '(tag 2)',
-    'dend': '(tag 3)',
-    'last': '(tag 4)',
-    'rad_gt': '(radius-ge (region "all") 1.5)',
-    'custom': '(join (region "last") (region "rad_gt"))'
+tutorial_regions = {
+    "all": "(all)",
+    "soma": "(tag 1)",
+    "axon": "(tag 2)",
+    "dend": "(tag 3)",
+    "last": "(tag 4)",
+    "rad_gt": '(radius-ge (region "all") 1.5)',
+    "custom": '(join (region "last") (region "rad_gt"))',
 }
 tutorial_locsets = {
-    'root': '(root)',
-    'terminal': '(terminal)',
-    'custom_terminal': '(restrict (locset "terminal") (region "custom"))',
-    'axon_terminal': '(restrict (locset "terminal") (region "axon"))'
-
+    "root": "(root)",
+    "terminal": "(terminal)",
+    "custom_terminal": '(restrict (locset "terminal") (region "custom"))',
+    "axon_terminal": '(restrict (locset "terminal") (region "axon"))',
 }
 
 tutorial_labels = {**tutorial_regions, **tutorial_locsets}
@@ -266,79 +272,106 @@ tutorial_cell = arbor.cable_cell(tutorial_morph, tutorial_dict, arbor.decor())
 ###############################################################################
 
 tree = arbor.segment_tree()
-s = tree.append(arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1)
+s = tree.append(
+    arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1
+)
 b0 = tree.append(s, arbor.mpoint(0, 0, 0, 2), arbor.mpoint(50, 0, 0, 2), tag=3)
-b1 = tree.append(b0, arbor.mpoint(50, 0, 0, 2), arbor.mpoint(50+50/sqrt(2), 50/sqrt(2), 0, 0.5), tag=3)
-b2 = tree.append(b0, arbor.mpoint(50, 0, 0, 1), arbor.mpoint(50+50/sqrt(2), -50/sqrt(2), 0, 1), tag=3)
+b1 = tree.append(
+    b0,
+    arbor.mpoint(50, 0, 0, 2),
+    arbor.mpoint(50 + 50 / sqrt(2), 50 / sqrt(2), 0, 0.5),
+    tag=3,
+)
+b2 = tree.append(
+    b0,
+    arbor.mpoint(50, 0, 0, 1),
+    arbor.mpoint(50 + 50 / sqrt(2), -50 / sqrt(2), 0, 1),
+    tag=3,
+)
 tutorial_network_ring_morph = arbor.morphology(tree)
 
-tutorial_network_ring_regions  = {
-    'soma': '(tag 1)',
-    'dend': '(tag 3)'
+tutorial_network_ring_regions = {"soma": "(tag 1)", "dend": "(tag 3)"}
+tutorial_network_ring_locsets = {"synapse_site": "(location 1 0.5)", "root": "(root)"}
+tutorial_network_ring_labels = {
+    **tutorial_network_ring_regions,
+    **tutorial_network_ring_locsets,
 }
-tutorial_network_ring_locsets = {
-    'synapse_site': '(location 1 0.5)',
-    'root': '(root)'
-}
-tutorial_network_ring_labels = {**tutorial_network_ring_regions, **tutorial_network_ring_locsets}
 tutorial_network_ring_dict = arbor.label_dict(tutorial_network_ring_labels)
 
 # Create a cell to concretise the region and locset definitions
-tutorial_network_ring_cell = arbor.cable_cell(tutorial_network_ring_morph, tutorial_network_ring_dict, arbor.decor())
+tutorial_network_ring_cell = arbor.cable_cell(
+    tutorial_network_ring_morph, tutorial_network_ring_dict, arbor.decor()
+)
 
 ################################################################################
 # Output all of the morphologies and region/locset definitions to a Python script
 # that can be run during the documentation build to generate images.
 ################################################################################
-f = open(sys.argv[1] + '/inputs.py', 'w')
-f.write('import representation\n')
-f.write('from representation import Segment\n')
-
-f.write('\n############# morphologies\n\n')
-f.write(write_morphology('label_morph',    label_morph))
-f.write(write_morphology('detached_morph', detached_morph))
-f.write(write_morphology('stacked_morph',  stacked_morph))
-f.write(write_morphology('sphere_morph',   sphere_morph))
-f.write(write_morphology('branch_morph1',  branch_morph1))
-f.write(write_morphology('branch_morph2',  branch_morph2))
-f.write(write_morphology('branch_morph3',  branch_morph3))
-f.write(write_morphology('branch_morph4',  branch_morph4))
-f.write(write_morphology('yshaped_morph',  yshaped_morph))
-f.write(write_morphology('ysoma_morph1',   ysoma_morph1))
-f.write(write_morphology('ysoma_morph2',   ysoma_morph2))
-f.write(write_morphology('ysoma_morph3',   ysoma_morph3))
-f.write(write_morphology('tutorial_morph', tutorial_morph))
-f.write(write_morphology('swc_morph',      swc_morph))
-f.write(write_morphology('tutorial_network_ring_morph', tutorial_network_ring_morph))
-
-f.write('\n############# locsets (label_morph)\n\n')
+f = open(sys.argv[1] + "/inputs.py", "w")
+f.write("import representation\n")
+f.write("from representation import Segment\n")
+
+f.write("\n############# morphologies\n\n")
+f.write(write_morphology("label_morph", label_morph))
+f.write(write_morphology("detached_morph", detached_morph))
+f.write(write_morphology("stacked_morph", stacked_morph))
+f.write(write_morphology("sphere_morph", sphere_morph))
+f.write(write_morphology("branch_morph1", branch_morph1))
+f.write(write_morphology("branch_morph2", branch_morph2))
+f.write(write_morphology("branch_morph3", branch_morph3))
+f.write(write_morphology("branch_morph4", branch_morph4))
+f.write(write_morphology("yshaped_morph", yshaped_morph))
+f.write(write_morphology("ysoma_morph1", ysoma_morph1))
+f.write(write_morphology("ysoma_morph2", ysoma_morph2))
+f.write(write_morphology("ysoma_morph3", ysoma_morph3))
+f.write(write_morphology("tutorial_morph", tutorial_morph))
+f.write(write_morphology("swc_morph", swc_morph))
+f.write(write_morphology("tutorial_network_ring_morph", tutorial_network_ring_morph))
+
+f.write("\n############# locsets (label_morph)\n\n")
 for label in locsets:
     locs = [(l.branch, l.pos) for l in cell.locations('"{}"'.format(label))]
-    f.write('ls_{}  = {{\'type\': \'locset\', \'value\': {}}}\n'.format(label, locs))
+    f.write("ls_{}  = {{'type': 'locset', 'value': {}}}\n".format(label, locs))
 
-f.write('\n############# regions (label_morph)\n\n')
+f.write("\n############# regions (label_morph)\n\n")
 for label in regions:
     comps = [(c.branch, c.prox, c.dist) for c in cell.cables('"{}"'.format(label))]
-    f.write('reg_{} = {{\'type\': \'region\', \'value\': {}}}\n'.format(label, comps))
+    f.write("reg_{} = {{'type': 'region', 'value': {}}}\n".format(label, comps))
 
-f.write('\n############# locsets (tutorial_morph)\n\n')
+f.write("\n############# locsets (tutorial_morph)\n\n")
 for label in tutorial_locsets:
     locs = [(l.branch, l.pos) for l in tutorial_cell.locations('"{}"'.format(label))]
-    f.write('tut_ls_{}  = {{\'type\': \'locset\', \'value\': {}}}\n'.format(label, locs))
+    f.write("tut_ls_{}  = {{'type': 'locset', 'value': {}}}\n".format(label, locs))
 
-f.write('\n############# regions (tutorial_morph)\n\n')
+f.write("\n############# regions (tutorial_morph)\n\n")
 for label in tutorial_regions:
-    comps = [(c.branch, c.prox, c.dist) for c in tutorial_cell.cables('"{}"'.format(label))]
-    f.write('tut_reg_{} = {{\'type\': \'region\', \'value\': {}}}\n'.format(label, comps))
+    comps = [
+        (c.branch, c.prox, c.dist) for c in tutorial_cell.cables('"{}"'.format(label))
+    ]
+    f.write("tut_reg_{} = {{'type': 'region', 'value': {}}}\n".format(label, comps))
 
-f.write('\n############# locsets (tutorial_network_ring_morph)\n\n')
+f.write("\n############# locsets (tutorial_network_ring_morph)\n\n")
 for label in tutorial_network_ring_locsets:
-    locs = [(l.branch, l.pos) for l in tutorial_network_ring_cell.locations('"{}"'.format(label))]
-    f.write('tut_network_ring_ls_{}  = {{\'type\': \'locset\', \'value\': {}}}\n'.format(label, locs))
-
-f.write('\n############# regions (tutorial_network_ring_morph)\n\n')
+    locs = [
+        (l.branch, l.pos)
+        for l in tutorial_network_ring_cell.locations('"{}"'.format(label))
+    ]
+    f.write(
+        "tut_network_ring_ls_{}  = {{'type': 'locset', 'value': {}}}\n".format(
+            label, locs
+        )
+    )
+
+f.write("\n############# regions (tutorial_network_ring_morph)\n\n")
 for label in tutorial_network_ring_regions:
-    comps = [(c.branch, c.prox, c.dist) for c in tutorial_network_ring_cell.cables('"{}"'.format(label))]
-    f.write('tut_network_ring_reg_{} = {{\'type\': \'region\', \'value\': {}}}\n'.format(label, comps))
+    comps = [
+        (c.branch, c.prox, c.dist)
+        for c in tutorial_network_ring_cell.cables('"{}"'.format(label))
+    ]
+    f.write(
+        "tut_network_ring_reg_{} = {{'type': 'region', 'value': {}}}\n".format(
+            label, comps
+        )
+    )
 
 f.close()
diff --git a/doc/scripts/inputs.py b/doc/scripts/inputs.py
index da750d9e..a59dd0ae 100644
--- a/doc/scripts/inputs.py
+++ b/doc/scripts/inputs.py
@@ -4,183 +4,573 @@ from representation import Segment
 ############# morphologies
 
 tmp = [
-    [[Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1), Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3), Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3)]],
-    [[Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3), Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3)]],
+    [
+        [
+            Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1),
+            Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3),
+            Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3),
+        ]
+    ],
+    [
+        [
+            Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3),
+            Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3),
+        ]
+    ],
     [[Segment((12.0, -0.5, 0.5), (19.0, -3.0, 0.5), 3)]],
     [[Segment((19.0, -3.0, 0.5), (24.0, -7.0, 0.2), 3)]],
-    [[Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3), Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3)]],
-    [[Segment((0.0, 0.0, 2.0), (-7.0, 0.0, 0.4), 2), Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2)]],]
+    [
+        [
+            Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3),
+            Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3),
+        ]
+    ],
+    [
+        [
+            Segment((0.0, 0.0, 2.0), (-7.0, 0.0, 0.4), 2),
+            Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2),
+        ]
+    ],
+]
 label_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1)], [Segment((5.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3), Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3)]],
-    [[Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3), Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3)]],
+    [
+        [Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1)],
+        [
+            Segment((5.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3),
+            Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3),
+        ],
+    ],
+    [
+        [
+            Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3),
+            Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3),
+        ]
+    ],
     [[Segment((12.0, -0.5, 0.5), (19.0, -3.0, 0.5), 3)]],
     [[Segment((19.0, -3.0, 0.5), (24.0, -7.0, 0.2), 3)]],
-    [[Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3), Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3)]],
-    [[Segment((-2.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2)]],]
+    [
+        [
+            Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3),
+            Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3),
+        ]
+    ],
+    [[Segment((-2.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2)]],
+]
 detached_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 0.5), (1.0, 0.0, 1.5), 1), Segment((1.0, 0.0, 1.5), (2.0, 0.0, 2.5), 1), Segment((2.0, 0.0, 2.5), (3.0, 0.0, 2.5), 1), Segment((3.0, 0.0, 2.5), (4.0, 0.0, 1.2), 1), Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3), Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3)]],
-    [[Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3), Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3)]],
+    [
+        [
+            Segment((0.0, 0.0, 0.5), (1.0, 0.0, 1.5), 1),
+            Segment((1.0, 0.0, 1.5), (2.0, 0.0, 2.5), 1),
+            Segment((2.0, 0.0, 2.5), (3.0, 0.0, 2.5), 1),
+            Segment((3.0, 0.0, 2.5), (4.0, 0.0, 1.2), 1),
+            Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3),
+            Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3),
+        ]
+    ],
+    [
+        [
+            Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3),
+            Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3),
+        ]
+    ],
     [[Segment((12.0, -0.5, 0.5), (19.0, -3.0, 0.5), 3)]],
     [[Segment((19.0, -3.0, 0.5), (24.0, -7.0, 0.2), 3)]],
-    [[Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3), Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3)]],
-    [[Segment((0.0, 0.0, 0.4), (-7.0, 0.0, 0.4), 2), Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2)]],]
+    [
+        [
+            Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 3),
+            Segment((23.0, -1.0, 0.2), (26.0, -2.0, 0.2), 3),
+        ]
+    ],
+    [
+        [
+            Segment((0.0, 0.0, 0.4), (-7.0, 0.0, 0.4), 2),
+            Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2),
+        ]
+    ],
+]
 stacked_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((-2.0, 0.0, 2.0), (2.0, 0.0, 2.0), 1)]],]
+    [[Segment((-2.0, 0.0, 2.0), (2.0, 0.0, 2.0), 1)]],
+]
 sphere_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 1.0), (10.0, 0.0, 0.5), 3)]],]
+    [[Segment((0.0, 0.0, 1.0), (10.0, 0.0, 0.5), 3)]],
+]
 branch_morph1 = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1), Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2), Segment((5.0, -0.1, 0.7), (8.0, 0.0, 0.6), 2), Segment((8.0, 0.0, 0.6), (10.0, 0.0, 0.5), 3)]],]
+    [
+        [
+            Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1),
+            Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2),
+            Segment((5.0, -0.1, 0.7), (8.0, 0.0, 0.6), 2),
+            Segment((8.0, 0.0, 0.6), (10.0, 0.0, 0.5), 3),
+        ]
+    ],
+]
 branch_morph2 = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1), Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2)], [Segment((6.0, -0.1, 0.7), (9.0, 0.0, 0.6), 2), Segment((9.0, 0.0, 0.6), (11.0, 0.0, 0.5), 3)]],]
+    [
+        [
+            Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1),
+            Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2),
+        ],
+        [
+            Segment((6.0, -0.1, 0.7), (9.0, 0.0, 0.6), 2),
+            Segment((9.0, 0.0, 0.6), (11.0, 0.0, 0.5), 3),
+        ],
+    ],
+]
 branch_morph3 = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1), Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2), Segment((5.0, -0.1, 0.7), (8.0, 0.0, 0.5), 2), Segment((8.0, 0.0, 0.3), (10.0, 0.0, 0.5), 3)]],]
+    [
+        [
+            Segment((0.0, 0.0, 1.0), (3.0, 0.2, 0.8), 1),
+            Segment((3.0, 0.2, 0.8), (5.0, -0.1, 0.7), 2),
+            Segment((5.0, -0.1, 0.7), (8.0, 0.0, 0.5), 2),
+            Segment((8.0, 0.0, 0.3), (10.0, 0.0, 0.5), 3),
+        ]
+    ],
+]
 branch_morph4 = representation.make_morph(tmp)
 
 tmp = [
     [[Segment((0.0, 0.0, 1.0), (10.0, 0.0, 0.5), 3)]],
     [[Segment((10.0, 0.0, 0.5), (15.0, 3.0, 0.2), 3)]],
-    [[Segment((10.0, 0.0, 0.5), (15.0, -3.0, 0.2), 3)]],]
+    [[Segment((10.0, 0.0, 0.5), (15.0, -3.0, 0.2), 3)]],
+]
 yshaped_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((-3.0, 0.0, 3.0), (3.0, 0.0, 3.0), 1)], [Segment((4.0, -1.0, 0.6), (10.0, -2.0, 0.5), 3), Segment((10.0, -2.0, 0.5), (15.0, -1.0, 0.5), 3)]],
+    [
+        [Segment((-3.0, 0.0, 3.0), (3.0, 0.0, 3.0), 1)],
+        [
+            Segment((4.0, -1.0, 0.6), (10.0, -2.0, 0.5), 3),
+            Segment((10.0, -2.0, 0.5), (15.0, -1.0, 0.5), 3),
+        ],
+    ],
     [[Segment((15.0, -1.0, 0.5), (18.0, -5.0, 0.3), 3)]],
-    [[Segment((15.0, -1.0, 0.5), (20.0, 2.0, 0.3), 3)]],]
+    [[Segment((15.0, -1.0, 0.5), (20.0, 2.0, 0.3), 3)]],
+]
 ysoma_morph1 = representation.make_morph(tmp)
 
 tmp = [
     [[Segment((-3.0, 0.0, 3.0), (3.0, 0.0, 3.0), 1)]],
-    [[Segment((4.0, -1.0, 0.6), (10.0, -2.0, 0.5), 3), Segment((10.0, -2.0, 0.5), (15.0, -1.0, 0.5), 3)]],
+    [
+        [
+            Segment((4.0, -1.0, 0.6), (10.0, -2.0, 0.5), 3),
+            Segment((10.0, -2.0, 0.5), (15.0, -1.0, 0.5), 3),
+        ]
+    ],
     [[Segment((15.0, -1.0, 0.5), (18.0, -5.0, 0.3), 3)]],
     [[Segment((15.0, -1.0, 0.5), (20.0, 2.0, 0.3), 3)]],
     [[Segment((2.0, 1.0, 0.6), (12.0, 4.0, 0.5), 3)]],
     [[Segment((12.0, 4.0, 0.5), (18.0, 4.0, 0.3), 3)]],
     [[Segment((12.0, 4.0, 0.5), (16.0, 9.0, 0.1), 3)]],
-    [[Segment((-3.5, 0.0, 1.5), (-6.0, -0.2, 0.5), 2), Segment((-6.0, -0.2, 0.5), (-15.0, -0.1, 0.5), 2)]],]
+    [
+        [
+            Segment((-3.5, 0.0, 1.5), (-6.0, -0.2, 0.5), 2),
+            Segment((-6.0, -0.2, 0.5), (-15.0, -0.1, 0.5), 2),
+        ]
+    ],
+]
 ysoma_morph2 = representation.make_morph(tmp)
 
 tmp = [
     [[Segment((-3.0, 0.0, 3.0), (3.0, 0.0, 3.0), 1)]],
-    [[Segment((3.0, 0.0, 0.6), (9.0, -1.0, 0.5), 3), Segment((9.0, -1.0, 0.5), (14.0, 0.0, 0.5), 3)]],
+    [
+        [
+            Segment((3.0, 0.0, 0.6), (9.0, -1.0, 0.5), 3),
+            Segment((9.0, -1.0, 0.5), (14.0, 0.0, 0.5), 3),
+        ]
+    ],
     [[Segment((14.0, 0.0, 0.5), (17.0, -4.0, 0.3), 3)]],
     [[Segment((14.0, 0.0, 0.5), (19.0, 3.0, 0.3), 3)]],
     [[Segment((3.0, 0.0, 0.6), (13.0, 3.0, 0.5), 3)]],
     [[Segment((13.0, 3.0, 0.5), (19.0, 3.0, 0.3), 3)]],
     [[Segment((13.0, 3.0, 0.5), (17.0, 8.0, 0.1), 3)]],
-    [[Segment((-3.0, 0.0, 1.5), (-5.5, -0.2, 0.5), 2), Segment((-5.5, -0.2, 0.5), (-14.5, -0.1, 0.5), 2)]],]
+    [
+        [
+            Segment((-3.0, 0.0, 1.5), (-5.5, -0.2, 0.5), 2),
+            Segment((-5.5, -0.2, 0.5), (-14.5, -0.1, 0.5), 2),
+        ]
+    ],
+]
 ysoma_morph3 = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1), Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3), Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3)]],
-    [[Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3), Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3)]],
+    [
+        [
+            Segment((0.0, 0.0, 2.0), (4.0, 0.0, 2.0), 1),
+            Segment((4.0, 0.0, 0.8), (8.0, 0.0, 0.8), 3),
+            Segment((8.0, 0.0, 0.8), (12.0, -0.5, 0.8), 3),
+        ]
+    ],
+    [
+        [
+            Segment((12.0, -0.5, 0.8), (20.0, 4.0, 0.4), 3),
+            Segment((20.0, 4.0, 0.4), (26.0, 6.0, 0.2), 3),
+        ]
+    ],
     [[Segment((12.0, -0.5, 0.5), (19.0, -3.0, 0.5), 3)]],
     [[Segment((19.0, -3.0, 0.5), (24.0, -7.0, 0.2), 4)]],
-    [[Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 4), Segment((23.0, -1.0, 0.2), (36.0, -2.0, 0.2), 4)]],
-    [[Segment((0.0, 0.0, 2.0), (-7.0, 0.0, 0.4), 2), Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2)]],]
+    [
+        [
+            Segment((19.0, -3.0, 0.5), (23.0, -1.0, 0.2), 4),
+            Segment((23.0, -1.0, 0.2), (36.0, -2.0, 0.2), 4),
+        ]
+    ],
+    [
+        [
+            Segment((0.0, 0.0, 2.0), (-7.0, 0.0, 0.4), 2),
+            Segment((-7.0, 0.0, 0.4), (-10.0, 0.0, 0.4), 2),
+        ]
+    ],
+]
 tutorial_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((0.0, 0.0, 1.0), (2.0, 0.0, 1.0), 1), Segment((2.0, 0.0, 1.0), (20.0, 0.0, 1.0), 3)]],
-    [[Segment((0.0, 0.0, 1.0), (-3.0, 0.0, 0.7), 2)]],]
+    [
+        [
+            Segment((0.0, 0.0, 1.0), (2.0, 0.0, 1.0), 1),
+            Segment((2.0, 0.0, 1.0), (20.0, 0.0, 1.0), 3),
+        ]
+    ],
+    [[Segment((0.0, 0.0, 1.0), (-3.0, 0.0, 0.7), 2)]],
+]
 swc_morph = representation.make_morph(tmp)
 
 tmp = [
-    [[Segment((-12.0, 0.0, 6.0), (0.0, 0.0, 6.0), 1), Segment((0.0, 0.0, 2.0), (50.0, 0.0, 2.0), 3)]],
+    [
+        [
+            Segment((-12.0, 0.0, 6.0), (0.0, 0.0, 6.0), 1),
+            Segment((0.0, 0.0, 2.0), (50.0, 0.0, 2.0), 3),
+        ]
+    ],
     [[Segment((50.0, 0.0, 2.0), (85.35533905932738, 35.35533905932737, 0.5), 3)]],
-    [[Segment((50.0, 0.0, 1.0), (85.35533905932738, -35.35533905932737, 1.0), 3)]],]
+    [[Segment((50.0, 0.0, 1.0), (85.35533905932738, -35.35533905932737, 1.0), 3)]],
+]
 tutorial_network_ring_morph = representation.make_morph(tmp)
 
 
 ############# locsets (label_morph)
 
-ls_root  = {'type': 'locset', 'value': [(0, 0.0)]}
-ls_term  = {'type': 'locset', 'value': [(1, 1.0), (3, 1.0), (4, 1.0), (5, 1.0)]}
-ls_rand_dend  = {'type': 'locset', 'value': [(0, 0.5547193370156588), (0, 0.5841758202819731), (0, 0.607192003545501), (0, 0.6181091003428546), (0, 0.6190845627201184), (0, 0.7027325639263277), (0, 0.7616129092226993), (0, 0.9645150497869694), (1, 0.15382287505908834), (1, 0.2594719824047551), (1, 0.28087652335178354), (1, 0.3729681478609085), (1, 0.3959560134241004), (1, 0.4629424550242548), (1, 0.47346867377446744), (1, 0.5493486883630476), (1, 0.6227685370674116), (1, 0.6362196581003494), (1, 0.6646511214508091), (1, 0.7157318936458146), (1, 0.7464198558822775), (1, 0.77074507802833), (1, 0.7860238136304932), (1, 0.8988928261704698), (1, 0.9581259332943499), (2, 0.12773985425987294), (2, 0.3365926476076694), (2, 0.44454300804769703), (2, 0.5409466695719178), (2, 0.5767511435223905), (2, 0.6340206909931745), (2, 0.6354772583375223), (2, 0.6807941995943213), (2, 0.774655947503608), (3, 0.05020708596877571), (3, 0.25581431877212274), (3, 0.2958305460715556), (3, 0.296698184761692), (3, 0.509669134988683), (3, 0.7662305637426007), (3, 0.8565839889923518), (3, 0.8889077221517746), (4, 0.24311286693286885), (4, 0.4354361205546333), (4, 0.4467752481260171), (4, 0.5308169153994543), (4, 0.5701465671464049), (4, 0.670081739879954), (4, 0.6995486862583797), (4, 0.8186709628604206), (4, 0.9141224600171143)]}
-ls_loc15  = {'type': 'locset', 'value': [(1, 0.5)]}
-ls_loc05  = {'type': 'locset', 'value': [(0, 0.5)]}
-ls_uniform0  = {'type': 'locset', 'value': [(0, 0.5841758202819731), (1, 0.6362196581003494), (1, 0.7157318936458146), (1, 0.7464198558822775), (2, 0.6340206909931745), (2, 0.6807941995943213), (3, 0.296698184761692), (3, 0.509669134988683), (3, 0.7662305637426007), (4, 0.5701465671464049)]}
-ls_uniform1  = {'type': 'locset', 'value': [(0, 0.9778060763285382), (1, 0.19973428495790843), (1, 0.8310607916260988), (2, 0.9210229159315735), (2, 0.9244292525837472), (2, 0.9899772550845479), (3, 0.9924233395972087), (4, 0.3641426305909531), (4, 0.4787812247064867), (4, 0.5138656268861914)]}
-ls_branchmid  = {'type': 'locset', 'value': [(0, 0.5), (1, 0.5), (2, 0.5), (3, 0.5), (4, 0.5), (5, 0.5)]}
-ls_distal  = {'type': 'locset', 'value': [(1, 0.796025976329944), (3, 0.6666666666666667), (4, 0.39052429175127), (5, 1.0)]}
-ls_proximal  = {'type': 'locset', 'value': [(1, 0.29602597632994393), (2, 0.0), (5, 0.6124999999999999)]}
-ls_distint_in  = {'type': 'locset', 'value': [(1, 0.5), (2, 0.7), (5, 0.1)]}
-ls_proxint_in  = {'type': 'locset', 'value': [(1, 0.8), (2, 0.3)]}
-ls_loctest  = {'type': 'locset', 'value': [(1, 1.0), (2, 0.0), (5, 0.0)]}
-ls_restrict  = {'type': 'locset', 'value': [(1, 1.0), (3, 1.0), (4, 1.0)]}
-ls_proximal_translate  = {'type': 'locset', 'value': [(1, 0.35497750169352515), (2, 0.5160959062272675), (2, 0.6817468794150789), (5, 0.0)]}
-ls_distal_translate_single  = {'type': 'locset', 'value': [(0, 0.915588599565521)]}
-ls_distal_translate_multi  = {'type': 'locset', 'value': [(1, 0.5795163072671657), (3, 0.24228815992614555), (4, 0.20321157163712014)]}
+ls_root = {"type": "locset", "value": [(0, 0.0)]}
+ls_term = {"type": "locset", "value": [(1, 1.0), (3, 1.0), (4, 1.0), (5, 1.0)]}
+ls_rand_dend = {
+    "type": "locset",
+    "value": [
+        (0, 0.5547193370156588),
+        (0, 0.5841758202819731),
+        (0, 0.607192003545501),
+        (0, 0.6181091003428546),
+        (0, 0.6190845627201184),
+        (0, 0.7027325639263277),
+        (0, 0.7616129092226993),
+        (0, 0.9645150497869694),
+        (1, 0.15382287505908834),
+        (1, 0.2594719824047551),
+        (1, 0.28087652335178354),
+        (1, 0.3729681478609085),
+        (1, 0.3959560134241004),
+        (1, 0.4629424550242548),
+        (1, 0.47346867377446744),
+        (1, 0.5493486883630476),
+        (1, 0.6227685370674116),
+        (1, 0.6362196581003494),
+        (1, 0.6646511214508091),
+        (1, 0.7157318936458146),
+        (1, 0.7464198558822775),
+        (1, 0.77074507802833),
+        (1, 0.7860238136304932),
+        (1, 0.8988928261704698),
+        (1, 0.9581259332943499),
+        (2, 0.12773985425987294),
+        (2, 0.3365926476076694),
+        (2, 0.44454300804769703),
+        (2, 0.5409466695719178),
+        (2, 0.5767511435223905),
+        (2, 0.6340206909931745),
+        (2, 0.6354772583375223),
+        (2, 0.6807941995943213),
+        (2, 0.774655947503608),
+        (3, 0.05020708596877571),
+        (3, 0.25581431877212274),
+        (3, 0.2958305460715556),
+        (3, 0.296698184761692),
+        (3, 0.509669134988683),
+        (3, 0.7662305637426007),
+        (3, 0.8565839889923518),
+        (3, 0.8889077221517746),
+        (4, 0.24311286693286885),
+        (4, 0.4354361205546333),
+        (4, 0.4467752481260171),
+        (4, 0.5308169153994543),
+        (4, 0.5701465671464049),
+        (4, 0.670081739879954),
+        (4, 0.6995486862583797),
+        (4, 0.8186709628604206),
+        (4, 0.9141224600171143),
+    ],
+}
+ls_loc15 = {"type": "locset", "value": [(1, 0.5)]}
+ls_loc05 = {"type": "locset", "value": [(0, 0.5)]}
+ls_uniform0 = {
+    "type": "locset",
+    "value": [
+        (0, 0.5841758202819731),
+        (1, 0.6362196581003494),
+        (1, 0.7157318936458146),
+        (1, 0.7464198558822775),
+        (2, 0.6340206909931745),
+        (2, 0.6807941995943213),
+        (3, 0.296698184761692),
+        (3, 0.509669134988683),
+        (3, 0.7662305637426007),
+        (4, 0.5701465671464049),
+    ],
+}
+ls_uniform1 = {
+    "type": "locset",
+    "value": [
+        (0, 0.9778060763285382),
+        (1, 0.19973428495790843),
+        (1, 0.8310607916260988),
+        (2, 0.9210229159315735),
+        (2, 0.9244292525837472),
+        (2, 0.9899772550845479),
+        (3, 0.9924233395972087),
+        (4, 0.3641426305909531),
+        (4, 0.4787812247064867),
+        (4, 0.5138656268861914),
+    ],
+}
+ls_branchmid = {
+    "type": "locset",
+    "value": [(0, 0.5), (1, 0.5), (2, 0.5), (3, 0.5), (4, 0.5), (5, 0.5)],
+}
+ls_distal = {
+    "type": "locset",
+    "value": [
+        (1, 0.796025976329944),
+        (3, 0.6666666666666667),
+        (4, 0.39052429175127),
+        (5, 1.0),
+    ],
+}
+ls_proximal = {
+    "type": "locset",
+    "value": [(1, 0.29602597632994393), (2, 0.0), (5, 0.6124999999999999)],
+}
+ls_distint_in = {"type": "locset", "value": [(1, 0.5), (2, 0.7), (5, 0.1)]}
+ls_proxint_in = {"type": "locset", "value": [(1, 0.8), (2, 0.3)]}
+ls_loctest = {"type": "locset", "value": [(1, 1.0), (2, 0.0), (5, 0.0)]}
+ls_restrict = {"type": "locset", "value": [(1, 1.0), (3, 1.0), (4, 1.0)]}
+ls_proximal_translate = {
+    "type": "locset",
+    "value": [
+        (1, 0.35497750169352515),
+        (2, 0.5160959062272675),
+        (2, 0.6817468794150789),
+        (5, 0.0),
+    ],
+}
+ls_distal_translate_single = {"type": "locset", "value": [(0, 0.915588599565521)]}
+ls_distal_translate_multi = {
+    "type": "locset",
+    "value": [
+        (1, 0.5795163072671657),
+        (3, 0.24228815992614555),
+        (4, 0.20321157163712014),
+    ],
+}
 
 ############# regions (label_morph)
 
-reg_empty = {'type': 'region', 'value': []}
-reg_all = {'type': 'region', 'value': [(0, 0.0, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.0, 1.0)]}
-reg_tag1 = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168)]}
-reg_tag2 = {'type': 'region', 'value': [(5, 0.0, 1.0)]}
-reg_tag3 = {'type': 'region', 'value': [(0, 0.3324708796524168, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0)]}
-reg_tag4 = {'type': 'region', 'value': []}
-reg_soma = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168)]}
-reg_axon = {'type': 'region', 'value': [(5, 0.0, 1.0)]}
-reg_dend = {'type': 'region', 'value': [(0, 0.3324708796524168, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0)]}
-reg_radlt5 = {'type': 'region', 'value': [(1, 0.44403896449491587, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.65625, 1.0)]}
-reg_radle5 = {'type': 'region', 'value': [(1, 0.44403896449491587, 1.0), (2, 0.0, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.65625, 1.0)]}
-reg_radgt5 = {'type': 'region', 'value': [(0, 0.0, 1.0), (1, 0.0, 0.44403896449491587), (5, 0.0, 0.65625)]}
-reg_radge5 = {'type': 'region', 'value': [(0, 0.0, 1.0), (1, 0.0, 0.44403896449491587), (2, 0.0, 1.0), (3, 0.0, 0.0), (4, 0.0, 0.0), (5, 0.0, 0.65625)]}
-reg_rad36 = {'type': 'region', 'value': [(1, 0.29602597632994393, 0.796025976329944), (2, 0.0, 1.0), (3, 0.0, 0.6666666666666667), (4, 0.0, 0.39052429175127), (5, 0.6124999999999999, 1.0)]}
-reg_branch0 = {'type': 'region', 'value': [(0, 0.0, 1.0)]}
-reg_branch3 = {'type': 'region', 'value': [(3, 0.0, 1.0)]}
-reg_segment0 = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168)]}
-reg_segment3 = {'type': 'region', 'value': [(1, 0.0, 0.5920519526598877)]}
-reg_cable_0_28 = {'type': 'region', 'value': [(0, 0.2, 0.8)]}
-reg_cable_1_01 = {'type': 'region', 'value': [(1, 0.0, 1.0)]}
-reg_cable_1_31 = {'type': 'region', 'value': [(1, 0.3, 1.0)]}
-reg_cable_1_37 = {'type': 'region', 'value': [(1, 0.3, 0.7)]}
-reg_proxint = {'type': 'region', 'value': [(0, 0.7697564611867647, 1.0), (1, 0.4774887508467626, 0.8), (2, 0.0, 0.3)]}
-reg_proxintinf = {'type': 'region', 'value': [(0, 0.0, 1.0), (1, 0.0, 0.8), (2, 0.0, 0.3)]}
-reg_distint = {'type': 'region', 'value': [(1, 0.5, 0.8225112491532374), (2, 0.7, 1.0), (3, 0.0, 0.432615327328525), (4, 0.0, 0.3628424955125098), (5, 0.1, 0.6)]}
-reg_distintinf = {'type': 'region', 'value': [(1, 0.5, 1.0), (2, 0.7, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.1, 1.0)]}
-reg_lhs = {'type': 'region', 'value': [(0, 0.5, 1.0), (1, 0.0, 0.5)]}
-reg_rhs = {'type': 'region', 'value': [(1, 0.0, 1.0)]}
-reg_and = {'type': 'region', 'value': [(1, 0.0, 0.5)]}
-reg_or = {'type': 'region', 'value': [(0, 0.5, 1.0), (1, 0.0, 1.0)]}
+reg_empty = {"type": "region", "value": []}
+reg_all = {
+    "type": "region",
+    "value": [
+        (0, 0.0, 1.0),
+        (1, 0.0, 1.0),
+        (2, 0.0, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.0, 1.0),
+    ],
+}
+reg_tag1 = {"type": "region", "value": [(0, 0.0, 0.3324708796524168)]}
+reg_tag2 = {"type": "region", "value": [(5, 0.0, 1.0)]}
+reg_tag3 = {
+    "type": "region",
+    "value": [
+        (0, 0.3324708796524168, 1.0),
+        (1, 0.0, 1.0),
+        (2, 0.0, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+    ],
+}
+reg_tag4 = {"type": "region", "value": []}
+reg_soma = {"type": "region", "value": [(0, 0.0, 0.3324708796524168)]}
+reg_axon = {"type": "region", "value": [(5, 0.0, 1.0)]}
+reg_dend = {
+    "type": "region",
+    "value": [
+        (0, 0.3324708796524168, 1.0),
+        (1, 0.0, 1.0),
+        (2, 0.0, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+    ],
+}
+reg_radlt5 = {
+    "type": "region",
+    "value": [
+        (1, 0.44403896449491587, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.65625, 1.0),
+    ],
+}
+reg_radle5 = {
+    "type": "region",
+    "value": [
+        (1, 0.44403896449491587, 1.0),
+        (2, 0.0, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.65625, 1.0),
+    ],
+}
+reg_radgt5 = {
+    "type": "region",
+    "value": [(0, 0.0, 1.0), (1, 0.0, 0.44403896449491587), (5, 0.0, 0.65625)],
+}
+reg_radge5 = {
+    "type": "region",
+    "value": [
+        (0, 0.0, 1.0),
+        (1, 0.0, 0.44403896449491587),
+        (2, 0.0, 1.0),
+        (3, 0.0, 0.0),
+        (4, 0.0, 0.0),
+        (5, 0.0, 0.65625),
+    ],
+}
+reg_rad36 = {
+    "type": "region",
+    "value": [
+        (1, 0.29602597632994393, 0.796025976329944),
+        (2, 0.0, 1.0),
+        (3, 0.0, 0.6666666666666667),
+        (4, 0.0, 0.39052429175127),
+        (5, 0.6124999999999999, 1.0),
+    ],
+}
+reg_branch0 = {"type": "region", "value": [(0, 0.0, 1.0)]}
+reg_branch3 = {"type": "region", "value": [(3, 0.0, 1.0)]}
+reg_segment0 = {"type": "region", "value": [(0, 0.0, 0.3324708796524168)]}
+reg_segment3 = {"type": "region", "value": [(1, 0.0, 0.5920519526598877)]}
+reg_cable_0_28 = {"type": "region", "value": [(0, 0.2, 0.8)]}
+reg_cable_1_01 = {"type": "region", "value": [(1, 0.0, 1.0)]}
+reg_cable_1_31 = {"type": "region", "value": [(1, 0.3, 1.0)]}
+reg_cable_1_37 = {"type": "region", "value": [(1, 0.3, 0.7)]}
+reg_proxint = {
+    "type": "region",
+    "value": [
+        (0, 0.7697564611867647, 1.0),
+        (1, 0.4774887508467626, 0.8),
+        (2, 0.0, 0.3),
+    ],
+}
+reg_proxintinf = {
+    "type": "region",
+    "value": [(0, 0.0, 1.0), (1, 0.0, 0.8), (2, 0.0, 0.3)],
+}
+reg_distint = {
+    "type": "region",
+    "value": [
+        (1, 0.5, 0.8225112491532374),
+        (2, 0.7, 1.0),
+        (3, 0.0, 0.432615327328525),
+        (4, 0.0, 0.3628424955125098),
+        (5, 0.1, 0.6),
+    ],
+}
+reg_distintinf = {
+    "type": "region",
+    "value": [
+        (1, 0.5, 1.0),
+        (2, 0.7, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.1, 1.0),
+    ],
+}
+reg_lhs = {"type": "region", "value": [(0, 0.5, 1.0), (1, 0.0, 0.5)]}
+reg_rhs = {"type": "region", "value": [(1, 0.0, 1.0)]}
+reg_and = {"type": "region", "value": [(1, 0.0, 0.5)]}
+reg_or = {"type": "region", "value": [(0, 0.5, 1.0), (1, 0.0, 1.0)]}
 
 ############# locsets (tutorial_morph)
 
-tut_ls_root  = {'type': 'locset', 'value': [(0, 0.0)]}
-tut_ls_terminal  = {'type': 'locset', 'value': [(1, 1.0), (3, 1.0), (4, 1.0), (5, 1.0)]}
-tut_ls_custom_terminal  = {'type': 'locset', 'value': [(3, 1.0), (4, 1.0)]}
-tut_ls_axon_terminal  = {'type': 'locset', 'value': [(5, 1.0)]}
+tut_ls_root = {"type": "locset", "value": [(0, 0.0)]}
+tut_ls_terminal = {"type": "locset", "value": [(1, 1.0), (3, 1.0), (4, 1.0), (5, 1.0)]}
+tut_ls_custom_terminal = {"type": "locset", "value": [(3, 1.0), (4, 1.0)]}
+tut_ls_axon_terminal = {"type": "locset", "value": [(5, 1.0)]}
 
 ############# regions (tutorial_morph)
 
-tut_reg_all = {'type': 'region', 'value': [(0, 0.0, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.0, 1.0)]}
-tut_reg_soma = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168)]}
-tut_reg_axon = {'type': 'region', 'value': [(5, 0.0, 1.0)]}
-tut_reg_dend = {'type': 'region', 'value': [(0, 0.3324708796524168, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0)]}
-tut_reg_last = {'type': 'region', 'value': [(3, 0.0, 1.0), (4, 0.0, 1.0)]}
-tut_reg_rad_gt = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168), (5, 0.0, 0.21875)]}
-tut_reg_custom = {'type': 'region', 'value': [(0, 0.0, 0.3324708796524168), (3, 0.0, 1.0), (4, 0.0, 1.0), (5, 0.0, 0.21875)]}
+tut_reg_all = {
+    "type": "region",
+    "value": [
+        (0, 0.0, 1.0),
+        (1, 0.0, 1.0),
+        (2, 0.0, 1.0),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.0, 1.0),
+    ],
+}
+tut_reg_soma = {"type": "region", "value": [(0, 0.0, 0.3324708796524168)]}
+tut_reg_axon = {"type": "region", "value": [(5, 0.0, 1.0)]}
+tut_reg_dend = {
+    "type": "region",
+    "value": [(0, 0.3324708796524168, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0)],
+}
+tut_reg_last = {"type": "region", "value": [(3, 0.0, 1.0), (4, 0.0, 1.0)]}
+tut_reg_rad_gt = {
+    "type": "region",
+    "value": [(0, 0.0, 0.3324708796524168), (5, 0.0, 0.21875)],
+}
+tut_reg_custom = {
+    "type": "region",
+    "value": [
+        (0, 0.0, 0.3324708796524168),
+        (3, 0.0, 1.0),
+        (4, 0.0, 1.0),
+        (5, 0.0, 0.21875),
+    ],
+}
 
 ############# locsets (tutorial_network_ring_morph)
 
-tut_network_ring_ls_synapse_site  = {'type': 'locset', 'value': [(1, 0.5)]}
-tut_network_ring_ls_root  = {'type': 'locset', 'value': [(0, 0.0)]}
+tut_network_ring_ls_synapse_site = {"type": "locset", "value": [(1, 0.5)]}
+tut_network_ring_ls_root = {"type": "locset", "value": [(0, 0.0)]}
 
 ############# regions (tutorial_network_ring_morph)
 
-tut_network_ring_reg_soma = {'type': 'region', 'value': [(0, 0.0, 0.1935483870967742)]}
-tut_network_ring_reg_dend = {'type': 'region', 'value': [(0, 0.1935483870967742, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0)]}
+tut_network_ring_reg_soma = {"type": "region", "value": [(0, 0.0, 0.1935483870967742)]}
+tut_network_ring_reg_dend = {
+    "type": "region",
+    "value": [(0, 0.1935483870967742, 1.0), (1, 0.0, 1.0), (2, 0.0, 1.0)],
+}
diff --git a/doc/scripts/make_images.py b/doc/scripts/make_images.py
index 35369ec5..0c742a37 100644
--- a/doc/scripts/make_images.py
+++ b/doc/scripts/make_images.py
@@ -3,52 +3,53 @@ import svgwrite
 import math
 import inputs
 
-tag_colors_colorscheme = ['white', '#ffc2c2', 'gray', '#c2caff', '#81c8aa']
-tag_colors_bwscheme = ['lightgray']*len(tag_colors_colorscheme)
+tag_colors_colorscheme = ["white", "#ffc2c2", "gray", "#c2caff", "#81c8aa"]
+tag_colors_bwscheme = ["lightgray"] * len(tag_colors_colorscheme)
 
 #
 # ############################################
 #
 
+
 def translate(x, f, xshift):
-    return (f*x[0]+xshift, -f*x[1])
+    return (f * x[0] + xshift, -f * x[1])
+
 
 def translate_all(points, f, xshift):
     return [translate(x, f, xshift) for x in points]
 
+
 # Draw one or more morphologies, side by side.
 # Each morphology can be drawn as segments or branches.
 def morph_image(morphs, methods, filename, **kwargs):
-    assert(len(morphs)==len(methods))
+    assert len(morphs) == len(methods)
 
-    lab_sc = kwargs.get('lab_sc',2)
-    sc = kwargs.get('sc',20)
-    draw_labels = kwargs.get('draw_labels',True)
-    colors = kwargs.get('colors',True)
+    lab_sc = kwargs.get("lab_sc", 2)
+    sc = kwargs.get("sc", 20)
+    draw_labels = kwargs.get("draw_labels", True)
+    colors = kwargs.get("colors", True)
     tag_colors = tag_colors_colorscheme if colors else tag_colors_bwscheme
 
-    print('generating:', filename)
+    print("generating:", filename)
     dwg = svgwrite.Drawing(filename=filename, debug=True)
 
     # Width of lines and circle strokes.
-    line_width=0.1*sc
+    line_width = 0.1 * sc
 
     # Padding around image.
-    fudge=1.5*sc
-
-    linecolor='black'
-    pointcolor='red'
-    lines = dwg.add(dwg.g(id='lines',
-                          stroke=linecolor,
-                          fill='white',
-                          stroke_width=line_width))
-    points = dwg.add(dwg.g(id='points',
-                           stroke=pointcolor,
-                           fill=pointcolor,
-                           stroke_width=line_width))
-    numbers = dwg.add(dwg.g(id='numbers',
-                             text_anchor='middle',
-                             alignment_baseline='middle'))
+    fudge = 1.5 * sc
+
+    linecolor = "black"
+    pointcolor = "red"
+    lines = dwg.add(
+        dwg.g(id="lines", stroke=linecolor, fill="white", stroke_width=line_width)
+    )
+    points = dwg.add(
+        dwg.g(id="points", stroke=pointcolor, fill=pointcolor, stroke_width=line_width)
+    )
+    numbers = dwg.add(
+        dwg.g(id="numbers", text_anchor="middle", alignment_baseline="middle")
+    )
 
     minx = math.inf
     miny = math.inf
@@ -57,8 +58,8 @@ def morph_image(morphs, methods, filename, **kwargs):
 
     offset = 0
 
-    bcolor = 'mediumslateblue'
-    branchfillcolor = 'lightgray'
+    bcolor = "mediumslateblue"
+    branchfillcolor = "lightgray"
 
     nmorph = len(morphs)
 
@@ -73,74 +74,95 @@ def morph_image(morphs, methods, filename, **kwargs):
             branch = morph[i]
 
             lx, ux, ly, uy = branch.minmax()
-            minx = min(minx,  sc*lx+offset)
-            miny = min(miny,  sc*ly)
-            maxx = max(maxx,  sc*ux+offset)
-            maxy = max(maxy,  sc*uy)
+            minx = min(minx, sc * lx + offset)
+            miny = min(miny, sc * ly)
+            maxx = max(maxx, sc * ux + offset)
+            maxy = max(maxy, sc * uy)
 
-            if method=='segments':
+            if method == "segments":
                 for sec in branch.sections:
                     for seg in sec:
-                        if seg.length>0.00001: # only draw nonzero length segments
+                        if seg.length > 0.00001:  # only draw nonzero length segments
                             line = translate_all(seg.corners(), sc, offset)
-                            lines.add(dwg.polygon(points=line, fill=tag_colors[seg.tag]))
+                            lines.add(
+                                dwg.polygon(points=line, fill=tag_colors[seg.tag])
+                            )
 
                             pos = translate(seg.location(0.5), sc, offset)
                             if draw_labels:
-                                points.add(dwg.circle(center=pos,
-                                                    stroke='black',
-                                                    r=sc*0.55*lab_sc,
-                                                    fill='white',
-                                                    stroke_width=sc/20*lab_sc))
+                                points.add(
+                                    dwg.circle(
+                                        center=pos,
+                                        stroke="black",
+                                        r=sc * 0.55 * lab_sc,
+                                        fill="white",
+                                        stroke_width=sc / 20 * lab_sc,
+                                    )
+                                )
                                 # The svg alignment_baseline attribute:
                                 #   - works on Chrome/Chromium
                                 #   - doesn't work on Firefox
                                 # so for now we just shift the relative position by sc/3
-                                label_pos = (pos[0], pos[1]+sc/3)
-                                numbers.add(dwg.text(str(segid),
-                                                    insert=label_pos,
-                                                    stroke='black',
-                                                    fill='black',
-                                                    font_size=sc*0.55*lab_sc))
+                                label_pos = (pos[0], pos[1] + sc / 3)
+                                numbers.add(
+                                    dwg.text(
+                                        str(segid),
+                                        insert=label_pos,
+                                        stroke="black",
+                                        fill="black",
+                                        font_size=sc * 0.55 * lab_sc,
+                                    )
+                                )
                         segid += 1
 
-            elif method=='branches':
+            elif method == "branches":
                 for line in branch.outline():
-                    lines.add(dwg.polygon(points=translate_all(line, sc, offset),
-                                          fill=branchfillcolor))
+                    lines.add(
+                        dwg.polygon(
+                            points=translate_all(line, sc, offset), fill=branchfillcolor
+                        )
+                    )
 
                 if draw_labels:
                     pos = translate(branch.location(0.5), sc, offset)
-                    points.add(dwg.circle(center=pos,
-                                        stroke=bcolor,
-                                        r=sc*0.55*lab_sc,
-                                        fill=bcolor,
-                                        stroke_width=sc/20*lab_sc))
+                    points.add(
+                        dwg.circle(
+                            center=pos,
+                            stroke=bcolor,
+                            r=sc * 0.55 * lab_sc,
+                            fill=bcolor,
+                            stroke_width=sc / 20 * lab_sc,
+                        )
+                    )
                     # The svg alignment_baseline attribute:
                     #   - works on Chrome/Chromium
                     #   - doesn't work on Firefox
                     # so for now we just shift the relative position by sc/3
-                    label_pos = (pos[0], pos[1]+sc/3)
-                    numbers.add(dwg.text(str(i),
-                                        insert=label_pos,
-                                        stroke='white',
-                                        fill='white',
-                                        font_size=sc*0.55*lab_sc))
+                    label_pos = (pos[0], pos[1] + sc / 3)
+                    numbers.add(
+                        dwg.text(
+                            str(i),
+                            insert=label_pos,
+                            stroke="white",
+                            fill="white",
+                            font_size=sc * 0.55 * lab_sc,
+                        )
+                    )
         offset = maxx - minx + sc
 
-
     # Find extent of image.
     minx -= fudge
     miny -= fudge
     maxx += fudge
     maxy += fudge
-    width = maxx-minx
-    height = maxy-miny
+    width = maxx - minx
+    height = maxy - miny
     dwg.viewbox(minx, -maxy, width, height)
 
     # Write the image to file.
     dwg.save()
 
+
 # Generate an image that illustrates regions and locsets on a morphology.
 #
 # Can't handle morpholgies with gaps, where segemnts with a parent-child
@@ -149,30 +171,28 @@ def morph_image(morphs, methods, filename, **kwargs):
 # not bee too hard to support.
 def label_image(morphology, labels, filename, **kwargs):
 
-    loc_sc = kwargs.get('loc_sc',2)
-    sc = kwargs.get('sc',20)
-    drawroot = kwargs.get('drawroot',True)
+    loc_sc = kwargs.get("loc_sc", 2)
+    sc = kwargs.get("sc", 20)
+    drawroot = kwargs.get("drawroot", True)
 
     morph = morphology
-    print('generating:', filename)
+    print("generating:", filename)
     dwg = svgwrite.Drawing(filename=filename, debug=True)
 
     # Width of lines and circle strokes.
-    line_width=0.2*sc
+    line_width = 0.2 * sc
 
     # Padding around image.
-    fudge=1.5*sc
-
-    linecolor='black'
-    pointcolor='red'
-    lines = dwg.add(dwg.g(id='lines',
-                          stroke=linecolor,
-                          fill='white',
-                          stroke_width=line_width))
-    points = dwg.add(dwg.g(id='points',
-                           stroke=pointcolor,
-                           fill=pointcolor,
-                           stroke_width=line_width))
+    fudge = 1.5 * sc
+
+    linecolor = "black"
+    pointcolor = "red"
+    lines = dwg.add(
+        dwg.g(id="lines", stroke=linecolor, fill="white", stroke_width=line_width)
+    )
+    points = dwg.add(
+        dwg.g(id="points", stroke=pointcolor, fill=pointcolor, stroke_width=line_width)
+    )
 
     minx = math.inf
     miny = math.inf
@@ -181,7 +201,7 @@ def label_image(morphology, labels, filename, **kwargs):
 
     offset = 0
 
-    branchfillcolor = 'lightgray'
+    branchfillcolor = "lightgray"
 
     nimage = len(labels)
     for l in range(nimage):
@@ -194,44 +214,69 @@ def label_image(morphology, labels, filename, **kwargs):
             branch = morph[i]
 
             lx, ux, ly, uy = branch.minmax()
-            minx = min(minx,  sc*lx+offset)
-            miny = min(miny,  sc*ly)
-            maxx = max(maxx,  sc*ux+offset)
-            maxy = max(maxy,  sc*uy)
+            minx = min(minx, sc * lx + offset)
+            miny = min(miny, sc * ly)
+            maxx = max(maxx, sc * ux + offset)
+            maxy = max(maxy, sc * uy)
 
             for line in branch.outline():
-                lines.add(dwg.polygon(points=translate_all(line, sc, offset),
-                                      fill=branchfillcolor,
-                                      stroke=branchfillcolor))
+                lines.add(
+                    dwg.polygon(
+                        points=translate_all(line, sc, offset),
+                        fill=branchfillcolor,
+                        stroke=branchfillcolor,
+                    )
+                )
 
         # Draw the root
         root = translate(morph[0].location(0), sc, offset)
         if drawroot:
-            points.add(dwg.circle(center=root, stroke='red', r=sc/2.5*loc_sc, fill='white', stroke_width=sc/10*loc_sc))
-
-        if lab['type'] == 'locset':
-            for loc in lab['value']:
+            points.add(
+                dwg.circle(
+                    center=root,
+                    stroke="red",
+                    r=sc / 2.5 * loc_sc,
+                    fill="white",
+                    stroke_width=sc / 10 * loc_sc,
+                )
+            )
+
+        if lab["type"] == "locset":
+            for loc in lab["value"]:
                 bid = loc[0]
                 pos = loc[1]
 
                 loc = translate(morph[bid].location(pos), sc, offset)
-                points.add(dwg.circle(center=loc, stroke='black', r=sc/3*loc_sc, fill='white', stroke_width=sc/10*loc_sc))
-
-        if lab['type'] == 'region':
-            for cab in lab['value']:
+                points.add(
+                    dwg.circle(
+                        center=loc,
+                        stroke="black",
+                        r=sc / 3 * loc_sc,
+                        fill="white",
+                        stroke_width=sc / 10 * loc_sc,
+                    )
+                )
+
+        if lab["type"] == "region":
+            for cab in lab["value"]:
                 # skip zero length cables
-                bid  = cab[0]
+                bid = cab[0]
                 ppos = cab[1]
                 dpos = cab[2]
 
                 # Don't draw zero-length cables
                 # How should these be drawn: with a line or a circle?
-                if ppos==dpos: continue
+                if ppos == dpos:
+                    continue
 
                 for line in morph[bid].outline(ppos, dpos):
-                    lines.add(dwg.polygon(points=translate_all(line, sc, offset),
-                                          fill='black',
-                                          stroke=branchfillcolor))
+                    lines.add(
+                        dwg.polygon(
+                            points=translate_all(line, sc, offset),
+                            fill="black",
+                            stroke=branchfillcolor,
+                        )
+                    )
 
         offset = maxx - minx + sc
 
@@ -240,90 +285,288 @@ def label_image(morphology, labels, filename, **kwargs):
     miny -= fudge
     maxx += fudge
     maxy += fudge
-    width = maxx-minx
-    height = maxy-miny
+    width = maxx - minx
+    height = maxy - miny
     dwg.viewbox(minx, -maxy, width, height)
 
     # Write the image to file.
     dwg.save()
 
-def generate(path=''):
-
-    morph_image([inputs.branch_morph2], ['segments'], path+'/term_segments.svg', colors=False, draw_labels=False)
-    morph_image([inputs.branch_morph2], ['branches'], path+'/term_branch.svg', colors=False, draw_labels=False)
-    label_image(inputs.branch_morph2, [inputs.reg_cable_0_28], path+'/term_cable.svg', drawroot=False)
 
-    morph_image([inputs.label_morph],    ['branches'], path+'/label_branch.svg')
-
-    morph_image([inputs.label_morph],    ['segments'], path+'/label_seg.svg')
-    morph_image([inputs.detached_morph], ['segments'], path+'/detached_seg.svg')
-    morph_image([inputs.stacked_morph],  ['segments'], path+'/stacked_seg.svg',lab_sc=1.2)
-    morph_image([inputs.swc_morph],      ['segments'], path+'/swc_morph.svg',lab_sc=1.5)
-
-    morph_image([inputs.label_morph, inputs.label_morph], ['segments', 'branches'], path+'/label_morph.svg')
-    morph_image([inputs.detached_morph, inputs.detached_morph], ['segments', 'branches'], path+'/detached_morph.svg')
-    morph_image([inputs.stacked_morph, inputs.stacked_morph], ['segments', 'branches'], path+'/stacked_morph.svg')
-    morph_image([inputs.sphere_morph, inputs.sphere_morph], ['segments', 'branches'], path+'/sphere_morph.svg',lab_sc=1.5)
-    morph_image([inputs.branch_morph1, inputs.branch_morph1], ['segments', 'branches'], path+'/branch_morph1.svg',lab_sc=1)
-    morph_image([inputs.branch_morph2, inputs.branch_morph2], ['segments', 'branches'], path+'/branch_morph2.svg',lab_sc=1)
-    morph_image([inputs.branch_morph3, inputs.branch_morph3], ['segments', 'branches'], path+'/branch_morph3.svg',lab_sc=1)
-    morph_image([inputs.branch_morph4, inputs.branch_morph4], ['segments', 'branches'], path+'/branch_morph4.svg',lab_sc=1)
-    morph_image([inputs.yshaped_morph, inputs.yshaped_morph], ['segments', 'branches'], path+'/yshaped_morph.svg',lab_sc=1.5)
-    morph_image([inputs.ysoma_morph1,  inputs.ysoma_morph1],  ['segments', 'branches'], path+'/ysoma_morph1.svg')
-    morph_image([inputs.ysoma_morph2,  inputs.ysoma_morph2],  ['segments', 'branches'], path+'/ysoma_morph2.svg')
-    morph_image([inputs.ysoma_morph3,  inputs.ysoma_morph3],  ['segments', 'branches'], path+'/ysoma_morph3.svg')
+def generate(path=""):
+
+    morph_image(
+        [inputs.branch_morph2],
+        ["segments"],
+        path + "/term_segments.svg",
+        colors=False,
+        draw_labels=False,
+    )
+    morph_image(
+        [inputs.branch_morph2],
+        ["branches"],
+        path + "/term_branch.svg",
+        colors=False,
+        draw_labels=False,
+    )
+    label_image(
+        inputs.branch_morph2,
+        [inputs.reg_cable_0_28],
+        path + "/term_cable.svg",
+        drawroot=False,
+    )
+
+    morph_image([inputs.label_morph], ["branches"], path + "/label_branch.svg")
+
+    morph_image([inputs.label_morph], ["segments"], path + "/label_seg.svg")
+    morph_image([inputs.detached_morph], ["segments"], path + "/detached_seg.svg")
+    morph_image(
+        [inputs.stacked_morph], ["segments"], path + "/stacked_seg.svg", lab_sc=1.2
+    )
+    morph_image([inputs.swc_morph], ["segments"], path + "/swc_morph.svg", lab_sc=1.5)
+
+    morph_image(
+        [inputs.label_morph, inputs.label_morph],
+        ["segments", "branches"],
+        path + "/label_morph.svg",
+    )
+    morph_image(
+        [inputs.detached_morph, inputs.detached_morph],
+        ["segments", "branches"],
+        path + "/detached_morph.svg",
+    )
+    morph_image(
+        [inputs.stacked_morph, inputs.stacked_morph],
+        ["segments", "branches"],
+        path + "/stacked_morph.svg",
+    )
+    morph_image(
+        [inputs.sphere_morph, inputs.sphere_morph],
+        ["segments", "branches"],
+        path + "/sphere_morph.svg",
+        lab_sc=1.5,
+    )
+    morph_image(
+        [inputs.branch_morph1, inputs.branch_morph1],
+        ["segments", "branches"],
+        path + "/branch_morph1.svg",
+        lab_sc=1,
+    )
+    morph_image(
+        [inputs.branch_morph2, inputs.branch_morph2],
+        ["segments", "branches"],
+        path + "/branch_morph2.svg",
+        lab_sc=1,
+    )
+    morph_image(
+        [inputs.branch_morph3, inputs.branch_morph3],
+        ["segments", "branches"],
+        path + "/branch_morph3.svg",
+        lab_sc=1,
+    )
+    morph_image(
+        [inputs.branch_morph4, inputs.branch_morph4],
+        ["segments", "branches"],
+        path + "/branch_morph4.svg",
+        lab_sc=1,
+    )
+    morph_image(
+        [inputs.yshaped_morph, inputs.yshaped_morph],
+        ["segments", "branches"],
+        path + "/yshaped_morph.svg",
+        lab_sc=1.5,
+    )
+    morph_image(
+        [inputs.ysoma_morph1, inputs.ysoma_morph1],
+        ["segments", "branches"],
+        path + "/ysoma_morph1.svg",
+    )
+    morph_image(
+        [inputs.ysoma_morph2, inputs.ysoma_morph2],
+        ["segments", "branches"],
+        path + "/ysoma_morph2.svg",
+    )
+    morph_image(
+        [inputs.ysoma_morph3, inputs.ysoma_morph3],
+        ["segments", "branches"],
+        path + "/ysoma_morph3.svg",
+    )
 
     ####################### locsets
 
-    label_image(inputs.label_morph, [inputs.ls_term, inputs.ls_rand_dend], path+'/locset_label_examples.svg')
-    label_image(inputs.label_morph, [inputs.reg_dend, inputs.reg_radlt5], path+'/region_label_examples.svg')
-    label_image(inputs.label_morph, [inputs.ls_root], path+'/root_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_term], path+'/term_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_loc15], path+'/location_15_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_loc05], path+'/location_05_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_rad36, inputs.ls_distal], path+'/distal_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_rad36, inputs.ls_proximal], path+'/proximal_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_uniform0, inputs.ls_uniform1], path+'/uniform_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_branchmid], path+'/on_branches_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_term, inputs.reg_tag3, inputs.ls_restrict], path+'/restrict_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_term, inputs.ls_proximal_translate], path+'/proximal_translate_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_loc05, inputs.ls_distal_translate_single, inputs.ls_distal_translate_multi], path+'/distal_translate_label.svg')
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_term, inputs.ls_rand_dend],
+        path + "/locset_label_examples.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_dend, inputs.reg_radlt5],
+        path + "/region_label_examples.svg",
+    )
+    label_image(inputs.label_morph, [inputs.ls_root], path + "/root_label.svg")
+    label_image(inputs.label_morph, [inputs.ls_term], path + "/term_label.svg")
+    label_image(inputs.label_morph, [inputs.ls_loc15], path + "/location_15_label.svg")
+    label_image(inputs.label_morph, [inputs.ls_loc05], path + "/location_05_label.svg")
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_rad36, inputs.ls_distal],
+        path + "/distal_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_rad36, inputs.ls_proximal],
+        path + "/proximal_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_uniform0, inputs.ls_uniform1],
+        path + "/uniform_label.svg",
+    )
+    label_image(
+        inputs.label_morph, [inputs.ls_branchmid], path + "/on_branches_label.svg"
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_term, inputs.reg_tag3, inputs.ls_restrict],
+        path + "/restrict_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_term, inputs.ls_proximal_translate],
+        path + "/proximal_translate_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [
+            inputs.ls_loc05,
+            inputs.ls_distal_translate_single,
+            inputs.ls_distal_translate_multi,
+        ],
+        path + "/distal_translate_label.svg",
+    )
 
     ####################### regions
 
-    label_image(inputs.label_morph, [inputs.reg_empty, inputs.reg_all], path+'/nil_all_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_tag1, inputs.reg_tag2, inputs.reg_tag3], path+'/tag_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_tag1, inputs.reg_tag3], path+'/tag_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_branch0, inputs.reg_branch3], path+'/branch_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_segment0, inputs.reg_segment3], path+'/segment_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_cable_1_01, inputs.reg_cable_1_31, inputs.reg_cable_1_37], path+'/cable_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_proxint_in, inputs.reg_proxint],    path+'/proxint_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_proxint_in, inputs.reg_proxintinf], path+'/proxintinf_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_distint_in, inputs.reg_distint],    path+'/distint_label.svg')
-    label_image(inputs.label_morph, [inputs.ls_distint_in, inputs.reg_distintinf], path+'/distintinf_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_lhs, inputs.reg_rhs, inputs.reg_or],  path+'/union_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_lhs, inputs.reg_rhs, inputs.reg_and], path+'/intersect_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_radlt5],  path+'/radiuslt_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_radle5],  path+'/radiusle_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_radgt5],  path+'/radiusgt_label.svg')
-    label_image(inputs.label_morph, [inputs.reg_radge5],  path+'/radiusge_label.svg')
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_empty, inputs.reg_all],
+        path + "/nil_all_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_tag1, inputs.reg_tag2, inputs.reg_tag3],
+        path + "/tag_label.svg",
+    )
+    label_image(
+        inputs.label_morph, [inputs.reg_tag1, inputs.reg_tag3], path + "/tag_label.svg"
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_branch0, inputs.reg_branch3],
+        path + "/branch_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_segment0, inputs.reg_segment3],
+        path + "/segment_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_cable_1_01, inputs.reg_cable_1_31, inputs.reg_cable_1_37],
+        path + "/cable_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_proxint_in, inputs.reg_proxint],
+        path + "/proxint_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_proxint_in, inputs.reg_proxintinf],
+        path + "/proxintinf_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_distint_in, inputs.reg_distint],
+        path + "/distint_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.ls_distint_in, inputs.reg_distintinf],
+        path + "/distintinf_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_lhs, inputs.reg_rhs, inputs.reg_or],
+        path + "/union_label.svg",
+    )
+    label_image(
+        inputs.label_morph,
+        [inputs.reg_lhs, inputs.reg_rhs, inputs.reg_and],
+        path + "/intersect_label.svg",
+    )
+    label_image(inputs.label_morph, [inputs.reg_radlt5], path + "/radiuslt_label.svg")
+    label_image(inputs.label_morph, [inputs.reg_radle5], path + "/radiusle_label.svg")
+    label_image(inputs.label_morph, [inputs.reg_radgt5], path + "/radiusgt_label.svg")
+    label_image(inputs.label_morph, [inputs.reg_radge5], path + "/radiusge_label.svg")
 
     ####################### Tutorial examples
 
-    morph_image([inputs.tutorial_morph], ['segments'], path+'/tutorial_morph.svg')
-    morph_image([inputs.tutorial_morph], ['segments'], path+'/tutorial_morph_nolabels_nocolors.svg', colors=False, draw_labels=False)
-    morph_image([inputs.tutorial_network_ring_morph], ['segments'], path+'/tutorial_network_ring_morph.svg',lab_sc=6)
+    morph_image([inputs.tutorial_morph], ["segments"], path + "/tutorial_morph.svg")
+    morph_image(
+        [inputs.tutorial_morph],
+        ["segments"],
+        path + "/tutorial_morph_nolabels_nocolors.svg",
+        colors=False,
+        draw_labels=False,
+    )
+    morph_image(
+        [inputs.tutorial_network_ring_morph],
+        ["segments"],
+        path + "/tutorial_network_ring_morph.svg",
+        lab_sc=6,
+    )
 
     ####################### locsets
 
-    label_image(inputs.tutorial_morph, [inputs.tut_ls_root, inputs.tut_ls_terminal], path+'/tutorial_root_term.svg')
-    label_image(inputs.tutorial_morph, [inputs.tut_ls_custom_terminal, inputs.tut_ls_axon_terminal], path+'/tutorial_custom_axon_term.svg')
-    label_image(inputs.tutorial_network_ring_morph, [inputs.tut_network_ring_ls_synapse_site], path+'/tutorial_network_ring_synapse_site.svg', loc_sc=6)
+    label_image(
+        inputs.tutorial_morph,
+        [inputs.tut_ls_root, inputs.tut_ls_terminal],
+        path + "/tutorial_root_term.svg",
+    )
+    label_image(
+        inputs.tutorial_morph,
+        [inputs.tut_ls_custom_terminal, inputs.tut_ls_axon_terminal],
+        path + "/tutorial_custom_axon_term.svg",
+    )
+    label_image(
+        inputs.tutorial_network_ring_morph,
+        [inputs.tut_network_ring_ls_synapse_site],
+        path + "/tutorial_network_ring_synapse_site.svg",
+        loc_sc=6,
+    )
 
     ####################### regions
-    label_image(inputs.tutorial_morph, [inputs.tut_reg_soma, inputs.tut_reg_axon, inputs.tut_reg_dend, inputs.tut_reg_last], path+'/tutorial_tag.svg')
-    label_image(inputs.tutorial_morph, [inputs.tut_reg_all, inputs.tut_reg_rad_gt], path+'/tutorial_all_gt.svg')
-    label_image(inputs.tutorial_morph, [inputs.tut_reg_custom], path+'/tutorial_custom.svg')
-
-if __name__ == '__main__':
-    generate('.')
+    label_image(
+        inputs.tutorial_morph,
+        [
+            inputs.tut_reg_soma,
+            inputs.tut_reg_axon,
+            inputs.tut_reg_dend,
+            inputs.tut_reg_last,
+        ],
+        path + "/tutorial_tag.svg",
+    )
+    label_image(
+        inputs.tutorial_morph,
+        [inputs.tut_reg_all, inputs.tut_reg_rad_gt],
+        path + "/tutorial_all_gt.svg",
+    )
+    label_image(
+        inputs.tutorial_morph, [inputs.tut_reg_custom], path + "/tutorial_custom.svg"
+    )
+
+
+if __name__ == "__main__":
+    generate(".")
diff --git a/doc/scripts/representation.py b/doc/scripts/representation.py
index e29e9de5..869d7069 100644
--- a/doc/scripts/representation.py
+++ b/doc/scripts/representation.py
@@ -1,26 +1,34 @@
 import math
 
+
 def add_vec(u, v):
-    return (u[0]+v[0], u[1]+v[1])
+    return (u[0] + v[0], u[1] + v[1])
+
 
 def norm_vec(v):
-    return math.sqrt(v[0]*v[0] + v[1]*v[1])
+    return math.sqrt(v[0] * v[0] + v[1] * v[1])
+
 
 def sub_vec(u, v):
-    return (u[0]-v[0], u[1]-v[1])
+    return (u[0] - v[0], u[1] - v[1])
+
 
 def rot90_vec(v):
     return (v[1], -v[0])
 
+
 def scal_vec(alpha, v):
-    return (alpha*v[0], alpha*v[1])
+    return (alpha * v[0], alpha * v[1])
+
 
 def unit_vec(v):
     L = norm_vec(v)
-    return (v[0]/L, v[1]/L)
+    return (v[0] / L, v[1] / L)
+
 
 def is_collocated(x, y):
-    return x[0]==y[0] and x[1]==y[1]
+    return x[0] == y[0] and x[1] == y[1]
+
 
 class Segment:
     def __init__(self, prox, dist, tag):
@@ -32,7 +40,11 @@ class Segment:
     def location(self, pos):
         b = self.prox
         e = self.dist
-        return (b[0]+pos*(e[0]-b[0]), b[1]+pos*(e[1]-b[1]), b[2]+pos*(e[2]-b[2]))
+        return (
+            b[0] + pos * (e[0] - b[0]),
+            b[1] + pos * (e[1] - b[1]),
+            b[2] + pos * (e[2] - b[2]),
+        )
 
     def corners(self, prox=0, dist=1):
         b = self.location(prox)
@@ -45,13 +57,13 @@ class Segment:
         p2 = add_vec(e, scal_vec(re, o))
         p3 = sub_vec(e, scal_vec(re, o))
         p4 = sub_vec(b, scal_vec(rb, o))
-        return [p1,p2,p3,p4]
+        return [p1, p2, p3, p4]
 
     def __str__(self):
-        return 'seg({}, {})'.format(self.prox, self.dist)
+        return "seg({}, {})".format(self.prox, self.dist)
 
     def __repr__(self):
-        return 'seg({}, {})'.format(self.prox, self.dist)
+        return "seg({}, {})".format(self.prox, self.dist)
 
 
 # Represent and query a cable cell branch for rendering.
@@ -62,8 +74,8 @@ class Segment:
 # The section representation makes things a bit messy, but it is required
 # to be able to draw morphologies with gaps.
 
-class Branch:
 
+class Branch:
     def __init__(self, sections):
         self.sections = sections
         length = 0
@@ -81,10 +93,10 @@ class Branch:
             for seg in sec:
                 px, py, pr = seg.prox
                 dx, dy, dr = seg.dist
-                minx = min(minx, px-pr, dx-dr)
-                miny = min(miny, py-pr, dy-dr)
-                maxx = max(maxx, px+pr, dx+dr)
-                maxy = max(maxy, py+pr, dy+dr)
+                minx = min(minx, px - pr, dx - dr)
+                miny = min(miny, py - pr, dy - dr)
+                maxx = max(maxx, px + pr, dx + dr)
+                maxy = max(maxy, py + pr, dy + dr)
 
         return (minx, maxx, miny, maxy)
 
@@ -96,11 +108,11 @@ class Branch:
     #       seg: index of the segment in the section
     #       pos: relative position of the location inside the segment
     def segment_id(self, pos):
-        assert(pos>=0 and pos<=1)
-        if pos==0:
+        assert pos >= 0 and pos <= 1
+        if pos == 0:
             return (0, 0, 0.0)
-        if pos==1:
-            return (len(self.sections)-1, len(self.sections[-1])-1, 1.0)
+        if pos == 1:
+            return (len(self.sections) - 1, len(self.sections[-1]) - 1, 1.0)
         l = pos * self.length
 
         part = 0
@@ -108,14 +120,14 @@ class Branch:
             sec = self.sections[secid]
             for segid in range(len(sec)):
                 seg = sec[segid]
-                if part+seg.length >= l:
-                    segpos = (l-part)/seg.length
+                if part + seg.length >= l:
+                    segpos = (l - part) / seg.length
                     return (secid, segid, segpos)
 
                 part += seg.length
 
     def location(self, pos):
-        assert(pos>=0 and pos<=1)
+        assert pos >= 0 and pos <= 1
 
         secid, segid, segpos = self.segment_id(pos)
         return self.sections[secid][segid].location(segpos)
@@ -124,8 +136,8 @@ class Branch:
         sec = self.sections[secid]
 
         # Handle the case where the cable is in one segment
-        if pseg==dseg:
-            assert(ppos<=dpos)
+        if pseg == dseg:
+            assert ppos <= dpos
             return sec[pseg].corners(ppos, dpos)
 
         left = []
@@ -137,7 +149,7 @@ class Branch:
         right += [p4, p3]
 
         # Handle the full segments in the middle
-        for segid in range(pseg+1, dseg):
+        for segid in range(pseg + 1, dseg):
             p1, p2, p3, p4 = sec[segid].corners()
             left += [p1, p2]
             right += [p4, p3]
@@ -150,35 +162,36 @@ class Branch:
         right.reverse()
         return left + right
 
-
-
     # Return outline of all (sub)sections in the branch between the relative
     # locations: 0 ≤ prox ≤ dist ≤ 1
     def outline(self, prox=0, dist=1):
         psec, pseg, ppos = self.segment_id(prox)
         dsec, dseg, dpos = self.segment_id(dist)
 
-        if psec==dsec and pseg==dseg:
+        if psec == dsec and pseg == dseg:
             return [self.sections[psec][pseg].corners(ppos, dpos)]
-        if psec==dsec:
+        if psec == dsec:
             return [self.sec_outline(psec, pseg, ppos, dseg, dpos)]
 
-        outlines = [self.sec_outline(psec, pseg, ppos, len(self.sections[psec])-1, 1)]
-        for secid in range(psec+1,dsec):
-            outlines.append(self.sec_outline(secid, 0, 0, len(self.sections[secid])-1, 1))
+        outlines = [self.sec_outline(psec, pseg, ppos, len(self.sections[psec]) - 1, 1)]
+        for secid in range(psec + 1, dsec):
+            outlines.append(
+                self.sec_outline(secid, 0, 0, len(self.sections[secid]) - 1, 1)
+            )
         outlines.append(self.sec_outline(dsec, 0, 0, dseg, dpos))
 
         return outlines
 
+
 # A morphology for rendering is a flat list of branches, with no
 # parent-child information for the branches.
 # Each branch is itself a list of sections, where each section
 # represents a sequence of segments with no gaps.
 
+
 def make_morph(branches):
     m = []
     for branch_sections in branches:
         m.append(Branch(branch_sections))
 
     return m
-
diff --git a/example/lfp/neuron_lfp_example.py b/example/lfp/neuron_lfp_example.py
index c3db1fda..d0953e8a 100755
--- a/example/lfp/neuron_lfp_example.py
+++ b/example/lfp/neuron_lfp_example.py
@@ -1,11 +1,11 @@
 #!/usr/env/bin python
 # -*- coding: utf-8 -*-
 # Author: Torbjørn Ness <torbjorn.ness@nmbu.no>
-'''
+"""
 NEURON and Python - Creating a multi-compartment model with synaptic input
 with randomized activation times
-'''
-# Import modules for plotting and NEURON itself 
+"""
+# Import modules for plotting and NEURON itself
 import matplotlib.pyplot as plt
 import neuron
 import numpy as np
@@ -21,8 +21,8 @@ class Cell:
     def __init__(self):
         cvode = neuron.h.CVode()
         cvode.use_fast_imem(1)
-        self.tstop = 100.        # simulation duration in ms
-        self.v_init = -65        # membrane voltage(s) at t = 0
+        self.tstop = 100.0  # simulation duration in ms
+        self.v_init = -65  # membrane voltage(s) at t = 0
 
         neuron.h.dt = 0.1
 
@@ -37,13 +37,13 @@ class Cell:
         self.totnsegs = counter  # Total number of compartments in cell model
         self.collect_geometry()
 
-
         self.insert_synapse(self.seclist[0])
 
         self.initiate_recorders()
 
     def make_cell(self):
-        neuron.h("""
+        neuron.h(
+            """
         create soma[1]
         create apic[1]
         objref all
@@ -67,7 +67,8 @@ class Cell:
         apic[0] { insert pas }
         soma[0] { insert hh }
 
-        """)
+        """
+        )
 
     def initiate_recorders(self):
         self.imem = []  # Record membrane currents
@@ -96,16 +97,16 @@ class Cell:
         """
         print(syn_sec.diam)
         syn = neuron.h.ExpSyn(0.5, sec=syn_sec)
-        syn.e = 0.         # reversal potential of synapse conductance in mV
-        syn.tau = 2.       # time constant of synapse conductance in ms
+        syn.e = 0.0  # reversal potential of synapse conductance in mV
+        syn.tau = 2.0  # time constant of synapse conductance in ms
 
-        ns = neuron.h.NetStim(0.5)     # spike time generator object (~presynaptic)
-        ns.noise = 1.                  # Fractional randomness (intervals from exp dist)
-        ns.start = 0.                  # approximate time of first spike
-        ns.number = 1000               # number of spikes
-        ns.interval = 10.              # average interspike interval
-        nc = neuron.h.NetCon(ns, syn)    # Connect generator to synapse
-        nc.weight[0] = .005                  # Set synapse weight
+        ns = neuron.h.NetStim(0.5)  # spike time generator object (~presynaptic)
+        ns.noise = 1.0  # Fractional randomness (intervals from exp dist)
+        ns.start = 0.0  # approximate time of first spike
+        ns.number = 1000  # number of spikes
+        ns.interval = 10.0  # average interspike interval
+        nc = neuron.h.NetCon(ns, syn)  # Connect generator to synapse
+        nc.weight[0] = 0.005  # Set synapse weight
 
         # Everything must be stored or NEURON will forget they ever existed
         self.ns = ns
@@ -130,13 +131,13 @@ class Cell:
 
         counter = 0
 
-        #loop over all segments
+        # loop over all segments
         for sec in neuron.h.allsec():
             n3d = int(neuron.h.n3d())
             nseg = sec.nseg
-            gsen2 = 1./2/nseg
+            gsen2 = 1.0 / 2 / nseg
             if n3d > 0:
-                #create interpolation objects for the xyz pt3d info:
+                # create interpolation objects for the xyz pt3d info:
                 L = np.zeros(n3d)
                 x = np.zeros(n3d)
                 y = np.zeros(n3d)
@@ -147,33 +148,33 @@ class Cell:
                     y[i] = neuron.h.y3d(i)
                     z[i] = neuron.h.z3d(i)
 
-                #normalize as seg.x [0, 1]
+                # normalize as seg.x [0, 1]
                 L /= sec.L
 
-                #temporary store position of segment midpoints
+                # temporary store position of segment midpoints
                 segx = np.zeros(nseg)
                 for i, seg in enumerate(sec):
                     segx[i] = seg.x
 
-                #can't be >0 which may happen due to NEURON->Python float transfer:
+                # can't be >0 which may happen due to NEURON->Python float transfer:
                 segx0 = (segx - gsen2).round(decimals=6)
                 segx1 = (segx + gsen2).round(decimals=6)
 
-                #fill vectors with interpolated coordinates of start and end points
-                xstartvec[counter:counter+nseg] = np.interp(segx0, L, x)
-                xendvec[counter:counter+nseg] = np.interp(segx1, L, x)
+                # fill vectors with interpolated coordinates of start and end points
+                xstartvec[counter : counter + nseg] = np.interp(segx0, L, x)
+                xendvec[counter : counter + nseg] = np.interp(segx1, L, x)
 
-                ystartvec[counter:counter+nseg] = np.interp(segx0, L, y)
-                yendvec[counter:counter+nseg] = np.interp(segx1, L, y)
+                ystartvec[counter : counter + nseg] = np.interp(segx0, L, y)
+                yendvec[counter : counter + nseg] = np.interp(segx1, L, y)
 
-                zstartvec[counter:counter+nseg] = np.interp(segx0, L, z)
-                zendvec[counter:counter+nseg] = np.interp(segx1, L, z)
+                zstartvec[counter : counter + nseg] = np.interp(segx0, L, z)
+                zendvec[counter : counter + nseg] = np.interp(segx1, L, z)
 
-                #fill in values area, diam, length
+                # fill in values area, diam, length
                 for i, seg in enumerate(sec):
                     areavec[counter] = neuron.h.area(seg.x)
                     diamvec[counter] = seg.diam
-                    lengthvec[counter] = sec.L/nseg
+                    lengthvec[counter] = sec.L / nseg
                     counter += 1
 
         # starting position of each compartment (segment)
@@ -204,7 +205,7 @@ class Cell:
         self.vmem = np.array(self.vmem)
         self.imem = np.array(self.imem)
         self.syn_i = np.array(self.syn_i)
-        self.tvec = np.array(self.tvec)[:self.vmem.shape[1]]
+        self.tvec = np.array(self.tvec)[: self.vmem.shape[1]]
 
 
 class ExtElectrode:
@@ -238,9 +239,11 @@ class ExtElectrode:
         """
         self.mapping = np.zeros((self.num_elecs, cell.totnsegs))
         for e_idx in range(self.num_elecs):
-            r2 = ((cell.xmid - self.elec_x[e_idx])**2 +
-                  (cell.ymid - self.elec_y[e_idx])**2 +
-                  (cell.zmid - self.elec_z[e_idx])**2)
+            r2 = (
+                (cell.xmid - self.elec_x[e_idx]) ** 2
+                + (cell.ymid - self.elec_y[e_idx]) ** 2
+                + (cell.zmid - self.elec_z[e_idx]) ** 2
+            )
 
             self.mapping[e_idx] = 1 / (4 * np.pi * self.sigma * np.sqrt(r2))
 
@@ -251,22 +254,43 @@ def plot_results(cell, electrode):
     ################################################################################
     fig = plt.figure(figsize=(9, 5))
     fig.subplots_adjust(wspace=0.5, hspace=0.9)
-    ax_morph = fig.add_subplot(131, aspect=1, xlim=[-150, 150], ylim=[-100, 600],
-                               title="morphology", xlabel="x ($\mu$m)", ylabel="y ($\mu$m)")
-    ax_syn = fig.add_subplot(332, ylabel='nA', title="synaptic current", xlabel='time (ms)')
-    ax_vmem = fig.add_subplot(335, ylabel='mV', xlabel='time (ms)', title="membrane potential")
-    ax_imem = fig.add_subplot(338, ylabel='nA', xlabel='time (ms)', title="membrane current")
-    ax_ep = fig.add_subplot(133, ylabel='$\mu$V', xlabel="time (ms)", title="Extracellular potential")
+    ax_morph = fig.add_subplot(
+        131,
+        aspect=1,
+        xlim=[-150, 150],
+        ylim=[-100, 600],
+        title="morphology",
+        xlabel="x ($\mu$m)",
+        ylabel="y ($\mu$m)",
+    )
+    ax_syn = fig.add_subplot(
+        332, ylabel="nA", title="synaptic current", xlabel="time (ms)"
+    )
+    ax_vmem = fig.add_subplot(
+        335, ylabel="mV", xlabel="time (ms)", title="membrane potential"
+    )
+    ax_imem = fig.add_subplot(
+        338, ylabel="nA", xlabel="time (ms)", title="membrane current"
+    )
+    ax_ep = fig.add_subplot(
+        133, ylabel="$\mu$V", xlabel="time (ms)", title="Extracellular potential"
+    )
 
     plot_comp_idx = 0
-    plot_comp_clr = 'r'
+    plot_comp_clr = "r"
 
     for idx in range(cell.totnsegs):
-        ax_morph.plot([cell.xstart[idx], cell.xend[idx]],
-                      [cell.zstart[idx], cell.zend[idx]], lw=cell.diam[idx] / 2, c='k')
-    ax_morph.plot(cell.xmid[plot_comp_idx], cell.zmid[plot_comp_idx], marker='*', c=plot_comp_clr)
-
-    ax_syn.plot(cell.tvec, cell.syn_i, c='k', lw=2)
+        ax_morph.plot(
+            [cell.xstart[idx], cell.xend[idx]],
+            [cell.zstart[idx], cell.zend[idx]],
+            lw=cell.diam[idx] / 2,
+            c="k",
+        )
+    ax_morph.plot(
+        cell.xmid[plot_comp_idx], cell.zmid[plot_comp_idx], marker="*", c=plot_comp_clr
+    )
+
+    ax_syn.plot(cell.tvec, cell.syn_i, c="k", lw=2)
     ax_vmem.plot(cell.tvec, cell.vmem[0, :], c=plot_comp_clr, lw=2)
     ax_imem.plot(cell.tvec, cell.imem[0, :], c=plot_comp_clr, lw=2)
 
@@ -274,13 +298,16 @@ def plot_results(cell, electrode):
         e_clr = electrode.elec_clr(e_idx)
         sig = 1000 * electrode.extracellular_potential[e_idx]  # convert to uV
         ax_ep.plot(cell.tvec, sig, c=e_clr)
-        ax_morph.plot(electrode.elec_x[e_idx], electrode.elec_z[e_idx], marker='o', c=e_clr)
+        ax_morph.plot(
+            electrode.elec_x[e_idx], electrode.elec_z[e_idx], marker="o", c=e_clr
+        )
 
-    fig.savefig('example_nrn_EP.png')
+    fig.savefig("example_nrn_EP.png")
 
     plt.close(fig)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     cell = Cell()
     cell.simulate()
 
@@ -293,4 +320,3 @@ if __name__ == '__main__':
     electrode.calc_extracellular_potential(cell)
 
     plot_results(cell, electrode)
-
diff --git a/example/lfp/plot-lfp.py b/example/lfp/plot-lfp.py
index bbbeff05..ed210ef3 100755
--- a/example/lfp/plot-lfp.py
+++ b/example/lfp/plot-lfp.py
@@ -18,38 +18,60 @@ import sys
 #   "morphology": { "unit": <string>, "samples":  [[[<number> <number> <number>] ...] ...]
 #                   "probe": [<number> <number>], "electrodes": [[<number> <number>] ...] }
 
+
 def subplot_timeseries(fig, index, jdict, key):
     data = jdict[key]
-    sub = fig.add_subplot(index, ylabel=data['unit'], title=key, xlabel='time (ms)')
-    ts = data['time']
-    vss = data['values'] if 'values' in data else [data['value']]
+    sub = fig.add_subplot(index, ylabel=data["unit"], title=key, xlabel="time (ms)")
+    ts = data["time"]
+    vss = data["values"] if "values" in data else [data["value"]]
+
+    for vs in vss:
+        sub.plot(ts, vs)
 
-    for vs in vss: sub.plot(ts, vs)
 
 def subplot_morphology(fig, index, jdict, key, xlim, ylim):
     data = jdict[key]
-    unit = data['unit']
-    sub = fig.add_subplot(index, xlabel='x ('+unit+')', ylabel='y ('+unit+')', title=key, xlim=xlim, ylim=ylim)
-
-    for samples in data['samples']:
-        polys = [([x0-s0*dy, x0+s0*dy, x1+s1*dy, x1-s1*dy], [y0+s0*dx, y0-s0*dx, y1-s1*dx, y1+s1*dx])
-                for ((x0, y0, r0), (x1, y1, r1)) in zip(samples, samples[1:])
-                for dx, dy in [(x1-x0, y1-y0)]
-                for d in [math.sqrt(dx*dx+dy*dy)]
-                if d>0
-                for s0, s1 in [(r0/d, r1/d)]]
-
-        for xs, ys in polys: sub.fill(xs, ys, 'k')
-    sub.plot(*[u for x, y in data['electrodes'] for u in [[x], [y], 'o']])
-    sub.plot(*[u for x, y in [data['probe']] for u in [[x], [y], 'r*']])
-
-P = argparse.ArgumentParser(description='Plot results of LFP demo.')
+    unit = data["unit"]
+    sub = fig.add_subplot(
+        index,
+        xlabel="x (" + unit + ")",
+        ylabel="y (" + unit + ")",
+        title=key,
+        xlim=xlim,
+        ylim=ylim,
+    )
+
+    for samples in data["samples"]:
+        polys = [
+            (
+                [x0 - s0 * dy, x0 + s0 * dy, x1 + s1 * dy, x1 - s1 * dy],
+                [y0 + s0 * dx, y0 - s0 * dx, y1 - s1 * dx, y1 + s1 * dx],
+            )
+            for ((x0, y0, r0), (x1, y1, r1)) in zip(samples, samples[1:])
+            for dx, dy in [(x1 - x0, y1 - y0)]
+            for d in [math.sqrt(dx * dx + dy * dy)]
+            if d > 0
+            for s0, s1 in [(r0 / d, r1 / d)]
+        ]
+
+        for xs, ys in polys:
+            sub.fill(xs, ys, "k")
+    sub.plot(*[u for x, y in data["electrodes"] for u in [[x], [y], "o"]])
+    sub.plot(*[u for x, y in [data["probe"]] for u in [[x], [y], "r*"]])
+
+
+P = argparse.ArgumentParser(description="Plot results of LFP demo.")
 P.add_argument(
-    'input', metavar='FILE', nargs='?', type=argparse.FileType('r'), default=sys.stdin,
-    help='LFP example output in JSON')
+    "input",
+    metavar="FILE",
+    nargs="?",
+    type=argparse.FileType("r"),
+    default=sys.stdin,
+    help="LFP example output in JSON",
+)
 P.add_argument(
-    '-o', '--output', metavar='FILE', dest='outfile',
-    help='save plot to file FILE')
+    "-o", "--output", metavar="FILE", dest="outfile", help="save plot to file FILE"
+)
 
 args = P.parse_args()
 j = json.load(args.input)
@@ -57,11 +79,11 @@ j = json.load(args.input)
 fig = plt.figure(figsize=(9, 5))
 fig.subplots_adjust(wspace=0.6, hspace=0.9)
 
-subplot_morphology(fig, 131, j, 'morphology', xlim=[-100, 100], ylim=[-100, 600])
-subplot_timeseries(fig, 332, j, 'synaptic current')
-subplot_timeseries(fig, 335, j, 'membrane potential')
-subplot_timeseries(fig, 338, j, 'ionic current density')
-subplot_timeseries(fig, 133, j, 'extracellular potential')
+subplot_morphology(fig, 131, j, "morphology", xlim=[-100, 100], ylim=[-100, 600])
+subplot_timeseries(fig, 332, j, "synaptic current")
+subplot_timeseries(fig, 335, j, "membrane potential")
+subplot_timeseries(fig, 338, j, "ionic current density")
+subplot_timeseries(fig, 133, j, "extracellular potential")
 
 if args.outfile:
     fig.savefig(args.outfile)
diff --git a/python/__init__.py b/python/__init__.py
index 0a4ac0bb..17ccfc65 100644
--- a/python/__init__.py
+++ b/python/__init__.py
@@ -9,12 +9,14 @@ from ._arbor import *
 # Parse VERSION file for the Arbor version string.
 def get_version():
     import os
+
     here = os.path.abspath(os.path.dirname(__file__))
-    with open(os.path.join(here, 'VERSION')) as version_file:
+    with open(os.path.join(here, "VERSION")) as version_file:
         return version_file.read().strip()
 
+
 __version__ = get_version()
-__config__  = config()
+__config__ = config()
 
 # Remove get_version from arbor module.
 del get_version
diff --git a/python/example/brunel.py b/python/example/brunel.py
index 944916ed..6a01f411 100755
--- a/python/example/brunel.py
+++ b/python/example/brunel.py
@@ -5,7 +5,7 @@ import argparse
 import numpy as np
 from numpy.random import RandomState
 
-'''
+"""
 A Brunel network consists of nexc excitatory LIF neurons and ninh inhibitory LIF neurons.
 Each neuron in the network receives in_degree_prop * nexc excitatory connections
 chosen randomly, in_degree_prop * ninh inhibitory connections and next (external) Poisson connections.
@@ -18,7 +18,7 @@ recurrent connections have a small effect.
 Call with parameters, for example:
 ./brunel.py -n 400 -m 100 -e 20 -p 0.1 -w 1.2 -d 1 -g 0.5 -l 5 -t 100 -s 1 -G 50 -S 123 -f spikes.txt
 
-'''
+"""
 
 # Samples m unique values in interval [start, end) - gid.
 # We exclude gid because we don't want self-loops.
@@ -29,14 +29,28 @@ def sample_subset(gen, gid, start, end, m):
     gen.shuffle(idx)
     return idx[:m]
 
-class brunel_recipe (arbor.recipe):
-    def __init__(self, nexc, ninh, next, in_degree_prop, weight, delay, rel_inh_strength, poiss_lambda, seed = 42):
+
+class brunel_recipe(arbor.recipe):
+    def __init__(
+        self,
+        nexc,
+        ninh,
+        next,
+        in_degree_prop,
+        weight,
+        delay,
+        rel_inh_strength,
+        poiss_lambda,
+        seed=42,
+    ):
 
         arbor.recipe.__init__(self)
 
         # Make sure that in_degree_prop in the interval (0, 1]
-        if not 0.0<in_degree_prop<=1.0:
-            print("The proportion of incoming connections should be in the interval (0, 1].")
+        if not 0.0 < in_degree_prop <= 1.0:
+            print(
+                "The proportion of incoming connections should be in the interval (0, 1]."
+            )
             quit()
 
         self.ncells_exc_ = nexc
@@ -47,7 +61,7 @@ class brunel_recipe (arbor.recipe):
         # Set up the parameters.
         self.weight_exc_ = weight
         self.weight_inh_ = -rel_inh_strength * weight
-        self.weight_ext_ =  weight
+        self.weight_ext_ = weight
         self.in_degree_exc_ = round(in_degree_prop * nexc)
         self.in_degree_inh_ = round(in_degree_prop * ninh)
         # each cell receives next incoming Poisson sources with mean rate poiss_lambda, which is equivalent
@@ -62,16 +76,22 @@ class brunel_recipe (arbor.recipe):
 
     def connections_on(self, gid):
         gen = RandomState(gid + self.seed_)
-        connections=[]
+        connections = []
         # Add incoming excitatory connections.
         connections = [
-            arbor.connection((i,"src"), "tgt", self.weight_exc_, self.delay_)
+            arbor.connection((i, "src"), "tgt", self.weight_exc_, self.delay_)
             for i in sample_subset(gen, gid, 0, self.ncells_exc_, self.in_degree_exc_)
         ]
         # Add incoming inhibitory connections.
         connections += [
-            arbor.connection((i,"src"), "tgt", self.weight_inh_, self.delay_)
-            for i in sample_subset(gen, gid, self.ncells_exc_, self.ncells_exc_ + self.ncells_inh_, self.in_degree_inh_)
+            arbor.connection((i, "src"), "tgt", self.weight_inh_, self.delay_)
+            for i in sample_subset(
+                gen,
+                gid,
+                self.ncells_exc_,
+                self.ncells_exc_ + self.ncells_inh_,
+                self.in_degree_inh_,
+            )
         ]
 
         return connections
@@ -92,29 +112,121 @@ class brunel_recipe (arbor.recipe):
         sched = arbor.poisson_schedule(t0, self.lambda_, gid + self.seed_)
         return [arbor.event_generator("tgt", self.weight_ext_, sched)]
 
+
 if __name__ == "__main__":
 
-    parser = argparse.ArgumentParser(description='Brunel model miniapp.')
-    parser.add_argument('-n', '--n-excitatory', dest='nexc', type=int, default=400, help='Number of cells in the excitatory population')
-    parser.add_argument('-m', '--n-inhibitory', dest='ninh', type=int, default=100, help='Number of cells in the inhibitory population')
-    parser.add_argument('-e', '--n-external', dest='next', type=int, default=40, help='Number of incoming Poisson (external) connections per cell')
-    parser.add_argument('-p', '--in-degree-prop', dest='syn_per_cell_prop', type=float, default=0.05, help='Proportion of the connections received per cell')
-    parser.add_argument('-w', '--weight', dest='weight', type=float, default=1.2, help='Weight of excitatory connections')
-    parser.add_argument('-d', '--delay', dest='delay', type=float, default=0.1, help='Delay of all connections')
-    parser.add_argument('-g', '--rel-inh-w', dest='rel_inh_strength', type=float, default=1, help='Relative strength of inhibitory synapses with respect to the excitatory ones')
-    parser.add_argument('-l', '--lambda', dest='poiss_lambda', type=float, default=1, help='Mean firing rate from a single poisson cell (kHz)')
-    parser.add_argument('-t', '--tfinal', dest='tfinal', type=float, default=100, help='Length of the simulation period (ms)')
-    parser.add_argument('-s', '--dt', dest='dt', type=float, default=1, help='Simulation time step (ms)')
-    parser.add_argument('-G', '--group-size', dest='group_size', type=int, default=10, help='Number of cells per cell group')
-    parser.add_argument('-S', '--seed', dest='seed', type=int, default=42, help='Seed for poisson spike generators')
-    parser.add_argument('-f', '--write-spikes', dest='spike_file_output', type=str, help='Save spikes to file')
+    parser = argparse.ArgumentParser(description="Brunel model miniapp.")
+    parser.add_argument(
+        "-n",
+        "--n-excitatory",
+        dest="nexc",
+        type=int,
+        default=400,
+        help="Number of cells in the excitatory population",
+    )
+    parser.add_argument(
+        "-m",
+        "--n-inhibitory",
+        dest="ninh",
+        type=int,
+        default=100,
+        help="Number of cells in the inhibitory population",
+    )
+    parser.add_argument(
+        "-e",
+        "--n-external",
+        dest="next",
+        type=int,
+        default=40,
+        help="Number of incoming Poisson (external) connections per cell",
+    )
+    parser.add_argument(
+        "-p",
+        "--in-degree-prop",
+        dest="syn_per_cell_prop",
+        type=float,
+        default=0.05,
+        help="Proportion of the connections received per cell",
+    )
+    parser.add_argument(
+        "-w",
+        "--weight",
+        dest="weight",
+        type=float,
+        default=1.2,
+        help="Weight of excitatory connections",
+    )
+    parser.add_argument(
+        "-d",
+        "--delay",
+        dest="delay",
+        type=float,
+        default=0.1,
+        help="Delay of all connections",
+    )
+    parser.add_argument(
+        "-g",
+        "--rel-inh-w",
+        dest="rel_inh_strength",
+        type=float,
+        default=1,
+        help="Relative strength of inhibitory synapses with respect to the excitatory ones",
+    )
+    parser.add_argument(
+        "-l",
+        "--lambda",
+        dest="poiss_lambda",
+        type=float,
+        default=1,
+        help="Mean firing rate from a single poisson cell (kHz)",
+    )
+    parser.add_argument(
+        "-t",
+        "--tfinal",
+        dest="tfinal",
+        type=float,
+        default=100,
+        help="Length of the simulation period (ms)",
+    )
+    parser.add_argument(
+        "-s", "--dt", dest="dt", type=float, default=1, help="Simulation time step (ms)"
+    )
+    parser.add_argument(
+        "-G",
+        "--group-size",
+        dest="group_size",
+        type=int,
+        default=10,
+        help="Number of cells per cell group",
+    )
+    parser.add_argument(
+        "-S",
+        "--seed",
+        dest="seed",
+        type=int,
+        default=42,
+        help="Seed for poisson spike generators",
+    )
+    parser.add_argument(
+        "-f",
+        "--write-spikes",
+        dest="spike_file_output",
+        type=str,
+        help="Save spikes to file",
+    )
     # parser.add_argument('-z', '--profile-rank-zero', dest='profile_only_zero', action='store_true', help='Only output profile information for rank 0')
-    parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Print more verbose information to stdout')
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        dest="verbose",
+        action="store_true",
+        help="Print more verbose information to stdout",
+    )
 
     opt = parser.parse_args()
     if opt.verbose:
         print("Running brunel.py with the following settings:")
-        for k,v in vars(opt).items():
+        for k, v in vars(opt).items():
             print(f"{k} = {v}")
 
     context = arbor.context("avail_threads")
@@ -123,9 +235,19 @@ if __name__ == "__main__":
     meters = arbor.meter_manager()
     meters.start(context)
 
-    recipe = brunel_recipe(opt.nexc, opt.ninh, opt.next, opt.syn_per_cell_prop, opt.weight, opt.delay, opt.rel_inh_strength, opt.poiss_lambda, opt.seed)
+    recipe = brunel_recipe(
+        opt.nexc,
+        opt.ninh,
+        opt.next,
+        opt.syn_per_cell_prop,
+        opt.weight,
+        opt.delay,
+        opt.rel_inh_strength,
+        opt.poiss_lambda,
+        opt.seed,
+    )
 
-    meters.checkpoint('recipe-create', context)
+    meters.checkpoint("recipe-create", context)
 
     hint = arbor.partition_hint()
     hint.cpu_group_size = opt.group_size
@@ -133,24 +255,24 @@ if __name__ == "__main__":
     decomp = arbor.partition_load_balance(recipe, context, hints)
     print(decomp)
 
-    meters.checkpoint('load-balance', context)
+    meters.checkpoint("load-balance", context)
 
     sim = arbor.simulation(recipe, decomp, context)
     sim.record(arbor.spike_recording.all)
 
-    meters.checkpoint('simulation-init', context)
+    meters.checkpoint("simulation-init", context)
 
-    sim.run(opt.tfinal,opt.dt)
+    sim.run(opt.tfinal, opt.dt)
 
-    meters.checkpoint('simulation-run', context)
+    meters.checkpoint("simulation-run", context)
 
     # Print profiling information
-    print(f'{arbor.meter_report(meters, context)}')
+    print(f"{arbor.meter_report(meters, context)}")
 
     # Print spike times
     print(f"{len(sim.spikes())} spikes generated.")
 
     if opt.spike_file_output:
-        with open(opt.spike_file_output, 'w') as the_file:
+        with open(opt.spike_file_output, "w") as the_file:
             for meta, data in sim.spikes():
                 the_file.write(f"{meta[0]} {data:3.3f}\n")
diff --git a/python/example/dynamic-catalogue.py b/python/example/dynamic-catalogue.py
index bedd86de..6fc5e8fc 100644
--- a/python/example/dynamic-catalogue.py
+++ b/python/example/dynamic-catalogue.py
@@ -4,7 +4,8 @@ from pathlib import Path
 
 import arbor as arb
 
-cat = Path('cat-catalogue.so').resolve()
+cat = Path("cat-catalogue.so").resolve()
+
 
 class recipe(arb.recipe):
     def __init__(self):
@@ -14,7 +15,7 @@ class recipe(arb.recipe):
         self.props = arb.neuron_cable_properties()
         self.props.catalogue = arb.load_catalogue(cat)
         d = arb.decor()
-        d.paint('(all)', arb.density('dummy'))
+        d.paint("(all)", arb.density("dummy"))
         d.set_property(Vm=0.0)
         self.cell = arb.cable_cell(self.tree, arb.label_dict(), d)
 
@@ -30,11 +31,14 @@ class recipe(arb.recipe):
     def cell_description(self, gid):
         return self.cell
 
+
 if not cat.is_file():
-    print("""Catalogue not found in this directory.
+    print(
+        """Catalogue not found in this directory.
 Please ensure it has been compiled by calling
   <arbor>/scripts/build-catalogue cat <arbor>/python/example/cat
-where <arbor> is the location of the arbor source tree.""")
+where <arbor> is the location of the arbor source tree."""
+    )
     exit(1)
 
 rcp = recipe()
diff --git a/python/example/gap_junctions.py b/python/example/gap_junctions.py
index 8985b3f3..2b6b3221 100644
--- a/python/example/gap_junctions.py
+++ b/python/example/gap_junctions.py
@@ -18,29 +18,31 @@ import matplotlib.pyplot as plt
 
 
 def make_cable_cell(gid):
-    
+
     # Build a segment tree
     tree = arbor.segment_tree()
 
     # Soma with radius 5 μm and length 2 * radius = 10 μm, (tag = 1)
-    s = tree.append(arbor.mnpos, arbor.mpoint(-10, 0, 0, 5), arbor.mpoint(0, 0, 0, 5), tag=1)
+    s = tree.append(
+        arbor.mnpos, arbor.mpoint(-10, 0, 0, 5), arbor.mpoint(0, 0, 0, 5), tag=1
+    )
 
     # Single dendrite with radius 2 μm and length 40 μm, (tag = 2)
     b = tree.append(s, arbor.mpoint(0, 0, 0, 2), arbor.mpoint(40, 0, 0, 2), tag=2)
 
     # Label dictionary for cell components
     labels = arbor.label_dict()
-    labels['soma'] = '(tag 1)'
-    labels['dend'] = '(tag 2)'
+    labels["soma"] = "(tag 1)"
+    labels["dend"] = "(tag 2)"
 
     # Mark location for synapse site at midpoint of dendrite (branch 0 = soma + dendrite)
-    labels['synapse_site'] = '(location 0 0.6)'
+    labels["synapse_site"] = "(location 0 0.6)"
 
     # Gap junction site at connection point of soma and dendrite
-    labels['gj_site'] = '(location 0 0.2)'
+    labels["gj_site"] = "(location 0 0.2)"
 
     # Label root of the tree
-    labels['root'] = '(root)'
+    labels["root"] = "(root)"
 
     # Paint dynamics onto the cell, hh on soma and passive properties on dendrite
     decor = arbor.decor()
@@ -48,19 +50,19 @@ def make_cable_cell(gid):
     decor.paint('"dend"', arbor.density("pas"))
 
     # Attach one synapse and gap junction each on their labeled sites
-    decor.place('"synapse_site"', arbor.synapse('expsyn'), 'syn')
-    decor.place('"gj_site"', arbor.junction('gj'), 'gj')
+    decor.place('"synapse_site"', arbor.synapse("expsyn"), "syn")
+    decor.place('"gj_site"', arbor.junction("gj"), "gj")
 
     # Attach spike detector to cell root
-    decor.place('"root"', arbor.spike_detector(-10), 'detector')
+    decor.place('"root"', arbor.spike_detector(-10), "detector")
 
     cell = arbor.cable_cell(tree, labels, decor)
 
     return cell
 
+
 # Create a recipe that generates connected chains of cells
 class chain_recipe(arbor.recipe):
-
     def __init__(self, ncells_per_chain, nchains):
         arbor.recipe.__init__(self)
         self.nchains = nchains
@@ -81,34 +83,34 @@ class chain_recipe(arbor.recipe):
         if (gid == 0) or (gid % self.ncells_per_chain > 0):
             return []
         else:
-            src = gid-1
-            w   = 0.05
-            d   = 10
-            return [arbor.connection((src,'detector'), 'syn', w, d)]
-    
+            src = gid - 1
+            w = 0.05
+            d = 10
+            return [arbor.connection((src, "detector"), "syn", w, d)]
+
     # Create gap junction connections between a cell within a chain and its neighbor(s)
     def gap_junctions_on(self, gid):
         conns = []
 
-        chain_begin = int(gid/self.ncells_per_chain) * self.ncells_per_chain
-        chain_end   = chain_begin + self.ncells_per_chain
+        chain_begin = int(gid / self.ncells_per_chain) * self.ncells_per_chain
+        chain_end = chain_begin + self.ncells_per_chain
 
         next_cell = gid + 1
         prev_cell = gid - 1
 
         if next_cell < chain_end:
-            conns.append(arbor.gap_junction_connection((gid+1, 'gj'), 'gj', 0.015))
+            conns.append(arbor.gap_junction_connection((gid + 1, "gj"), "gj", 0.015))
         if prev_cell >= chain_begin:
-            conns.append(arbor.gap_junction_connection((gid-1, 'gj'), 'gj', 0.015))
-        
+            conns.append(arbor.gap_junction_connection((gid - 1, "gj"), "gj", 0.015))
+
         return conns
 
     # Event generator at first cell
     def event_generators(self, gid):
-        if gid==0:
+        if gid == 0:
             sched = arbor.explicit_schedule([1])
             weight = 0.1
-            return [arbor.event_generator('syn', weight, sched)]
+            return [arbor.event_generator("syn", weight, sched)]
         return []
 
     # Place a probe at the root of each cell
@@ -118,6 +120,7 @@ class chain_recipe(arbor.recipe):
     def global_properties(self, kind):
         return self.props
 
+
 # Number of cells per chain
 ncells_per_chain = 5
 
@@ -127,7 +130,7 @@ nchains = 3
 # Total number of cells
 ncells = nchains * ncells_per_chain
 
-#Instantiate recipe
+# Instantiate recipe
 recipe = chain_recipe(ncells_per_chain, nchains)
 
 # Create a default execution context, domain decomposition and simulation
@@ -143,20 +146,24 @@ handles = [sim.sample((gid, 0), arbor.regular_schedule(0.1)) for gid in range(nc
 
 # Run simulation for 100 ms
 sim.run(100)
-print('Simulation finished')
+print("Simulation finished")
 
 # Print spike times
-print('spikes:')
+print("spikes:")
 for sp in sim.spikes():
-    print(' ', sp)
+    print(" ", sp)
 
 # Plot the results
 print("Plotting results ...")
 df_list = []
 for gid in range(ncells):
     samples, meta = sim.samples(handles[gid])[0]
-    df_list.append(pandas.DataFrame({'t/ms': samples[:, 0], 'U/mV': samples[:, 1], 'Cell': f"cell {gid}"}))
-
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Cell",ci=None)
+    df_list.append(
+        pandas.DataFrame(
+            {"t/ms": samples[:, 0], "U/mV": samples[:, 1], "Cell": f"cell {gid}"}
+        )
+    )
+
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", hue="Cell", ci=None)
 plt.show()
diff --git a/python/example/network_ring.py b/python/example/network_ring.py
index 0ae49f5d..9eec3cbd 100755
--- a/python/example/network_ring.py
+++ b/python/example/network_ring.py
@@ -15,52 +15,65 @@ from math import sqrt
 #        \
 #         b2
 
+
 def make_cable_cell(gid):
     # (1) Build a segment tree
     tree = arbor.segment_tree()
 
     # Soma (tag=1) with radius 6 μm, modelled as cylinder of length 2*radius
-    s = tree.append(arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1)
+    s = tree.append(
+        arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1
+    )
 
     # Single dendrite (tag=3) of length 50 μm and radius 2 μm attached to soma.
     b0 = tree.append(s, arbor.mpoint(0, 0, 0, 2), arbor.mpoint(50, 0, 0, 2), tag=3)
 
     # Attach two dendrites (tag=3) of length 50 μm to the end of the first dendrite.
     # Radius tapers from 2 to 0.5 μm over the length of the dendrite.
-    b1 = tree.append(b0, arbor.mpoint(50, 0, 0, 2), arbor.mpoint(50+50/sqrt(2), 50/sqrt(2), 0, 0.5), tag=3)
+    b1 = tree.append(
+        b0,
+        arbor.mpoint(50, 0, 0, 2),
+        arbor.mpoint(50 + 50 / sqrt(2), 50 / sqrt(2), 0, 0.5),
+        tag=3,
+    )
     # Constant radius of 1 μm over the length of the dendrite.
-    b2 = tree.append(b0, arbor.mpoint(50, 0, 0, 1), arbor.mpoint(50+50/sqrt(2), -50/sqrt(2), 0, 1), tag=3)
+    b2 = tree.append(
+        b0,
+        arbor.mpoint(50, 0, 0, 1),
+        arbor.mpoint(50 + 50 / sqrt(2), -50 / sqrt(2), 0, 1),
+        tag=3,
+    )
 
     # Associate labels to tags
     labels = arbor.label_dict()
-    labels['soma'] = '(tag 1)'
-    labels['dend'] = '(tag 3)'
+    labels["soma"] = "(tag 1)"
+    labels["dend"] = "(tag 3)"
 
     # (2) Mark location for synapse at the midpoint of branch 1 (the first dendrite).
-    labels['synapse_site'] = '(location 1 0.5)'
+    labels["synapse_site"] = "(location 1 0.5)"
     # Mark the root of the tree.
-    labels['root'] = '(root)'
+    labels["root"] = "(root)"
 
     # (3) Create a decor and a cable_cell
     decor = arbor.decor()
 
     # Put hh dynamics on soma, and passive properties on the dendrites.
-    decor.paint('"soma"', arbor.density('hh'))
-    decor.paint('"dend"', arbor.density('pas'))
+    decor.paint('"soma"', arbor.density("hh"))
+    decor.paint('"dend"', arbor.density("pas"))
 
     # (4) Attach a single synapse.
-    decor.place('"synapse_site"', arbor.synapse('expsyn'), 'syn')
+    decor.place('"synapse_site"', arbor.synapse("expsyn"), "syn")
 
     # Attach a spike detector with threshold of -10 mV.
-    decor.place('"root"', arbor.spike_detector(-10), 'detector')
+    decor.place('"root"', arbor.spike_detector(-10), "detector")
 
     cell = arbor.cable_cell(tree, labels, decor)
 
     return cell
 
-# (5) Create a recipe that generates a network of connected cells.
-class ring_recipe (arbor.recipe):
 
+# (5) Create a recipe that generates a network of connected cells.
+class ring_recipe(arbor.recipe):
     def __init__(self, ncells):
         # The base C++ class constructor must be called first, to ensure that
         # all memory in the C++ class is initialized correctly.
@@ -84,17 +97,17 @@ class ring_recipe (arbor.recipe):
 
     # (8) Make a ring network. For each gid, provide a list of incoming connections.
     def connections_on(self, gid):
-        src = (gid-1)%self.ncells
-        w = 0.01 # 0.01 μS on expsyn
-        d = 5 # ms delay
-        return [arbor.connection((src,'detector'), 'syn', w, d)]
+        src = (gid - 1) % self.ncells
+        w = 0.01  # 0.01 μS on expsyn
+        d = 5  # ms delay
+        return [arbor.connection((src, "detector"), "syn", w, d)]
 
     # (9) Attach a generator to the first cell in the ring.
     def event_generators(self, gid):
-        if gid==0:
-            sched = arbor.explicit_schedule([1]) # one event at 1 ms
-            weight = 0.1 # 0.1 μS on expsyn
-            return [arbor.event_generator('syn', weight, sched)]
+        if gid == 0:
+            sched = arbor.explicit_schedule([1])  # one event at 1 ms
+            weight = 0.1  # 0.1 μS on expsyn
+            return [arbor.event_generator("syn", weight, sched)]
         return []
 
     # (10) Place a probe at the root of each cell.
@@ -104,6 +117,7 @@ class ring_recipe (arbor.recipe):
     def global_properties(self, kind):
         return self.props
 
+
 # (11) Instantiate recipe
 ncells = 4
 recipe = ring_recipe(ncells)
@@ -121,19 +135,25 @@ handles = [sim.sample((gid, 0), arbor.regular_schedule(0.1)) for gid in range(nc
 
 # (15) Run simulation for 100 ms
 sim.run(100)
-print('Simulation finished')
+print("Simulation finished")
 
 # (16) Print spike times
-print('spikes:')
+print("spikes:")
 for sp in sim.spikes():
-    print(' ', sp)
+    print(" ", sp)
 
 # (17) Plot the recorded voltages over time.
 print("Plotting results ...")
 df_list = []
 for gid in range(ncells):
     samples, meta = sim.samples(handles[gid])[0]
-    df_list.append(pandas.DataFrame({'t/ms': samples[:, 0], 'U/mV': samples[:, 1], 'Cell': f"cell {gid}"}))
-
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Cell",ci=None).savefig('network_ring_result.svg')
+    df_list.append(
+        pandas.DataFrame(
+            {"t/ms": samples[:, 0], "U/mV": samples[:, 1], "Cell": f"cell {gid}"}
+        )
+    )
+
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", hue="Cell", ci=None).savefig(
+    "network_ring_result.svg"
+)
diff --git a/python/example/network_ring_mpi.py b/python/example/network_ring_mpi.py
index a403d948..84aa7daa 100644
--- a/python/example/network_ring_mpi.py
+++ b/python/example/network_ring_mpi.py
@@ -17,52 +17,65 @@ from math import sqrt
 #        \
 #         b2
 
+
 def make_cable_cell(gid):
     # (1) Build a segment tree
     tree = arbor.segment_tree()
 
     # Soma (tag=1) with radius 6 μm, modelled as cylinder of length 2*radius
-    s = tree.append(arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1)
+    s = tree.append(
+        arbor.mnpos, arbor.mpoint(-12, 0, 0, 6), arbor.mpoint(0, 0, 0, 6), tag=1
+    )
 
     # Single dendrite (tag=3) of length 50 μm and radius 2 μm attached to soma.
     b0 = tree.append(s, arbor.mpoint(0, 0, 0, 2), arbor.mpoint(50, 0, 0, 2), tag=3)
 
     # Attach two dendrites (tag=3) of length 50 μm to the end of the first dendrite.
     # Radius tapers from 2 to 0.5 μm over the length of the dendrite.
-    b1 = tree.append(b0, arbor.mpoint(50, 0, 0, 2), arbor.mpoint(50+50/sqrt(2), 50/sqrt(2), 0, 0.5), tag=3)
+    b1 = tree.append(
+        b0,
+        arbor.mpoint(50, 0, 0, 2),
+        arbor.mpoint(50 + 50 / sqrt(2), 50 / sqrt(2), 0, 0.5),
+        tag=3,
+    )
     # Constant radius of 1 μm over the length of the dendrite.
-    b2 = tree.append(b0, arbor.mpoint(50, 0, 0, 1), arbor.mpoint(50+50/sqrt(2), -50/sqrt(2), 0, 1), tag=3)
+    b2 = tree.append(
+        b0,
+        arbor.mpoint(50, 0, 0, 1),
+        arbor.mpoint(50 + 50 / sqrt(2), -50 / sqrt(2), 0, 1),
+        tag=3,
+    )
 
     # Associate labels to tags
     labels = arbor.label_dict()
-    labels['soma'] = '(tag 1)'
-    labels['dend'] = '(tag 3)'
+    labels["soma"] = "(tag 1)"
+    labels["dend"] = "(tag 3)"
 
     # (2) Mark location for synapse at the midpoint of branch 1 (the first dendrite).
-    labels['synapse_site'] = '(location 1 0.5)'
+    labels["synapse_site"] = "(location 1 0.5)"
     # Mark the root of the tree.
-    labels['root'] = '(root)'
+    labels["root"] = "(root)"
 
     # (3) Create a decor and a cable_cell
     decor = arbor.decor()
 
     # Put hh dynamics on soma, and passive properties on the dendrites.
-    decor.paint('"soma"', arbor.density('hh'))
-    decor.paint('"dend"', arbor.density('pas'))
+    decor.paint('"soma"', arbor.density("hh"))
+    decor.paint('"dend"', arbor.density("pas"))
 
     # (4) Attach a single synapse.
-    decor.place('"synapse_site"', arbor.synapse('expsyn'), 'syn')
+    decor.place('"synapse_site"', arbor.synapse("expsyn"), "syn")
 
     # Attach a spike detector with threshold of -10 mV.
-    decor.place('"root"', arbor.spike_detector(-10), 'detector')
+    decor.place('"root"', arbor.spike_detector(-10), "detector")
 
     cell = arbor.cable_cell(tree, labels, decor)
 
     return cell
 
-# (5) Create a recipe that generates a network of connected cells.
-class ring_recipe (arbor.recipe):
 
+# (5) Create a recipe that generates a network of connected cells.
+class ring_recipe(arbor.recipe):
     def __init__(self, ncells):
         # The base C++ class constructor must be called first, to ensure that
         # all memory in the C++ class is initialized correctly.
@@ -86,17 +99,17 @@ class ring_recipe (arbor.recipe):
 
     # (8) Make a ring network. For each gid, provide a list of incoming connections.
     def connections_on(self, gid):
-        src = (gid-1)%self.ncells
-        w = 0.01 # 0.01 μS on expsyn
-        d = 5 # ms delay
-        return [arbor.connection((src,'detector'), 'syn', w, d)]
+        src = (gid - 1) % self.ncells
+        w = 0.01  # 0.01 μS on expsyn
+        d = 5  # ms delay
+        return [arbor.connection((src, "detector"), "syn", w, d)]
 
     # (9) Attach a generator to the first cell in the ring.
     def event_generators(self, gid):
-        if gid==0:
-            sched = arbor.explicit_schedule([1]) # one event at 1 ms
-            weight = 0.1 # 0.1 μS on expsyn
-            return [arbor.event_generator('syn', weight, sched)]
+        if gid == 0:
+            sched = arbor.explicit_schedule([1])  # one event at 1 ms
+            weight = 0.1  # 0.1 μS on expsyn
+            return [arbor.event_generator("syn", weight, sched)]
         return []
 
     # (10) Place a probe at the root of each cell.
@@ -106,6 +119,7 @@ class ring_recipe (arbor.recipe):
     def global_properties(self, kind):
         return self.props
 
+
 # (11) Instantiate recipe
 ncells = 500
 recipe = ring_recipe(ncells)
@@ -129,17 +143,21 @@ sim.record(arbor.spike_recording.all)
 handles = [sim.sample((gid, 0), arbor.regular_schedule(1)) for gid in range(ncells)]
 
 # (16) Run simulation
-sim.run(ncells*5)
-print('Simulation finished')
+sim.run(ncells * 5)
+print("Simulation finished")
 
 # (17) Plot the recorded voltages over time.
-print('Storing results ...')
+print("Storing results ...")
 df_list = []
 for gid in range(ncells):
     if len(sim.samples(handles[gid])):
         samples, meta = sim.samples(handles[gid])[0]
-        df_list.append(pandas.DataFrame({'t/ms': samples[:, 0], 'U/mV': samples[:, 1], 'Cell': f"cell {gid}"}))
+        df_list.append(
+            pandas.DataFrame(
+                {"t/ms": samples[:, 0], "U/mV": samples[:, 1], "Cell": f"cell {gid}"}
+            )
+        )
 
 if len(df_list):
-    df = pandas.concat(df_list,ignore_index=True)
-    df.to_csv(f"result_mpi_{context.rank}.csv", float_format='%g')
+    df = pandas.concat(df_list, ignore_index=True)
+    df.to_csv(f"result_mpi_{context.rank}.csv", float_format="%g")
diff --git a/python/example/network_ring_mpi_plot.py b/python/example/network_ring_mpi_plot.py
index 5f54baef..594cbd65 100644
--- a/python/example/network_ring_mpi_plot.py
+++ b/python/example/network_ring_mpi_plot.py
@@ -10,5 +10,7 @@ df_list = []
 for result in results:
     df_list.append(pandas.read_csv(result))
 
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Cell",ci=None).savefig('mpi_result.svg')
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", hue="Cell", ci=None).savefig(
+    "mpi_result.svg"
+)
diff --git a/python/example/single_cell_allen.py b/python/example/single_cell_allen.py
index 24df8844..511fd33b 100644
--- a/python/example/single_cell_allen.py
+++ b/python/example/single_cell_allen.py
@@ -24,23 +24,23 @@ def load_allen_fit(fit):
 
     param = defaultdict(parameters)
     mechs = defaultdict(dict)
-    for block in fit['genome']:
-        mech   = block['mechanism'] or 'pas'
-        region = block['section']
-        name   = block['name']
-        value  = float(block['value'])
-        if name.endswith('_' + mech):
-            name = name[:-(len(mech) + 1)]
+    for block in fit["genome"]:
+        mech = block["mechanism"] or "pas"
+        region = block["section"]
+        name = block["name"]
+        value = float(block["value"])
+        if name.endswith("_" + mech):
+            name = name[: -(len(mech) + 1)]
         elif mech == "pas":
             # transform names and values
-            if name == 'cm':
+            if name == "cm":
                 # scaling factor NEURON -> Arbor
-                param[region].cm = value/100.0
-            elif name == 'Ra':
+                param[region].cm = value / 100.0
+            elif name == "Ra":
                 param[region].rL = value
-            elif name == 'Vm':
+            elif name == "Vm":
                 param[region].Vm = value
-            elif name == 'celsius':
+            elif name == "celsius":
                 param[region].tempK = value + 273.15
             else:
                 raise Exception(f"Unknown key: {name}")
@@ -53,31 +53,36 @@ def load_allen_fit(fit):
     mechs = [(r, m, vs) for (r, m), vs in mechs.items()]
 
     default = parameters(
-        tempK=float(fit['conditions'][0]['celsius']) + 273.15,
-        Vm=float(fit['conditions'][0]['v_init']),
-        rL=float(fit['passive'][0]['ra'])
+        tempK=float(fit["conditions"][0]["celsius"]) + 273.15,
+        Vm=float(fit["conditions"][0]["v_init"]),
+        rL=float(fit["passive"][0]["ra"]),
     )
 
     ions = []
-    for kv in fit['conditions'][0]['erev']:
-        region = kv['section']
+    for kv in fit["conditions"][0]["erev"]:
+        region = kv["section"]
         for k, v in kv.items():
-            if k == 'section':
+            if k == "section":
                 continue
             ion = k[1:]
             ions.append((region, ion, float(v)))
 
-    return default, regs, ions, mechs, fit['fitting'][0]['junction_potential']
+    return default, regs, ions, mechs, fit["fitting"][0]["junction_potential"]
+
 
 def make_cell(swc, fit):
     morphology = arbor.load_swc_neuron(swc)
     # (2) Label the region tags found in the swc with the names used in the parameter fit file.
     # In addition, label the midpoint of the somarbor.
-    labels = arbor.label_dict({'soma': '(tag 1)',
-                                'axon': '(tag 2)',
-                                'dend': '(tag 3)',
-                                'apic': '(tag 4)',
-                                'midpoint': '(location 0 0.5)'})
+    labels = arbor.label_dict(
+        {
+            "soma": "(tag 1)",
+            "axon": "(tag 2)",
+            "dend": "(tag 3)",
+            "apic": "(tag 4)",
+            "midpoint": "(location 0 0.5)",
+        }
+    )
 
     # (3) A function that parses the Allen parameter fit file into components for an arbor.decor
     dflt, regions, ions, mechanisms, offset = load_allen_fit(fit)
@@ -92,34 +97,37 @@ def make_cell(swc, fit):
     # (7) set reversal potentials
     for region, ion, e in ions:
         decor.paint(f'"{region}"', ion_name=ion, rev_pot=e)
-    decor.set_ion('ca', int_con=5e-5, ext_con=2.0, method=arbor.mechanism('nernst/x=ca'))
+    decor.set_ion(
+        "ca", int_con=5e-5, ext_con=2.0, method=arbor.mechanism("nernst/x=ca")
+    )
     # (8) assign ion dynamics
     for region, mech, values in mechanisms:
         nm = mech
         vs = {}
-        sp = '/'
+        sp = "/"
         for k, v in values.items():
-            if mech == 'pas' and k == 'e':
-                nm = f'{nm}{sp}{k}={v}'
-                sp = ','
+            if mech == "pas" and k == "e":
+                nm = f"{nm}{sp}{k}={v}"
+                sp = ","
             else:
                 vs[k] = v
         decor.paint(f'"{region}"', arbor.density(arbor.mechanism(nm, vs)))
     # (9) attach stimulus and spike detector
-    decor.place('"midpoint"', arbor.iclamp(200, 1000, 0.15), 'ic')
-    decor.place('"midpoint"', arbor.spike_detector(-40), 'sd')
+    decor.place('"midpoint"', arbor.iclamp(200, 1000, 0.15), "ic")
+    decor.place('"midpoint"', arbor.spike_detector(-40), "sd")
     # (10) discretisation strategy: max compartment length
     decor.discretization(arbor.cv_policy_max_extent(20))
 
     # (11) Create cell
     return arbor.cable_cell(morphology, labels, decor), offset
 
+
 # (12) Create cell, model
-cell, offset = make_cell('single_cell_allen.swc', 'single_cell_allen_fit.json')
+cell, offset = make_cell("single_cell_allen.swc", "single_cell_allen_fit.json")
 model = arbor.single_cell_model(cell)
 
 # (13) Set the probe
-model.probe('voltage', '"midpoint"', frequency=200)
+model.probe("voltage", '"midpoint"', frequency=200)
 
 # (14) Install the Allen mechanism catalogue.
 model.properties.catalogue.extend(arbor.allen_catalogue(), "")
@@ -128,14 +136,39 @@ model.properties.catalogue.extend(arbor.allen_catalogue(), "")
 model.run(tfinal=1400, dt=0.005)
 
 # (16) Load and scale reference
-reference = 1000.0*pandas.read_csv('single_cell_allen_neuron_ref.csv')['U/mV'].values[:-1] + offset
+reference = (
+    1000.0 * pandas.read_csv("single_cell_allen_neuron_ref.csv")["U/mV"].values[:-1]
+    + offset
+)
 
 # (17) Plot
 df_list = []
-df_list.append(pandas.DataFrame({'t/ms': model.traces[0].time, 'U/mV':model.traces[0].value, 'Simulator': "Arbor"}))
-df_list.append(pandas.DataFrame({'t/ms': model.traces[0].time, 'U/mV':reference, 'Simulator': "Neuron"}))
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Simulator",ci=None)
-plt.scatter(model.spikes, [-40]*len(model.spikes), color=seaborn.color_palette()[2], zorder=20)
-plt.bar(200, max(reference)-min(reference), 1000, min(reference), align='edge', label='Stimulus', color='0.9')
-plt.savefig('single_cell_allen_result.svg')
+df_list.append(
+    pandas.DataFrame(
+        {
+            "t/ms": model.traces[0].time,
+            "U/mV": model.traces[0].value,
+            "Simulator": "Arbor",
+        }
+    )
+)
+df_list.append(
+    pandas.DataFrame(
+        {"t/ms": model.traces[0].time, "U/mV": reference, "Simulator": "Neuron"}
+    )
+)
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", hue="Simulator", ci=None)
+plt.scatter(
+    model.spikes, [-40] * len(model.spikes), color=seaborn.color_palette()[2], zorder=20
+)
+plt.bar(
+    200,
+    max(reference) - min(reference),
+    1000,
+    min(reference),
+    align="edge",
+    label="Stimulus",
+    color="0.9",
+)
+plt.savefig("single_cell_allen_result.svg")
diff --git a/python/example/single_cell_cable.py b/python/example/single_cell_cable.py
index 1043dba3..c7116fc9 100755
--- a/python/example/single_cell_cable.py
+++ b/python/example/single_cell_cable.py
@@ -9,10 +9,20 @@ import seaborn  # You may have to pip install these.
 
 
 class Cable(arbor.recipe):
-    def __init__(self, probes,
-                 Vm, length, radius, cm, rL, g,
-                 stimulus_start, stimulus_duration, stimulus_amplitude,
-                 cv_policy_max_extent):
+    def __init__(
+        self,
+        probes,
+        Vm,
+        length,
+        radius,
+        cm,
+        rL,
+        g,
+        stimulus_start,
+        stimulus_duration,
+        stimulus_amplitude,
+        cv_policy_max_extent,
+    ):
         """
         probes -- list of probes
 
@@ -72,23 +82,29 @@ class Cable(arbor.recipe):
 
         tree = arbor.segment_tree()
 
-        tree.append(arbor.mnpos,
-                    arbor.mpoint(0, 0, 0, self.radius),
-                    arbor.mpoint(self.length, 0, 0, self.radius),
-                    tag=1)
+        tree.append(
+            arbor.mnpos,
+            arbor.mpoint(0, 0, 0, self.radius),
+            arbor.mpoint(self.length, 0, 0, self.radius),
+            tag=1,
+        )
 
-        labels = arbor.label_dict({'cable': '(tag 1)',
-                                   'start': '(location 0 0)'})
+        labels = arbor.label_dict({"cable": "(tag 1)", "start": "(location 0 0)"})
 
         decor = arbor.decor()
         decor.set_property(Vm=self.Vm)
         decor.set_property(cm=self.cm)
         decor.set_property(rL=self.rL)
 
-        decor.paint('"cable"',
-                    arbor.density(f'pas/e={self.Vm}', {'g': self.g}))
+        decor.paint('"cable"', arbor.density(f"pas/e={self.Vm}", {"g": self.g}))
 
-        decor.place('"start"', arbor.iclamp(self.stimulus_start, self.stimulus_duration, self.stimulus_amplitude), "iclamp")
+        decor.place(
+            '"start"',
+            arbor.iclamp(
+                self.stimulus_start, self.stimulus_duration, self.stimulus_amplitude
+            ),
+            "iclamp",
+        )
 
         policy = arbor.cv_policy_max_extent(self.cv_policy_max_extent)
         decor.discretization(policy)
@@ -107,7 +123,7 @@ def get_rm(g):
     """Return membrane resistivity in Ohm*m^2
     g -- membrane conductivity in S/m^2
     """
-    return 1/g
+    return 1 / g
 
 
 def get_taum(cm, rm):
@@ -115,7 +131,7 @@ def get_taum(cm, rm):
     cm -- membrane capacitance in F/m^2
     rm -- membrane resistivity in Ohm*m^2
     """
-    return cm*rm
+    return cm * rm
 
 
 def get_lambdam(a, rm, rL):
@@ -124,7 +140,7 @@ def get_lambdam(a, rm, rL):
     rm -- membrane resistivity in Ohm*m^2
     rL -- axial resistivity in Ohm*m
     """
-    return np.sqrt(a*rm/(2*rL))
+    return np.sqrt(a * rm / (2 * rL))
 
 
 def get_vcond(lambdam, taum):
@@ -132,7 +148,7 @@ def get_vcond(lambdam, taum):
     lambda -- electronic length in m
     taum -- membrane time constant
     """
-    return 2*lambdam/taum
+    return 2 * lambdam / taum
 
 
 def get_tmax(data):
@@ -142,38 +158,54 @@ def get_tmax(data):
 
 if __name__ == "__main__":
 
-    parser = argparse.ArgumentParser(description='Cable')
+    parser = argparse.ArgumentParser(description="Cable")
 
     parser.add_argument(
-        '--Vm', help="membrane leak potential in mV", type=float, default=-65)
+        "--Vm", help="membrane leak potential in mV", type=float, default=-65
+    )
+    parser.add_argument("--length", help="cable length in μm", type=float, default=1000)
+    parser.add_argument("--radius", help="cable radius in μm", type=float, default=1)
     parser.add_argument(
-        '--length', help="cable length in μm", type=float, default=1000)
+        "--cm", help="membrane capacitance in F/m^2", type=float, default=0.01
+    )
     parser.add_argument(
-        '--radius', help="cable radius in μm", type=float, default=1)
+        "--rL", help="axial resistivity in Ω·cm", type=float, default=90
+    )
     parser.add_argument(
-        '--cm', help="membrane capacitance in F/m^2", type=float, default=0.01)
+        "--g", help="membrane conductivity in S/cm^2", type=float, default=0.001
+    )
+
     parser.add_argument(
-        '--rL', help="axial resistivity in Ω·cm", type=float, default=90)
+        "--stimulus_start", help="start of stimulus in ms", type=float, default=10
+    )
     parser.add_argument(
-        '--g', help="membrane conductivity in S/cm^2", type=float, default=0.001)
-
-    parser.add_argument('--stimulus_start',
-                        help="start of stimulus in ms", type=float, default=10)
-    parser.add_argument('--stimulus_duration',
-                        help="duration of stimulus in ms", type=float, default=0.1)
-    parser.add_argument('--stimulus_amplitude',
-                        help="amplitude of stimulus in nA", type=float, default=1)
+        "--stimulus_duration",
+        help="duration of stimulus in ms",
+        type=float,
+        default=0.1,
+    )
+    parser.add_argument(
+        "--stimulus_amplitude",
+        help="amplitude of stimulus in nA",
+        type=float,
+        default=1,
+    )
 
-    parser.add_argument('--cv_policy_max_extent',
-                        help="maximum extent of control volume in μm", type=float,
-                        default=10)
+    parser.add_argument(
+        "--cv_policy_max_extent",
+        help="maximum extent of control volume in μm",
+        type=float,
+        default=10,
+    )
 
     # parse the command line arguments
     args = parser.parse_args()
 
     # set up membrane voltage probes equidistantly along the dendrites
-    probes = [arbor.cable_probe_membrane_voltage(
-        f'(location 0 {r})') for r in np.linspace(0, 1, 11)]
+    probes = [
+        arbor.cable_probe_membrane_voltage(f"(location 0 {r})")
+        for r in np.linspace(0, 1, 11)
+    ]
     recipe = Cable(probes, **vars(args))
 
     # create a default execution context and a default domain decomposition
@@ -183,8 +215,9 @@ if __name__ == "__main__":
     # configure the simulation and handles for the probes
     sim = arbor.simulation(recipe, domains, context)
     dt = 0.001
-    handles = [sim.sample((0, i), arbor.regular_schedule(dt))
-               for i in range(len(probes))]
+    handles = [
+        sim.sample((0, i), arbor.regular_schedule(dt)) for i in range(len(probes))
+    ]
 
     # run the simulation for 30 ms
     sim.run(tfinal=30, dt=dt)
@@ -194,24 +227,30 @@ if __name__ == "__main__":
     df_list = []
     for probe in range(len(handles)):
         samples, meta = sim.samples(handles[probe])[0]
-        df_list.append(pandas.DataFrame({'t/ms': samples[:, 0], 'U/mV': samples[:, 1], 'Probe': f"{probe}"}))
+        df_list.append(
+            pandas.DataFrame(
+                {"t/ms": samples[:, 0], "U/mV": samples[:, 1], "Probe": f"{probe}"}
+            )
+        )
 
-    df = pandas.concat(df_list,ignore_index=True)
-    seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Probe",ci=None).set(xlim=(9,14)).savefig('single_cell_cable_result.svg')
+    df = pandas.concat(df_list, ignore_index=True)
+    seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", hue="Probe", ci=None).set(
+        xlim=(9, 14)
+    ).savefig("single_cell_cable_result.svg")
 
     # calculcate the idealized conduction velocity, cf. cable equation
     data = [sim.samples(handle)[0][0] for handle in handles]
-    rm = get_rm(args.g*1/(0.01*0.01))
+    rm = get_rm(args.g * 1 / (0.01 * 0.01))
     taum = get_taum(args.cm, rm)
-    lambdam = get_lambdam(args.radius*1e-6, rm, args.rL*0.01)
+    lambdam = get_lambdam(args.radius * 1e-6, rm, args.rL * 0.01)
     vcond_ideal = get_vcond(lambdam, taum)
 
     # take the last and second probe
-    delta_t = (get_tmax(data[-1]) - get_tmax(data[1]))
+    delta_t = get_tmax(data[-1]) - get_tmax(data[1])
 
     # 90% because we took the second probe
-    delta_x = args.length*0.9
-    vcond = delta_x*1e-6/(delta_t*1e-3)
+    delta_x = args.length * 0.9
+    vcond = delta_x * 1e-6 / (delta_t * 1e-3)
 
     print(f"calculated conduction velocity: {vcond_ideal:.2f} m/s")
     print(f"simulated conduction velocity:  {vcond:.2f} m/s")
diff --git a/python/example/single_cell_detailed.py b/python/example/single_cell_detailed.py
index 7f53efb0..30f081b6 100755
--- a/python/example/single_cell_detailed.py
+++ b/python/example/single_cell_detailed.py
@@ -26,50 +26,50 @@ labels = arbor.label_dict()
 # Regions:
 
 # Add labels for tag 1, 2, 3, 4
-labels['soma'] = '(tag 1)'
-labels['axon'] = '(tag 2)'
-labels['dend'] = '(tag 3)'
-labels['last'] = '(tag 4)'
+labels["soma"] = "(tag 1)"
+labels["axon"] = "(tag 2)"
+labels["dend"] = "(tag 3)"
+labels["last"] = "(tag 4)"
 # Add a label for a region that includes the whole morphology
-labels['all'] = '(all)'
+labels["all"] = "(all)"
 # Add a label for the parts of the morphology with radius greater than 1.5 μm.
-labels['gt_1.5'] = '(radius-ge (region "all") 1.5)'
+labels["gt_1.5"] = '(radius-ge (region "all") 1.5)'
 # Join regions "last" and "gt_1.5"
-labels['custom'] = '(join (region "last") (region "gt_1.5"))'
+labels["custom"] = '(join (region "last") (region "gt_1.5"))'
 
 # Locsets:
 
 # Add a labels for the root of the morphology and all the terminal points
-labels['root']     = '(root)'
-labels['terminal'] = '(terminal)'
+labels["root"] = "(root)"
+labels["terminal"] = "(terminal)"
 # Add a label for the terminal locations in the "custom" region:
-labels['custom_terminal'] = '(restrict (locset "terminal") (region "custom"))'
+labels["custom_terminal"] = '(restrict (locset "terminal") (region "custom"))'
 # Add a label for the terminal locations in the "axon" region:
-labels['axon_terminal'] = '(restrict (locset "terminal") (region "axon"))'
+labels["axon_terminal"] = '(restrict (locset "terminal") (region "axon"))'
 
 # (3) Create and populate the decor.
 
 decor = arbor.decor()
 
 # Set the default properties of the cell (this overrides the model defaults).
-decor.set_property(Vm =-55)
-decor.set_ion('na', int_con=10,   ext_con=140, rev_pot=50, method='nernst/na')
-decor.set_ion('k',  int_con=54.4, ext_con=2.5, rev_pot=-77)
+decor.set_property(Vm=-55)
+decor.set_ion("na", int_con=10, ext_con=140, rev_pot=50, method="nernst/na")
+decor.set_ion("k", int_con=54.4, ext_con=2.5, rev_pot=-77)
 
 # Override the cell defaults.
 decor.paint('"custom"', tempK=270)
-decor.paint('"soma"',   Vm=-50)
+decor.paint('"soma"', Vm=-50)
 
 # Paint density mechanisms.
-decor.paint('"all"', density('pas'))
-decor.paint('"custom"', density('hh'))
-decor.paint('"dend"',  density('Ih', {'gbar': 0.001}))
+decor.paint('"all"', density("pas"))
+decor.paint('"custom"', density("hh"))
+decor.paint('"dend"', density("Ih", {"gbar": 0.001}))
 
 # Place stimuli and spike detectors.
-decor.place('"root"', arbor.iclamp(10, 1, current=2), 'iclamp0')
-decor.place('"root"', arbor.iclamp(30, 1, current=2), 'iclamp1')
-decor.place('"root"', arbor.iclamp(50, 1, current=2), 'iclamp2')
-decor.place('"axon_terminal"', arbor.spike_detector(-10), 'detector')
+decor.place('"root"', arbor.iclamp(10, 1, current=2), "iclamp0")
+decor.place('"root"', arbor.iclamp(30, 1, current=2), "iclamp1")
+decor.place('"root"', arbor.iclamp(50, 1, current=2), "iclamp2")
+decor.place('"axon_terminal"', arbor.spike_detector(-10), "detector")
 
 # Single CV for the "soma" region
 soma_policy = arbor.cv_policy_single('"soma"')
@@ -90,9 +90,9 @@ model = arbor.single_cell_model(cell)
 
 # (6) Set the model default properties
 
-model.properties.set_property(Vm =-65, tempK=300, rL=35.4, cm=0.01)
-model.properties.set_ion('na', int_con=10,   ext_con=140, rev_pot=50, method='nernst/na')
-model.properties.set_ion('k',  int_con=54.4, ext_con=2.5, rev_pot=-77)
+model.properties.set_property(Vm=-65, tempK=300, rL=35.4, cm=0.01)
+model.properties.set_ion("na", int_con=10, ext_con=140, rev_pot=50, method="nernst/na")
+model.properties.set_ion("k", int_con=54.4, ext_con=2.5, rev_pot=-77)
 
 # Extend the default catalogue with the Allen catalogue.
 # The function takes a second string parameter that can prefix
@@ -104,7 +104,7 @@ model.properties.catalogue.extend(arbor.allen_catalogue(), "")
 
 # Add voltage probes on the "custom_terminal" locset
 # which sample the voltage at 50 kHz
-model.probe('voltage', where='"custom_terminal"',  frequency=50)
+model.probe("voltage", where='"custom_terminal"', frequency=50)
 
 # (8) Run the simulation for 100 ms, with a dt of 0.025 ms
 
@@ -112,7 +112,7 @@ model.run(tfinal=100, dt=0.025)
 
 # (9) Print the spikes.
 
-print(len(model.spikes), 'spikes recorded:')
+print(len(model.spikes), "spikes recorded:")
 for s in model.spikes:
     print(s)
 
@@ -120,6 +120,17 @@ for s in model.spikes:
 
 df_list = []
 for t in model.traces:
-    df_list.append(pandas.DataFrame({'t/ms': t.time, 'U/mV': t.value, 'Location': str(t.location), 'Variable': t.variable}))
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Location",col="Variable",ci=None).savefig('single_cell_detailed_result.svg')
+    df_list.append(
+        pandas.DataFrame(
+            {
+                "t/ms": t.time,
+                "U/mV": t.value,
+                "Location": str(t.location),
+                "Variable": t.variable,
+            }
+        )
+    )
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(
+    data=df, kind="line", x="t/ms", y="U/mV", hue="Location", col="Variable", ci=None
+).savefig("single_cell_detailed_result.svg")
diff --git a/python/example/single_cell_detailed_recipe.py b/python/example/single_cell_detailed_recipe.py
index 000b6bb3..25da4580 100644
--- a/python/example/single_cell_detailed_recipe.py
+++ b/python/example/single_cell_detailed_recipe.py
@@ -26,50 +26,50 @@ labels = arbor.label_dict()
 # Regions:
 
 # Add labels for tag 1, 2, 3, 4
-labels['soma'] = '(tag 1)'
-labels['axon'] = '(tag 2)'
-labels['dend'] = '(tag 3)'
-labels['last'] = '(tag 4)'
+labels["soma"] = "(tag 1)"
+labels["axon"] = "(tag 2)"
+labels["dend"] = "(tag 3)"
+labels["last"] = "(tag 4)"
 # Add a label for a region that includes the whole morphology
-labels['all'] = '(all)'
+labels["all"] = "(all)"
 # Add a label for the parts of the morphology with radius greater than 1.5 μm.
-labels['gt_1.5'] = '(radius-ge (region "all") 1.5)'
+labels["gt_1.5"] = '(radius-ge (region "all") 1.5)'
 # Join regions "last" and "gt_1.5"
-labels['custom'] = '(join (region "last") (region "gt_1.5"))'
+labels["custom"] = '(join (region "last") (region "gt_1.5"))'
 
 # Locsets:
 
 # Add a labels for the root of the morphology and all the terminal points
-labels['root']     = '(root)'
-labels['terminal'] = '(terminal)'
+labels["root"] = "(root)"
+labels["terminal"] = "(terminal)"
 # Add a label for the terminal locations in the "custom" region:
-labels['custom_terminal'] = '(restrict (locset "terminal") (region "custom"))'
+labels["custom_terminal"] = '(restrict (locset "terminal") (region "custom"))'
 # Add a label for the terminal locations in the "axon" region:
-labels['axon_terminal'] = '(restrict (locset "terminal") (region "axon"))'
+labels["axon_terminal"] = '(restrict (locset "terminal") (region "axon"))'
 
 # (3) Create and populate the decor.
 
 decor = arbor.decor()
 
 # Set the default properties of the cell (this overrides the model defaults).
-decor.set_property(Vm =-55)
-decor.set_ion('na', int_con=10,   ext_con=140, rev_pot=50, method='nernst/na')
-decor.set_ion('k',  int_con=54.4, ext_con=2.5, rev_pot=-77)
+decor.set_property(Vm=-55)
+decor.set_ion("na", int_con=10, ext_con=140, rev_pot=50, method="nernst/na")
+decor.set_ion("k", int_con=54.4, ext_con=2.5, rev_pot=-77)
 
 # Override the cell defaults.
 decor.paint('"custom"', tempK=270)
-decor.paint('"soma"',   Vm=-50)
+decor.paint('"soma"', Vm=-50)
 
 # Paint density mechanisms.
-decor.paint('"all"', density('pas'))
-decor.paint('"custom"', density('hh'))
-decor.paint('"dend"', density('Ih', {'gbar': 0.001}))
+decor.paint('"all"', density("pas"))
+decor.paint('"custom"', density("hh"))
+decor.paint('"dend"', density("Ih", {"gbar": 0.001}))
 
 # Place stimuli and spike detectors.
-decor.place('"root"', arbor.iclamp(10, 1, current=2), 'iclamp0')
-decor.place('"root"', arbor.iclamp(30, 1, current=2), 'iclamp1')
-decor.place('"root"', arbor.iclamp(50, 1, current=2), 'iclamp2')
-decor.place('"axon_terminal"', arbor.spike_detector(-10), 'detector')
+decor.place('"root"', arbor.iclamp(10, 1, current=2), "iclamp0")
+decor.place('"root"', arbor.iclamp(30, 1, current=2), "iclamp1")
+decor.place('"root"', arbor.iclamp(50, 1, current=2), "iclamp2")
+decor.place('"axon_terminal"', arbor.spike_detector(-10), "detector")
 
 # Single CV for the "soma" region
 soma_policy = arbor.cv_policy_single('"soma"')
@@ -89,7 +89,7 @@ cell = arbor.cable_cell(morph, labels, decor)
 probe = arbor.cable_probe_membrane_voltage('"custom_terminal"')
 
 # (6) Create a class that inherits from arbor.recipe
-class single_recipe (arbor.recipe):
+class single_recipe(arbor.recipe):
 
     # (6.1) Define the class constructor
     def __init__(self, cell, probes):
@@ -101,9 +101,11 @@ class single_recipe (arbor.recipe):
 
         self.the_props = arbor.cable_global_properties()
         self.the_props.set_property(Vm=-65, tempK=300, rL=35.4, cm=0.01)
-        self.the_props.set_ion(ion='na', int_con=10,   ext_con=140, rev_pot=50, method='nernst/na')
-        self.the_props.set_ion(ion='k',  int_con=54.4, ext_con=2.5, rev_pot=-77)
-        self.the_props.set_ion(ion='ca', int_con=5e-5, ext_con=2, rev_pot=132.5)
+        self.the_props.set_ion(
+            ion="na", int_con=10, ext_con=140, rev_pot=50, method="nernst/na"
+        )
+        self.the_props.set_ion(ion="k", int_con=54.4, ext_con=2.5, rev_pot=-77)
+        self.the_props.set_ion(ion="ca", int_con=5e-5, ext_con=2, rev_pot=132.5)
         self.the_props.catalogue.extend(arbor.allen_catalogue(), "")
 
     # (6.2) Override the num_cells method
@@ -138,6 +140,7 @@ class single_recipe (arbor.recipe):
     def global_properties(self, gid):
         return self.the_props
 
+
 # Instantiate recipe
 # Pass the probe in a list because that it what single_recipe expects.
 recipe = single_recipe(cell, [probe])
@@ -154,7 +157,7 @@ sim = arbor.simulation(recipe, domains, context)
 # Instruct the simulation to record the spikes and sample the probe
 sim.record(arbor.spike_recording.all)
 
-probe_id = arbor.cell_member(0,0)
+probe_id = arbor.cell_member(0, 0)
 handle = sim.sample(probe_id, arbor.regular_schedule(0.02))
 
 # (7) Run the simulation
@@ -162,7 +165,7 @@ sim.run(tfinal=100, dt=0.025)
 
 # (8) Print or display the results
 spikes = sim.spikes()
-print(len(spikes), 'spikes recorded:')
+print(len(spikes), "spikes recorded:")
 for s in spikes:
     print(s)
 
@@ -174,6 +177,17 @@ for d, m in sim.samples(handle):
 
 df_list = []
 for i in range(len(data)):
-    df_list.append(pandas.DataFrame({'t/ms': data[i][:, 0], 'U/mV': data[i][:, 1], 'Location': str(meta[i]), 'Variable':'voltage'}))
-df = pandas.concat(df_list,ignore_index=True)
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Location",col="Variable",ci=None).savefig('single_cell_recipe_result.svg')
+    df_list.append(
+        pandas.DataFrame(
+            {
+                "t/ms": data[i][:, 0],
+                "U/mV": data[i][:, 1],
+                "Location": str(meta[i]),
+                "Variable": "voltage",
+            }
+        )
+    )
+df = pandas.concat(df_list, ignore_index=True)
+seaborn.relplot(
+    data=df, kind="line", x="t/ms", y="U/mV", hue="Location", col="Variable", ci=None
+).savefig("single_cell_recipe_result.svg")
diff --git a/python/example/single_cell_extracellular_potentials.py b/python/example/single_cell_extracellular_potentials.py
index 0a0e6539..a5c13334 100644
--- a/python/example/single_cell_extracellular_potentials.py
+++ b/python/example/single_cell_extracellular_potentials.py
@@ -50,7 +50,7 @@ class Recipe(arbor.recipe):
         return [
             arbor.cable_probe_membrane_voltage_cell(),
             arbor.cable_probe_total_current_cell(),
-            arbor.cable_probe_stimulus_current_cell()
+            arbor.cable_probe_stimulus_current_cell(),
         ]
 
 
@@ -66,7 +66,7 @@ filename = sys.argv[1]
 morphology = arbor.load_swc_arbor(filename)
 
 # define a location on morphology for current clamp
-clamp_location = arbor.location(4, 1/6)
+clamp_location = arbor.location(4, 1 / 6)
 
 
 def make_cable_cell(morphology, clamp_location):
@@ -90,20 +90,22 @@ def make_cable_cell(morphology, clamp_location):
 
     # set passive mechanism all over
     # passive mech w. leak reversal potential (mV)
-    pas = arbor.mechanism('pas/e=-65')
-    pas.set('g', 0.0001)  # leak conductivity (S/cm2)
-    decor.paint('(all)', arbor.density(pas))
+    pas = arbor.mechanism("pas/e=-65")
+    pas.set("g", 0.0001)  # leak conductivity (S/cm2)
+    decor.paint("(all)", arbor.density(pas))
 
     # set number of CVs per branch
     policy = arbor.cv_policy_fixed_per_branch(cvs_per_branch)
     decor.discretization(policy)
 
     # place sinusoid input current
-    iclamp = arbor.iclamp(5,  # stimulation onset (ms)
-                          1E8,  # stimulation duration (ms)
-                          -0.001,  # stimulation amplitude (nA)
-                          frequency=0.1,  # stimulation frequency (kHz)
-                          phase=0)  # stimulation phase)
+    iclamp = arbor.iclamp(
+        5,  # stimulation onset (ms)
+        1e8,  # stimulation duration (ms)
+        -0.001,  # stimulation amplitude (nA)
+        frequency=0.1,  # stimulation frequency (kHz)
+        phase=0,
+    )  # stimulation phase)
     decor.place(str(clamp_location), iclamp, '"iclamp"')
 
     # create ``arbor.place_pwlin`` object
@@ -127,7 +129,7 @@ domains = arbor.partition_load_balance(recipe, context)
 sim = arbor.simulation(recipe, domains, context)
 
 # set up sampling on probes with sampling every 1 ms
-schedule = arbor.regular_schedule(1.)
+schedule = arbor.regular_schedule(1.0)
 v_handle = sim.sample(recipe.vprobe_id, schedule, arbor.sampling_policy.exact)
 i_handle = sim.sample(recipe.iprobe_id, schedule, arbor.sampling_policy.exact)
 c_handle = sim.sample(recipe.cprobe_id, schedule, arbor.sampling_policy.exact)
@@ -169,7 +171,7 @@ I_m = I_c_samples[:, 1:] + I_m_samples[:, 1:]  # (nA)
 # First we define a couple of classes to interface the LFPykit
 # library (https://LFPykit.readthedocs.io, https://github.com/LFPy/LFPykit):
 class ArborCellGeometry(lfpykit.CellGeometry):
-    '''
+    """
     Class inherited from  ``lfpykit.CellGeometry`` for easier forward-model
     predictions in Arbor that keeps track of arbor.segment information
     for each CV.
@@ -185,7 +187,7 @@ class ArborCellGeometry(lfpykit.CellGeometry):
     See also
     --------
     lfpykit.CellGeometry
-    '''
+    """
 
     def __init__(self, p, cables):
         x, y, z, d = [np.array([], dtype=float).reshape((0, 2))] * 4
@@ -196,8 +198,7 @@ class ArborCellGeometry(lfpykit.CellGeometry):
                 x = np.row_stack([x, [seg.prox.x, seg.dist.x]])
                 y = np.row_stack([y, [seg.prox.y, seg.dist.y]])
                 z = np.row_stack([z, [seg.prox.z, seg.dist.z]])
-                d = np.row_stack(
-                    [d, [seg.prox.radius * 2, seg.dist.radius * 2]])
+                d = np.row_stack([d, [seg.prox.radius * 2, seg.dist.radius * 2]])
                 CV_ind = np.r_[CV_ind, i]
 
         super().__init__(x=x, y=y, z=z, d=d)
@@ -205,7 +206,7 @@ class ArborCellGeometry(lfpykit.CellGeometry):
 
 
 class ArborLineSourcePotential(lfpykit.LineSourcePotential):
-    '''subclass of ``lfpykit.LineSourcePotential`` modified for
+    """subclass of ``lfpykit.LineSourcePotential`` modified for
     instances of ``ArborCellGeometry``.
     Each CV may consist of several segments , and this implementation
     accounts for their contributions normalized by surface area, that is,
@@ -229,27 +230,28 @@ class ArborLineSourcePotential(lfpykit.LineSourcePotential):
     See also
     --------
     lfpykit.LineSourcePotential
-    '''
+    """
 
     def __init__(self, **kwargs):
         super().__init__(**kwargs)
         self._get_transformation_matrix = super().get_transformation_matrix
 
     def get_transformation_matrix(self):
-        '''Get linear response matrix
+        """Get linear response matrix
 
         Returns
         -------
         response_matrix: ndarray
             shape (n_coords, n_CVs) ndarray
-        '''
+        """
         M_tmp = self._get_transformation_matrix()
         n_CVs = np.unique(self.cell._CV_ind).size
         M = np.zeros((self.x.size, n_CVs))
         for i in range(n_CVs):
             inds = self.cell._CV_ind == i
-            M[:, i] = M_tmp[:, inds] @ (self.cell.area[inds] /
-                                        self.cell.area[inds].sum())
+            M[:, i] = M_tmp[:, inds] @ (
+                self.cell.area[inds] / self.cell.area[inds].sum()
+            )
 
         return M
 
@@ -263,17 +265,16 @@ cell_geometry = ArborCellGeometry(p, I_m_meta)
 axis = np.array([-110, 370, -80, 70])
 dx = 2  # spatial resolution along x-axis (µm)
 dz = 2  # spatial resolution along y-axis (µm)
-X, Y = np.meshgrid(np.linspace(axis[0], axis[1],
-                               int(np.diff(axis[:2]) // dx) + 1),
-                   np.linspace(axis[2], axis[3],
-                               int(np.diff(axis[2:]) // dz) + 1))
+X, Y = np.meshgrid(
+    np.linspace(axis[0], axis[1], int(np.diff(axis[:2]) // dx) + 1),
+    np.linspace(axis[2], axis[3], int(np.diff(axis[2:]) // dz) + 1),
+)
 Z = np.zeros_like(X)
 
 # ``ArborLineSourcePotential`` instance, get mapping for all segments per CV
-lsp = ArborLineSourcePotential(cell=cell_geometry,
-                               x=X.flatten(),
-                               y=Y.flatten(),
-                               z=Z.flatten())
+lsp = ArborLineSourcePotential(
+    cell=cell_geometry, x=X.flatten(), y=Y.flatten(), z=Z.flatten()
+)
 M = lsp.get_transformation_matrix()
 
 # Extracellular potential in x,y-plane (mV)
@@ -303,17 +304,20 @@ def create_polygon(x, y, d):
     y_grad = np.gradient(y)
     theta = np.arctan2(y_grad, x_grad)
 
-    xp = np.r_[(x + 0.5 * d * np.sin(theta)).ravel(),
-               (x - 0.5 * d * np.sin(theta)).ravel()[::-1]]
-    yp = np.r_[(y - 0.5 * d * np.cos(theta)).ravel(),
-               (y + 0.5 * d * np.cos(theta)).ravel()[::-1]]
+    xp = np.r_[
+        (x + 0.5 * d * np.sin(theta)).ravel(),
+        (x - 0.5 * d * np.sin(theta)).ravel()[::-1],
+    ]
+    yp = np.r_[
+        (y - 0.5 * d * np.cos(theta)).ravel(),
+        (y + 0.5 * d * np.cos(theta)).ravel()[::-1],
+    ]
 
     return list(zip(xp, yp))
 
 
-def get_cv_polycollection(
-        cell_geometry, V_m, vlims=[-66, -64], cmap='viridis'):
-    '''
+def get_cv_polycollection(cell_geometry, V_m, vlims=[-66, -64], cmap="viridis"):
+    """
     Parameters
     ----------
     cell_geometry: ``ArborCellGeometry`` object
@@ -327,24 +331,31 @@ def get_cv_polycollection(
     Returns
     -------
     PolyCollection
-    '''
+    """
     norm = plt.Normalize(vmin=vlims[0], vmax=vlims[1], clip=True)
     colors = [plt.get_cmap(cmap)(norm(v)) for v in V_m]
     zips = []
     for i in range(V_m.size):
         inds = cell_geometry._CV_ind == i
-        zips.append(create_polygon(cell_geometry.x[inds, ].flatten(),
-                                   cell_geometry.y[inds, ].flatten(),
-                                   cell_geometry.d[inds, ].flatten()))
-    polycol = PolyCollection(zips,
-                             edgecolors=colors,
-                             facecolors=colors,
-                             linewidths=0.)
+        zips.append(
+            create_polygon(
+                cell_geometry.x[
+                    inds,
+                ].flatten(),
+                cell_geometry.y[
+                    inds,
+                ].flatten(),
+                cell_geometry.d[
+                    inds,
+                ].flatten(),
+            )
+        )
+    polycol = PolyCollection(zips, edgecolors=colors, facecolors=colors, linewidths=0.0)
     return polycol
 
 
 def get_segment_outlines(cell_geometry):
-    '''
+    """
     Parameters
     ----------
     cell_geometry: ``ArborCellGeometry`` object
@@ -354,26 +365,27 @@ def get_segment_outlines(cell_geometry):
     Returns
     -------
     PolyCollection
-    '''
+    """
     zips = []
     for x_, y_, d_ in zip(cell_geometry.x, cell_geometry.y, cell_geometry.d):
         zips.append(create_polygon(x_, y_, d_))
-    polycol = PolyCollection(zips,
-                             edgecolors='k',
-                             facecolors='none',
-                             linewidths=0.5)
+    polycol = PolyCollection(zips, edgecolors="k", facecolors="none", linewidths=0.5)
     return polycol
 
 
-def colorbar(fig, ax, im,
-             width=0.01,
-             height=1.0,
-             hoffset=0.01,
-             voffset=0.0,
-             orientation='vertical'):
-    '''
+def colorbar(
+    fig,
+    ax,
+    im,
+    width=0.01,
+    height=1.0,
+    hoffset=0.01,
+    voffset=0.0,
+    orientation="vertical",
+):
+    """
     draw matplotlib colorbar without resizing the parent axes object
-    '''
+    """
     rect = np.array(ax.get_position().bounds)
     rect = np.array(ax.get_position().bounds)
     caxrect = [0] * 4
@@ -391,26 +403,25 @@ def colorbar(fig, ax, im,
 time_index = -1
 
 # use seaborn style
-plt.style.use('seaborn')
-plt.rcParams['image.cmap'] = 'viridis'
+plt.style.use("seaborn")
+plt.rcParams["image.cmap"] = "viridis"
 
 # create figure and axis
 fig, ax = plt.subplots(1, 1, figsize=(16, 6), dpi=100)
 
 # plot contours of V_e
-lim = float(f'{abs(V_e).max() / 3:.1e}')
+lim = float(f"{abs(V_e).max() / 3:.1e}")
 levels = np.linspace(-lim, lim, 25)
-im_V_e = ax.contourf(X, Y, V_e[:, time_index].reshape(X.shape),
-                     cmap='RdBu',
-                     levels=levels,
-                     extend='both')
+im_V_e = ax.contourf(
+    X, Y, V_e[:, time_index].reshape(X.shape), cmap="RdBu", levels=levels, extend="both"
+)
 
 # V_e colorbar:
 cb = colorbar(fig, ax, im_V_e, height=0.45, voffset=0.55)
-cb.set_label('$V_e$ (mV)')
+cb.set_label("$V_e$ (mV)")
 
 # add outline of each CV with color coding according to membrane voltage
-vlims = [-66., -64.]
+vlims = [-66.0, -64.0]
 polycol = get_cv_polycollection(cell_geometry, V_m[time_index, :], vlims=vlims)
 im_V_m = ax.add_collection(polycol)
 
@@ -418,25 +429,25 @@ im_V_m = ax.add_collection(polycol)
 cb2 = colorbar(fig, ax, im_V_m, height=0.45)
 cb2.set_ticks([0, 0.5, 1])
 cb2.set_ticklabels([vlims[0], np.mean(vlims), vlims[1]])
-cb2.set_label(r'$V_m$ (mV)')
+cb2.set_label(r"$V_m$ (mV)")
 
 # draw segment outlines
 ax.add_collection(get_segment_outlines(cell_geometry))
 
 # add marker denoting clamp location
 point = p.at(clamp_location)
-ax.plot(point.x, point.y, 'ko', ms=10, label='stimulus')
+ax.plot(point.x, point.y, "ko", ms=10, label="stimulus")
 
 ax.legend()
 
 # axis annotations
 ax.axis(axis)
-ax.set_xlabel(r'$x$ ($\mu$m)', labelpad=0)
-ax.set_ylabel(r'$y$ ($\mu$m)', labelpad=0)
-ax.set_title(f'$V_e$ and $V_m$ at $t$={time[time_index]} ms')
+ax.set_xlabel(r"$x$ ($\mu$m)", labelpad=0)
+ax.set_ylabel(r"$y$ ($\mu$m)", labelpad=0)
+ax.set_title(f"$V_e$ and $V_m$ at $t$={time[time_index]} ms")
 
 # save file
-fig.savefig('single_cell_extracellular_potentials.svg', bbox_inches='tight')
+fig.savefig("single_cell_extracellular_potentials.svg", bbox_inches="tight")
 
 # ## Notes on output:
 # The spatial discretization is here deliberately coarse with only 3 CVs
diff --git a/python/example/single_cell_model.py b/python/example/single_cell_model.py
index 2410971b..74df6778 100755
--- a/python/example/single_cell_model.py
+++ b/python/example/single_cell_model.py
@@ -2,21 +2,20 @@
 # This script is included in documentation. Adapt line numbers if touched.
 
 import arbor
-import pandas, seaborn # You may have to pip install these.
+import pandas, seaborn  # You may have to pip install these.
 
 # (1) Create a morphology with a single (cylindrical) segment of length=diameter=6 μm
 tree = arbor.segment_tree()
 tree.append(arbor.mnpos, arbor.mpoint(-3, 0, 0, 3), arbor.mpoint(3, 0, 0, 3), tag=1)
 
 # (2) Define the soma and its midpoint
-labels = arbor.label_dict({'soma':   '(tag 1)',
-                           'midpoint': '(location 0 0.5)'})
+labels = arbor.label_dict({"soma": "(tag 1)", "midpoint": "(location 0 0.5)"})
 
 # (3) Create and set up a decor object
 decor = arbor.decor()
 decor.set_property(Vm=-40)
-decor.paint('"soma"', arbor.density('hh'))
-decor.place('"midpoint"', arbor.iclamp( 10, 2, 0.8), "iclamp")
+decor.paint('"soma"', arbor.density("hh"))
+decor.place('"midpoint"', arbor.iclamp(10, 2, 0.8), "iclamp")
 decor.place('"midpoint"', arbor.spike_detector(-10), "detector")
 
 # (4) Create cell and the single cell model based on it
@@ -26,24 +25,26 @@ cell = arbor.cable_cell(tree, labels, decor)
 m = arbor.single_cell_model(cell)
 
 # (6) Attach voltage probe sampling at 10 kHz (every 0.1 ms).
-m.probe('voltage', '"midpoint"', frequency=10)
+m.probe("voltage", '"midpoint"', frequency=10)
 
 # (7) Run simulation for 30 ms of simulated activity.
 m.run(tfinal=30)
 
 # (8) Print spike times.
-if len(m.spikes)>0:
-    print('{} spikes:'.format(len(m.spikes)))
+if len(m.spikes) > 0:
+    print("{} spikes:".format(len(m.spikes)))
     for s in m.spikes:
-        print('{:3.3f}'.format(s))
+        print("{:3.3f}".format(s))
 else:
-    print('no spikes')
+    print("no spikes")
 
 # (9) Plot the recorded voltages over time.
 print("Plotting results ...")
-seaborn.set_theme() # Apply some styling to the plot
-df = pandas.DataFrame({'t/ms': m.traces[0].time, 'U/mV': m.traces[0].value})
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",ci=None).savefig('single_cell_model_result.svg')
+seaborn.set_theme()  # Apply some styling to the plot
+df = pandas.DataFrame({"t/ms": m.traces[0].time, "U/mV": m.traces[0].value})
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", ci=None).savefig(
+    "single_cell_model_result.svg"
+)
 
 # (10) Optionally, you can store your results for later processing.
-df.to_csv('single_cell_model_result.csv', float_format='%g')
+df.to_csv("single_cell_model_result.csv", float_format="%g")
diff --git a/python/example/single_cell_nml.py b/python/example/single_cell_nml.py
index d4f57313..395f1f73 100755
--- a/python/example/single_cell_nml.py
+++ b/python/example/single_cell_nml.py
@@ -33,10 +33,14 @@ labels.append(morpho_segments)
 labels.append(morpho_named)
 labels.append(morpho_groups)
 
-# Add locsets to the label dictionary. 
-labels['stim_site'] = '(location 1 0.5)' # site for the stimulus, in the middle of branch 1.
-labels['axon_end']  = '(restrict (terminal) (region "axon"))' # end of the axon.
-labels['root']      = '(root)' # the start of the soma in this morphology is at the root of the cell.
+# Add locsets to the label dictionary.
+labels[
+    "stim_site"
+] = "(location 1 0.5)"  # site for the stimulus, in the middle of branch 1.
+labels["axon_end"] = '(restrict (terminal) (region "axon"))'  # end of the axon.
+labels[
+    "root"
+] = "(root)"  # the start of the soma in this morphology is at the root of the cell.
 
 # Optional: print out the regions and locsets available in the label dictionary.
 print("Label dictionary regions: ", labels.regions, "\n")
@@ -47,13 +51,13 @@ decor = arbor.decor()
 # Set initial membrane potential to -55 mV
 decor.set_property(Vm=-55)
 # Use Nernst to calculate reversal potential for calcium.
-decor.set_ion('ca', method=mech('nernst/x=ca'))
-#decor.set_ion('ca', method='nernst/x=ca')
+decor.set_ion("ca", method=mech("nernst/x=ca"))
+# decor.set_ion('ca', method='nernst/x=ca')
 # hh mechanism on the soma and axon.
-decor.paint('"soma"', arbor.density('hh'))
-decor.paint('"axon"', arbor.density('hh'))
+decor.paint('"soma"', arbor.density("hh"))
+decor.paint('"axon"', arbor.density("hh"))
 # pas mechanism the dendrites.
-decor.paint('"dend"', arbor.density('pas'))
+decor.paint('"dend"', arbor.density("pas"))
 # Increase resistivity on dendrites.
 decor.paint('"dend"', rL=500)
 # Attach stimuli that inject 4 nA current for 1 ms, starting at 3 and 8 ms.
@@ -80,29 +84,40 @@ print(cell.locations('"axon_end"'))
 m = arbor.single_cell_model(cell)
 
 # Attach voltage probes that sample at 50 kHz.
-m.probe('voltage', where='"root"',  frequency=50)
-m.probe('voltage', where='"stim_site"',  frequency=50)
-m.probe('voltage', where='"axon_end"', frequency=50)
+m.probe("voltage", where='"root"', frequency=50)
+m.probe("voltage", where='"stim_site"', frequency=50)
+m.probe("voltage", where='"axon_end"', frequency=50)
 
 # Simulate the cell for 15 ms.
-tfinal=15
+tfinal = 15
 m.run(tfinal)
 print("Simulation done.")
 
 # Print spike times.
-if len(m.spikes)>0:
-    print('{} spikes:'.format(len(m.spikes)))
+if len(m.spikes) > 0:
+    print("{} spikes:".format(len(m.spikes)))
     for s in m.spikes:
-        print('  {:7.4f}'.format(s))
+        print("  {:7.4f}".format(s))
 else:
-    print('no spikes')
+    print("no spikes")
 
 # Plot the recorded voltages over time.
 print("Plotting results ...")
 df_list = []
 for t in m.traces:
-    df_list.append(pandas.DataFrame({'t/ms': t.time, 'U/mV': t.value, 'Location': str(t.location), "Variable": t.variable}))
-
-df = pandas.concat(df_list,ignore_index=True)
-
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Location",col="Variable",ci=None).savefig('single_cell_nml.svg')
+    df_list.append(
+        pandas.DataFrame(
+            {
+                "t/ms": t.time,
+                "U/mV": t.value,
+                "Location": str(t.location),
+                "Variable": t.variable,
+            }
+        )
+    )
+
+df = pandas.concat(df_list, ignore_index=True)
+
+seaborn.relplot(
+    data=df, kind="line", x="t/ms", y="U/mV", hue="Location", col="Variable", ci=None
+).savefig("single_cell_nml.svg")
diff --git a/python/example/single_cell_recipe.py b/python/example/single_cell_recipe.py
index ade3cea4..eae1e436 100644
--- a/python/example/single_cell_recipe.py
+++ b/python/example/single_cell_recipe.py
@@ -2,7 +2,7 @@
 # This script is included in documentation. Adapt line numbers if touched.
 
 import arbor
-import pandas, seaborn # You may have to pip install these.
+import pandas, seaborn  # You may have to pip install these.
 
 # The corresponding generic recipe version of `single_cell_model.py`.
 
@@ -11,14 +11,13 @@ tree = arbor.segment_tree()
 tree.append(arbor.mnpos, arbor.mpoint(-3, 0, 0, 3), arbor.mpoint(3, 0, 0, 3), tag=1)
 
 # (2) Define the soma and its midpoint
-labels = arbor.label_dict({'soma':   '(tag 1)',
-                           'midpoint': '(location 0 0.5)'})
+labels = arbor.label_dict({"soma": "(tag 1)", "midpoint": "(location 0 0.5)"})
 
 # (3) Create cell and set properties
 decor = arbor.decor()
 decor.set_property(Vm=-40)
-decor.paint('"soma"', arbor.density('hh'))
-decor.place('"midpoint"', arbor.iclamp( 10, 2, 0.8), "iclamp")
+decor.paint('"soma"', arbor.density("hh"))
+decor.place('"midpoint"', arbor.iclamp(10, 2, 0.8), "iclamp")
 decor.place('"midpoint"', arbor.spike_detector(-10), "detector")
 cell = arbor.cable_cell(tree, labels, decor)
 
@@ -26,7 +25,8 @@ cell = arbor.cable_cell(tree, labels, decor)
 # This constitutes the corresponding generic recipe version of
 # `single_cell_model.py`.
 
-class single_recipe (arbor.recipe):
+
+class single_recipe(arbor.recipe):
     def __init__(self, cell, probes):
         # (4.1) The base C++ class constructor must be called first, to ensure that
         # all memory in the C++ class is initialized correctly.
@@ -55,6 +55,7 @@ class single_recipe (arbor.recipe):
         # (4.6) Override the global_properties method
         return self.the_props
 
+
 # (5) Instantiate recipe with a voltage probe located on "midpoint".
 
 recipe = single_recipe(cell, [arbor.cable_probe_membrane_voltage('"midpoint"')])
@@ -77,16 +78,18 @@ sim.run(tfinal=30)
 spikes = sim.spikes()
 data, meta = sim.samples(handle)[0]
 
-if len(spikes)>0:
-    print('{} spikes:'.format(len(spikes)))
-    for t in spikes['time']:
-        print('{:3.3f}'.format(t))
+if len(spikes) > 0:
+    print("{} spikes:".format(len(spikes)))
+    for t in spikes["time"]:
+        print("{:3.3f}".format(t))
 else:
-    print('no spikes')
+    print("no spikes")
 
 print("Plotting results ...")
 
-df = pandas.DataFrame({'t/ms': data[:, 0], 'U/mV': data[:, 1]})
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", ci=None).savefig('single_cell_recipe_result.svg')
+df = pandas.DataFrame({"t/ms": data[:, 0], "U/mV": data[:, 1]})
+seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV", ci=None).savefig(
+    "single_cell_recipe_result.svg"
+)
 
-df.to_csv('single_cell_recipe_result.csv', float_format='%g')
+df.to_csv("single_cell_recipe_result.csv", float_format="%g")
diff --git a/python/example/single_cell_stdp.py b/python/example/single_cell_stdp.py
index 54f4846f..2f0d75bc 100755
--- a/python/example/single_cell_stdp.py
+++ b/python/example/single_cell_stdp.py
@@ -22,21 +22,21 @@ class single_recipe(arbor.recipe):
 
     def cell_description(self, gid):
         tree = arbor.segment_tree()
-        tree.append(arbor.mnpos, arbor.mpoint(-3, 0, 0, 3),
-                    arbor.mpoint(3, 0, 0, 3), tag=1)
+        tree.append(
+            arbor.mnpos, arbor.mpoint(-3, 0, 0, 3), arbor.mpoint(3, 0, 0, 3), tag=1
+        )
 
-        labels = arbor.label_dict({'soma':   '(tag 1)',
-                                   'center': '(location 0 0.5)'})
+        labels = arbor.label_dict({"soma": "(tag 1)", "center": "(location 0 0.5)"})
 
         decor = arbor.decor()
         decor.set_property(Vm=-40)
-        decor.paint('(all)', arbor.density('hh'))
+        decor.paint("(all)", arbor.density("hh"))
 
         decor.place('"center"', arbor.spike_detector(-10), "detector")
-        decor.place('"center"', arbor.synapse('expsyn'), "synapse")
+        decor.place('"center"', arbor.synapse("expsyn"), "synapse")
 
-        mech = arbor.mechanism('expsyn_stdp')
-        mech.set("max_weight", 1.)
+        mech = arbor.mechanism("expsyn_stdp")
+        mech.set("max_weight", 1.0)
         syn = arbor.synapse(mech)
 
         decor.place('"center"', syn, "stpd_synapse")
@@ -46,27 +46,30 @@ class single_recipe(arbor.recipe):
         return cell
 
     def event_generators(self, gid):
-        """two stimuli: one that makes the cell spike, the other to monitor STDP
-        """
+        """two stimuli: one that makes the cell spike, the other to monitor STDP"""
 
         stimulus_times = numpy.linspace(50, 500, self.n_pairs)
 
         # strong enough stimulus
-        spike = arbor.event_generator("synapse", 1., arbor.explicit_schedule(stimulus_times))
+        spike = arbor.event_generator(
+            "synapse", 1.0, arbor.explicit_schedule(stimulus_times)
+        )
 
         # zero weight -> just modify synaptic weight via stdp
-        stdp = arbor.event_generator("stpd_synapse", 0., arbor.explicit_schedule(stimulus_times - self.dT))
+        stdp = arbor.event_generator(
+            "stpd_synapse", 0.0, arbor.explicit_schedule(stimulus_times - self.dT)
+        )
 
         return [spike, stdp]
 
     def probes(self, gid):
-        return [arbor.cable_probe_membrane_voltage('"center"'),
-                arbor.cable_probe_point_state(1, "expsyn_stdp", "g"),
-                arbor.cable_probe_point_state(1, "expsyn_stdp", "apost"),
-                arbor.cable_probe_point_state(1, "expsyn_stdp", "apre"),
-                arbor.cable_probe_point_state(
-                    1, "expsyn_stdp", "weight_plastic")
-                ]
+        return [
+            arbor.cable_probe_membrane_voltage('"center"'),
+            arbor.cable_probe_point_state(1, "expsyn_stdp", "g"),
+            arbor.cable_probe_point_state(1, "expsyn_stdp", "apost"),
+            arbor.cable_probe_point_state(1, "expsyn_stdp", "apre"),
+            arbor.cable_probe_point_state(1, "expsyn_stdp", "weight_plastic"),
+        ]
 
     def global_properties(self, kind):
         return self.the_props
@@ -93,17 +96,20 @@ def run(dT, n_pairs=1, do_plots=False):
     if do_plots:
         print("Plotting detailed results ...")
 
-        for (handle, var) in [(handle_mem, 'U'),
-                              (handle_g, "g"),
-                              (handle_apost, "apost"),
-                              (handle_apre, "apre"),
-                              (handle_weight_plastic, "weight_plastic")]:
+        for (handle, var) in [
+            (handle_mem, "U"),
+            (handle_g, "g"),
+            (handle_apost, "apost"),
+            (handle_apre, "apre"),
+            (handle_weight_plastic, "weight_plastic"),
+        ]:
 
             data, meta = sim.samples(handle)[0]
 
-            df = pandas.DataFrame({'t/ms': data[:, 0], var: data[:, 1]})
-            seaborn.relplot(data=df, kind="line", x="t/ms", y=var,
-                            ci=None).savefig('single_cell_stdp_result_{}.svg'.format(var))
+            df = pandas.DataFrame({"t/ms": data[:, 0], var: data[:, 1]})
+            seaborn.relplot(data=df, kind="line", x="t/ms", y=var, ci=None).savefig(
+                "single_cell_stdp_result_{}.svg".format(var)
+            )
 
     weight_plastic, meta = sim.samples(handle_weight_plastic)[0]
 
@@ -111,7 +117,8 @@ def run(dT, n_pairs=1, do_plots=False):
 
 
 data = numpy.array([(dT, run(dT)) for dT in numpy.arange(-20, 20, 0.5)])
-df = pandas.DataFrame({'t/ms': data[:, 0], 'dw': data[:, 1]})
+df = pandas.DataFrame({"t/ms": data[:, 0], "dw": data[:, 1]})
 print("Plotting results ...")
-seaborn.relplot(data=df, x="t/ms", y="dw", kind="line",
-                ci=None).savefig('single_cell_stdp.svg')
+seaborn.relplot(data=df, x="t/ms", y="dw", kind="line", ci=None).savefig(
+    "single_cell_stdp.svg"
+)
diff --git a/python/example/single_cell_swc.py b/python/example/single_cell_swc.py
index dbeacbfa..0a808b85 100755
--- a/python/example/single_cell_swc.py
+++ b/python/example/single_cell_swc.py
@@ -26,12 +26,14 @@ filename = sys.argv[1]
 morpho = arbor.load_swc_arbor(filename)
 
 # Define the regions and locsets in the model.
-defs = {'soma': '(tag 1)',  # soma has tag 1 in swc files.
-        'axon': '(tag 2)',  # axon has tag 2 in swc files.
-        'dend': '(tag 3)',  # dendrites have tag 3 in swc files.
-        'root': '(root)',   # the start of the soma in this morphology is at the root of the cell.
-        'stim_site': '(location 0 0.5)', # site for the stimulus, in the middle of branch 0.
-        'axon_end': '(restrict (terminal) (region "axon"))'} # end of the axon.
+defs = {
+    "soma": "(tag 1)",  # soma has tag 1 in swc files.
+    "axon": "(tag 2)",  # axon has tag 2 in swc files.
+    "dend": "(tag 3)",  # dendrites have tag 3 in swc files.
+    "root": "(root)",  # the start of the soma in this morphology is at the root of the cell.
+    "stim_site": "(location 0 0.5)",  # site for the stimulus, in the middle of branch 0.
+    "axon_end": '(restrict (terminal) (region "axon"))',
+}  # end of the axon.
 labels = arbor.label_dict(defs)
 
 decor = arbor.decor()
@@ -39,13 +41,13 @@ decor = arbor.decor()
 # Set initial membrane potential to -55 mV
 decor.set_property(Vm=-55)
 # Use Nernst to calculate reversal potential for calcium.
-decor.set_ion('ca', method=mech('nernst/x=ca'))
-#decor.set_ion('ca', method='nernst/x=ca')
+decor.set_ion("ca", method=mech("nernst/x=ca"))
+# decor.set_ion('ca', method='nernst/x=ca')
 # hh mechanism on the soma and axon.
-decor.paint('"soma"', arbor.density('hh'))
-decor.paint('"axon"', arbor.density('hh'))
+decor.paint('"soma"', arbor.density("hh"))
+decor.paint('"axon"', arbor.density("hh"))
 # pas mechanism the dendrites.
-decor.paint('"dend"', arbor.density('pas'))
+decor.paint('"dend"', arbor.density("pas"))
 # Increase resistivity on dendrites.
 decor.paint('"dend"', rL=500)
 # Attach stimuli that inject 4 nA current for 1 ms, starting at 3 and 8 ms.
@@ -72,29 +74,40 @@ print(cell.locations('"axon_end"'))
 m = arbor.single_cell_model(cell)
 
 # Attach voltage probes that sample at 50 kHz.
-m.probe('voltage', where='"root"',  frequency=50)
-m.probe('voltage', where='"stim_site"',  frequency=50)
-m.probe('voltage', where='"axon_end"', frequency=50)
+m.probe("voltage", where='"root"', frequency=50)
+m.probe("voltage", where='"stim_site"', frequency=50)
+m.probe("voltage", where='"axon_end"', frequency=50)
 
 # Simulate the cell for 15 ms.
-tfinal=15
+tfinal = 15
 m.run(tfinal)
 print("Simulation done.")
 
 # Print spike times.
-if len(m.spikes)>0:
-    print('{} spikes:'.format(len(m.spikes)))
+if len(m.spikes) > 0:
+    print("{} spikes:".format(len(m.spikes)))
     for s in m.spikes:
-        print('  {:7.4f}'.format(s))
+        print("  {:7.4f}".format(s))
 else:
-    print('no spikes')
+    print("no spikes")
 
 # Plot the recorded voltages over time.
 print("Plotting results ...")
 df_list = []
 for t in m.traces:
-    df_list.append(pandas.DataFrame({'t/ms': t.time, 'U/mV': t.value, 'Location': str(t.location), "Variable": t.variable}))
-
-df = pandas.concat(df_list,ignore_index=True)
-
-seaborn.relplot(data=df, kind="line", x="t/ms", y="U/mV",hue="Location",col="Variable",ci=None).savefig('single_cell_swc.svg')
+    df_list.append(
+        pandas.DataFrame(
+            {
+                "t/ms": t.time,
+                "U/mV": t.value,
+                "Location": str(t.location),
+                "Variable": t.variable,
+            }
+        )
+    )
+
+df = pandas.concat(df_list, ignore_index=True)
+
+seaborn.relplot(
+    data=df, kind="line", x="t/ms", y="U/mV", hue="Location", col="Variable", ci=None
+).savefig("single_cell_swc.svg")
diff --git a/python/example/two_cell_gap_junctions.py b/python/example/two_cell_gap_junctions.py
index 844b2f90..add860b9 100755
--- a/python/example/two_cell_gap_junctions.py
+++ b/python/example/two_cell_gap_junctions.py
@@ -10,9 +10,9 @@ import matplotlib.pyplot as plt
 
 
 class TwoCellsWithGapJunction(arbor.recipe):
-    def __init__(self, probes,
-                 Vms, length, radius, cm, rL, g, gj_g,
-                 cv_policy_max_extent):
+    def __init__(
+        self, probes, Vms, length, radius, cm, rL, g, gj_g, cv_policy_max_extent
+    ):
         """
         probes -- list of probes
 
@@ -73,12 +73,14 @@ class TwoCellsWithGapJunction(arbor.recipe):
 
         tree = arbor.segment_tree()
 
-        tree.append(arbor.mnpos,
-                    arbor.mpoint(0, 0, 0, self.radius),
-                    arbor.mpoint(self.length, 0, 0, self.radius),
-                    tag=1)
+        tree.append(
+            arbor.mnpos,
+            arbor.mpoint(0, 0, 0, self.radius),
+            arbor.mpoint(self.length, 0, 0, self.radius),
+            tag=1,
+        )
 
-        labels = arbor.label_dict({'cell' : '(tag 1)', 'gj_site': '(location 0 0.5)'})
+        labels = arbor.label_dict({"cell": "(tag 1)", "gj_site": "(location 0 0.5)"})
 
         decor = arbor.decor()
         decor.set_property(Vm=self.Vms[gid])
@@ -86,9 +88,9 @@ class TwoCellsWithGapJunction(arbor.recipe):
         decor.set_property(rL=self.rL)
 
         # add a gap junction mechanism at the "gj_site" location and label that specific mechanism on that location "gj_label"
-        junction_mech = arbor.junction('gj', {"g" : self.gj_g})
-        decor.place('"gj_site"', junction_mech, 'gj_label')
-        decor.paint('"cell"', arbor.density(f'pas/e={self.Vms[gid]}', {'g': self.g}))
+        junction_mech = arbor.junction("gj", {"g": self.gj_g})
+        decor.place('"gj_site"', junction_mech, "gj_label")
+        decor.paint('"cell"', arbor.density(f"pas/e={self.Vms[gid]}", {"g": self.g}))
 
         if self.cv_policy_max_extent is not None:
             policy = arbor.cv_policy_max_extent(self.cv_policy_max_extent)
@@ -102,31 +104,47 @@ class TwoCellsWithGapJunction(arbor.recipe):
         assert gid in [0, 1]
 
         # create a bidirectional gap junction from cell 0 at label "gj_label" to cell 1 at label "gj_label" and back.
-        return [arbor.gap_junction_connection((1 if gid == 0 else 0, 'gj_label'), 'gj_label', 1)]
+        return [
+            arbor.gap_junction_connection(
+                (1 if gid == 0 else 0, "gj_label"), "gj_label", 1
+            )
+        ]
 
 
 if __name__ == "__main__":
 
-    parser = argparse.ArgumentParser(description='Two cells connected via a gap junction')
+    parser = argparse.ArgumentParser(
+        description="Two cells connected via a gap junction"
+    )
 
     parser.add_argument(
-        '--Vms', help="membrane leak potentials in mV", type=float, default=[-100, -60], nargs=2)
+        "--Vms",
+        help="membrane leak potentials in mV",
+        type=float,
+        default=[-100, -60],
+        nargs=2,
+    )
+    parser.add_argument("--length", help="cell length in μm", type=float, default=100)
+    parser.add_argument("--radius", help="cell radius in μm", type=float, default=3)
     parser.add_argument(
-        '--length', help="cell length in μm", type=float, default=100)
+        "--cm", help="membrane capacitance in F/m^2", type=float, default=0.005
+    )
     parser.add_argument(
-        '--radius', help="cell radius in μm", type=float, default=3)
+        "--rL", help="axial resistivity in Ω·cm", type=float, default=90
+    )
     parser.add_argument(
-        '--cm', help="membrane capacitance in F/m^2", type=float, default=0.005)
-    parser.add_argument(
-        '--rL', help="axial resistivity in Ω·cm", type=float, default=90)
-    parser.add_argument(
-        '--g', help="membrane conductivity in S/cm^2", type=float, default=0.001)
+        "--g", help="membrane conductivity in S/cm^2", type=float, default=0.001
+    )
 
     parser.add_argument(
-        '--gj_g', help="gap junction conductivity in μS", type=float, default=0.01)
+        "--gj_g", help="gap junction conductivity in μS", type=float, default=0.01
+    )
 
-    parser.add_argument('--cv_policy_max_extent',
-                        help="maximum extent of control volume in μm", type=float)
+    parser.add_argument(
+        "--cv_policy_max_extent",
+        help="maximum extent of control volume in μm",
+        type=float,
+    )
 
     # parse the command line arguments
     args = parser.parse_args()
@@ -145,7 +163,9 @@ if __name__ == "__main__":
     dt = 0.01
     handles = []
     for gid in [0, 1]:
-        handles += [sim.sample((gid, i), arbor.regular_schedule(dt)) for i in range(len(probes))]
+        handles += [
+            sim.sample((gid, i), arbor.regular_schedule(dt)) for i in range(len(probes))
+        ]
 
     # run the simulation for 5 ms
     sim.run(tfinal=5, dt=dt)
@@ -155,33 +175,39 @@ if __name__ == "__main__":
     df_list = []
     for probe in range(len(handles)):
         samples, meta = sim.samples(handles[probe])[0]
-        df_list.append(pandas.DataFrame({'t/ms': samples[:, 0], 'U/mV': samples[:, 1], 'Cell': f"{probe}"}))
+        df_list.append(
+            pandas.DataFrame(
+                {"t/ms": samples[:, 0], "U/mV": samples[:, 1], "Cell": f"{probe}"}
+            )
+        )
 
-    df = pandas.concat(df_list,ignore_index=True)
+    df = pandas.concat(df_list, ignore_index=True)
 
     fig, ax = plt.subplots()
 
     # plot the membrane potentials of the two cells as function of time
-    seaborn.lineplot(ax=ax,data=df, x="t/ms", y="U/mV",hue="Cell",ci=None)
+    seaborn.lineplot(ax=ax, data=df, x="t/ms", y="U/mV", hue="Cell", ci=None)
 
     # area of cells
-    area = args.length*1e-6 * 2*np.pi*args.radius*1e-6
+    area = args.length * 1e-6 * 2 * np.pi * args.radius * 1e-6
 
     # total conductance and resistance
-    cell_g = args.g/1e-4 * area
-    cell_R = 1/cell_g
+    cell_g = args.g / 1e-4 * area
+    cell_R = 1 / cell_g
 
     # gap junction conductance and resistance in base units
-    si_gj_g = args.gj_g*1e-6
-    si_gj_R = 1/si_gj_g
+    si_gj_g = args.gj_g * 1e-6
+    si_gj_R = 1 / si_gj_g
 
     # indicate the expected equilibrium potentials
-    for (i, j) in [[0,1], [1,0]]:
-        weighted_potential = args.Vms[i] + ((args.Vms[j] - args.Vms[i])*(si_gj_R + cell_R))/(2*cell_R + si_gj_R)
-        ax.axhline(weighted_potential, linestyle='dashed', color='black', alpha=0.5)
+    for (i, j) in [[0, 1], [1, 0]]:
+        weighted_potential = args.Vms[i] + (
+            (args.Vms[j] - args.Vms[i]) * (si_gj_R + cell_R)
+        ) / (2 * cell_R + si_gj_R)
+        ax.axhline(weighted_potential, linestyle="dashed", color="black", alpha=0.5)
 
     # plot the initial/nominal resting potentials
     for gid, Vm in enumerate(args.Vms):
-        ax.axhline(Vm, linestyle='dashed', color='black', alpha=0.5)
+        ax.axhline(Vm, linestyle="dashed", color="black", alpha=0.5)
 
-    fig.savefig('two_cell_gap_junctions_result.svg')
+    fig.savefig("two_cell_gap_junctions_result.svg")
diff --git a/python/setup.py b/python/setup.py
index 70b4386a..915f3ca1 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -2,38 +2,37 @@ import setuptools
 import os
 
 here = os.path.abspath(os.path.dirname(__file__))
-with open(os.path.join(here, 'arbor/VERSION')) as version_file:
+with open(os.path.join(here, "arbor/VERSION")) as version_file:
     version_ = version_file.read().strip()
 
 setuptools.setup(
-    name='arbor',
-    packages=['arbor'],
+    name="arbor",
+    packages=["arbor"],
     version=version_,
-    author='CSCS and FZJ',
-    url='https://github.com/arbor-sim/arbor',
-    description='High performance simulation of networks of multicompartment neurons.',
-    long_description='',
+    author="CSCS and FZJ",
+    url="https://github.com/arbor-sim/arbor",
+    description="High performance simulation of networks of multicompartment neurons.",
+    long_description="",
     classifiers=[
-        'Development Status :: 4 - Beta', # Upgrade to "5 - Production/Stable" on release.
-        'Intended Audience :: Science/Research',
-        'Topic :: Scientific/Engineering :: Build Tools',
-        'License :: OSI Approved :: BSD License'
-        'Programming Language :: Python :: 3.6',
-        'Programming Language :: Python :: 3.7',
-        'Programming Language :: Python :: 3.8',
-        'Programming Language :: Python :: 3.9',
+        "Development Status :: 4 - Beta",  # Upgrade to "5 - Production/Stable" on release.
+        "Intended Audience :: Science/Research",
+        "Topic :: Scientific/Engineering :: Build Tools",
+        "License :: OSI Approved :: BSD License"
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
     ],
     project_urls={
-        'Source': 'https://github.com/arbor-sim/arbor',
-        'Documentation': 'https://docs.arbor-sim.org',
-        'Bug Reports': 'https://github.com/arbor-sim/arbor/issues',
+        "Source": "https://github.com/arbor-sim/arbor",
+        "Documentation": "https://docs.arbor-sim.org",
+        "Bug Reports": "https://github.com/arbor-sim/arbor/issues",
     },
     package_data={
-        'arbor': ['VERSION', '_arbor.*.so'],
+        "arbor": ["VERSION", "_arbor.*.so"],
     },
-    python_requires='>=3.6',
+    python_requires=">=3.6",
     install_requires=[],
     setup_requires=[],
     zip_safe=False,
 )
-
diff --git a/python/test/cases.py b/python/test/cases.py
index 4c21056a..657bd428 100644
--- a/python/test/cases.py
+++ b/python/test/cases.py
@@ -4,10 +4,16 @@ from . import fixtures
 
 _mpi_enabled = arbor.__config__
 
+
 @fixtures.context
 def skipIfNotDistributed(context):
-    skipSingleNode = unittest.skipIf(context.ranks < 2, "Skipping distributed test on single node.")
-    skipNotEnabled = unittest.skipIf(not _mpi_enabled, "Skipping distributed test, no MPI support in arbor.")
+    skipSingleNode = unittest.skipIf(
+        context.ranks < 2, "Skipping distributed test on single node."
+    )
+    skipNotEnabled = unittest.skipIf(
+        not _mpi_enabled, "Skipping distributed test, no MPI support in arbor."
+    )
+
     def skipper(f):
         return skipSingleNode(skipNotEnabled(f))
 
@@ -16,4 +22,6 @@ def skipIfNotDistributed(context):
 
 @fixtures.context
 def skipIfDistributed(context):
-    return unittest.skipIf(context.ranks > 1, "Skipping single node test on multiple nodes.")
+    return unittest.skipIf(
+        context.ranks > 1, "Skipping single node test on multiple nodes."
+    )
diff --git a/python/test/fixtures.py b/python/test/fixtures.py
index f0468656..9f44c516 100644
--- a/python/test/fixtures.py
+++ b/python/test/fixtures.py
@@ -20,10 +20,12 @@ except TypeError:
     # without max size.
     cache = cache(None)
 
+
 def _fix(param_name, fixture, func):
     """
     Decorates `func` to inject the `fixture` callable result as `param_name`.
     """
+
     @functools.wraps(func)
     def wrapper(*args, **kwargs):
         kwargs[param_name] = fixture()
@@ -31,6 +33,7 @@ def _fix(param_name, fixture, func):
 
     return wrapper
 
+
 def _fixture(decorator):
     @functools.wraps(decorator)
     def fixture_decorator(func):
@@ -38,6 +41,7 @@ def _fixture(decorator):
 
     return fixture_decorator
 
+
 def _singleton_fixture(f):
     return _fixture(cache(f))
 
@@ -74,14 +78,21 @@ def context():
     return arbor.context(*args)
 
 
-class _BuildCatError(Exception): pass
+class _BuildCatError(Exception):
+    pass
 
 
 def _build_cat_local(name, path):
     try:
-        subprocess.run(["arbor-build-catalogue", name, str(path)], check=True, stderr=subprocess.PIPE)
+        subprocess.run(
+            ["arbor-build-catalogue", name, str(path)],
+            check=True,
+            stderr=subprocess.PIPE,
+        )
     except subprocess.CalledProcessError as e:
-        raise _BuildCatError("Tests can't build catalogues:\n" + e.stderr.decode()) from None
+        raise _BuildCatError(
+            "Tests can't build catalogues:\n" + e.stderr.decode()
+        ) from None
 
 
 def _build_cat_distributed(comm, name, path):
@@ -105,6 +116,7 @@ def _build_cat_distributed(comm, name, path):
     if build_err:
         raise build_err
 
+
 @context
 def _build_cat(name, path, context):
     if context.has_mpi:
@@ -133,25 +145,26 @@ def dummy_catalogue(repo_path):
     cat_path = _build_cat("dummy", path)
     return arbor.load_catalogue(str(cat_path))
 
+
 @_fixture
 class empty_recipe(arbor.recipe):
     """
     Blank recipe fixture.
     """
+
     pass
 
+
 @_fixture
 class art_spiker_recipe(arbor.recipe):
     """
     Recipe fixture with 3 artificial spiking cells and one cable cell.
     """
+
     def __init__(self):
         super().__init__()
         self.the_props = arbor.neuron_cable_properties()
-        self.trains = [
-                [0.8, 2, 2.1, 3],
-                [0.4, 2, 2.2, 3.1, 4.5],
-                [0.2, 2, 2.8, 3]]
+        self.trains = [[0.8, 2, 2.1, 3], [0.4, 2, 2.2, 3.1, 4.5], [0.2, 2, 2.8, 3]]
 
     def num_cells(self):
         return 4
@@ -188,14 +201,13 @@ class art_spiker_recipe(arbor.recipe):
         )
 
         # (2) Define the soma and its midpoint
-        labels = arbor.label_dict({'soma':   '(tag 1)',
-                                   'midpoint': '(location 0 0.5)'})
+        labels = arbor.label_dict({"soma": "(tag 1)", "midpoint": "(location 0 0.5)"})
 
         # (3) Create cell and set properties
         decor = arbor.decor()
         decor.set_property(Vm=-40)
-        decor.paint('"soma"', arbor.density('hh'))
-        decor.place('"midpoint"', arbor.iclamp( 10, 2, 0.8), "iclamp")
+        decor.paint('"soma"', arbor.density("hh"))
+        decor.place('"midpoint"', arbor.iclamp(10, 2, 0.8), "iclamp")
         decor.place('"midpoint"', arbor.spike_detector(-10), "detector")
 
         # return tuple of tree, labels, and decor for creating a cable cell (can still be modified before calling arbor.cable_cell())
@@ -203,26 +215,31 @@ class art_spiker_recipe(arbor.recipe):
 
     def cell_description(self, gid):
         if gid < 3:
-            return arbor.spike_source_cell("src", arbor.explicit_schedule(self.trains[gid]))
+            return arbor.spike_source_cell(
+                "src", arbor.explicit_schedule(self.trains[gid])
+            )
         else:
             tree, labels, decor = self._cable_cell_elements()
             return arbor.cable_cell(tree, labels, decor)
 
+
 @_fixture
 def sum_weight_hh_spike():
-	""" 
+    """
     Fixture returning connection weight for 'expsyn_stdp' mechanism which is just enough to evoke an immediate spike
     at t=1ms in the 'hh' neuron in 'art_spiker_recipe'
     """
-	return 0.4
+    return 0.4
+
 
 @_fixture
 def sum_weight_hh_spike_2():
-	""" 
+    """
     Fixture returning connection weight for 'expsyn_stdp' mechanism which is just enough to evoke an immediate spike
     at t=1.8ms in the 'hh' neuron in 'art_spiker_recipe'
     """
-	return 0.36
+    return 0.36
+
 
 @_fixture
 @context
diff --git a/python/test/unit/test_cable_probes.py b/python/test/unit/test_cable_probes.py
index c9f0d15a..59e8e610 100644
--- a/python/test/unit/test_cable_probes.py
+++ b/python/test/unit/test_cable_probes.py
@@ -11,6 +11,7 @@ tests for cable probe wrappers
 # Test recipe cc comprises one simple cable cell and mechanisms on it
 # sufficient to test cable cell probe wrappers wrap correctly.
 
+
 class cc_recipe(A.recipe):
     def __init__(self):
         A.recipe.__init__(self)
@@ -19,10 +20,10 @@ class cc_recipe(A.recipe):
 
         dec = A.decor()
 
-        dec.place('(location 0 0.08)', A.synapse("expsyn"), "syn0")
-        dec.place('(location 0 0.09)', A.synapse("exp2syn"), "syn1")
-        dec.place('(location 0 0.1)', A.iclamp(20.), "iclamp")
-        dec.paint('(all)', A.density("hh"))
+        dec.place("(location 0 0.08)", A.synapse("expsyn"), "syn0")
+        dec.place("(location 0 0.09)", A.synapse("exp2syn"), "syn1")
+        dec.place("(location 0 0.1)", A.iclamp(20.0), "iclamp")
+        dec.paint("(all)", A.density("hh"))
 
         self.cell = A.cable_cell(st, A.label_dict(), dec)
 
@@ -44,44 +45,47 @@ class cc_recipe(A.recipe):
         # the returned list.
         return [
             # probe id (0, 0)
-            A.cable_probe_membrane_voltage(where='(location 0 0.00)'),
+            A.cable_probe_membrane_voltage(where="(location 0 0.00)"),
             # probe id (0, 1)
             A.cable_probe_membrane_voltage_cell(),
             # probe id (0, 2)
-            A.cable_probe_axial_current(where='(location 0 0.02)'),
+            A.cable_probe_axial_current(where="(location 0 0.02)"),
             # probe id (0, 3)
-            A.cable_probe_total_ion_current_density(where='(location 0 0.03)'),
+            A.cable_probe_total_ion_current_density(where="(location 0 0.03)"),
             # probe id (0, 4)
             A.cable_probe_total_ion_current_cell(),
             # probe id (0, 5)
             A.cable_probe_total_current_cell(),
             # probe id (0, 6)
-            A.cable_probe_density_state(where='(location 0 0.06)', mechanism='hh', state='m'),
+            A.cable_probe_density_state(
+                where="(location 0 0.06)", mechanism="hh", state="m"
+            ),
             # probe id (0, 7)
-            A.cable_probe_density_state_cell(mechanism='hh', state='n'),
+            A.cable_probe_density_state_cell(mechanism="hh", state="n"),
             # probe id (0, 8)
-            A.cable_probe_point_state(target=0, mechanism='expsyn', state='g'),
+            A.cable_probe_point_state(target=0, mechanism="expsyn", state="g"),
             # probe id (0, 9)
-            A.cable_probe_point_state_cell(mechanism='exp2syn', state='B'),
+            A.cable_probe_point_state_cell(mechanism="exp2syn", state="B"),
             # probe id (0, 10)
-            A.cable_probe_ion_current_density(where='(location 0 0.10)', ion='na'),
+            A.cable_probe_ion_current_density(where="(location 0 0.10)", ion="na"),
             # probe id (0, 11)
-            A.cable_probe_ion_current_cell(ion='na'),
+            A.cable_probe_ion_current_cell(ion="na"),
             # probe id (0, 12)
-            A.cable_probe_ion_int_concentration(where='(location 0 0.12)', ion='na'),
+            A.cable_probe_ion_int_concentration(where="(location 0 0.12)", ion="na"),
             # probe id (0, 13)
-            A.cable_probe_ion_int_concentration_cell(ion='na'),
+            A.cable_probe_ion_int_concentration_cell(ion="na"),
             # probe id (0, 14)
-            A.cable_probe_ion_ext_concentration(where='(location 0 0.14)', ion='na'),
+            A.cable_probe_ion_ext_concentration(where="(location 0 0.14)", ion="na"),
             # probe id (0, 15)
-            A.cable_probe_ion_ext_concentration_cell(ion='na'),
+            A.cable_probe_ion_ext_concentration_cell(ion="na"),
             # probe id (0, 15)
-            A.cable_probe_stimulus_current_cell()
+            A.cable_probe_stimulus_current_cell(),
         ]
 
     def cell_description(self, gid):
         return self.cell
 
+
 class TestCableProbes(unittest.TestCase):
     def test_probe_addr_metadata(self):
         recipe = cc_recipe()
diff --git a/python/test/unit/test_catalogues.py b/python/test/unit/test_catalogues.py
index 4e469186..e4bd1e63 100644
--- a/python/test/unit/test_catalogues.py
+++ b/python/test/unit/test_catalogues.py
@@ -6,6 +6,7 @@ import arbor as arb
 tests for (dynamically loaded) catalogues
 """
 
+
 class recipe(arb.recipe):
     def __init__(self):
         arb.recipe.__init__(self)
@@ -13,14 +14,14 @@ class recipe(arb.recipe):
         self.tree.append(arb.mnpos, (0, 0, 0, 10), (1, 0, 0, 10), 1)
         self.props = arb.neuron_cable_properties()
         try:
-            self.props.catalogue = arb.load_catalogue('dummy-catalogue.so')
+            self.props.catalogue = arb.load_catalogue("dummy-catalogue.so")
         except:
             print("Catalogue not found. Are you running from build directory?")
             raise
         self.props.catalogue = arb.default_catalogue()
 
         d = arb.decor()
-        d.paint('(all)', arb.density('pas'))
+        d.paint("(all)", arb.density("pas"))
         d.set_property(Vm=0.0)
         self.cell = arb.cable_cell(self.tree, arb.label_dict(), d)
 
@@ -46,10 +47,14 @@ class TestCatalogues(unittest.TestCase):
     def test_shared_catalogue(self, dummy_catalogue):
         cat = dummy_catalogue
         nms = [m for m in cat]
-        self.assertEqual(nms, ['dummy'], "Expected equal names.")
+        self.assertEqual(nms, ["dummy"], "Expected equal names.")
         for nm in nms:
             prm = list(cat[nm].parameters.keys())
-            self.assertEqual(prm, ['gImbar'], "Expected equal parameters on mechanism '{}'.".format(nm))
+            self.assertEqual(
+                prm,
+                ["gImbar"],
+                "Expected equal parameters on mechanism '{}'.".format(nm),
+            )
 
     def test_simulation(self):
         rcp = recipe()
@@ -72,13 +77,31 @@ class TestCatalogues(unittest.TestCase):
         self.assertEqual(0, len(cat), "Expected no mechanisms in `arbor.catalogue()`.")
         # Test empty extend
         other.extend(cat, "")
-        self.assertEqual(hash_(ref), hash_(other), "Extending cat with empty should not change cat.")
-        self.assertEqual(0, len(cat), "Extending cat with empty should not change empty.")
+        self.assertEqual(
+            hash_(ref), hash_(other), "Extending cat with empty should not change cat."
+        )
+        self.assertEqual(
+            0, len(cat), "Extending cat with empty should not change empty."
+        )
         other.extend(cat, "prefix/")
-        self.assertEqual(hash_(ref), hash_(other), "Extending cat with prefixed empty should not change cat.")
-        self.assertEqual(0, len(cat), "Extending cat with prefixed empty should not change empty.")
+        self.assertEqual(
+            hash_(ref),
+            hash_(other),
+            "Extending cat with prefixed empty should not change cat.",
+        )
+        self.assertEqual(
+            0, len(cat), "Extending cat with prefixed empty should not change empty."
+        )
         cat.extend(other, "")
-        self.assertEqual(hash_(other), hash_(cat), "Extending empty with cat should turn empty into cat.")
+        self.assertEqual(
+            hash_(other),
+            hash_(cat),
+            "Extending empty with cat should turn empty into cat.",
+        )
         cat = arb.catalogue()
         cat.extend(other, "prefix/")
-        self.assertNotEqual(hash_(other), hash_(cat), "Extending empty with prefixed cat should not yield cat")
+        self.assertNotEqual(
+            hash_(other),
+            hash_(cat),
+            "Extending empty with prefixed cat should not yield cat",
+        )
diff --git a/python/test/unit/test_clear_samplers.py b/python/test/unit/test_clear_samplers.py
index bb7c2018..062affb7 100644
--- a/python/test/unit/test_clear_samplers.py
+++ b/python/test/unit/test_clear_samplers.py
@@ -14,6 +14,7 @@ from .. import fixtures, cases
 all tests for the simulator wrapper
 """
 
+
 @cases.skipIfDistributed()
 class TestClearSamplers(unittest.TestCase):
     # test that all spikes are sorted by time then by gid
@@ -34,10 +35,10 @@ class TestClearSamplers(unittest.TestCase):
         sim.reset()
 
         # simulated with clearing the memory inbetween the steppings
-        sim.run(3,0.01)
+        sim.run(3, 0.01)
         spikes = sim.spikes()
-        times_t  = spikes["time"].tolist()
-        gids_t   = spikes["source"]["gid"].tolist()
+        times_t = spikes["time"].tolist()
+        gids_t = spikes["source"]["gid"].tolist()
         data_t, meta_t = sim.samples(handle)[0]
 
         # clear the samplers memory
@@ -48,7 +49,7 @@ class TestClearSamplers(unittest.TestCase):
         self.assertEqual(0, len(spikes["time"].tolist()))
         self.assertEqual(0, len(spikes["source"]["gid"].tolist()))
         data_test, meta_test = sim.samples(handle)[0]
-        self.assertEqual(0,data_test.size)
+        self.assertEqual(0, data_test.size)
 
         # run the next part of the simulation
         sim.run(5, 0.01)
@@ -58,7 +59,6 @@ class TestClearSamplers(unittest.TestCase):
         data_temp, meta_temp = sim.samples(handle)[0]
         data_t = np.concatenate((data_t, data_temp), 0)
 
-
         # check if results are the same
         self.assertEqual(gids, gids_t)
         self.assertEqual(times_t, times)
diff --git a/python/test/unit/test_contexts.py b/python/test/unit/test_contexts.py
index 7ed3a7d3..c5aecc78 100644
--- a/python/test/unit/test_contexts.py
+++ b/python/test/unit/test_contexts.py
@@ -11,6 +11,7 @@ from .. import fixtures
 all tests for non-distributed arb.context
 """
 
+
 class TestContexts(unittest.TestCase):
     def test_default_allocation(self):
         alloc = arb.proc_allocation()
@@ -33,22 +34,24 @@ class TestContexts(unittest.TestCase):
         self.assertFalse(alloc.has_gpu)
 
     def test_exceptions_allocation(self):
-        with self.assertRaisesRegex(RuntimeError,
-            "gpu_id must be None, or a non-negative integer"):
-            arb.proc_allocation(gpu_id = 1.)
-        with self.assertRaisesRegex(RuntimeError,
-            "gpu_id must be None, or a non-negative integer"):
-            arb.proc_allocation(gpu_id = -1)
-        with self.assertRaisesRegex(RuntimeError,
-            "gpu_id must be None, or a non-negative integer"):
-            arb.proc_allocation(gpu_id = 'gpu_id')
+        with self.assertRaisesRegex(
+            RuntimeError, "gpu_id must be None, or a non-negative integer"
+        ):
+            arb.proc_allocation(gpu_id=1.0)
+        with self.assertRaisesRegex(
+            RuntimeError, "gpu_id must be None, or a non-negative integer"
+        ):
+            arb.proc_allocation(gpu_id=-1)
+        with self.assertRaisesRegex(
+            RuntimeError, "gpu_id must be None, or a non-negative integer"
+        ):
+            arb.proc_allocation(gpu_id="gpu_id")
         with self.assertRaises(TypeError):
-            arb.proc_allocation(threads = 1.)
-        with self.assertRaisesRegex(ValueError,
-            "threads must be a positive integer"):
-             arb.proc_allocation(threads = 0)
+            arb.proc_allocation(threads=1.0)
+        with self.assertRaisesRegex(ValueError, "threads must be a positive integer"):
+            arb.proc_allocation(threads=0)
         with self.assertRaises(TypeError):
-            arb.proc_allocation(threads = None)
+            arb.proc_allocation(threads=None)
 
     def test_default_context(self):
         ctx = arb.context()
@@ -61,7 +64,7 @@ class TestContexts(unittest.TestCase):
         self.assertEqual(ctx.rank, 0)
 
     def test_context(self):
-        ctx = arb.context(threads = 42, gpu_id = None)
+        ctx = arb.context(threads=42, gpu_id=None)
 
         self.assertFalse(ctx.has_mpi)
         self.assertFalse(ctx.has_gpu)
@@ -71,7 +74,7 @@ class TestContexts(unittest.TestCase):
 
     def test_context_avail_threads(self):
         # test that 'avail_threads' returns at least 1.
-        ctx = arb.context(threads = 'avail_threads', gpu_id = None)
+        ctx = arb.context(threads="avail_threads", gpu_id=None)
 
         self.assertFalse(ctx.has_mpi)
         self.assertFalse(ctx.has_gpu)
diff --git a/python/test/unit/test_decor.py b/python/test/unit/test_decor.py
index cb835673..e51aec51 100644
--- a/python/test/unit/test_decor.py
+++ b/python/test/unit/test_decor.py
@@ -10,32 +10,33 @@ Tests for decor and decoration wrappers.
 TODO: Coverage for more than just iclamp.
 """
 
+
 class TestDecorClasses(unittest.TestCase):
     def test_iclamp(self):
         # Constant amplitude iclamp:
-        clamp = A.iclamp(10);
+        clamp = A.iclamp(10)
         self.assertEqual(0, clamp.frequency)
         self.assertEqual([(0, 10)], clamp.envelope)
 
-        clamp = A.iclamp(10, frequency=20);
+        clamp = A.iclamp(10, frequency=20)
         self.assertEqual(20, clamp.frequency)
         self.assertEqual([(0, 10)], clamp.envelope)
 
         # Square pulse:
-        clamp = A.iclamp(100, 20, 3);
+        clamp = A.iclamp(100, 20, 3)
         self.assertEqual(0, clamp.frequency)
         self.assertEqual([(100, 3), (120, 3), (120, 0)], clamp.envelope)
 
-        clamp = A.iclamp(100, 20, 3, frequency=7);
+        clamp = A.iclamp(100, 20, 3, frequency=7)
         self.assertEqual(7, clamp.frequency)
         self.assertEqual([(100, 3), (120, 3), (120, 0)], clamp.envelope)
 
         # Explicit envelope:
         envelope = [(1, 10), (3, 30), (5, 50), (7, 0)]
-        clamp = A.iclamp(envelope);
+        clamp = A.iclamp(envelope)
         self.assertEqual(0, clamp.frequency)
         self.assertEqual(envelope, clamp.envelope)
 
-        clamp = A.iclamp(envelope, frequency=7);
+        clamp = A.iclamp(envelope, frequency=7)
         self.assertEqual(7, clamp.frequency)
         self.assertEqual(envelope, clamp.envelope)
diff --git a/python/test/unit/test_domain_decompositions.py b/python/test/unit/test_domain_decompositions.py
index 9dd86ae8..a7aa303e 100644
--- a/python/test/unit/test_domain_decompositions.py
+++ b/python/test/unit/test_domain_decompositions.py
@@ -15,7 +15,7 @@ all tests for non-distributed arb.domain_decomposition
 """
 
 # Dummy recipe
-class homo_recipe (arb.recipe):
+class homo_recipe(arb.recipe):
     def __init__(self, n=4):
         arb.recipe.__init__(self)
         self.ncells = n
@@ -27,11 +27,12 @@ class homo_recipe (arb.recipe):
         return []
 
     def cell_kind(self, gid):
-            return arb.cell_kind.cable
+        return arb.cell_kind.cable
+
 
 # Heterogenous cell population of cable and rss cells.
 # Interleaved so that cells with even gid are cable cells, and even gid are spike source cells.
-class hetero_recipe (arb.recipe):
+class hetero_recipe(arb.recipe):
     def __init__(self, n=4):
         arb.recipe.__init__(self)
         self.ncells = n
@@ -43,11 +44,12 @@ class hetero_recipe (arb.recipe):
         return []
 
     def cell_kind(self, gid):
-        if (gid%2):
+        if gid % 2:
             return arb.cell_kind.spike_source
         else:
             return arb.cell_kind.cable
 
+
 class TestDomain_Decompositions(unittest.TestCase):
     # 1 cpu core, no gpus; assumes all cells will be put into cell groups of size 1
     def test_domain_decomposition_homogenous_CPU(self):
@@ -96,7 +98,7 @@ class TestDomain_Decompositions(unittest.TestCase):
 
         self.assertEqual(len(grp.gids), n_cells)
         self.assertEqual(grp.gids[0], 0)
-        self.assertEqual(grp.gids[-1], n_cells-1)
+        self.assertEqual(grp.gids[-1], n_cells - 1)
         self.assertEqual(grp.backend, arb.backend.gpu)
         self.assertEqual(grp.kind, arb.cell_kind.cable)
 
@@ -132,7 +134,7 @@ class TestDomain_Decompositions(unittest.TestCase):
         kinds = [arb.cell_kind.cable, arb.cell_kind.spike_source]
         for k in kinds:
             gids = kind_lists[k]
-            self.assertEqual(len(gids), int(n_cells/2))
+            self.assertEqual(len(gids), int(n_cells / 2))
             for gid in gids:
                 self.assertEqual(k, recipe.cell_kind(gid))
 
@@ -148,7 +150,7 @@ class TestDomain_Decompositions(unittest.TestCase):
         self.assertEqual(decomp.num_global_cells, n_cells)
 
         # one cell group with n_cells/2 on gpu, and n_cells/2 groups on cpu
-        expected_groups = int(n_cells/2) + 1
+        expected_groups = int(n_cells / 2) + 1
         self.assertEqual(len(decomp.groups), expected_groups)
 
         grps = range(expected_groups)
@@ -157,16 +159,16 @@ class TestDomain_Decompositions(unittest.TestCase):
         for i in grps:
             grp = decomp.groups[i]
             k = grp.kind
-            if (k == arb.cell_kind.cable):
+            if k == arb.cell_kind.cable:
                 self.assertEqual(grp.backend, arb.backend.gpu)
-                self.assertEqual(len(grp.gids), int(n_cells/2))
+                self.assertEqual(len(grp.gids), int(n_cells / 2))
                 for gid in grp.gids:
-                    self.assertTrue(gid%2==0)
+                    self.assertTrue(gid % 2 == 0)
                     n += 1
-            elif (k == arb.cell_kind.spike_source):
+            elif k == arb.cell_kind.spike_source:
                 self.assertEqual(grp.backend, arb.backend.multicore)
                 self.assertEqual(len(grp.gids), 1)
-                self.assertTrue(grp.gids[0]%2)
+                self.assertTrue(grp.gids[0] % 2)
                 n += 1
         self.assertEqual(n_cells, n)
 
@@ -183,7 +185,12 @@ class TestDomain_Decompositions(unittest.TestCase):
         spike_hint = arb.partition_hint()
         spike_hint.prefer_gpu = False
         spike_hint.cpu_group_size = 4
-        hints = dict([(arb.cell_kind.cable, cable_hint), (arb.cell_kind.spike_source, spike_hint)])
+        hints = dict(
+            [
+                (arb.cell_kind.cable, cable_hint),
+                (arb.cell_kind.spike_source, spike_hint),
+            ]
+        )
 
         decomp = arb.partition_load_balance(recipe, context, hints)
 
@@ -194,11 +201,13 @@ class TestDomain_Decompositions(unittest.TestCase):
         spike_groups = []
 
         for g in decomp.groups:
-            self.assertTrue(g.kind == arb.cell_kind.cable or g.kind == arb.cell_kind.spike_source)
+            self.assertTrue(
+                g.kind == arb.cell_kind.cable or g.kind == arb.cell_kind.spike_source
+            )
 
-            if (g.kind == arb.cell_kind.cable):
+            if g.kind == arb.cell_kind.cable:
                 cable_groups.append(g.gids)
-            elif (g.kind == arb.cell_kind.spike_source):
+            elif g.kind == arb.cell_kind.spike_source:
                 spike_groups.append(g.gids)
 
         self.assertEqual(exp_cable_groups, cable_groups)
@@ -217,10 +226,17 @@ class TestDomain_Decompositions(unittest.TestCase):
         spike_hint = arb.partition_hint()
         spike_hint.prefer_gpu = False
         spike_hint.gpu_group_size = 1
-        hints = dict([(arb.cell_kind.cable, cable_hint), (arb.cell_kind.spike_source, spike_hint)])
-
-        with self.assertRaisesRegex(RuntimeError,
-            "unable to perform load balancing because cell_kind::cable has invalid suggested cpu_cell_group size of 0"):
+        hints = dict(
+            [
+                (arb.cell_kind.cable, cable_hint),
+                (arb.cell_kind.spike_source, spike_hint),
+            ]
+        )
+
+        with self.assertRaisesRegex(
+            RuntimeError,
+            "unable to perform load balancing because cell_kind::cable has invalid suggested cpu_cell_group size of 0",
+        ):
             decomp = arb.partition_load_balance(recipe, context, hints)
 
         cable_hint = arb.partition_hint()
@@ -229,8 +245,15 @@ class TestDomain_Decompositions(unittest.TestCase):
         spike_hint = arb.partition_hint()
         spike_hint.prefer_gpu = True
         spike_hint.gpu_group_size = 0
-        hints = dict([(arb.cell_kind.cable, cable_hint), (arb.cell_kind.spike_source, spike_hint)])
-
-        with self.assertRaisesRegex(RuntimeError,
-            "unable to perform load balancing because cell_kind::spike_source has invalid suggested gpu_cell_group size of 0"):
+        hints = dict(
+            [
+                (arb.cell_kind.cable, cable_hint),
+                (arb.cell_kind.spike_source, spike_hint),
+            ]
+        )
+
+        with self.assertRaisesRegex(
+            RuntimeError,
+            "unable to perform load balancing because cell_kind::spike_source has invalid suggested gpu_cell_group size of 0",
+        ):
             decomp = arb.partition_load_balance(recipe, context, hints)
diff --git a/python/test/unit/test_event_generators.py b/python/test/unit/test_event_generators.py
index 2ea56490..b992e9e8 100644
--- a/python/test/unit/test_event_generators.py
+++ b/python/test/unit/test_event_generators.py
@@ -11,11 +11,11 @@ from .. import fixtures
 all tests for event generators (regular, explicit, poisson)
 """
 
-class TestEventGenerator(unittest.TestCase):
 
+class TestEventGenerator(unittest.TestCase):
     def test_event_generator_regular_schedule(self):
         cm = arb.cell_local_label("tgt0")
-        rs = arb.regular_schedule(2.0, 1., 100.)
+        rs = arb.regular_schedule(2.0, 1.0, 100.0)
         rg = arb.event_generator(cm, 3.14, rs)
         self.assertEqual(rg.target.label, "tgt0")
         self.assertEqual(rg.target.policy, arb.selection_policy.univalent)
@@ -23,15 +23,15 @@ class TestEventGenerator(unittest.TestCase):
 
     def test_event_generator_explicit_schedule(self):
         cm = arb.cell_local_label("tgt1", arb.selection_policy.round_robin)
-        es = arb.explicit_schedule([0,1,2,3,4.4])
+        es = arb.explicit_schedule([0, 1, 2, 3, 4.4])
         eg = arb.event_generator(cm, -0.01, es)
         self.assertEqual(eg.target.label, "tgt1")
         self.assertEqual(eg.target.policy, arb.selection_policy.round_robin)
         self.assertAlmostEqual(eg.weight, -0.01)
 
     def test_event_generator_poisson_schedule(self):
-        ps = arb.poisson_schedule(0., 10., 0)
-        pg = arb.event_generator("tgt2", 42., ps)
+        ps = arb.poisson_schedule(0.0, 10.0, 0)
+        pg = arb.event_generator("tgt2", 42.0, ps)
         self.assertEqual(pg.target.label, "tgt2")
         self.assertEqual(pg.target.policy, arb.selection_policy.univalent)
-        self.assertEqual(pg.weight, 42.)
+        self.assertEqual(pg.weight, 42.0)
diff --git a/python/test/unit/test_identifiers.py b/python/test/unit/test_identifiers.py
index 61469cb3..f682e569 100644
--- a/python/test/unit/test_identifiers.py
+++ b/python/test/unit/test_identifiers.py
@@ -11,15 +11,15 @@ from .. import fixtures
 all tests for identifiers, indexes, kinds
 """
 
-class TestCellMembers(unittest.TestCase):
 
+class TestCellMembers(unittest.TestCase):
     def test_gid_index_ctor_cell_member(self):
-        cm = arb.cell_member(17,42)
+        cm = arb.cell_member(17, 42)
         self.assertEqual(cm.gid, 17)
         self.assertEqual(cm.index, 42)
 
     def test_set_gid_index_cell_member(self):
-        cm = arb.cell_member(0,0)
+        cm = arb.cell_member(0, 0)
         cm.gid = 13
         cm.index = 23
         self.assertEqual(cm.gid, 13)
diff --git a/python/test/unit/test_morphology.py b/python/test/unit/test_morphology.py
index 4c271f2d..90454ada 100644
--- a/python/test/unit/test_morphology.py
+++ b/python/test/unit/test_morphology.py
@@ -12,49 +12,57 @@ from .. import fixtures
 tests for morphology-related classes
 """
 
+
 def as_matrix(iso):
     trans = N.array(iso((0, 0, 0)))
-    return N.c_[N.array([iso(v) for v in [(1,0,0),(0,1,0),(0,0,1)]]).transpose()-N.c_[trans, trans, trans], trans]
+    return N.c_[
+        N.array([iso(v) for v in [(1, 0, 0), (0, 1, 0), (0, 0, 1)]]).transpose()
+        - N.c_[trans, trans, trans],
+        trans,
+    ]
+
 
 class TestPlacePwlin(unittest.TestCase):
     def test_identity(self):
         self.assertTrue(N.isclose(as_matrix(A.isometry()), N.eye(3, 4)).all())
 
     def test_translation(self):
-        displacement=(4, 5, 6)
+        displacement = (4, 5, 6)
         iso = A.isometry.translate(displacement)
         expected = N.c_[N.eye(3), displacement]
         self.assertTrue(N.isclose(as_matrix(iso), expected).all())
 
     def test_rotation(self):
         # 90 degrees about y axis.
-        iso = A.isometry.rotate(theta=math.pi/2, axis=(0, 1, 0))
-        expected = N.array([[0, 0, 1, 0],[0, 1, 0, 0],[-1, 0, 0, 0]])
+        iso = A.isometry.rotate(theta=math.pi / 2, axis=(0, 1, 0))
+        expected = N.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0]])
         self.assertTrue(N.isclose(as_matrix(iso), expected).all())
 
     def test_compose(self):
         # Translations are always extrinsic, rotations are intrinsic.
-        y90 = A.isometry.rotate(theta=math.pi/2, axis=(0, 1, 0))
-        z90 = A.isometry.rotate(theta=math.pi/2, axis=(0, 0, 1))
+        y90 = A.isometry.rotate(theta=math.pi / 2, axis=(0, 1, 0))
+        z90 = A.isometry.rotate(theta=math.pi / 2, axis=(0, 0, 1))
         t123 = A.isometry.translate(1, 2, 3)
         t456 = A.isometry.translate(4, 5, 6)
-        iso = t123*z90*t456*y90   # rot about y, then translate, then rot about z, then translate.
-        expected = N.array([[0, 0, 1, 5],[1, 0, 0, 7],[0, 1, 0, 9]])
+        iso = (
+            t123 * z90 * t456 * y90
+        )  # rot about y, then translate, then rot about z, then translate.
+        expected = N.array([[0, 0, 1, 5], [1, 0, 0, 7], [0, 1, 0, 9]])
         self.assertTrue(N.isclose(as_matrix(iso), expected).all())
 
     def test_mpoint(self):
         # Translations can be built from mpoints, and isometry can act upon mpoints. Radius is ignored.
-        y90 = A.isometry.rotate(theta=math.pi/2, axis=(0, 1, 0))
-        z90 = A.isometry.rotate(theta=math.pi/2, axis=(0, 0, 1))
+        y90 = A.isometry.rotate(theta=math.pi / 2, axis=(0, 1, 0))
+        z90 = A.isometry.rotate(theta=math.pi / 2, axis=(0, 0, 1))
         t123 = A.isometry.translate(A.mpoint(1, 2, 3, 20))
         t456 = A.isometry.translate(A.mpoint(4, 5, 6, 30))
-        iso = t123*z90*t456*y90
-        expected = N.array([[0, 0, 1, 5],[1, 0, 0, 7],[0, 1, 0, 9]])
+        iso = t123 * z90 * t456 * y90
+        expected = N.array([[0, 0, 1, 5], [1, 0, 0, 7], [0, 1, 0, 9]])
         self.assertTrue(N.isclose(as_matrix(iso), expected).all())
 
         q = iso(A.mpoint(2, 3, 4, 10))
         q_arr = N.array((q.x, q.y, q.z, q.radius))
-        q_expected = N.array([4+5, 2+7, 3+9, 10])
+        q_expected = N.array([4 + 5, 2 + 7, 3 + 9, 10])
         self.assertTrue(N.isclose(q_arr, q_expected).all())
 
     def test_place_pwlin_id(self):
@@ -78,13 +86,15 @@ class TestPlacePwlin(unittest.TestCase):
 
         Lhalf = place.at(A.location(0, 0.5))
         Lhalfs = place.all_at(A.location(0, 0.5))
-        self.assertTrue(s0d==Lhalf or s1p==Lhalf)
-        self.assertTrue([s0d, s1p]==Lhalfs)
+        self.assertTrue(s0d == Lhalf or s1p == Lhalf)
+        self.assertTrue([s0d, s1p] == Lhalfs)
 
-        Chalf = [(s.prox, s.dist) for s in place.segments([A.cable(0, 0., 0.5)])]
+        Chalf = [(s.prox, s.dist) for s in place.segments([A.cable(0, 0.0, 0.5)])]
         self.assertEqual([(s0p, s0d)], Chalf)
 
-        Chalf_all = [(s.prox, s.dist) for s in place.all_segments([A.cable(0, 0., 0.5)])]
+        Chalf_all = [
+            (s.prox, s.dist) for s in place.all_segments([A.cable(0, 0.0, 0.5)])
+        ]
         self.assertEqual([(s0p, s0d), (s1p, s1p)], Chalf_all)
 
     def test_place_pwlin_isometry(self):
@@ -114,11 +124,13 @@ class TestPlacePwlin(unittest.TestCase):
 
         Lhalf = place.at(A.location(0, 0.5))
         Lhalfs = place.all_at(A.location(0, 0.5))
-        self.assertTrue(x0d==Lhalf or x1p==Lhalf)
-        self.assertTrue([x0d, x1p]==Lhalfs)
+        self.assertTrue(x0d == Lhalf or x1p == Lhalf)
+        self.assertTrue([x0d, x1p] == Lhalfs)
 
-        Chalf = [(s.prox, s.dist) for s in place.segments([A.cable(0, 0., 0.5)])]
+        Chalf = [(s.prox, s.dist) for s in place.segments([A.cable(0, 0.0, 0.5)])]
         self.assertEqual([(x0p, x0d)], Chalf)
 
-        Chalf_all = [(s.prox, s.dist) for s in place.all_segments([A.cable(0, 0., 0.5)])]
+        Chalf_all = [
+            (s.prox, s.dist) for s in place.all_segments([A.cable(0, 0.0, 0.5)])
+        ]
         self.assertEqual([(x0p, x0d), (x1p, x1p)], Chalf_all)
diff --git a/python/test/unit/test_multiple_connections.py b/python/test/unit/test_multiple_connections.py
index 3f065e83..89cc32d7 100644
--- a/python/test/unit/test_multiple_connections.py
+++ b/python/test/unit/test_multiple_connections.py
@@ -19,269 +19,364 @@ NOTE: In principle, a plasticity (STDP) mechanism is employed here to test if a
       potentiation in the second synapse mechanism is only enhanced by spikes of presynaptic neuron 1.
 """
 
-class TestMultipleConnections(unittest.TestCase):
 
-	# Constructor (overridden)
-	def __init__(self, args):
-		super(TestMultipleConnections, self).__init__(args)
-
-		self.runtime = 2 # ms
-		self.dt = 0.01 # ms
-
-	# Method creating a new mechanism for a synapse with STDP
-	def create_syn_mechanism(self, scale_contrib = 1):
-		# create new synapse mechanism
-		syn_mechanism = arb.mechanism("expsyn_stdp")
-
-		# set pre- and postsynaptic contributions for STDP
-		syn_mechanism.set("Apre",  0.01 * scale_contrib) 
-		syn_mechanism.set("Apost",  -0.01 * scale_contrib)
-
-		# set minimal decay time
-		syn_mechanism.set("tau", self.dt)
-
-		return syn_mechanism
-
-	# Method that does the final evaluation for all tests
-	def evaluate_outcome(self, sim, handle_mem):
-		# membrane potential should temporarily be above the spiking threshold at around 1.0 ms (only testing this if the current node keeps the data, cf. GitHub issue #1892)
-		if len(sim.samples(handle_mem)) > 0:
-			data_mem, _ = sim.samples(handle_mem)[0]
-			#print(data_mem[(data_mem[:, 0] >= 1.0), 1])
-			self.assertGreater(data_mem[(np.round(data_mem[:, 0], 2) == 1.02), 1], -10)
-			self.assertLess(data_mem[(np.round(data_mem[:, 0], 2) == 1.05), 1], -10)
-
-		# neuron 3 should spike at around 1.0 ms, when the added input from all connections will cause threshold crossing
-		spike_times = sim.spikes()["time"]
-		spike_gids = sim.spikes()["source"]["gid"]
-		#print(list(zip(*[spike_times, spike_gids])))
-		self.assertGreater(sum(spike_gids == 3), 0)
-		self.assertAlmostEqual(spike_times[(spike_gids == 3)][0], 1.00, delta=0.04)
-
-	# Method that does additional evaluation for Test #1
-	def evaluate_additional_outcome_1(self, sim, handle_mem):
-		# order of spiking neurons (also cf. 'test_spikes.py')
-		spike_gids = sim.spikes()["source"]["gid"]
-		self.assertEqual([2, 1, 0, 3, 3], spike_gids.tolist())
-
-		# neuron 3 should spike again at around 1.8 ms, when the added input from all connections will cause threshold crossing
-		spike_times = sim.spikes()["time"]
-		self.assertAlmostEqual(spike_times[(spike_gids == 3)][1], 1.80, delta=0.04)
-
-	# Method that does additional evaluation for Test #2 and Test #3
-	def evaluate_additional_outcome_2_3(self, sim, handle_mem):
-		# order of spiking neurons (also cf. 'test_spikes.py')
-		spike_gids = sim.spikes()["source"]["gid"]
-		self.assertEqual([2, 1, 0, 3], spike_gids.tolist())
-
-	# Method that runs the main part of Test #1 and Test #2
-	def rr_main(self, context, art_spiker_recipe, weight, weight2):
-		# define new method 'cell_description()' and overwrite the original one in the 'art_spiker_recipe' object
-		create_syn_mechanism = self.create_syn_mechanism
-		def cell_description(self, gid):
-			# spike source neuron
-			if gid < 3:
-				return arb.spike_source_cell("spike_source", arb.explicit_schedule(self.trains[gid]))
-
-			# spike-receiving cable neuron
-			elif gid == 3:
-				tree, labels, decor = self._cable_cell_elements()
-
-				scale_stdp = 0.5 # use only half of the original magnitude for STDP because two connections will come into play
-
-				decor.place('"midpoint"', arb.synapse(create_syn_mechanism(scale_stdp)), "postsyn_target") # place synapse for input from one presynaptic neuron at the center of the soma
-				decor.place('"midpoint"', arb.synapse(create_syn_mechanism(scale_stdp)), "postsyn_target") # place synapse for input from another presynaptic neuron at the center of the soma
-				                                                                                           # (using the same label as above!)
-				return arb.cable_cell(tree, labels, decor)
-		art_spiker_recipe.cell_description = types.MethodType(cell_description, art_spiker_recipe)
-
-		# read connections from recipe for testing
-		connections_from_recipe = art_spiker_recipe.connections_on(3)
-
-		# connection #1 from neuron 0 to 3
-		self.assertEqual(connections_from_recipe[0].dest.label, "postsyn_target")
-		self.assertAlmostEqual(connections_from_recipe[0].weight, weight)
-		self.assertAlmostEqual(connections_from_recipe[0].delay, 0.2)
-
-		# connection #2 from neuron 0 to 3
-		self.assertEqual(connections_from_recipe[1].dest.label, "postsyn_target")
-		self.assertAlmostEqual(connections_from_recipe[1].weight, weight)
-		self.assertAlmostEqual(connections_from_recipe[1].delay, 0.2)
-
-		# connection #1 from neuron 1 to 3
-		self.assertEqual(connections_from_recipe[2].dest.label, "postsyn_target")
-		self.assertAlmostEqual(connections_from_recipe[2].weight, weight2)
-		self.assertAlmostEqual(connections_from_recipe[2].delay, 1.4)
-
-		# connection #2 from neuron 1 to 3
-		self.assertEqual(connections_from_recipe[3].dest.label, "postsyn_target")
-		self.assertAlmostEqual(connections_from_recipe[3].weight, weight2)
-		self.assertAlmostEqual(connections_from_recipe[3].delay, 1.4)
-
-		# construct domain_decomposition and simulation object
-		dd = arb.partition_load_balance(art_spiker_recipe, context) 
-		sim = arb.simulation(art_spiker_recipe, dd, context)
-		sim.record(arb.spike_recording.all)
-
-		# create schedule and handle to record the membrane potential of neuron 3
-		reg_sched = arb.regular_schedule(0, self.dt, self.runtime)
-		handle_mem = sim.sample((3, 0), reg_sched)
-
-		# run the simulation
-		sim.run(self.runtime, self.dt)
-
-		return sim, handle_mem
-
-	# Test #1 (for 'round_robin')
-	@fixtures.context
-	@fixtures.art_spiker_recipe
-	@fixtures.sum_weight_hh_spike
-	@fixtures.sum_weight_hh_spike_2
-	def test_multiple_connections_rr_no_halt(self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2):
-		weight = sum_weight_hh_spike/2 # connection strength which is, summed over two connections, just enough to evoke an immediate spike at t=1ms
-		weight2 = 0.97*sum_weight_hh_spike_2/2 # connection strength which is, summed over two connections, just NOT enough to evoke an immediate spike at t=1.8ms
-
-		# define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
-		def connections_on(self, gid):
-			# incoming to neurons 0--2
-			if gid < 3:
-				return []
-			
-			# incoming to neuron 3
-			elif gid == 3:
-				source_label_0 = arb.cell_global_label(0, "spike_source") # referring to the "spike_source" label of neuron 0
-				source_label_1 = arb.cell_global_label(1, "spike_source") # referring to the "spike_source" label of neuron 1
-
-				target_label_rr = arb.cell_local_label("postsyn_target", arb.selection_policy.round_robin) # referring to the current item in the "postsyn_target" label group of neuron 3, moving to the next item afterwards
-
-				conn_0_3_n1 = arb.connection(source_label_0, target_label_rr, weight, 0.2) # first connection from neuron 0 to 3
-				conn_0_3_n2 = arb.connection(source_label_0, target_label_rr, weight, 0.2) # second connection from neuron 0 to 3
-				                                                                           # NOTE: this is not connecting to the same target label item as 'conn_0_3_n1' because 'round_robin' has been used before!
-				conn_1_3_n1 = arb.connection(source_label_1, target_label_rr, weight2, 1.4) # first connection from neuron 1 to 3
-				conn_1_3_n2 = arb.connection(source_label_1, target_label_rr, weight2, 1.4) # second connection from neuron 1 to 3
-				                                                                            # NOTE: this is not connecting to the same target label item as 'conn_1_3_n1' because 'round_robin' has been used before!
-
-				return [conn_0_3_n1, conn_0_3_n2, conn_1_3_n1, conn_1_3_n2]
-		art_spiker_recipe.connections_on = types.MethodType(connections_on, art_spiker_recipe)
-
-		# run the main part of this test
-		sim, handle_mem = self.rr_main(context, art_spiker_recipe, weight, weight2)
-	
-		# evaluate the outcome
-		self.evaluate_outcome(sim, handle_mem)
-		self.evaluate_additional_outcome_1(sim, handle_mem)
-
-	# Test #2 (for the combination of 'round_robin_halt' and 'round_robin')
-	@fixtures.context
-	@fixtures.art_spiker_recipe
-	@fixtures.sum_weight_hh_spike
-	@fixtures.sum_weight_hh_spike_2
-	def test_multiple_connections_rr_halt(self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2):
-		weight = sum_weight_hh_spike/2 # connection strength which is, summed over two connections, just enough to evoke an immediate spike at t=1ms
-		weight2 = 0.97*sum_weight_hh_spike_2/2 # connection strength which is, summed over two connections, just NOT enough to evoke an immediate spike at t=1.8ms
-
-		# define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
-		def connections_on(self, gid):
-			# incoming to neurons 0--2
-			if gid < 3:
-				return []
-			
-			# incoming to neuron 3
-			elif gid == 3:
-				source_label_0 = arb.cell_global_label(0, "spike_source") # referring to the "spike_source" label of neuron 0
-				source_label_1 = arb.cell_global_label(1, "spike_source") # referring to the "spike_source" label of neuron 1
-
-				target_label_rr_halt = arb.cell_local_label("postsyn_target", arb.selection_policy.round_robin_halt) # referring to the current item in the "postsyn_target" label group of neuron 3
-				target_label_rr = arb.cell_local_label("postsyn_target", arb.selection_policy.round_robin) # referring to the current item in the "postsyn_target" label group of neuron 3, moving to the next item afterwards
-
-				conn_0_3_n1 = arb.connection(source_label_0, target_label_rr_halt, weight, 0.2) # first connection from neuron 0 to 3
-				conn_0_3_n2 = arb.connection(source_label_0, target_label_rr, weight, 0.2) # second connection from neuron 0 to 3
-				conn_1_3_n1 = arb.connection(source_label_1, target_label_rr_halt, weight2, 1.4) # first connection from neuron 1 to 3
-				conn_1_3_n2 = arb.connection(source_label_1, target_label_rr, weight2, 1.4) # second connection from neuron 1 to 3
-
-				return [conn_0_3_n1, conn_0_3_n2, conn_1_3_n1, conn_1_3_n2]
-		art_spiker_recipe.connections_on = types.MethodType(connections_on, art_spiker_recipe)
-
-		# run the main part of this test
-		sim, handle_mem = self.rr_main(context, art_spiker_recipe, weight, weight2)
-	
-		# evaluate the outcome
-		self.evaluate_outcome(sim, handle_mem)
-		self.evaluate_additional_outcome_2_3(sim, handle_mem)
-
-	# Test #3 (for 'univalent')
-	@fixtures.context
-	@fixtures.art_spiker_recipe
-	@fixtures.sum_weight_hh_spike
-	@fixtures.sum_weight_hh_spike_2
-	def test_multiple_connections_uni(self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2):
-		weight = sum_weight_hh_spike # connection strength which is just enough to evoke an immediate spike at t=1ms (equaling the sum of two connections in Test #2)
-		weight2 = 0.97*sum_weight_hh_spike_2 # connection strength which is just NOT enough to evoke an immediate spike at t=1.8ms (equaling the sum of two connections in Test #2)
-
-		# define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
-		def connections_on(self, gid):
-			# incoming to neurons 0--2
-			if gid < 3:
-				return []
-			
-			# incoming to neuron 3
-			elif gid == 3:
-				source_label_0 = arb.cell_global_label(0, "spike_source") # referring to the "spike_source" label of neuron 0
-				source_label_1 = arb.cell_global_label(1, "spike_source") # referring to the "spike_source" label of neuron 1
-
-				target_label_uni_n1 = arb.cell_local_label("postsyn_target_1", arb.selection_policy.univalent) # referring to an only item in the "postsyn_target_1" label group of neuron 3
-				target_label_uni_n2 = arb.cell_local_label("postsyn_target_2", arb.selection_policy.univalent) # referring to an only item in the "postsyn_target_2" label group of neuron 3
-
-				conn_0_3 = arb.connection(source_label_0, target_label_uni_n1, weight, 0.2) # connection from neuron 0 to 3
-				conn_1_3 = arb.connection(source_label_1, target_label_uni_n2, weight2, 1.4) # connection from neuron 1 to 3
-
-				return [conn_0_3, conn_1_3]
-		art_spiker_recipe.connections_on = types.MethodType(connections_on, art_spiker_recipe)
-
-		# define new method 'cell_description()' and overwrite the original one in the 'art_spiker_recipe' object
-		create_syn_mechanism = self.create_syn_mechanism
-		def cell_description(self, gid):
-			# spike source neuron
-			if gid < 3:
-				return arb.spike_source_cell("spike_source", arb.explicit_schedule(self.trains[gid]))
-
-			# spike-receiving cable neuron
-			elif gid == 3:
-				tree, labels, decor = self._cable_cell_elements()
-
-				decor.place('"midpoint"', arb.synapse(create_syn_mechanism()), "postsyn_target_1") # place synapse for input from one presynaptic neuron at the center of the soma
-				decor.place('"midpoint"', arb.synapse(create_syn_mechanism()), "postsyn_target_2") # place synapse for input from another presynaptic neuron at the center of the soma
-				                                                                                   # (using another label as above!)
-
-				return arb.cable_cell(tree, labels, decor)
-		art_spiker_recipe.cell_description = types.MethodType(cell_description, art_spiker_recipe)
-
-		# read connections from recipe for testing
-		connections_from_recipe = art_spiker_recipe.connections_on(3)
-
-		# connection from neuron 0 to 3
-		self.assertEqual(connections_from_recipe[0].dest.label, "postsyn_target_1")
-		self.assertAlmostEqual(connections_from_recipe[0].weight, weight)
-		self.assertAlmostEqual(connections_from_recipe[0].delay, 0.2)
-
-		# connection from neuron 1 to 3
-		self.assertEqual(connections_from_recipe[1].dest.label, "postsyn_target_2")
-		self.assertAlmostEqual(connections_from_recipe[1].weight, weight2)
-		self.assertAlmostEqual(connections_from_recipe[1].delay, 1.4)
-
-		# construct domain_decomposition and simulation object
-		dd = arb.partition_load_balance(art_spiker_recipe, context) 
-		sim = arb.simulation(art_spiker_recipe, dd, context)
-		sim.record(arb.spike_recording.all)
-
-		# create schedule and handle to record the membrane potential of neuron 3
-		reg_sched = arb.regular_schedule(0, self.dt, self.runtime)
-		handle_mem = sim.sample((3, 0), reg_sched)
-
-		# run the simulation
-		sim.run(self.runtime, self.dt)
-	
-		# evaluate the outcome
-		self.evaluate_outcome(sim, handle_mem)
-		self.evaluate_additional_outcome_2_3(sim, handle_mem)
+class TestMultipleConnections(unittest.TestCase):
 
+    # Constructor (overridden)
+    def __init__(self, args):
+        super(TestMultipleConnections, self).__init__(args)
+
+        self.runtime = 2  # ms
+        self.dt = 0.01  # ms
+
+    # Method creating a new mechanism for a synapse with STDP
+    def create_syn_mechanism(self, scale_contrib=1):
+        # create new synapse mechanism
+        syn_mechanism = arb.mechanism("expsyn_stdp")
+
+        # set pre- and postsynaptic contributions for STDP
+        syn_mechanism.set("Apre", 0.01 * scale_contrib)
+        syn_mechanism.set("Apost", -0.01 * scale_contrib)
+
+        # set minimal decay time
+        syn_mechanism.set("tau", self.dt)
+
+        return syn_mechanism
+
+    # Method that does the final evaluation for all tests
+    def evaluate_outcome(self, sim, handle_mem):
+        # membrane potential should temporarily be above the spiking threshold at around 1.0 ms (only testing this if the current node keeps the data, cf. GitHub issue #1892)
+        if len(sim.samples(handle_mem)) > 0:
+            data_mem, _ = sim.samples(handle_mem)[0]
+            # print(data_mem[(data_mem[:, 0] >= 1.0), 1])
+            self.assertGreater(data_mem[(np.round(data_mem[:, 0], 2) == 1.02), 1], -10)
+            self.assertLess(data_mem[(np.round(data_mem[:, 0], 2) == 1.05), 1], -10)
+
+        # neuron 3 should spike at around 1.0 ms, when the added input from all connections will cause threshold crossing
+        spike_times = sim.spikes()["time"]
+        spike_gids = sim.spikes()["source"]["gid"]
+        # print(list(zip(*[spike_times, spike_gids])))
+        self.assertGreater(sum(spike_gids == 3), 0)
+        self.assertAlmostEqual(spike_times[(spike_gids == 3)][0], 1.00, delta=0.04)
+
+    # Method that does additional evaluation for Test #1
+    def evaluate_additional_outcome_1(self, sim, handle_mem):
+        # order of spiking neurons (also cf. 'test_spikes.py')
+        spike_gids = sim.spikes()["source"]["gid"]
+        self.assertEqual([2, 1, 0, 3, 3], spike_gids.tolist())
+
+        # neuron 3 should spike again at around 1.8 ms, when the added input from all connections will cause threshold crossing
+        spike_times = sim.spikes()["time"]
+        self.assertAlmostEqual(spike_times[(spike_gids == 3)][1], 1.80, delta=0.04)
+
+    # Method that does additional evaluation for Test #2 and Test #3
+    def evaluate_additional_outcome_2_3(self, sim, handle_mem):
+        # order of spiking neurons (also cf. 'test_spikes.py')
+        spike_gids = sim.spikes()["source"]["gid"]
+        self.assertEqual([2, 1, 0, 3], spike_gids.tolist())
+
+    # Method that runs the main part of Test #1 and Test #2
+    def rr_main(self, context, art_spiker_recipe, weight, weight2):
+        # define new method 'cell_description()' and overwrite the original one in the 'art_spiker_recipe' object
+        create_syn_mechanism = self.create_syn_mechanism
+
+        def cell_description(self, gid):
+            # spike source neuron
+            if gid < 3:
+                return arb.spike_source_cell(
+                    "spike_source", arb.explicit_schedule(self.trains[gid])
+                )
+
+            # spike-receiving cable neuron
+            elif gid == 3:
+                tree, labels, decor = self._cable_cell_elements()
+
+                scale_stdp = 0.5  # use only half of the original magnitude for STDP because two connections will come into play
+
+                decor.place(
+                    '"midpoint"',
+                    arb.synapse(create_syn_mechanism(scale_stdp)),
+                    "postsyn_target",
+                )  # place synapse for input from one presynaptic neuron at the center of the soma
+                decor.place(
+                    '"midpoint"',
+                    arb.synapse(create_syn_mechanism(scale_stdp)),
+                    "postsyn_target",
+                )  # place synapse for input from another presynaptic neuron at the center of the soma
+                # (using the same label as above!)
+                return arb.cable_cell(tree, labels, decor)
+
+        art_spiker_recipe.cell_description = types.MethodType(
+            cell_description, art_spiker_recipe
+        )
+
+        # read connections from recipe for testing
+        connections_from_recipe = art_spiker_recipe.connections_on(3)
+
+        # connection #1 from neuron 0 to 3
+        self.assertEqual(connections_from_recipe[0].dest.label, "postsyn_target")
+        self.assertAlmostEqual(connections_from_recipe[0].weight, weight)
+        self.assertAlmostEqual(connections_from_recipe[0].delay, 0.2)
+
+        # connection #2 from neuron 0 to 3
+        self.assertEqual(connections_from_recipe[1].dest.label, "postsyn_target")
+        self.assertAlmostEqual(connections_from_recipe[1].weight, weight)
+        self.assertAlmostEqual(connections_from_recipe[1].delay, 0.2)
+
+        # connection #1 from neuron 1 to 3
+        self.assertEqual(connections_from_recipe[2].dest.label, "postsyn_target")
+        self.assertAlmostEqual(connections_from_recipe[2].weight, weight2)
+        self.assertAlmostEqual(connections_from_recipe[2].delay, 1.4)
+
+        # connection #2 from neuron 1 to 3
+        self.assertEqual(connections_from_recipe[3].dest.label, "postsyn_target")
+        self.assertAlmostEqual(connections_from_recipe[3].weight, weight2)
+        self.assertAlmostEqual(connections_from_recipe[3].delay, 1.4)
+
+        # construct domain_decomposition and simulation object
+        dd = arb.partition_load_balance(art_spiker_recipe, context)
+        sim = arb.simulation(art_spiker_recipe, dd, context)
+        sim.record(arb.spike_recording.all)
+
+        # create schedule and handle to record the membrane potential of neuron 3
+        reg_sched = arb.regular_schedule(0, self.dt, self.runtime)
+        handle_mem = sim.sample((3, 0), reg_sched)
+
+        # run the simulation
+        sim.run(self.runtime, self.dt)
+
+        return sim, handle_mem
+
+    # Test #1 (for 'round_robin')
+    @fixtures.context
+    @fixtures.art_spiker_recipe
+    @fixtures.sum_weight_hh_spike
+    @fixtures.sum_weight_hh_spike_2
+    def test_multiple_connections_rr_no_halt(
+        self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2
+    ):
+        weight = (
+            sum_weight_hh_spike / 2
+        )  # connection strength which is, summed over two connections, just enough to evoke an immediate spike at t=1ms
+        weight2 = (
+            0.97 * sum_weight_hh_spike_2 / 2
+        )  # connection strength which is, summed over two connections, just NOT enough to evoke an immediate spike at t=1.8ms
+
+        # define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
+        def connections_on(self, gid):
+            # incoming to neurons 0--2
+            if gid < 3:
+                return []
+
+            # incoming to neuron 3
+            elif gid == 3:
+                source_label_0 = arb.cell_global_label(
+                    0, "spike_source"
+                )  # referring to the "spike_source" label of neuron 0
+                source_label_1 = arb.cell_global_label(
+                    1, "spike_source"
+                )  # referring to the "spike_source" label of neuron 1
+
+                target_label_rr = arb.cell_local_label(
+                    "postsyn_target", arb.selection_policy.round_robin
+                )  # referring to the current item in the "postsyn_target" label group of neuron 3, moving to the next item afterwards
+
+                conn_0_3_n1 = arb.connection(
+                    source_label_0, target_label_rr, weight, 0.2
+                )  # first connection from neuron 0 to 3
+                conn_0_3_n2 = arb.connection(
+                    source_label_0, target_label_rr, weight, 0.2
+                )  # second connection from neuron 0 to 3
+                # NOTE: this is not connecting to the same target label item as 'conn_0_3_n1' because 'round_robin' has been used before!
+                conn_1_3_n1 = arb.connection(
+                    source_label_1, target_label_rr, weight2, 1.4
+                )  # first connection from neuron 1 to 3
+                conn_1_3_n2 = arb.connection(
+                    source_label_1, target_label_rr, weight2, 1.4
+                )  # second connection from neuron 1 to 3
+                # NOTE: this is not connecting to the same target label item as 'conn_1_3_n1' because 'round_robin' has been used before!
+
+                return [conn_0_3_n1, conn_0_3_n2, conn_1_3_n1, conn_1_3_n2]
+
+        art_spiker_recipe.connections_on = types.MethodType(
+            connections_on, art_spiker_recipe
+        )
+
+        # run the main part of this test
+        sim, handle_mem = self.rr_main(context, art_spiker_recipe, weight, weight2)
+
+        # evaluate the outcome
+        self.evaluate_outcome(sim, handle_mem)
+        self.evaluate_additional_outcome_1(sim, handle_mem)
+
+    # Test #2 (for the combination of 'round_robin_halt' and 'round_robin')
+    @fixtures.context
+    @fixtures.art_spiker_recipe
+    @fixtures.sum_weight_hh_spike
+    @fixtures.sum_weight_hh_spike_2
+    def test_multiple_connections_rr_halt(
+        self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2
+    ):
+        weight = (
+            sum_weight_hh_spike / 2
+        )  # connection strength which is, summed over two connections, just enough to evoke an immediate spike at t=1ms
+        weight2 = (
+            0.97 * sum_weight_hh_spike_2 / 2
+        )  # connection strength which is, summed over two connections, just NOT enough to evoke an immediate spike at t=1.8ms
+
+        # define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
+        def connections_on(self, gid):
+            # incoming to neurons 0--2
+            if gid < 3:
+                return []
+
+            # incoming to neuron 3
+            elif gid == 3:
+                source_label_0 = arb.cell_global_label(
+                    0, "spike_source"
+                )  # referring to the "spike_source" label of neuron 0
+                source_label_1 = arb.cell_global_label(
+                    1, "spike_source"
+                )  # referring to the "spike_source" label of neuron 1
+
+                target_label_rr_halt = arb.cell_local_label(
+                    "postsyn_target", arb.selection_policy.round_robin_halt
+                )  # referring to the current item in the "postsyn_target" label group of neuron 3
+                target_label_rr = arb.cell_local_label(
+                    "postsyn_target", arb.selection_policy.round_robin
+                )  # referring to the current item in the "postsyn_target" label group of neuron 3, moving to the next item afterwards
+
+                conn_0_3_n1 = arb.connection(
+                    source_label_0, target_label_rr_halt, weight, 0.2
+                )  # first connection from neuron 0 to 3
+                conn_0_3_n2 = arb.connection(
+                    source_label_0, target_label_rr, weight, 0.2
+                )  # second connection from neuron 0 to 3
+                conn_1_3_n1 = arb.connection(
+                    source_label_1, target_label_rr_halt, weight2, 1.4
+                )  # first connection from neuron 1 to 3
+                conn_1_3_n2 = arb.connection(
+                    source_label_1, target_label_rr, weight2, 1.4
+                )  # second connection from neuron 1 to 3
+
+                return [conn_0_3_n1, conn_0_3_n2, conn_1_3_n1, conn_1_3_n2]
+
+        art_spiker_recipe.connections_on = types.MethodType(
+            connections_on, art_spiker_recipe
+        )
+
+        # run the main part of this test
+        sim, handle_mem = self.rr_main(context, art_spiker_recipe, weight, weight2)
+
+        # evaluate the outcome
+        self.evaluate_outcome(sim, handle_mem)
+        self.evaluate_additional_outcome_2_3(sim, handle_mem)
+
+    # Test #3 (for 'univalent')
+    @fixtures.context
+    @fixtures.art_spiker_recipe
+    @fixtures.sum_weight_hh_spike
+    @fixtures.sum_weight_hh_spike_2
+    def test_multiple_connections_uni(
+        self, context, art_spiker_recipe, sum_weight_hh_spike, sum_weight_hh_spike_2
+    ):
+        weight = sum_weight_hh_spike  # connection strength which is just enough to evoke an immediate spike at t=1ms (equaling the sum of two connections in Test #2)
+        weight2 = (
+            0.97 * sum_weight_hh_spike_2
+        )  # connection strength which is just NOT enough to evoke an immediate spike at t=1.8ms (equaling the sum of two connections in Test #2)
+
+        # define new method 'connections_on()' and overwrite the original one in the 'art_spiker_recipe' object
+        def connections_on(self, gid):
+            # incoming to neurons 0--2
+            if gid < 3:
+                return []
+
+            # incoming to neuron 3
+            elif gid == 3:
+                source_label_0 = arb.cell_global_label(
+                    0, "spike_source"
+                )  # referring to the "spike_source" label of neuron 0
+                source_label_1 = arb.cell_global_label(
+                    1, "spike_source"
+                )  # referring to the "spike_source" label of neuron 1
+
+                target_label_uni_n1 = arb.cell_local_label(
+                    "postsyn_target_1", arb.selection_policy.univalent
+                )  # referring to an only item in the "postsyn_target_1" label group of neuron 3
+                target_label_uni_n2 = arb.cell_local_label(
+                    "postsyn_target_2", arb.selection_policy.univalent
+                )  # referring to an only item in the "postsyn_target_2" label group of neuron 3
+
+                conn_0_3 = arb.connection(
+                    source_label_0, target_label_uni_n1, weight, 0.2
+                )  # connection from neuron 0 to 3
+                conn_1_3 = arb.connection(
+                    source_label_1, target_label_uni_n2, weight2, 1.4
+                )  # connection from neuron 1 to 3
+
+                return [conn_0_3, conn_1_3]
+
+        art_spiker_recipe.connections_on = types.MethodType(
+            connections_on, art_spiker_recipe
+        )
+
+        # define new method 'cell_description()' and overwrite the original one in the 'art_spiker_recipe' object
+        create_syn_mechanism = self.create_syn_mechanism
+
+        def cell_description(self, gid):
+            # spike source neuron
+            if gid < 3:
+                return arb.spike_source_cell(
+                    "spike_source", arb.explicit_schedule(self.trains[gid])
+                )
+
+            # spike-receiving cable neuron
+            elif gid == 3:
+                tree, labels, decor = self._cable_cell_elements()
+
+                decor.place(
+                    '"midpoint"',
+                    arb.synapse(create_syn_mechanism()),
+                    "postsyn_target_1",
+                )  # place synapse for input from one presynaptic neuron at the center of the soma
+                decor.place(
+                    '"midpoint"',
+                    arb.synapse(create_syn_mechanism()),
+                    "postsyn_target_2",
+                )  # place synapse for input from another presynaptic neuron at the center of the soma
+                # (using another label as above!)
+
+                return arb.cable_cell(tree, labels, decor)
+
+        art_spiker_recipe.cell_description = types.MethodType(
+            cell_description, art_spiker_recipe
+        )
+
+        # read connections from recipe for testing
+        connections_from_recipe = art_spiker_recipe.connections_on(3)
+
+        # connection from neuron 0 to 3
+        self.assertEqual(connections_from_recipe[0].dest.label, "postsyn_target_1")
+        self.assertAlmostEqual(connections_from_recipe[0].weight, weight)
+        self.assertAlmostEqual(connections_from_recipe[0].delay, 0.2)
+
+        # connection from neuron 1 to 3
+        self.assertEqual(connections_from_recipe[1].dest.label, "postsyn_target_2")
+        self.assertAlmostEqual(connections_from_recipe[1].weight, weight2)
+        self.assertAlmostEqual(connections_from_recipe[1].delay, 1.4)
+
+        # construct domain_decomposition and simulation object
+        dd = arb.partition_load_balance(art_spiker_recipe, context)
+        sim = arb.simulation(art_spiker_recipe, dd, context)
+        sim.record(arb.spike_recording.all)
+
+        # create schedule and handle to record the membrane potential of neuron 3
+        reg_sched = arb.regular_schedule(0, self.dt, self.runtime)
+        handle_mem = sim.sample((3, 0), reg_sched)
+
+        # run the simulation
+        sim.run(self.runtime, self.dt)
+
+        # evaluate the outcome
+        self.evaluate_outcome(sim, handle_mem)
+        self.evaluate_additional_outcome_2_3(sim, handle_mem)
diff --git a/python/test/unit/test_profiling.py b/python/test/unit/test_profiling.py
index 41f406c0..92a17e23 100644
--- a/python/test/unit/test_profiling.py
+++ b/python/test/unit/test_profiling.py
@@ -12,10 +12,12 @@ from .. import fixtures
 all tests for profiling
 """
 
+
 def lazy_skipIf(condition, reason):
     """
     Postpone skip evaluation until test is ran by evaluating callable `condition`
     """
+
     def inner_decorator(f):
         @functools.wraps(f)
         def wrapped(*args, **kwargs):
@@ -28,14 +30,12 @@ def lazy_skipIf(condition, reason):
 
     return inner_decorator
 
+
 class a_recipe(arb.recipe):
     def __init__(self):
         arb.recipe.__init__(self)
         self.props = arb.neuron_cable_properties()
-        self.trains = [
-                [0.8, 2, 2.1, 3],
-                [0.4, 2, 2.2, 3.1, 4.5],
-                [0.2, 2, 2.8, 3]]
+        self.trains = [[0.8, 2, 2.1, 3], [0.4, 2, 2.2, 3.1, 4.5], [0.2, 2, 2.8, 3]]
 
     def num_cells(self):
         return 3
@@ -58,20 +58,34 @@ class a_recipe(arb.recipe):
     def cell_description(self, gid):
         return arb.spike_source_cell("src", arb.explicit_schedule(self.trains[gid]))
 
+
 def skipWithoutSupport():
     return not bool(arb.config().get("profiling", False))
 
+
 class TestProfiling(unittest.TestCase):
     def test_support(self):
-        self.assertTrue("profiling" in arb.config(), 'profiling key not in config')
+        self.assertTrue("profiling" in arb.config(), "profiling key not in config")
         profiling_support = arb.config()["profiling"]
-        self.assertEqual(bool, type(profiling_support), 'profiling flag should be bool')
+        self.assertEqual(bool, type(profiling_support), "profiling flag should be bool")
         if profiling_support:
-            self.assertTrue(hasattr(arb, "profiler_initialize"), 'missing profiling interface with profiling support')
-            self.assertTrue(hasattr(arb, "profiler_summary"), 'missing profiling interface with profiling support')
+            self.assertTrue(
+                hasattr(arb, "profiler_initialize"),
+                "missing profiling interface with profiling support",
+            )
+            self.assertTrue(
+                hasattr(arb, "profiler_summary"),
+                "missing profiling interface with profiling support",
+            )
         else:
-            self.assertFalse(hasattr(arb, "profiler_initialize"), 'profiling interface without profiling support')
-            self.assertFalse(hasattr(arb, "profiler_summary"), 'profiling interface without profiling support')
+            self.assertFalse(
+                hasattr(arb, "profiler_initialize"),
+                "profiling interface without profiling support",
+            )
+            self.assertFalse(
+                hasattr(arb, "profiler_summary"),
+                "profiling interface without profiling support",
+            )
 
     @lazy_skipIf(skipWithoutSupport, "run test only with profiling support")
     def test_summary(self):
@@ -81,5 +95,5 @@ class TestProfiling(unittest.TestCase):
         dd = arb.partition_load_balance(recipe, context)
         arb.simulation(recipe, dd, context).run(1)
         summary = arb.profiler_summary()
-        self.assertEqual(str, type(summary), 'profiler summary must be str')
-        self.assertTrue(summary, 'empty summary')
+        self.assertEqual(str, type(summary), "profiler summary must be str")
+        self.assertTrue(summary, "empty summary")
diff --git a/python/test/unit/test_schedules.py b/python/test/unit/test_schedules.py
index 3742fe20..c3394bf4 100644
--- a/python/test/unit/test_schedules.py
+++ b/python/test/unit/test_schedules.py
@@ -11,60 +11,60 @@ from .. import fixtures
 all tests for schedules (regular, explicit, poisson)
 """
 
+
 class TestRegularSchedule(unittest.TestCase):
     def test_none_ctor_regular_schedule(self):
         rs = arb.regular_schedule(tstart=0, dt=0.1, tstop=None)
         self.assertEqual(rs.dt, 0.1)
 
     def test_tstart_dt_tstop_ctor_regular_schedule(self):
-        rs = arb.regular_schedule(10., 1., 20.)
-        self.assertEqual(rs.tstart, 10.)
-        self.assertEqual(rs.dt, 1.)
-        self.assertEqual(rs.tstop, 20.)
+        rs = arb.regular_schedule(10.0, 1.0, 20.0)
+        self.assertEqual(rs.tstart, 10.0)
+        self.assertEqual(rs.dt, 1.0)
+        self.assertEqual(rs.tstop, 20.0)
 
     def test_set_tstart_dt_tstop_regular_schedule(self):
         rs = arb.regular_schedule(0.1)
         self.assertAlmostEqual(rs.dt, 0.1, places=1)
-        rs.tstart = 17.
+        rs.tstart = 17.0
         rs.dt = 0.5
-        rs.tstop = 42.
-        self.assertEqual(rs.tstart, 17.)
+        rs.tstop = 42.0
+        self.assertEqual(rs.tstart, 17.0)
         self.assertAlmostEqual(rs.dt, 0.5, places=1)
-        self.assertEqual(rs.tstop, 42.)
+        self.assertEqual(rs.tstop, 42.0)
 
     def test_events_regular_schedule(self):
         expected = [0, 0.25, 0.5, 0.75, 1.0]
-        rs = arb.regular_schedule(tstart=0., dt=0.25, tstop=1.25)
-        self.assertEqual(expected, rs.events(0., 1.25))
-        self.assertEqual(expected, rs.events(0., 5.))
-        self.assertEqual([], rs.events(5., 10.))
+        rs = arb.regular_schedule(tstart=0.0, dt=0.25, tstop=1.25)
+        self.assertEqual(expected, rs.events(0.0, 1.25))
+        self.assertEqual(expected, rs.events(0.0, 5.0))
+        self.assertEqual([], rs.events(5.0, 10.0))
 
     def test_exceptions_regular_schedule(self):
-        with self.assertRaisesRegex(RuntimeError,
-            "tstart must be a non-negative number"):
-            arb.regular_schedule(tstart=-1., dt=0.1)
-        with self.assertRaisesRegex(RuntimeError,
-            "dt must be a positive number"):
+        with self.assertRaisesRegex(
+            RuntimeError, "tstart must be a non-negative number"
+        ):
+            arb.regular_schedule(tstart=-1.0, dt=0.1)
+        with self.assertRaisesRegex(RuntimeError, "dt must be a positive number"):
             arb.regular_schedule(dt=-0.1)
-        with self.assertRaisesRegex(RuntimeError,
-            "dt must be a positive number"):
+        with self.assertRaisesRegex(RuntimeError, "dt must be a positive number"):
             arb.regular_schedule(dt=0)
         with self.assertRaises(TypeError):
             arb.regular_schedule(dt=None)
         with self.assertRaises(TypeError):
-            arb.regular_schedule(dt='dt')
-        with self.assertRaisesRegex(RuntimeError,
-            "tstop must be a non-negative number, or None"):
-            arb.regular_schedule(tstart=0, dt=0.1, tstop='tstop')
-        with self.assertRaisesRegex(RuntimeError,
-            "t0 must be a non-negative number"):
-            rs = arb.regular_schedule(0., 1., 10.)
+            arb.regular_schedule(dt="dt")
+        with self.assertRaisesRegex(
+            RuntimeError, "tstop must be a non-negative number, or None"
+        ):
+            arb.regular_schedule(tstart=0, dt=0.1, tstop="tstop")
+        with self.assertRaisesRegex(RuntimeError, "t0 must be a non-negative number"):
+            rs = arb.regular_schedule(0.0, 1.0, 10.0)
             rs.events(-1, 0)
-        with self.assertRaisesRegex(RuntimeError,
-            "t1 must be a non-negative number"):
-            rs = arb.regular_schedule(0., 1., 10.)
+        with self.assertRaisesRegex(RuntimeError, "t1 must be a non-negative number"):
+            rs = arb.regular_schedule(0.0, 1.0, 10.0)
             rs.events(0, -10)
 
+
 class TestExplicitSchedule(unittest.TestCase):
     def test_times_contor_explicit_schedule(self):
         es = arb.explicit_schedule([1, 2, 3, 4.5])
@@ -80,97 +80,111 @@ class TestExplicitSchedule(unittest.TestCase):
         expected = [0.1, 0.3, 1.0]
         es = arb.explicit_schedule(times)
         for i in range(len(expected)):
-            self.assertAlmostEqual(expected[i], es.events(0., 1.25)[i], places = 2)
+            self.assertAlmostEqual(expected[i], es.events(0.0, 1.25)[i], places=2)
         expected = [0.3, 1.0, 1.25, 1.7]
         for i in range(len(expected)):
-            self.assertAlmostEqual(expected[i], es.events(0.3, 1.71)[i], places = 2)
+            self.assertAlmostEqual(expected[i], es.events(0.3, 1.71)[i], places=2)
 
     def test_exceptions_explicit_schedule(self):
-        with self.assertRaisesRegex(RuntimeError,
-            "explicit time schedule cannot contain negative values"):
+        with self.assertRaisesRegex(
+            RuntimeError, "explicit time schedule cannot contain negative values"
+        ):
             arb.explicit_schedule([-1])
         with self.assertRaises(TypeError):
-            arb.explicit_schedule(['times'])
+            arb.explicit_schedule(["times"])
         with self.assertRaises(TypeError):
             arb.explicit_schedule([None])
         with self.assertRaises(TypeError):
-            arb.explicit_schedule([[1,2,3]])
-        with self.assertRaisesRegex(RuntimeError,
-            "t1 must be a non-negative number"):
+            arb.explicit_schedule([[1, 2, 3]])
+        with self.assertRaisesRegex(RuntimeError, "t1 must be a non-negative number"):
             rs = arb.regular_schedule(0.1)
-            rs.events(1., -1.)
+            rs.events(1.0, -1.0)
+
 
 class TestPoissonSchedule(unittest.TestCase):
     def test_freq_poisson_schedule(self):
-        ps = arb.poisson_schedule(42.)
-        self.assertEqual(ps.freq, 42.)
+        ps = arb.poisson_schedule(42.0)
+        self.assertEqual(ps.freq, 42.0)
 
     def test_freq_tstart_contor_poisson_schedule(self):
-        ps = arb.poisson_schedule(freq = 5., tstart = 4.3)
-        self.assertEqual(ps.freq, 5.)
+        ps = arb.poisson_schedule(freq=5.0, tstart=4.3)
+        self.assertEqual(ps.freq, 5.0)
         self.assertEqual(ps.tstart, 4.3)
 
     def test_freq_seed_contor_poisson_schedule(self):
-        ps = arb.poisson_schedule(freq = 5., seed = 42)
-        self.assertEqual(ps.freq, 5.)
+        ps = arb.poisson_schedule(freq=5.0, seed=42)
+        self.assertEqual(ps.freq, 5.0)
         self.assertEqual(ps.seed, 42)
 
     def test_tstart_freq_seed_contor_poisson_schedule(self):
-        ps = arb.poisson_schedule(10., 100., 1000)
-        self.assertEqual(ps.tstart, 10.)
-        self.assertEqual(ps.freq, 100.)
+        ps = arb.poisson_schedule(10.0, 100.0, 1000)
+        self.assertEqual(ps.tstart, 10.0)
+        self.assertEqual(ps.freq, 100.0)
         self.assertEqual(ps.seed, 1000)
 
     def test_events_poisson_schedule(self):
         expected = [17.4107, 502.074, 506.111, 597.116]
-        ps = arb.poisson_schedule(0., 0.01, 0)
+        ps = arb.poisson_schedule(0.0, 0.01, 0)
         for i in range(len(expected)):
-            self.assertAlmostEqual(expected[i], ps.events(0., 600.)[i], places = 3)
-        expected = [5030.22, 5045.75, 5069.84, 5091.56, 5182.17, 5367.3, 5566.73, 5642.13, 5719.85, 5796, 5808.33]
+            self.assertAlmostEqual(expected[i], ps.events(0.0, 600.0)[i], places=3)
+        expected = [
+            5030.22,
+            5045.75,
+            5069.84,
+            5091.56,
+            5182.17,
+            5367.3,
+            5566.73,
+            5642.13,
+            5719.85,
+            5796,
+            5808.33,
+        ]
         for i in range(len(expected)):
-            self.assertAlmostEqual(expected[i], ps.events(5000., 6000.)[i], places = 2)
+            self.assertAlmostEqual(expected[i], ps.events(5000.0, 6000.0)[i], places=2)
 
     def test_exceptions_poisson_schedule(self):
         with self.assertRaises(TypeError):
             arb.poisson_schedule()
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(tstart = 10.)
+            arb.poisson_schedule(tstart=10.0)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(seed = 1432)
-        with self.assertRaisesRegex(RuntimeError,
-            "tstart must be a non-negative number"):
-            arb.poisson_schedule(freq=34., tstart = -10.)
+            arb.poisson_schedule(seed=1432)
+        with self.assertRaisesRegex(
+            RuntimeError, "tstart must be a non-negative number"
+        ):
+            arb.poisson_schedule(freq=34.0, tstart=-10.0)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., tstart = None)
+            arb.poisson_schedule(freq=34.0, tstart=None)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., tstart = 'tstart')
-        with self.assertRaisesRegex(RuntimeError,
-            "frequency must be a non-negative number"):
-            arb.poisson_schedule(freq = -100.)
+            arb.poisson_schedule(freq=34.0, tstart="tstart")
+        with self.assertRaisesRegex(
+            RuntimeError, "frequency must be a non-negative number"
+        ):
+            arb.poisson_schedule(freq=-100.0)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq = 'freq')
+            arb.poisson_schedule(freq="freq")
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., seed = -1)
+            arb.poisson_schedule(freq=34.0, seed=-1)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., seed = 10.)
+            arb.poisson_schedule(freq=34.0, seed=10.0)
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., seed = 'seed')
+            arb.poisson_schedule(freq=34.0, seed="seed")
         with self.assertRaises(TypeError):
-            arb.poisson_schedule(freq=34., seed = None)
-        with self.assertRaisesRegex(RuntimeError,
-            "t0 must be a non-negative number"):
-            ps = arb.poisson_schedule(0,0.01)
-            ps.events(-1., 1.)
-        with self.assertRaisesRegex(RuntimeError,
-            "t1 must be a non-negative number"):
-            ps = arb.poisson_schedule(0,0.01)
-            ps.events(1., -1.)
-        with self.assertRaisesRegex(RuntimeError,
-            "tstop must be a non-negative number, or None"):
-            arb.poisson_schedule(0, 0.1, tstop='tstop')
-            ps.events(1., -1.)
+            arb.poisson_schedule(freq=34.0, seed=None)
+        with self.assertRaisesRegex(RuntimeError, "t0 must be a non-negative number"):
+            ps = arb.poisson_schedule(0, 0.01)
+            ps.events(-1.0, 1.0)
+        with self.assertRaisesRegex(RuntimeError, "t1 must be a non-negative number"):
+            ps = arb.poisson_schedule(0, 0.01)
+            ps.events(1.0, -1.0)
+        with self.assertRaisesRegex(
+            RuntimeError, "tstop must be a non-negative number, or None"
+        ):
+            arb.poisson_schedule(0, 0.1, tstop="tstop")
+            ps.events(1.0, -1.0)
 
     def test_tstop_poisson_schedule(self):
         tstop = 50
-        events = arb.poisson_schedule(0., 1, 0, tstop).events(0, 100)
-        self.assertTrue(max(events) < tstop)
\ No newline at end of file
+        events = arb.poisson_schedule(0.0, 1, 0, tstop).events(0, 100)
+        self.assertTrue(max(events) < tstop)
diff --git a/python/test/unit/test_spikes.py b/python/test/unit/test_spikes.py
index d7b32d34..f0e67fe0 100644
--- a/python/test/unit/test_spikes.py
+++ b/python/test/unit/test_spikes.py
@@ -10,6 +10,7 @@ from .. import fixtures
 all tests for the simulator wrapper
 """
 
+
 class TestSpikes(unittest.TestCase):
     # test that all spikes are sorted by time then by gid
     @fixtures.art_spiking_sim
@@ -28,4 +29,6 @@ class TestSpikes(unittest.TestCase):
         gids = spikes["source"]["gid"].tolist()
 
         self.assertEqual([2, 1, 0, 0, 1, 2, 0, 1, 2, 0, 2, 1, 1], gids)
-        self.assertEqual([0.2, 0.4, 0.8, 2., 2., 2., 2.1, 2.2, 2.8, 3., 3., 3.1, 4.5], times)
+        self.assertEqual(
+            [0.2, 0.4, 0.8, 2.0, 2.0, 2.0, 2.1, 2.2, 2.8, 3.0, 3.0, 3.1, 4.5], times
+        )
diff --git a/python/test/unit_distributed/test_contexts_arbmpi.py b/python/test/unit_distributed/test_contexts_arbmpi.py
index d7165a90..d1b98005 100644
--- a/python/test/unit_distributed/test_contexts_arbmpi.py
+++ b/python/test/unit_distributed/test_contexts_arbmpi.py
@@ -10,6 +10,8 @@ from .. import fixtures, cases
 """
 all tests for distributed arb.context using arbor mpi wrappers
 """
+
+
 @cases.skipIfNotDistributed()
 class TestContexts_arbmpi(unittest.TestCase):
     # Initialize mpi only once in this class (when adding classes move initialization to setUpModule()
@@ -19,6 +21,7 @@ class TestContexts_arbmpi(unittest.TestCase):
         if not arb.mpi_is_initialized():
             arb.mpi_init()
             self.local_mpi = True
+
     # Finalize mpi only once in this class (when adding classes move finalization to setUpModule()
     @classmethod
     def tearDownClass(self):
@@ -32,7 +35,7 @@ class TestContexts_arbmpi(unittest.TestCase):
         comm = arb.mpi_comm()
 
         # test that by default communicator is MPI_COMM_WORLD
-        self.assertEqual(str(comm), '<arbor.mpi_comm: MPI_COMM_WORLD>')
+        self.assertEqual(str(comm), "<arbor.mpi_comm: MPI_COMM_WORLD>")
 
     def test_context_arbmpi(self):
         comm = arb.mpi_comm()
@@ -54,11 +57,13 @@ class TestContexts_arbmpi(unittest.TestCase):
     def test_exceptions_context_arbmpi(self):
         alloc = arb.proc_allocation()
 
-        with self.assertRaisesRegex(RuntimeError,
-            "mpi must be None, or an MPI communicator"):
-            arb.context(mpi='MPI_COMM_WORLD')
-        with self.assertRaisesRegex(RuntimeError,
-            "mpi must be None, or an MPI communicator"):
+        with self.assertRaisesRegex(
+            RuntimeError, "mpi must be None, or an MPI communicator"
+        ):
+            arb.context(mpi="MPI_COMM_WORLD")
+        with self.assertRaisesRegex(
+            RuntimeError, "mpi must be None, or an MPI communicator"
+        ):
             arb.context(alloc, mpi=0)
 
     def test_finalized_arbmpi(self):
diff --git a/python/test/unit_distributed/test_contexts_mpi4py.py b/python/test/unit_distributed/test_contexts_mpi4py.py
index a22d065c..0d7a3554 100644
--- a/python/test/unit_distributed/test_contexts_mpi4py.py
+++ b/python/test/unit_distributed/test_contexts_mpi4py.py
@@ -8,10 +8,10 @@ import arbor as arb
 from .. import fixtures, cases
 
 # check Arbor's configuration of mpi
-mpi_enabled    = arb.__config__["mpi"]
+mpi_enabled = arb.__config__["mpi"]
 mpi4py_enabled = arb.__config__["mpi4py"]
 
-if (mpi_enabled and mpi4py_enabled):
+if mpi_enabled and mpi4py_enabled:
     import mpi4py.MPI as mpi
 
 """
@@ -28,7 +28,7 @@ class TestContexts_mpi4py(unittest.TestCase):
         comm = arb.mpi_comm(mpi.COMM_WORLD)
 
         # test that set communicator is MPI_COMM_WORLD
-        self.assertEqual(str(comm), '<arbor.mpi_comm: MPI_COMM_WORLD>')
+        self.assertEqual(str(comm), "<arbor.mpi_comm: MPI_COMM_WORLD>")
 
     def test_context_mpi4py(self):
         comm = arb.mpi_comm(mpi.COMM_WORLD)
@@ -50,11 +50,13 @@ class TestContexts_mpi4py(unittest.TestCase):
     def test_exceptions_context_arbmpi(self):
         alloc = arb.proc_allocation()
 
-        with self.assertRaisesRegex(RuntimeError,
-            "mpi must be None, or an MPI communicator"):
-            arb.context(mpi='MPI_COMM_WORLD')
-        with self.assertRaisesRegex(RuntimeError,
-            "mpi must be None, or an MPI communicator"):
+        with self.assertRaisesRegex(
+            RuntimeError, "mpi must be None, or an MPI communicator"
+        ):
+            arb.context(mpi="MPI_COMM_WORLD")
+        with self.assertRaisesRegex(
+            RuntimeError, "mpi must be None, or an MPI communicator"
+        ):
             arb.context(alloc, mpi=0)
 
     def test_finalized_mpi4py(self):
diff --git a/python/test/unit_distributed/test_domain_decompositions.py b/python/test/unit_distributed/test_domain_decompositions.py
index 3c125c75..29df3496 100644
--- a/python/test/unit_distributed/test_domain_decompositions.py
+++ b/python/test/unit_distributed/test_domain_decompositions.py
@@ -16,7 +16,7 @@ all tests for distributed arb.domain_decomposition
 """
 
 # Dummy recipe
-class homo_recipe (arb.recipe):
+class homo_recipe(arb.recipe):
     def __init__(self, n=4):
         arb.recipe.__init__(self)
         self.ncells = n
@@ -28,11 +28,12 @@ class homo_recipe (arb.recipe):
         return []
 
     def cell_kind(self, gid):
-            return arb.cell_kind.cable
+        return arb.cell_kind.cable
+
 
 # Heterogenous cell population of cable and rss cells.
 # Interleaved so that cells with even gid are cable cells, and even gid are spike source cells.
-class hetero_recipe (arb.recipe):
+class hetero_recipe(arb.recipe):
     def __init__(self, n=4):
         arb.recipe.__init__(self)
         self.ncells = n
@@ -44,11 +45,11 @@ class hetero_recipe (arb.recipe):
         tree = arb.segment_tree()
         tree.append(arb.mnpos, arb.mpoint(-3, 0, 0, 3), arb.mpoint(3, 0, 0, 3), tag=1)
         decor = arb.decor()
-        decor.place('(location 0 0.5)', arb.gap_junction_site(), "gj")
+        decor.place("(location 0 0.5)", arb.gap_junction_site(), "gj")
         return arb.cable_cell(tree, arb.label_dict(), decor)
 
     def cell_kind(self, gid):
-        if (gid%2):
+        if gid % 2:
             return arb.cell_kind.spike_source
         else:
             return arb.cell_kind.cable
@@ -59,6 +60,7 @@ class hetero_recipe (arb.recipe):
     def event_generators(self, gid):
         return []
 
+
 class gj_switch:
     def __init__(self, gid, shift):
         self.gid_ = gid
@@ -66,34 +68,41 @@ class gj_switch:
 
     def switch(self, arg):
         default = []
-        return getattr(self, 'case_' + str(arg), lambda: default)()
+        return getattr(self, "case_" + str(arg), lambda: default)()
 
     def case_1(self):
         return [arb.gap_junction_connection((7 + self.shift_, "gj"), "gj", 0.1)]
 
     def case_2(self):
-        return [arb.gap_junction_connection((6 + self.shift_, "gj"), "gj", 0.1),
-                arb.gap_junction_connection((9 + self.shift_, "gj"), "gj", 0.1)]
+        return [
+            arb.gap_junction_connection((6 + self.shift_, "gj"), "gj", 0.1),
+            arb.gap_junction_connection((9 + self.shift_, "gj"), "gj", 0.1),
+        ]
 
     def case_6(self):
-        return [arb.gap_junction_connection((2 + self.shift_, "gj"), "gj", 0.1),
-                arb.gap_junction_connection((7 + self.shift_, "gj"), "gj", 0.1)]
+        return [
+            arb.gap_junction_connection((2 + self.shift_, "gj"), "gj", 0.1),
+            arb.gap_junction_connection((7 + self.shift_, "gj"), "gj", 0.1),
+        ]
 
     def case_7(self):
-        return [arb.gap_junction_connection((6 + self.shift_, "gj"), "gj", 0.1),
-                arb.gap_junction_connection((1 + self.shift_, "gj"), "gj", 0.1)]
+        return [
+            arb.gap_junction_connection((6 + self.shift_, "gj"), "gj", 0.1),
+            arb.gap_junction_connection((1 + self.shift_, "gj"), "gj", 0.1),
+        ]
 
     def case_9(self):
         return [arb.gap_junction_connection((2 + self.shift_, "gj"), "gj", 0.1)]
 
-class gj_symmetric (arb.recipe):
+
+class gj_symmetric(arb.recipe):
     def __init__(self, num_ranks):
         arb.recipe.__init__(self)
         self.ncopies = num_ranks
-        self.size    = 10
+        self.size = 10
 
     def num_cells(self):
-        return self.size*self.ncopies
+        return self.size * self.ncopies
 
     def cell_description(self, gid):
         return []
@@ -102,41 +111,43 @@ class gj_symmetric (arb.recipe):
         return arb.cell_kind.cable
 
     def gap_junctions_on(self, gid):
-        shift = int((gid/self.size))
+        shift = int((gid / self.size))
         shift *= self.size
         s = gj_switch(gid, shift)
-        return s.switch(gid%self.size)
+        return s.switch(gid % self.size)
+
 
-class gj_non_symmetric (arb.recipe):
+class gj_non_symmetric(arb.recipe):
     def __init__(self, num_ranks):
         arb.recipe.__init__(self)
         self.groups = num_ranks
-        self.size   = num_ranks
+        self.size = num_ranks
 
     def num_cells(self):
-        return self.size*self.groups
+        return self.size * self.groups
 
     def cell_description(self, gid):
         tree = arb.segment_tree()
         tree.append(arb.mnpos, arb.mpoint(-3, 0, 0, 3), arb.mpoint(3, 0, 0, 3), tag=1)
         decor = arb.decor()
-        decor.place('(location 0 0.5)', arb.gap_junction_site(), "gj")
+        decor.place("(location 0 0.5)", arb.gap_junction_site(), "gj")
         return arb.cable_cell(tree, arb.label_dict(), decor)
 
     def cell_kind(self, gid):
         return arb.cell_kind.cable
 
     def gap_junctions_on(self, gid):
-        group = int(gid/self.groups)
-        id = gid%self.size
+        group = int(gid / self.groups)
+        id = gid % self.size
 
-        if (id == group and group != (self.groups - 1)):
+        if id == group and group != (self.groups - 1):
             return [arb.gap_junction_connection((gid + self.size, "gj"), "gj", 0.1)]
-        elif (id == group - 1):
+        elif id == group - 1:
             return [arb.gap_junction_connection((gid - self.size, "gj"), "gj", 0.1)]
         else:
             return []
 
+
 @cases.skipIfNotDistributed()
 class TestDomain_Decompositions_Distributed(unittest.TestCase):
     # Initialize mpi only once in this class (when adding classes move initialization to setUpModule()
@@ -146,6 +157,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         if not arb.mpi_is_initialized():
             arb.mpi_init()
             self.local_mpi = True
+
     # Finalize mpi only once in this class (when adding classes move finalization to setUpModule()
     @classmethod
     def tearDownClass(self):
@@ -154,7 +166,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
     # 1 node with 1 cpu core, no gpus; assumes all cells will be put into cell groups of size 1
     def test_domain_decomposition_homogenous_MC(self):
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=None, mpi=comm)
         else:
@@ -176,7 +188,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         b = I * n_local
         e = (I + 1) * n_local
-        gids = list(range(b,e))
+        gids = list(range(b, e))
 
         for gid in gids:
             self.assertEqual(I, decomp.gid_domain(gid))
@@ -197,7 +209,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
     @unittest.skipIf(gpu_enabled == False, "GPU not enabled")
     def test_domain_decomposition_homogenous_GPU(self):
 
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=0, mpi=comm)
         else:
@@ -219,7 +231,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         b = I * n_local
         e = (I + 1) * n_local
-        gids = list(range(b,e))
+        gids = list(range(b, e))
 
         for gid in gids:
             self.assertEqual(I, decomp.gid_domain(gid))
@@ -231,13 +243,13 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         self.assertEqual(len(grp.gids), n_local)
         self.assertEqual(grp.gids[0], b)
-        self.assertEqual(grp.gids[-1], e-1)
+        self.assertEqual(grp.gids[-1], e - 1)
         self.assertEqual(grp.backend, arb.backend.gpu)
         self.assertEqual(grp.kind, arb.cell_kind.cable)
 
     # 1 node with 1 cpu core, no gpus; assumes all cells will be put into cell groups of size 1
     def test_domain_decomposition_heterogenous_MC(self):
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=None, mpi=comm)
         else:
@@ -249,7 +261,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         # 10 cells per domain
         n_local = 10
         n_global = n_local * N
-        n_local_groups = n_local # 1 cell per group
+        n_local_groups = n_local  # 1 cell per group
 
         recipe = hetero_recipe(n_global)
         decomp = arb.partition_load_balance(recipe, context)
@@ -260,7 +272,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         b = I * n_local
         e = (I + 1) * n_local
-        gids = list(range(b,e))
+        gids = list(range(b, e))
 
         for gid in gids:
             self.assertEqual(I, decomp.gid_domain(gid))
@@ -282,14 +294,14 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         kinds = [arb.cell_kind.cable, arb.cell_kind.spike_source]
         for k in kinds:
             gids = kind_lists[k]
-            self.assertEqual(len(gids), int(n_local/2))
+            self.assertEqual(len(gids), int(n_local / 2))
             for gid in gids:
                 self.assertEqual(k, recipe.cell_kind(gid))
 
     def test_domain_decomposition_symmetric(self):
         nranks = 1
         rank = 0
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=None, mpi=comm)
             nranks = context.ranks
@@ -302,22 +314,24 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         self.assertEqual(6, len(decomp0.groups))
 
-        shift = int((rank * recipe.num_cells())/nranks)
+        shift = int((rank * recipe.num_cells()) / nranks)
 
-        exp_groups0 = [ [0 + shift],
-                        [3 + shift],
-                        [4 + shift],
-                        [5 + shift],
-                        [8 + shift],
-                        [1 + shift, 2 + shift, 6 + shift, 7 + shift, 9 + shift]]
+        exp_groups0 = [
+            [0 + shift],
+            [3 + shift],
+            [4 + shift],
+            [5 + shift],
+            [8 + shift],
+            [1 + shift, 2 + shift, 6 + shift, 7 + shift, 9 + shift],
+        ]
 
         for i in range(6):
             self.assertEqual(exp_groups0[i], decomp0.groups[i].gids)
 
-        cells_per_rank = int(recipe.num_cells()/nranks)
+        cells_per_rank = int(recipe.num_cells() / nranks)
 
         for i in range(recipe.num_cells()):
-            self.assertEqual(int(i/cells_per_rank), decomp0.gid_domain(i))
+            self.assertEqual(int(i / cells_per_rank), decomp0.gid_domain(i))
 
         # Test different group_hints
         hint1 = arb.partition_hint()
@@ -328,35 +342,47 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         decomp1 = arb.partition_load_balance(recipe, context, hints1)
         self.assertEqual(1, len(decomp1.groups))
 
-        exp_groups1 = [0 + shift, 3 + shift, 4 + shift, 5 + shift, 8 + shift,
-                        1 + shift, 2 + shift, 6 + shift, 7 + shift, 9 + shift]
+        exp_groups1 = [
+            0 + shift,
+            3 + shift,
+            4 + shift,
+            5 + shift,
+            8 + shift,
+            1 + shift,
+            2 + shift,
+            6 + shift,
+            7 + shift,
+            9 + shift,
+        ]
 
         self.assertEqual(exp_groups1, decomp1.groups[0].gids)
 
         for i in range(recipe.num_cells()):
-            self.assertEqual(int(i/cells_per_rank), decomp1.gid_domain(i))
+            self.assertEqual(int(i / cells_per_rank), decomp1.gid_domain(i))
 
         hint2 = arb.partition_hint()
         hint2.prefer_gpu = False
-        hint2.cpu_group_size = int(cells_per_rank/2)
+        hint2.cpu_group_size = int(cells_per_rank / 2)
         hints2 = dict([(arb.cell_kind.cable, hint2)])
 
         decomp2 = arb.partition_load_balance(recipe, context, hints2)
         self.assertEqual(2, len(decomp2.groups))
 
-        exp_groups2 = [ [0 + shift, 3 + shift, 4 + shift, 5 + shift, 8 + shift],
-                        [1 + shift, 2 + shift, 6 + shift, 7 + shift, 9 + shift] ]
+        exp_groups2 = [
+            [0 + shift, 3 + shift, 4 + shift, 5 + shift, 8 + shift],
+            [1 + shift, 2 + shift, 6 + shift, 7 + shift, 9 + shift],
+        ]
 
         for i in range(2):
             self.assertEqual(exp_groups2[i], decomp2.groups[i].gids)
 
         for i in range(recipe.num_cells()):
-            self.assertEqual(int(i/cells_per_rank), decomp2.gid_domain(i))
+            self.assertEqual(int(i / cells_per_rank), decomp2.gid_domain(i))
 
     def test_domain_decomposition_nonsymmetric(self):
         nranks = 1
         rank = 0
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=None, mpi=comm)
             nranks = context.ranks
@@ -371,12 +397,12 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         # check groups
         i = 0
-        for gid in range(rank*cells_per_rank, (rank + 1)*cells_per_rank):
-            if (gid%nranks == rank - 1):
+        for gid in range(rank * cells_per_rank, (rank + 1) * cells_per_rank):
+            if gid % nranks == rank - 1:
                 continue
-            elif (gid%nranks == rank and rank != nranks - 1):
+            elif gid % nranks == rank and rank != nranks - 1:
                 cg = [gid, gid + cells_per_rank]
-                self.assertEqual(cg, decomp.groups[len(decomp.groups)-1].gids)
+                self.assertEqual(cg, decomp.groups[len(decomp.groups) - 1].gids)
             else:
                 cg = [gid]
                 self.assertEqual(cg, decomp.groups[i].gids)
@@ -384,12 +410,12 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
 
         # check gid_domains
         for gid in range(recipe.num_cells()):
-            group = int(gid/cells_per_rank)
-            idx = gid%cells_per_rank
+            group = int(gid / cells_per_rank)
+            idx = gid % cells_per_rank
             ngroups = nranks
-            if (idx == group - 1):
+            if idx == group - 1:
                 self.assertEqual(group - 1, decomp.gid_domain(gid))
-            elif (idx == group and group != ngroups - 1):
+            elif idx == group and group != ngroups - 1:
                 self.assertEqual(group, decomp.gid_domain(gid))
             else:
                 self.assertEqual(group, decomp.gid_domain(gid))
@@ -397,7 +423,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
     def test_domain_decomposition_exceptions(self):
         nranks = 1
         rank = 0
-        if (mpi_enabled):
+        if mpi_enabled:
             comm = arb.mpi_comm()
             context = arb.context(threads=1, gpu_id=None, mpi=comm)
             nranks = context.ranks
@@ -412,8 +438,10 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         hint1.cpu_group_size = 0
         hints1 = dict([(arb.cell_kind.cable, hint1)])
 
-        with self.assertRaisesRegex(RuntimeError,
-            "unable to perform load balancing because cell_kind::cable has invalid suggested cpu_cell_group size of 0"):
+        with self.assertRaisesRegex(
+            RuntimeError,
+            "unable to perform load balancing because cell_kind::cable has invalid suggested cpu_cell_group size of 0",
+        ):
             decomp1 = arb.partition_load_balance(recipe, context, hints1)
 
         hint2 = arb.partition_hint()
@@ -421,6 +449,8 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
         hint2.gpu_group_size = 0
         hints2 = dict([(arb.cell_kind.cable, hint2)])
 
-        with self.assertRaisesRegex(RuntimeError,
-            "unable to perform load balancing because cell_kind::cable has invalid suggested gpu_cell_group size of 0"):
+        with self.assertRaisesRegex(
+            RuntimeError,
+            "unable to perform load balancing because cell_kind::cable has invalid suggested gpu_cell_group size of 0",
+        ):
             decomp2 = arb.partition_load_balance(recipe, context, hints2)
diff --git a/python/test/unit_distributed/test_simulator.py b/python/test/unit_distributed/test_simulator.py
index 5c23498f..8ac2a983 100644
--- a/python/test/unit_distributed/test_simulator.py
+++ b/python/test/unit_distributed/test_simulator.py
@@ -13,6 +13,7 @@ mpi_enabled = A.__config__["mpi"]
 test for MPI distribution of spike recording
 """
 
+
 class lifN_recipe(A.recipe):
     def __init__(self, n_cell):
         A.recipe.__init__(self)
@@ -31,22 +32,26 @@ class lifN_recipe(A.recipe):
     def event_generators(self, gid):
         sched_dt = 0.25
         weight = 400
-        return [A.event_generator("tgt", weight, A.regular_schedule(sched_dt)) for gid in range(0, self.num_cells())]
+        return [
+            A.event_generator("tgt", weight, A.regular_schedule(sched_dt))
+            for gid in range(0, self.num_cells())
+        ]
 
     def probes(self, gid):
         return []
 
-    def global_properties(self,kind):
+    def global_properties(self, kind):
         return self.props
 
     def cell_description(self, gid):
         c = A.lif_cell("src", "tgt")
-        if gid%2==0:
+        if gid % 2 == 0:
             c.t_ref = 2
         else:
             c.t_ref = 4
         return c
 
+
 @cases.skipIfNotDistributed()
 class TestSimulator(unittest.TestCase):
     def init_sim(self):
@@ -76,7 +81,7 @@ class TestSimulator(unittest.TestCase):
         self.assertEqual({(self.rank, 0)}, {s for s, t in spikes})
 
         times = sorted([t for s, t in spikes])
-        if self.rank%2==0:
+        if self.rank % 2 == 0:
             self.assertEqual([0, 2, 4, 6, 8], times)
         else:
             self.assertEqual([0, 4, 8], times)
@@ -87,5 +92,9 @@ class TestSimulator(unittest.TestCase):
         sim.run(9, 0.01)
         spikes = sim.spikes().tolist()
 
-        expected = [((s, 0), t) for s in range(0, self.ranks) for t in ([0, 2, 4, 6, 8] if s%2==0 else [0, 4, 8])]
+        expected = [
+            ((s, 0), t)
+            for s in range(0, self.ranks)
+            for t in ([0, 2, 4, 6, 8] if s % 2 == 0 else [0, 4, 8])
+        ]
         self.assertEqual(expected, sorted(spikes))
diff --git a/scripts/build-catalogue.in b/scripts/build-catalogue.in
index ed32c066..9e3384eb 100755
--- a/scripts/build-catalogue.in
+++ b/scripts/build-catalogue.in
@@ -10,9 +10,10 @@ import string
 import argparse
 import re
 
+
 def parse_arguments():
     def append_slash(s):
-        return s+'/' if s and not s.endswith('/') else s
+        return s + "/" if s and not s.endswith("/") else s
 
     class ConciseHelpFormatter(argparse.HelpFormatter):
         def __init__(self, **kwargs):
@@ -20,87 +21,92 @@ def parse_arguments():
 
         def _format_action_invocation(self, action):
             if not action.option_strings:
-                return super(ConciseHelpFormatter, self)._format_action_invocation(action)
+                return super(ConciseHelpFormatter, self)._format_action_invocation(
+                    action
+                )
             else:
-                optstr = ', '.join(action.option_strings)
-                if action.nargs==0:
+                optstr = ", ".join(action.option_strings)
+                if action.nargs == 0:
                     return optstr
                 else:
-                    return optstr+' '+self._format_args(action, action.dest.upper())
+                    return optstr + " " + self._format_args(action, action.dest.upper())
 
     parser = argparse.ArgumentParser(
-        description = 'Generate dynamic catalogue and build it into a shared object.',
-        usage = '%(prog)s catalogue_name mod_source_dir',
-        add_help = False,
-        formatter_class = ConciseHelpFormatter)
-
-    parser.add_argument('name',
-                        metavar='name',
-                        type=str,
-                        help='Catalogue name.')
-
-    parser.add_argument('--raw',
-                        metavar='raw',
-                        nargs='+',
-                        default=[],
-                        type=str,
-                        help='''Advanced: Raw mechanisms as C++ files. Per <name> the
+        description="Generate dynamic catalogue and build it into a shared object.",
+        usage="%(prog)s catalogue_name mod_source_dir",
+        add_help=False,
+        formatter_class=ConciseHelpFormatter,
+    )
+
+    parser.add_argument("name", metavar="name", type=str, help="Catalogue name.")
+
+    parser.add_argument(
+        "--raw",
+        metavar="raw",
+        nargs="+",
+        default=[],
+        type=str,
+        help="""Advanced: Raw mechanisms as C++ files. Per <name> the
 files <name>.hpp, <name>_cpu.cpp must be present
 in the target directory and with GPU support
-also <name>_gpu.cpp and <name>_gpu.cu (if not given -C).''')
-
-    parser.add_argument('modpfx',
-                        metavar='modpfx',
-                        type=str,
-                        help='Directory name where *.mod files live.')
-
-    parser.add_argument('-v', '--verbose',
-                        action='store_true',
-                        help='Verbose.')
-
-    parser.add_argument('-q', '--quiet',
-                        action='store_true',
-                        help='Less output.')
-
-    parser.add_argument('-g', '--gpu',
-                        metavar='gpu',
-                        help='Enable GPU support, valid options: cuda|hip|cuda-clang.')
+also <name>_gpu.cpp and <name>_gpu.cu (if not given -C).""",
+    )
+
+    parser.add_argument(
+        "modpfx",
+        metavar="modpfx",
+        type=str,
+        help="Directory name where *.mod files live.",
+    )
+
+    parser.add_argument("-v", "--verbose", action="store_true", help="Verbose.")
+
+    parser.add_argument("-q", "--quiet", action="store_true", help="Less output.")
+
+    parser.add_argument(
+        "-g",
+        "--gpu",
+        metavar="gpu",
+        help="Enable GPU support, valid options: cuda|hip|cuda-clang.",
+    )
+
+    parser.add_argument(
+        "-C", "--no-cpu", action="store_true", help="Disable CPU support."
+    )
+
+    parser.add_argument(
+        "-d",
+        "--debug",
+        nargs="?",
+        metavar="path",
+        const=True,
+        default=False,
+        help="Don't clean up the generated temp cpp code."
+        + " Can be a target path for the generated code.",
+    )
+
+    parser.add_argument(
+        "-h", "--help", action="help", help="Display this help and exit."
+    )
 
-    parser.add_argument('-C', '--no-cpu',
-                        action='store_true',
-                        help='Disable CPU support.')
-
-    parser.add_argument('-d', '--debug',
-                        nargs="?",
-                        metavar="path",
-                        const=True,
-                        default=False,
-                        help='Don\'t clean up the generated temp cpp code.'
-                        + ' Can be a target path for the generated code.')
+    return vars(parser.parse_args())
 
-    parser.add_argument('-h', '--help',
-                        action='help',
-                        help='Display this help and exit.')
 
-    return vars(parser.parse_args())
+args = parse_arguments()
+pwd = Path.cwd()
+name = re.sub(r"_+", r"_", re.sub(r"[^a-zA-Z0-9_]", r"_", args["name"]))
 
-args    = parse_arguments()
-pwd     = Path.cwd()
-name    = re.sub(r'_+', r'_',
-                 re.sub(r'[^a-zA-Z0-9_]', r'_',
-                        args['name']))
-
-mod_dir = pwd / Path(args['modpfx'])
-mods    = [f[:-4] for f in os.listdir(mod_dir) if f.endswith('.mod')]
-quiet   = args['quiet']
-verbose = args['verbose'] and not quiet
-debug   = args['debug']
-raw     = args['raw']
-gpu     = args['gpu']
-cpu     = not args['no_cpu']
+mod_dir = pwd / Path(args["modpfx"])
+mods = [f[:-4] for f in os.listdir(mod_dir) if f.endswith(".mod")]
+quiet = args["quiet"]
+verbose = args["verbose"] and not quiet
+debug = args["debug"]
+raw = args["raw"]
+gpu = args["gpu"]
+cpu = not args["no_cpu"]
 
 if gpu:
-    if gpu == 'cuda':
+    if gpu == "cuda":
         gpu_support = """
 add_compile_definitions(ARB_CUDA)
 add_compile_definitions(ARB_HAVE_GPU)
@@ -110,7 +116,9 @@ set(CMAKE_CUDA_HOST_COMPILER @CMAKE_CXX_COMPILER@)
 set(CMAKE_CUDA_ARCHITECTURES @CMAKE_CUDA_ARCHITECTURES@)
 """
     else:
-        print(f"Unsupported GPU target: {gpu}. If you need support for HIP or Clang-CUDA, please check here: https://github.com/arbor-sim/arbor/issues/1783")
+        print(
+            f"Unsupported GPU target: {gpu}. If you need support for HIP or Clang-CUDA, please check here: https://github.com/arbor-sim/arbor/issues/1783"
+        )
         exit(-1)
 else:
     gpu_support = """
@@ -122,12 +130,14 @@ data_path = (this_path / "@ARB_REL_DATADIR@").resolve()
 pack_path = (this_path / "@ARB_REL_PACKAGEDIR@").resolve()
 exec_path = this_path.resolve()
 
-for path in [exec_path / 'modcc',
-             data_path / 'generate_catalogue',
-             data_path / 'BuildModules.cmake',
-             pack_path / 'arbor-config.cmake',]:
+for path in [
+    exec_path / "modcc",
+    data_path / "generate_catalogue",
+    data_path / "BuildModules.cmake",
+    pack_path / "arbor-config.cmake",
+]:
     if not path.exists():
-        print(f'Could not find required tool: {path}. Please check your installation.')
+        print(f"Could not find required tool: {path}. Please check your installation.")
         exit(-1)
 
 cmake = f"""
@@ -182,7 +192,9 @@ if debug:
                 try:
                     os.makedirs(path, exist_ok=False)
                 except FileExistsError:
-                    sys.stderr.write(f"Error: Debug destination '{path}' already exists.\n")
+                    sys.stderr.write(
+                        f"Error: Debug destination '{path}' already exists.\n"
+                    )
                     sys.stderr.flush()
                     exit(1)
             else:
@@ -193,17 +205,18 @@ if debug:
         def __exit__(*args, **kwargs):
             pass
 
+
 with TemporaryDirectory() as tmp:
     tmp = Path(tmp)
-    shutil.copytree(mod_dir, tmp / 'mod')
-    os.mkdir(tmp / 'build')
-    os.chdir(tmp / 'build')
-    with open(tmp / 'CMakeLists.txt', 'w') as fd:
+    shutil.copytree(mod_dir, tmp / "mod")
+    os.mkdir(tmp / "build")
+    os.chdir(tmp / "build")
+    with open(tmp / "CMakeLists.txt", "w") as fd:
         fd.write(cmake)
-    shutil.copy2(f'{data_path}/BuildModules.cmake', tmp)
-    shutil.copy2(f'{data_path}/generate_catalogue', tmp)
+    shutil.copy2(f"{data_path}/BuildModules.cmake", tmp)
+    shutil.copy2(f"{data_path}/generate_catalogue", tmp)
 
-    out = tmp / 'build' / 'generated' / name
+    out = tmp / "build" / "generated" / name
     os.makedirs(out, exist_ok=True)
     sfx = [".hpp"]
     if cpu:
@@ -214,22 +227,24 @@ with TemporaryDirectory() as tmp:
         for s in sfx:
             fn = mod_dir / (e + s)
             if not fn.exists():
-                print(f'Could not find required file: {fn}. Please check your C++ mechanisms.')
+                print(
+                    f"Could not find required file: {fn}. Please check your C++ mechanisms."
+                )
                 exit(-1)
             else:
                 shutil.copy2(fn, out / (e + s))
 
-    cmake_cmd = 'cmake ..'
-    make_cmd = 'make'
+    cmake_cmd = "cmake .."
+    make_cmd = "make"
     if verbose:
         out, err = (None, None)
-        make_cmd += ' VERBOSE=1'
+        make_cmd += " VERBOSE=1"
     else:
         out, err = (sp.PIPE, sp.PIPE)
     try:
         sp.run(cmake_cmd, shell=True, check=True, stdout=out, stderr=err)
-        sp.run(make_cmd,  shell=True, check=True, stdout=out, stderr=err)
-        shutil.copy2(f'{name}-catalogue.so', pwd)
+        sp.run(make_cmd, shell=True, check=True, stdout=out, stderr=err)
+        shutil.copy2(f"{name}-catalogue.so", pwd)
     except sp.CalledProcessError as e:
         import sys, traceback as tb
 
@@ -253,4 +268,4 @@ with TemporaryDirectory() as tmp:
         exit(e.returncode)
 
     if not quiet:
-        print(f'Catalogue has been built and copied to {pwd}/{name}-catalogue.so')
+        print(f"Catalogue has been built and copied to {pwd}/{name}-catalogue.so")
diff --git a/scripts/patchwheel.py b/scripts/patchwheel.py
index 4a45d375..ef7262c4 100644
--- a/scripts/patchwheel.py
+++ b/scripts/patchwheel.py
@@ -1,13 +1,26 @@
-import shutil,subprocess,argparse
+import shutil, subprocess, argparse
 from pathlib import Path
 
+
 def parse_arguments():
-    parser = argparse.ArgumentParser(description='Patch Arbor wheels built with scikit-build and corrected by auditwheel. Linux only.')
-    parser.add_argument('path', type=dir_path, help='The path where your wheels are located. They will be patched in place.')
-    parser.add_argument('-ko','--keepold', action='store_true', help='If you want to keep the old wheels in /old')
+    parser = argparse.ArgumentParser(
+        description="Patch Arbor wheels built with scikit-build and corrected by auditwheel. Linux only."
+    )
+    parser.add_argument(
+        "path",
+        type=dir_path,
+        help="The path where your wheels are located. They will be patched in place.",
+    )
+    parser.add_argument(
+        "-ko",
+        "--keepold",
+        action="store_true",
+        help="If you want to keep the old wheels in /old",
+    )
 
     return parser.parse_args()
 
+
 def dir_path(path):
     path = Path(path)
     if Path.is_dir(path):
@@ -15,26 +28,29 @@ def dir_path(path):
     else:
         raise argparse.ArgumentTypeError(f"{path} is not a valid path")
 
+
 parsed_args = parse_arguments()
-Path.mkdir(parsed_args.path / 'old', exist_ok=True)
+Path.mkdir(parsed_args.path / "old", exist_ok=True)
 
 for inwheel in parsed_args.path.glob("*.whl"):
     zipdir = Path(f"{inwheel}.unzip")
     # shutil.unpack_archive(inwheel,zipdir,'zip') # Disabled, because shutil (and ZipFile) don't preserve filemodes
-    subprocess.check_call(f"unzip {inwheel} -d {zipdir}",shell=True)
+    subprocess.check_call(f"unzip {inwheel} -d {zipdir}", shell=True)
 
     arborn = list(zipdir.glob("**/_arbor.cpython*.so"))[0]
     libxml2n = list(zipdir.glob("**/libxml2*.so*"))[0]
-    subprocess.check_call(f"patchelf --set-rpath '$ORIGIN/../arbor.libs' {arborn}",shell=True)
-    subprocess.check_call(f"patchelf --set-rpath '$ORIGIN' {libxml2n}",shell=True)
+    subprocess.check_call(
+        f"patchelf --set-rpath '$ORIGIN/../arbor.libs' {arborn}", shell=True
+    )
+    subprocess.check_call(f"patchelf --set-rpath '$ORIGIN' {libxml2n}", shell=True)
 
     # TODO? correct checksum/bytecounts in *.dist-info/RECORD.
     # So far, Python does not report mismatches
 
-    outwheel = Path(shutil.make_archive(inwheel, 'zip', zipdir))
-    Path.rename(inwheel, parsed_args.path / 'old' / inwheel.name)
+    outwheel = Path(shutil.make_archive(inwheel, "zip", zipdir))
+    Path.rename(inwheel, parsed_args.path / "old" / inwheel.name)
     Path.rename(outwheel, parsed_args.path / inwheel.name)
     shutil.rmtree(zipdir)
 
 if not parsed_args.keepold:
-    shutil.rmtree(parsed_args.path / 'old')
+    shutil.rmtree(parsed_args.path / "old")
diff --git a/scripts/test-catalogue.py b/scripts/test-catalogue.py
index 7c4716d0..a543998d 100755
--- a/scripts/test-catalogue.py
+++ b/scripts/test-catalogue.py
@@ -6,8 +6,10 @@ import sys
 
 import arbor
 
-P = argparse.ArgumentParser(description='Verify that a mechanism catalogue can be loaded through Python interface.')
-P.add_argument('catname', metavar='FILE', help='path of the catalogue to test.')
+P = argparse.ArgumentParser(
+    description="Verify that a mechanism catalogue can be loaded through Python interface."
+)
+P.add_argument("catname", metavar="FILE", help="path of the catalogue to test.")
 
 args = P.parse_args()
 catname = args.catname
@@ -15,7 +17,7 @@ catname = args.catname
 print(catname)
 
 if not os.path.isfile(catname):
-    print('ERROR: unable to open catalogue file')
+    print("ERROR: unable to open catalogue file")
     sys.exit(1)
 
 print([n for n in arbor.load_catalogue(catname).keys()])
diff --git a/scripts/where.py b/scripts/where.py
index 1abf399e..08307a66 100644
--- a/scripts/where.py
+++ b/scripts/where.py
@@ -1,12 +1,22 @@
-import sys,sysconfig
+import sys, sysconfig
 
-pfx=sys.stdin.read()
+pfx = sys.stdin.read()
 try:
-    #override scheme on debian/ubuntu py3.10, where 'posix_local' is set and malfunctioning.
-    if sysconfig.get_default_scheme()=='posix_local':
-        print(sysconfig.get_path('platlib',vars={} if pfx=='' else {'base':pfx,'platbase':pfx},scheme='posix_prefix'))
+    # override scheme on debian/ubuntu py3.10, where 'posix_local' is set and malfunctioning.
+    if sysconfig.get_default_scheme() == "posix_local":
+        print(
+            sysconfig.get_path(
+                "platlib",
+                vars={} if pfx == "" else {"base": pfx, "platbase": pfx},
+                scheme="posix_prefix",
+            )
+        )
         sys.exit()
 except AttributeError:
-    #we're on Python <= 3.9, no scheme setting required and get_default_scheme does not exist.
+    # we're on Python <= 3.9, no scheme setting required and get_default_scheme does not exist.
     pass
-print(sysconfig.get_path('platlib',vars={} if pfx=='' else {'base':pfx,'platbase':pfx}))
+print(
+    sysconfig.get_path(
+        "platlib", vars={} if pfx == "" else {"base": pfx, "platbase": pfx}
+    )
+)
diff --git a/setup.py b/setup.py
index 30cf7453..ecf8dbcc 100644
--- a/setup.py
+++ b/setup.py
@@ -1,58 +1,66 @@
 from pathlib import Path
 from sys import executable as python
 from skbuild import setup
-import os,platform
+import os, platform
 
 # Hard coded options, because scikit-build does not do build options.
 # Override by instructing CMAKE, e.g.:
 # pip install . -- -DARB_USE_BUNDLED_LIBS=ON -DARB_WITH_MPI=ON -DARB_GPU=cuda
-with_mpi   = False
-with_gpu   = 'none'
-with_vec   = False
-arch       = 'none'
-with_nml   = True
-use_libs   = True
-build_type = 'Release' # this is ok even for debugging, as we always produce info
+with_mpi = False
+with_gpu = "none"
+with_vec = False
+arch = "none"
+with_nml = True
+use_libs = True
+build_type = "Release"  # this is ok even for debugging, as we always produce info
 
 # Find our dir; *should* be the arbor checkout
 here = Path(__file__).resolve().parent
 # Read version file
-with open(here / 'VERSION') as fd:
+with open(here / "VERSION") as fd:
     arbor_version = fd.read().strip()
 # Get the contents of the readme
-with open(here / 'python' / 'readme.md', encoding='utf-8') as fd:
+with open(here / "python" / "readme.md", encoding="utf-8") as fd:
     long_description = fd.read()
 
-setup(name='arbor',
-      version=arbor_version,
-      python_requires='>=3.6',
-      install_requires=['numpy'],
-      setup_requires=[],
-      zip_safe=False,
-      packages=['arbor'],
-      cmake_args=['-DARB_WITH_PYTHON=on',
-                  f'-DPYTHON_EXECUTABLE={python}',
-                  f'-DARB_WITH_MPI={with_mpi}',
-                  f'-DARB_VECTORIZE={with_vec}',
-                  f'-DARB_ARCH={arch}',
-                  f'-DARB_GPU={with_gpu}',
-                  f'-DARB_WITH_NEUROML={with_nml}',
-                  f'-DARB_USE_BUNDLED_LIBS={use_libs}',
-                  f'-DCMAKE_BUILD_TYPE={build_type}',],
-      author='The Arbor dev team.',
-      url='https://arbor-sim.org',
-      description='High performance simulation of networks of multicompartment neurons.',
-      long_description=long_description,
-      long_description_content_type='text/markdown',
-      classifiers=['Development Status :: 5 - Production/Stable',
-                   'Intended Audience :: Science/Research',
-                   'License :: OSI Approved :: BSD License',
-                   'Programming Language :: Python :: 3.6',
-                   'Programming Language :: Python :: 3.7',
-                   'Programming Language :: Python :: 3.8',
-                   'Programming Language :: Python :: 3.9',
-                   'Programming Language :: Python :: 3.10',
-                   'Programming Language :: C++',],
-      project_urls={'Source': 'https://github.com/arbor-sim/arbor',
-                    'Documentation': 'https://docs.arbor-sim.org',
-                    'Bug Reports': 'https://github.com/arbor-sim/arbor/issues',},)
+setup(
+    name="arbor",
+    version=arbor_version,
+    python_requires=">=3.6",
+    install_requires=["numpy"],
+    setup_requires=[],
+    zip_safe=False,
+    packages=["arbor"],
+    cmake_args=[
+        "-DARB_WITH_PYTHON=on",
+        f"-DPYTHON_EXECUTABLE={python}",
+        f"-DARB_WITH_MPI={with_mpi}",
+        f"-DARB_VECTORIZE={with_vec}",
+        f"-DARB_ARCH={arch}",
+        f"-DARB_GPU={with_gpu}",
+        f"-DARB_WITH_NEUROML={with_nml}",
+        f"-DARB_USE_BUNDLED_LIBS={use_libs}",
+        f"-DCMAKE_BUILD_TYPE={build_type}",
+    ],
+    author="The Arbor dev team.",
+    url="https://arbor-sim.org",
+    description="High performance simulation of networks of multicompartment neurons.",
+    long_description=long_description,
+    long_description_content_type="text/markdown",
+    classifiers=[
+        "Development Status :: 5 - Production/Stable",
+        "Intended Audience :: Science/Research",
+        "License :: OSI Approved :: BSD License",
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
+        "Programming Language :: C++",
+    ],
+    project_urls={
+        "Source": "https://github.com/arbor-sim/arbor",
+        "Documentation": "https://docs.arbor-sim.org",
+        "Bug Reports": "https://github.com/arbor-sim/arbor/issues",
+    },
+)
diff --git a/spack/package.py b/spack/package.py
index 3a04b47a..01626504 100644
--- a/spack/package.py
+++ b/spack/package.py
@@ -10,77 +10,97 @@ class Arbor(CMakePackage, CudaPackage):
     """Arbor is a high-performance library for computational neuroscience
     simulations."""
 
-    homepage = 'https://arbor-sim.org'
-    git      = 'https://github.com/arbor-sim/arbor.git'
-    url      = 'https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz'
-    maintainers = ['bcumming', 'brenthuisman', 'haampie', 'schmitts']
-
-    version('master', branch='master', submodules=True)
-    version('0.6', sha256='4cd333b18effc8833428ddc0b99e7dc976804771bc85da90034c272c7019e1e8', url='https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz')
-    version('0.5.2', sha256='290e2ad8ca8050db1791cabb6b431e7c0409c305af31b559e397e26b300a115d', url='https://github.com/arbor-sim/arbor/releases/download/v0.5.2/arbor-v0.5.2-full.tar.gz')
-    version('0.5', sha256='d0c8a4c7f97565d7c30493c66249be794d1dc424de266fc79cecbbf0e313df59', url='https://github.com/arbor-sim/arbor/releases/download/v0.5/arbor-v0.5-full.tar.gz')
-
-    variant('assertions', default=False, description='Enable arb_assert() assertions in code.')
-    variant('doc', default=False, description='Build documentation.')
-    variant('mpi', default=False, description='Enable MPI support')
-    variant('neuroml', default=True, description='Build NeuroML support library.')
-    variant('python', default=True, description='Enable Python frontend support')
-    variant('vectorize', default=False, description='Enable vectorization of computational kernels')
+    homepage = "https://arbor-sim.org"
+    git = "https://github.com/arbor-sim/arbor.git"
+    url = "https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz"
+    maintainers = ["bcumming", "brenthuisman", "haampie", "schmitts"]
+
+    version("master", branch="master", submodules=True)
+    version(
+        "0.6",
+        sha256="4cd333b18effc8833428ddc0b99e7dc976804771bc85da90034c272c7019e1e8",
+        url="https://github.com/arbor-sim/arbor/releases/download/v0.6/arbor-v0.6-full.tar.gz",
+    )
+    version(
+        "0.5.2",
+        sha256="290e2ad8ca8050db1791cabb6b431e7c0409c305af31b559e397e26b300a115d",
+        url="https://github.com/arbor-sim/arbor/releases/download/v0.5.2/arbor-v0.5.2-full.tar.gz",
+    )
+    version(
+        "0.5",
+        sha256="d0c8a4c7f97565d7c30493c66249be794d1dc424de266fc79cecbbf0e313df59",
+        url="https://github.com/arbor-sim/arbor/releases/download/v0.5/arbor-v0.5-full.tar.gz",
+    )
+
+    variant(
+        "assertions",
+        default=False,
+        description="Enable arb_assert() assertions in code.",
+    )
+    variant("doc", default=False, description="Build documentation.")
+    variant("mpi", default=False, description="Enable MPI support")
+    variant("neuroml", default=True, description="Build NeuroML support library.")
+    variant("python", default=True, description="Enable Python frontend support")
+    variant(
+        "vectorize",
+        default=False,
+        description="Enable vectorization of computational kernels",
+    )
 
     # https://docs.arbor-sim.org/en/latest/install/build_install.html#compilers
-    conflicts('%gcc@:8.3')
-    conflicts('%clang@:7')
+    conflicts("%gcc@:8.3")
+    conflicts("%clang@:7")
     # Cray compiler v9.2 and later is Clang-based.
-    conflicts('%cce@:9.1')
-    conflicts('%intel')
+    conflicts("%cce@:9.1")
+    conflicts("%intel")
 
-    depends_on('cmake@3.12:', type='build')
+    depends_on("cmake@3.12:", type="build")
 
     # misc dependencies
-    depends_on('fmt@7.1:', when='@0.5.3:')  # required by the modcc compiler
-    depends_on('nlohmann-json')
-    depends_on('random123')
-    depends_on('cuda@10:', when='+cuda')
-    depends_on('libxml2', when='+neuroml')
+    depends_on("fmt@7.1:", when="@0.5.3:")  # required by the modcc compiler
+    depends_on("nlohmann-json")
+    depends_on("random123")
+    depends_on("cuda@10:", when="+cuda")
+    depends_on("libxml2", when="+neuroml")
 
     # mpi
-    depends_on('mpi', when='+mpi')
-    depends_on('py-mpi4py', when='+mpi+python', type=('build', 'run'))
+    depends_on("mpi", when="+mpi")
+    depends_on("py-mpi4py", when="+mpi+python", type=("build", "run"))
 
     # python (bindings)
-    extends('python', when='+python')
-    depends_on('python@3.6:', when="+python", type=('build', 'run'))
-    depends_on('py-numpy', when='+python', type=('build', 'run'))
-    with when('+python'):
-        depends_on('py-pybind11@2.6:', type=('build', 'run'))
-        depends_on('py-pybind11@2.8.1:', when='@0.5.3:', type=('build', 'run'))
+    extends("python", when="+python")
+    depends_on("python@3.6:", when="+python", type=("build", "run"))
+    depends_on("py-numpy", when="+python", type=("build", "run"))
+    with when("+python"):
+        depends_on("py-pybind11@2.6:", type=("build", "run"))
+        depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type=("build", "run"))
 
     # sphinx based documentation
-    depends_on('python@3.6:', when="+doc", type='build')
-    depends_on('py-sphinx', when="+doc", type='build')
-    depends_on('py-svgwrite', when='+doc', type='build')
+    depends_on("python@3.6:", when="+doc", type="build")
+    depends_on("py-sphinx", when="+doc", type="build")
+    depends_on("py-svgwrite", when="+doc", type="build")
 
     @property
     def build_targets(self):
-        return ['all', 'html'] if '+doc' in self.spec else ['all']
+        return ["all", "html"] if "+doc" in self.spec else ["all"]
 
     def cmake_args(self):
         args = [
-            self.define_from_variant('ARB_WITH_ASSERTIONS', 'assertions'),
-            self.define_from_variant('ARB_WITH_MPI', 'mpi'),
-            self.define_from_variant('ARB_WITH_NEUROML', 'neuroml'),
-            self.define_from_variant('ARB_WITH_PYTHON', 'python'),
-            self.define_from_variant('ARB_VECTORIZE', 'vectorize'),
+            self.define_from_variant("ARB_WITH_ASSERTIONS", "assertions"),
+            self.define_from_variant("ARB_WITH_MPI", "mpi"),
+            self.define_from_variant("ARB_WITH_NEUROML", "neuroml"),
+            self.define_from_variant("ARB_WITH_PYTHON", "python"),
+            self.define_from_variant("ARB_VECTORIZE", "vectorize"),
         ]
 
-        if '+cuda' in self.spec:
-            args.append('-DARB_GPU=cuda')
+        if "+cuda" in self.spec:
+            args.append("-DARB_GPU=cuda")
 
         # query spack for the architecture-specific compiler flags set by its wrapper
-        args.append('-DARB_ARCH=none')
+        args.append("-DARB_ARCH=none")
         opt_flags = self.spec.target.optimization_flags(
-            self.spec.compiler.name,
-            self.spec.compiler.version)
-        args.append('-DARB_CXX_FLAGS_TARGET=' + opt_flags)
+            self.spec.compiler.name, self.spec.compiler.version
+        )
+        args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags)
 
         return args
diff --git a/validation/ref/neuron/ball_and_3stick.py b/validation/ref/neuron/ball_and_3stick.py
index a14ae073..996d86f6 100644
--- a/validation/ref/neuron/ball_and_3stick.py
+++ b/validation/ref/neuron/ball_and_3stick.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -7,21 +7,20 @@ import nrn_validation as V
 V.override_defaults_from_args()
 
 # dendrite geometry: all 100 µm long, 1 µm diameter.
-geom = [(0,1), (100, 1)]
+geom = [(0, 1), (100, 1)]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('dend1', geom)
-model.add_dendrite('dend2', geom, to='dend1')
-model.add_dendrite('dend3', geom, to='dend1')
+model.add_dendrite("dend1", geom)
+model.add_dendrite("dend2", geom, to="dend1")
+model.add_dendrite("dend3", geom, to="dend1")
 
-model.add_iclamp(5, 80, 0.45, to='dend2')
-model.add_iclamp(40, 10, -0.2, to='dend3')
+model.add_iclamp(5, 80, 0.45, to="dend2")
+model.add_iclamp(40, 10, -0.2, to="dend3")
 
 simdur = 100.0
 
-data = V.run_nrn_sim(simdur, report_dt=10, model='ball_and_3stick')
+data = V.run_nrn_sim(simdur, report_dt=10, model="ball_and_3stick")
 print(json.dumps(data))
 
 V.nrn_stop()
-
diff --git a/validation/ref/neuron/ball_and_squiggle.py b/validation/ref/neuron/ball_and_squiggle.py
index b69bc2ec..63d01589 100644
--- a/validation/ref/neuron/ball_and_squiggle.py
+++ b/validation/ref/neuron/ball_and_squiggle.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import math
@@ -10,21 +10,19 @@ V.override_defaults_from_args()
 # dendrite geometry: 100 µm long, varying diameter.
 length = 100.0
 npoints = 200
-radius = lambda x: math.exp(-x)*(math.sin(40*x)*0.05+0.1)+0.1
+radius = lambda x: math.exp(-x) * (math.sin(40 * x) * 0.05 + 0.1) + 0.1
 
-xs = [float(i)/(npoints-1) for i in range(npoints)]
-geom = [(length*x, 2.0*radius(x)) for x in xs]
+xs = [float(i) / (npoints - 1) for i in range(npoints)]
+geom = [(length * x, 2.0 * radius(x)) for x in xs]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('dend', geom)
-model.add_iclamp(5, 80, 0.3, to='dend')
+model.add_dendrite("dend", geom)
+model.add_iclamp(5, 80, 0.3, to="dend")
 
 simdur = 100.0
 
-data = V.run_nrn_sim(simdur, report_dt=10, model='ball_and_squiggle')
+data = V.run_nrn_sim(simdur, report_dt=10, model="ball_and_squiggle")
 print(json.dumps(data))
 
 V.nrn_stop()
-
-
diff --git a/validation/ref/neuron/ball_and_stick.py b/validation/ref/neuron/ball_and_stick.py
index 93cc097e..0c5fd163 100644
--- a/validation/ref/neuron/ball_and_stick.py
+++ b/validation/ref/neuron/ball_and_stick.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -7,14 +7,13 @@ import nrn_validation as V
 V.override_defaults_from_args()
 
 # dendrite geometry: all 100 µm long, 1 µm diameter.
-geom = [(0,1), (200, 1)]
+geom = [(0, 1), (200, 1)]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('dend', geom)
-model.add_iclamp(5, 80, 0.3, to='dend')
+model.add_dendrite("dend", geom)
+model.add_iclamp(5, 80, 0.3, to="dend")
 
-data = V.run_nrn_sim(100, report_dt=10, model='ball_and_stick')
+data = V.run_nrn_sim(100, report_dt=10, model="ball_and_stick")
 print(json.dumps(data))
 V.nrn_stop()
-
diff --git a/validation/ref/neuron/ball_and_taper.py b/validation/ref/neuron/ball_and_taper.py
index 7c351021..c2fbe6f9 100644
--- a/validation/ref/neuron/ball_and_taper.py
+++ b/validation/ref/neuron/ball_and_taper.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -7,14 +7,13 @@ import nrn_validation as V
 V.override_defaults_from_args()
 
 # dendrite geometry: 200 µm long, diameter 1 µm to 0.4 µm.
-geom = [(0,1.0), (200, 0.4)]
+geom = [(0, 1.0), (200, 0.4)]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('taper', geom)
-model.add_iclamp(5, 80, 0.3, to='taper')
+model.add_dendrite("taper", geom)
+model.add_iclamp(5, 80, 0.3, to="taper")
 
-data = V.run_nrn_sim(100, report_dt=10, model='ball_and_taper')
+data = V.run_nrn_sim(100, report_dt=10, model="ball_and_taper")
 print(json.dumps(data))
 V.nrn_stop()
-
diff --git a/validation/ref/neuron/nrn_validation.py b/validation/ref/neuron/nrn_validation.py
index 5370a077..5952b978 100644
--- a/validation/ref/neuron/nrn_validation.py
+++ b/validation/ref/neuron/nrn_validation.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import sys
 import os
@@ -13,8 +13,9 @@ from neuron import h
 # but this is chatty on stdout, which means we get
 # junk in our data if capturing output.
 
+
 def hoc_execute_quiet(arg):
-    with open(os.devnull, 'wb') as null:
+    with open(os.devnull, "wb") as null:
         fd = sys.stdout.fileno()
         keep = os.dup(fd)
         sys.stdout.flush()
@@ -23,37 +24,42 @@ def hoc_execute_quiet(arg):
         sys.stdout.flush()
         os.dup2(keep, fd)
 
+
 def hoc_setup():
     hoc_execute_quiet('load_file("stdrun.hoc")')
 
+
 def hoc_quit():
-    hoc_execute_quiet('quit()')
-    #h('quit()')
+    hoc_execute_quiet("quit()")
+    # h('quit()')
+
 
 default_model_parameters = {
-    'gnabar_hh':  0.12,   # H-H sodium conductance in S/cm^2
-    'gkbar_hh':   0.036,  # H-H potassium conductance in S/cm^2
-    'gl_hh':      0.0003, # H-H leak conductance in S/cm^2
-    'el_hh':    -54.3,    # H-H reversal potential in mV
-    'g_pas':      0.001,  # Passive conductance in S/cm^2
-    'e_pas':    -65.0,    # Leak reversal potential in mV
-    'Ra':       100.0,    # Intracellular resistivity in Ω·cm
-    'cm':         1.0,    # Membrane areal capacitance in µF/cm^2
-    'tau':        2.0,    # Exponential synapse time constant
-    'tau1':       0.5,    # Exp2 synapse tau1
-    'tau2':       2.0,    # Exp2 synapse tau2
-    'ncomp':   1001,      # Number of compartments (nseg) in dendrites
-    'dt':         0.0,    # (Simulation parameter) default dt, 0 => use cvode adaptive
-    'abstol':     1e-6    # (Simulation parameter) abstol for cvode if used
+    "gnabar_hh": 0.12,  # H-H sodium conductance in S/cm^2
+    "gkbar_hh": 0.036,  # H-H potassium conductance in S/cm^2
+    "gl_hh": 0.0003,  # H-H leak conductance in S/cm^2
+    "el_hh": -54.3,  # H-H reversal potential in mV
+    "g_pas": 0.001,  # Passive conductance in S/cm^2
+    "e_pas": -65.0,  # Leak reversal potential in mV
+    "Ra": 100.0,  # Intracellular resistivity in Ω·cm
+    "cm": 1.0,  # Membrane areal capacitance in µF/cm^2
+    "tau": 2.0,  # Exponential synapse time constant
+    "tau1": 0.5,  # Exp2 synapse tau1
+    "tau2": 2.0,  # Exp2 synapse tau2
+    "ncomp": 1001,  # Number of compartments (nseg) in dendrites
+    "dt": 0.0,  # (Simulation parameter) default dt, 0 => use cvode adaptive
+    "abstol": 1e-6,  # (Simulation parameter) abstol for cvode if used
 }
 
+
 def override_defaults_from_args(args=sys.argv):
     global default_model_parameters
     keys = list(default_model_parameters.keys())
-    r = re.compile('('+'|'.join(keys)+')=(.*)')
+    r = re.compile("(" + "|".join(keys) + ")=(.*)")
     for m in [r.match(a) for a in args]:
         if m:
-            default_model_parameters[m.group(1)]=float(m.group(2))
+            default_model_parameters[m.group(1)] = float(m.group(2))
+
 
 def combine(*dicts, **kw):
     r = {}
@@ -62,6 +68,7 @@ def combine(*dicts, **kw):
     r.update(kw)
     return r
 
+
 class VModel:
     def __init__(self):
         self.soma = None
@@ -92,23 +99,23 @@ class VModel:
         p = combine(default_model_parameters, kw)
 
         syn = h.ExpSyn(self.sections[secname](pos))
-        syn.tau = p['tau']
+        syn.tau = p["tau"]
         self.synapses.append(syn)
-        return len(self.synapses)-1
+        return len(self.synapses) - 1
 
     def add_exp2_syn(self, secname, pos=0.5, **kw):
         p = combine(default_model_parameters, kw)
 
         syn = h.Exp2Syn(self.sections[secname](pos))
-        syn.tau1 = p['tau1']
-        syn.tau2 = p['tau2']
+        syn.tau1 = p["tau1"]
+        syn.tau2 = p["tau2"]
         self.synapses.append(syn)
-        return len(self.synapses)-1
+        return len(self.synapses) - 1
 
     def add_spike(self, t, weight, target=0):
         stim = h.NetStim()
         stim.number = 1
-        stim.start  = 0
+        stim.start = 0
 
         nc = h.NetCon(stim, self.synapses[target])
         nc.delay = t
@@ -120,19 +127,19 @@ class VModel:
     def add_soma(self, diam, **kw):
         p = combine(default_model_parameters, kw)
 
-        soma = h.Section(name='soma')
+        soma = h.Section(name="soma")
         soma.diam = diam
         soma.L = diam
 
-        soma.Ra = p['Ra']
-        soma.cm = p['cm']
+        soma.Ra = p["Ra"]
+        soma.cm = p["cm"]
 
         # Insert active Hodgkin-Huxley channels in the soma.
-        soma.insert('hh')
-        soma.gnabar_hh = p['gnabar_hh']
-        soma.gkbar_hh = p['gkbar_hh']
-        soma.gl_hh = p['gl_hh']
-        soma.el_hh = p['el_hh']
+        soma.insert("hh")
+        soma.gnabar_hh = p["gnabar_hh"]
+        soma.gkbar_hh = p["gkbar_hh"]
+        soma.gl_hh = p["gl_hh"]
+        soma.el_hh = p["el_hh"]
 
         # For reversal potentials we use those computed using
         # the Nernst equation with the following values:
@@ -146,13 +153,13 @@ class VModel:
         # from the HH paper:
         #   ena    = 115.0mV + -65.0mV,
         #   ek     = -12.0mV + -65.0mV,
-        soma.ena =  63.55148117386
-        soma.ek  = -74.17164678272
+        soma.ena = 63.55148117386
+        soma.ek = -74.17164678272
 
         # This is how we would get NEURON to use Nernst equation, when they
         # correct the Nernst equation implementation.
-        #h.ion_style('k_ion', 3, 2, 1, 1, 1)
-        #h.ion_style('na_ion', 3, 2, 1, 1, 1)
+        # h.ion_style('k_ion', 3, 2, 1, 1, 1)
+        # h.ion_style('na_ion', 3, 2, 1, 1, 1)
 
         self.soma = soma
 
@@ -165,15 +172,15 @@ class VModel:
             h.pt3dadd(x, 0, 0, d)
         h.pop_section()
 
-        dend.Ra = p['Ra']
-        dend.cm = p['cm']
+        dend.Ra = p["Ra"]
+        dend.cm = p["cm"]
 
         # Add passive membrane properties to dendrite.
-        dend.insert('pas')
-        dend.g_pas = p['g_pas']
-        dend.e_pas = p['e_pas']
+        dend.insert("pas")
+        dend.g_pas = p["g_pas"]
+        dend.e_pas = p["e_pas"]
 
-        dend.nseg = int(p['ncomp'])
+        dend.nseg = int(p["ncomp"])
 
         if to is None:
             if self.soma is not None:
@@ -183,39 +190,41 @@ class VModel:
 
         self.sections[name] = dend
 
+
 # Run 'current' model, return list of traces.
 # Samples at cable mid- and end-points taken every `sample_dt`;
 # Voltage on all compartments per section reported every `report_dt`.
 
+
 def run_nrn_sim(tend, sample_dt=0.025, report_t=None, report_dt=None, dt=None, **meta):
     if dt is None:
-        dt = default_model_parameters['dt']
+        dt = default_model_parameters["dt"]
 
     # Instrument mid-point and ends of each section for traces.
     vtraces = []
     vtrace_t_hoc = h.Vector()
 
-    ncomps = set([s.nseg for s in h.allsec() if s.name()!='soma'])
-    if len(ncomps)==1:
-        common_ncomp = { 'ncomp': ncomps.pop() }
+    ncomps = set([s.nseg for s in h.allsec() if s.name() != "soma"])
+    if len(ncomps) == 1:
+        common_ncomp = {"ncomp": ncomps.pop()}
     else:
         common_ncomp = {}
 
     for s in h.allsec():
         vend = h.Vector()
         vend.record(s(0.5)._ref_v, sample_dt)
-        vtraces.append((s.name()+".mid", vend))
-        if s.nseg!=1 or s.name()!='soma':
+        vtraces.append((s.name() + ".mid", vend))
+        if s.nseg != 1 or s.name() != "soma":
             vmid = h.Vector()
             vmid.record(s(1.0)._ref_v, sample_dt)
-            vtraces.append((s.name()+".end", vmid))
+            vtraces.append((s.name() + ".end", vmid))
 
     vtrace_t_hoc.record(h._ref_t, sample_dt)
 
     # Instrument every segment for section voltage reports.
     if report_t is None:
         if report_dt is not None:
-            report_t = [report_dt*(1+i) for i in range(int(tend/report_dt))]
+            report_t = [report_dt * (1 + i) for i in range(int(tend / report_dt))]
         else:
             report_t = []
     elif not isinstance(report_t, list):
@@ -226,24 +235,24 @@ def run_nrn_sim(tend, sample_dt=0.025, report_t=None, report_dt=None, dt=None, *
 
     if report_t:
         for s in h.allsec():
-            nseg = s.nseg;
-            ps = [0] + [(i+0.5)/nseg for i in range(nseg)] + [1]
+            nseg = s.nseg
+            ps = [0] + [(i + 0.5) / nseg for i in range(nseg)] + [1]
             vs = [h.Vector() for p in ps]
             for p, v in zip(ps, vs):
                 v.record(s(p)._ref_v, vreport_t_hoc)
             vreports.append((s.name(), s.L, s.nseg, ps, vs))
 
     # Run sim
-    if dt==0:
+    if dt == 0:
         # Use CVODE instead
         h.cvode.active(1)
-        abstol = default_model_parameters['abstol']
+        abstol = default_model_parameters["abstol"]
         h.cvode.atol(abstol)
-        common_meta = { 'dt': 0, 'cvode': True, 'abstol': abstol }
+        common_meta = {"dt": 0, "cvode": True, "abstol": abstol}
     else:
         h.dt = dt
-        h.steps_per_ms = 1/dt # or else NEURON might noisily fudge dt
-        common_meta = { 'dt': dt, 'cvode': False }
+        h.steps_per_ms = 1 / dt  # or else NEURON might noisily fudge dt
+        common_meta = {"dt": dt, "cvode": False}
 
     h.secondorder = 2
     h.tstop = tend
@@ -253,43 +262,55 @@ def run_nrn_sim(tend, sample_dt=0.025, report_t=None, report_dt=None, dt=None, *
     traces = []
 
     vtrace_t = list(vtrace_t_hoc)
-    traces.append(combine(common_meta, meta, common_ncomp, {
-        'name':  'membrane voltage',
-        'sim':   'neuron',
-        'units': 'mV',
-        'data':  combine({n: list(v) for n, v in vtraces}, time=vtrace_t)
-    }))
+    traces.append(
+        combine(
+            common_meta,
+            meta,
+            common_ncomp,
+            {
+                "name": "membrane voltage",
+                "sim": "neuron",
+                "units": "mV",
+                "data": combine({n: list(v) for n, v in vtraces}, time=vtrace_t),
+            },
+        )
+    )
 
     # and section reports too
     vreport_t = list(vreport_t_hoc)
     for name, length, nseg, ps, vs in vreports:
         obs = np.column_stack([np.array(v) for v in vs])
-        xs = [length*p for p in ps]
+        xs = [length * p for p in ps]
         for i, t in enumerate(report_t):
-            if i>=obs.shape[0]:
+            if i >= obs.shape[0]:
                 break
 
-            traces.append(combine(common_meta, meta, {
-                'name': 'membrane voltage',
-                'sim':  'neuron',
-                'units': {'x': 'µm', name: 'mV'},
-                'ncomp': nseg,
-                'time': t,
-                'data': {
-                    'x': xs,
-                    name: list(obs[i,:])
-                }
-            }))
+            traces.append(
+                combine(
+                    common_meta,
+                    meta,
+                    {
+                        "name": "membrane voltage",
+                        "sim": "neuron",
+                        "units": {"x": "µm", name: "mV"},
+                        "ncomp": nseg,
+                        "time": t,
+                        "data": {"x": xs, name: list(obs[i, :])},
+                    },
+                )
+            )
 
     return traces
 
+
 def nrn_assert_no_sections():
     for s in h.allsec():
-        assert False, 'a section exists'
+        assert False, "a section exists"
+
 
 def nrn_stop():
     hoc_quit()
 
+
 # Run hoc setup on load
 hoc_setup()
-
diff --git a/validation/ref/neuron/simple_exp2_synapse.py b/validation/ref/neuron/simple_exp2_synapse.py
index bbaaa068..4ce71c68 100644
--- a/validation/ref/neuron/simple_exp2_synapse.py
+++ b/validation/ref/neuron/simple_exp2_synapse.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -7,18 +7,17 @@ import nrn_validation as V
 V.override_defaults_from_args()
 
 # dendrite geometry: 200 µm long, 1 µm diameter.
-geom = [(0,1), (200, 1)]
+geom = [(0, 1), (200, 1)]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('dend', geom)
-model.add_exp2_syn('dend')
+model.add_dendrite("dend", geom)
+model.add_exp2_syn("dend")
 
 model.add_spike(10, 0.04)
 model.add_spike(20, 0.04)
 model.add_spike(40, 0.04)
 
-data = V.run_nrn_sim(70, report_dt=10, model='exp2syn')
+data = V.run_nrn_sim(70, report_dt=10, model="exp2syn")
 print(json.dumps(data))
 V.nrn_stop()
-
diff --git a/validation/ref/neuron/simple_exp_synapse.py b/validation/ref/neuron/simple_exp_synapse.py
index e83a7051..2c8a3ab4 100644
--- a/validation/ref/neuron/simple_exp_synapse.py
+++ b/validation/ref/neuron/simple_exp_synapse.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -7,17 +7,17 @@ import nrn_validation as V
 V.override_defaults_from_args()
 
 # dendrite geometry: 200 µm long, 1 µm diameter.
-geom = [(0,1), (200, 1)]
+geom = [(0, 1), (200, 1)]
 
 model = V.VModel()
 model.add_soma(12.6157)
-model.add_dendrite('dend', geom)
-model.add_exp_syn('dend')
+model.add_dendrite("dend", geom)
+model.add_exp_syn("dend")
 
 model.add_spike(10, 0.04)
 model.add_spike(20, 0.04)
 model.add_spike(40, 0.04)
 
-data = V.run_nrn_sim(70, report_dt=10, model='expsyn')
+data = V.run_nrn_sim(70, report_dt=10, model="expsyn")
 print(json.dumps(data))
 V.nrn_stop()
diff --git a/validation/ref/neuron/soma.py b/validation/ref/neuron/soma.py
index 372f4207..9932fd9a 100644
--- a/validation/ref/neuron/soma.py
+++ b/validation/ref/neuron/soma.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-#coding: utf-8
+# coding: utf-8
 
 import json
 import nrn_validation as V
@@ -11,7 +11,6 @@ model = V.VModel()
 model.add_soma(18.8, Ra=100)
 model.add_iclamp(10, 100, 0.1)
 
-data = V.run_nrn_sim(100, report_dt=None, model='soma')
+data = V.run_nrn_sim(100, report_dt=None, model="soma")
 print(json.dumps(data))
 V.nrn_stop()
-
-- 
GitLab