diff --git a/CMakeLists.txt b/CMakeLists.txt
index 86aff17b95f532bbddc5beb17f02cce3ff363fff..62c167a698d58d3ed8470ef14d430eebc156e0f1 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -88,6 +88,16 @@ else()
     set(use_external_modcc ON BOOL)
 endif()
 
+# whether to attempt to use nrniv to build validation data
+# (if we find nrniv, do)
+find_program(NRNIV_BIN nrniv)
+if(NRNIV_BIN STREQUAL "NRNIV_BIN-NOTFOUND")
+    message(STATUS "nrniv not found; will not automatically build validation data sets")
+    set(BUILD_VALIDATION_DATA FALSE)
+else()
+    set(BUILD_VALIDATION_DATA TRUE)
+endif()
+
 include_directories(${CMAKE_SOURCE_DIR}/tclap/include)
 include_directories(${CMAKE_SOURCE_DIR}/vector)
 include_directories(${CMAKE_SOURCE_DIR}/include)
@@ -104,6 +114,7 @@ if(use_external_modcc)
     add_subdirectory(modcc)
 endif()
 add_subdirectory(mechanisms)
+add_subdirectory(nrn)
 add_subdirectory(src)
 add_subdirectory(tests)
 add_subdirectory(miniapp)
diff --git a/data/validation/.keep b/data/validation/.keep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/data/validation/ball_and_3stick.json b/data/validation/ball_and_3stick.json
deleted file mode 100644
index e4121f4383e44dd7a7c15c36598b2a85fba728f2..0000000000000000000000000000000000000000
--- a/data/validation/ball_and_3stick.json
+++ /dev/null
@@ -1,158 +0,0 @@
-[
-  {
-    "nseg": 5, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.943999999998871, 
-          20.48399999999761, 
-          33.862999999979664, 
-          52.52400000006877, 
-          65.9560000001329, 
-          79.3600000001969
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.1399999999987624, 
-          20.448999999997692, 
-          33.722999999978995, 
-          52.61200000006919, 
-          65.85800000013244, 
-          79.18600000019607
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.055999999998809, 
-          20.44199999999771, 
-          33.73499999997905, 
-          52.56900000006898, 
-          65.86400000013246, 
-          79.20200000019615
-        ], 
-        "thresh": -20.0
-      }
-    }
-  }, 
-  {
-    "nseg": 11, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.948999999998868, 
-          20.506999999997557, 
-          33.91399999997991, 
-          52.528000000068786, 
-          65.98500000013304, 
-          79.42000000019719
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.147999999998758, 
-          20.478999999997622, 
-          33.77899999997926, 
-          52.62300000006924, 
-          65.8930000001326, 
-          79.24900000019638
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.061999999998806, 
-          20.467999999997648, 
-          33.785999999979296, 
-          52.576000000069016, 
-          65.8940000001326, 
-          79.26000000019643
-        ], 
-        "thresh": -20.0
-      }
-    }
-  }, 
-  {
-    "nseg": 51, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.949999999998868, 
-          20.512999999997543, 
-          33.92699999997997, 
-          52.530000000068796, 
-          65.99200000013307, 
-          79.43500000019726
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.150999999998756, 
-          20.486999999997604, 
-          33.793999999979334, 
-          52.626000000069254, 
-          65.90200000013265, 
-          79.26500000019645
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.062999999998805, 
-          20.473999999997634, 
-          33.79899999997936, 
-          52.578000000069025, 
-          65.90200000013265, 
-          79.2740000001965
-        ], 
-        "thresh": -20.0
-      }
-    }
-  }, 
-  {
-    "nseg": 101, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.949999999998868, 
-          20.512999999997543, 
-          33.927999999979974, 
-          52.530000000068796, 
-          65.99300000013308, 
-          79.43500000019726
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.150999999998756, 
-          20.486999999997604, 
-          33.793999999979334, 
-          52.626000000069254, 
-          65.90300000013265, 
-          79.26600000019646
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.062999999998805, 
-          20.47499999999763, 
-          33.79999999997936, 
-          52.578000000069025, 
-          65.90200000013265, 
-          79.2750000001965
-        ], 
-        "thresh": -20.0
-      }
-    }
-  }
-]
\ No newline at end of file
diff --git a/data/validation/ball_and_stick.json b/data/validation/ball_and_stick.json
deleted file mode 100644
index 5b459a3a287e012def1539866531efa458f66cc0..0000000000000000000000000000000000000000
--- a/data/validation/ball_and_stick.json
+++ /dev/null
@@ -1,158 +0,0 @@
-[
-  {
-    "nseg": 5, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.980999999998851, 
-          20.98699999999644, 
-          34.792999999984104, 
-          48.61100000005008, 
-          62.43100000011607, 
-          76.25100000018206
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.2499999999987015, 
-          21.193999999995956, 
-          34.97599999998498, 
-          48.78900000005093, 
-          62.60700000011691, 
-          76.4270000001829
-        ], 
-        "thresh": 10.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.169999999998746, 
-          21.177999999995993, 
-          34.97799999998499, 
-          48.79400000005096, 
-          62.614000000116945, 
-          76.43400000018293
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 11, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          6.998999999998841, 
-          21.078999999996224, 
-          34.983999999985016, 
-          48.9080000000515, 
-          62.835000000118, 
-          76.7630000001845
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.277999999998686, 
-          21.29999999999571, 
-          35.17999999998595, 
-          49.09800000005241, 
-          63.0240000001189, 
-          76.9510000001854
-        ], 
-        "thresh": 10.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.181999999998739, 
-          21.2609999999958, 
-          35.15899999998585, 
-          49.08100000005233, 
-          63.00700000011882, 
-          76.93400000018532
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 51, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          7.003999999998838, 
-          21.10199999999617, 
-          35.03299999998525, 
-          48.98500000005187, 
-          62.9400000001185, 
-          76.89500000018514
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.284999999998682, 
-          21.32599999999565, 
-          35.232999999986205, 
-          49.17800000005279, 
-          63.13200000011942, 
-          77.08700000018605
-        ], 
-        "thresh": 10.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.185999999998737, 
-          21.28199999999575, 
-          35.20499999998607, 
-          49.15500000005268, 
-          63.10900000011931, 
-          77.06400000018594
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 101, 
-    "dt": 0.001, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          7.003999999998838, 
-          21.102999999996168, 
-          35.03499999998526, 
-          48.98800000005188, 
-          62.94400000011852, 
-          76.90000000018516
-        ], 
-        "thresh": 0.0
-      }, 
-      "clamp": {
-        "spikes": [
-          7.284999999998682, 
-          21.326999999995646, 
-          35.234999999986215, 
-          49.181000000052805, 
-          63.13500000011943, 
-          77.09100000018607
-        ], 
-        "thresh": 10.0
-      }, 
-      "dend": {
-        "spikes": [
-          7.185999999998737, 
-          21.28299999999575, 
-          35.20699999998608, 
-          49.15700000005269, 
-          63.11300000011933, 
-          77.06900000018597
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }
-]
\ No newline at end of file
diff --git a/data/validation/simple_exp2_synapse.json b/data/validation/simple_exp2_synapse.json
deleted file mode 100644
index 137d9e6f86916e945b9f8c87d02369912fa03df0..0000000000000000000000000000000000000000
--- a/data/validation/simple_exp2_synapse.json
+++ /dev/null
@@ -1,90 +0,0 @@
-[
-  {
-    "nseg": 5, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.23749999999963, 
-          21.72500000000066, 
-          41.2625000000051
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.487500000000605, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 11, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.23749999999963, 
-          21.73750000000066, 
-          41.2750000000051
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.487500000000605, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 51, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.23749999999963, 
-          21.73750000000066, 
-          41.2750000000051
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.487500000000605, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 101, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.23749999999963, 
-          21.73750000000066, 
-          41.2750000000051
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.487500000000605, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }
-]
\ No newline at end of file
diff --git a/data/validation/simple_exp_synapse.json b/data/validation/simple_exp_synapse.json
deleted file mode 100644
index 675f8accd5753b720efe80020f3665df9ba47318..0000000000000000000000000000000000000000
--- a/data/validation/simple_exp_synapse.json
+++ /dev/null
@@ -1,90 +0,0 @@
-[
-  {
-    "nseg": 5, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.087499999999638, 
-          21.525000000000613, 
-          41.11250000000506
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.51250000000061, 
-          41.10000000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 11, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.099999999999637, 
-          21.537500000000616, 
-          41.125000000005066
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.525000000000613, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 51, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.099999999999637, 
-          21.537500000000616, 
-          41.125000000005066
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.525000000000613, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }, 
-  {
-    "nseg": 101, 
-    "dt": 0.0125, 
-    "measurements": {
-      "soma": {
-        "spikes": [
-          11.099999999999637, 
-          21.537500000000616, 
-          41.125000000005066
-        ], 
-        "thresh": 20.0
-      }, 
-      "dend": {
-        "spikes": [
-          11.087499999999638, 
-          21.525000000000613, 
-          41.11250000000506
-        ], 
-        "thresh": -10.0
-      }
-    }
-  }
-]
\ No newline at end of file
diff --git a/data/validation/soma.json b/data/validation/soma.json
deleted file mode 100644
index b0de340ce5891e8cc95fdfb5cc92d956751f37da..0000000000000000000000000000000000000000
--- a/data/validation/soma.json
+++ /dev/null
@@ -1,62 +0,0 @@
-[
- {
-  "dt": 0.05, 
-  "spikes": [
-   12.04999999999985, 
-   27.57499999999897, 
-   42.85000000000118, 
-   58.10000000000465, 
-   73.37500000000811, 
-   88.62500000001158, 
-   103.90000000001505
-  ]
- }, 
- {
-  "dt": 0.02, 
-  "spikes": [
-   12.04999999999985, 
-   27.57499999999897, 
-   42.85000000000118, 
-   58.10000000000465, 
-   73.37500000000811, 
-   88.62500000001158, 
-   103.90000000001505
-  ]
- }, 
- {
-  "dt": 0.01, 
-  "spikes": [
-   12.037499999999584, 
-   27.525000000001977, 
-   42.76250000000544, 
-   57.9875000000089, 
-   73.21250000000188, 
-   88.43749999998803, 
-   103.66249999997419
-  ]
- }, 
- {
-  "dt": 0.001, 
-  "spikes": [
-   12.026000000003206, 
-   27.476999999981313, 
-   42.68400000002178, 
-   57.881000000094346, 
-   73.0770000001669, 
-   88.27300000023946, 
-   103.46900000031202
-  ]
- }, 
- {
-  "dt": 0.0001, 
-  "spikes": [
-   12.024499999978646, 
-   27.473100000350247, 
-   42.67780000085499, 
-   57.87230000135939, 
-   73.06600000186377, 
-   88.25970000236815, 
-   103.45330000287252
-  ]
- }
-]
\ No newline at end of file
diff --git a/miniapp/trace_sampler.hpp b/miniapp/trace_sampler.hpp
index 26db0a491444db32a49653f861b18dce464c103d..6bdc51a5bbf6f1c10bac2b8b5c5748faca888c18 100644
--- a/miniapp/trace_sampler.hpp
+++ b/miniapp/trace_sampler.hpp
@@ -1,5 +1,10 @@
 #pragma once
 
+/*
+ * Simple(st?) implementation of a recorder of scalar
+ * trace data from a cell probe, with some metadata.
+ */
+
 #include <cstdlib>
 #include <vector>
 
@@ -38,7 +43,7 @@ struct trace_sampler {
     using time_type = Time;
     using value_type = Value;
 
-    float next_sample_t() const { return t_next_sample_; }
+    time_type next_sample_t() const { return t_next_sample_; }
 
     util::optional<time_type> operator()(time_type t, value_type v) {
         if (t<t_next_sample_) {
diff --git a/nrn/CMakeLists.txt b/nrn/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..28325c091cea5aec8b0666ac46ba6503e82cdc9a
--- /dev/null
+++ b/nrn/CMakeLists.txt
@@ -0,0 +1,26 @@
+# The validation scripts to run (without .py extension)
+
+set(validations
+     ball_and_stick
+     ball_and_3stick
+     ball_and_taper
+     simple_exp_synapse
+     simple_exp2_synapse
+     soma)
+
+# Only try and make validation sets if we can find nrniv
+if(BUILD_VALIDATION_DATA)
+    set(common "${CMAKE_CURRENT_SOURCE_DIR}/nrn_validation.py")
+    foreach(v ${validations})
+	set(out "${CMAKE_SOURCE_DIR}/data/validation/neuron_${v}.json")
+	set(src "${CMAKE_CURRENT_SOURCE_DIR}/${v}.py")
+	add_custom_command(
+	    OUTPUT "${out}"
+	    DEPENDS "${src}" "${common}"
+	    WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
+	    COMMAND ${NRNIV_BIN} -nobanner -python ${src} > ${out})
+        list(APPEND all_neuron_validation "${out}")
+    endforeach()
+    add_custom_target(validation_data DEPENDS ${all_neuron_validation})
+endif()
+
diff --git a/nrn/ball_and_3stick.py b/nrn/ball_and_3stick.py
index 4333072216b56d203708430243e28ef008cdfa68..a1c31e7a6c8825479879e42aa8f35bb10e638345 100644
--- a/nrn/ball_and_3stick.py
+++ b/nrn/ball_and_3stick.py
@@ -1,164 +1,28 @@
-from timeit import default_timer as timer
-import os.path
-from matplotlib import pyplot
-import numpy as np
-import json
-import argparse
-import sys
-from neuron import gui, h
-
-parser = argparse.ArgumentParser(description='generate spike train ball and stick model with hh channels at soma and pas channels in dendrite')
-parser.add_argument('--plot', action='store_true', dest='plot')
-
-# hack to make things work with nrniv ... -python:
-# throw away args before -python foo.py if -python present.
-
-if '-python' in sys.argv:
-    argv = sys.argv[sys.argv.index('-python')+2:]
-else:
-    argv = sys.argv
-
-args = parser.parse_args(argv)
-
-soma = h.Section(name='soma')
-dend = [
-        h.Section(name='dend0'),
-        h.Section(name='dend1'),
-        h.Section(name='dend2'),
-    ]
+#!/usr/bin/env python
+#coding: utf-8
 
-dend[0].connect(soma(1))
-dend[1].connect(dend[0](1))
-dend[2].connect(dend[0](1))
-
-# Surface area of cylinder is 2*pi*r*h (sealed ends are implicit).
-# Here we make a square cylinder in that the diameter
-# is equal to the height, so diam = h. ==> Area = 4*pi*r^2
-# We want a soma of 500 microns squared:
-# r^2 = 500/(4*pi) ==> r = 6.2078, diam = 12.6157
-soma.L = soma.diam = 12.6157 # Makes a soma of 500 microns squared.
-for d in dend :
-    d.L = 100
-    d.diam = 1
-
-for sec in h.allsec():
-    sec.Ra = 100    # Axial resistance in Ohm * cm
-    sec.cm = 1      # Membrane capacitance in micro Farads / cm^2
-
-# Insert active Hodgkin-Huxley current in the soma
-soma.insert('hh')
-soma.gnabar_hh = 0.12  # Sodium conductance in S/cm2
-soma.gkbar_hh = 0.036  # Potassium conductance in S/cm2
-soma.gl_hh = 0.0003    # Leak conductance in S/cm2
-soma.el_hh = -54.3     # Reversal potential in mV
+import json
+import nrn_validation as V
 
-# Insert passive current in the dendrite
-for d in dend :
-    d.insert('pas')
-    d.g_pas = 0.001  # Passive conductance in S/cm2
-    d.e_pas = -65    # Leak reversal potential mV
+V.override_defaults_from_args()
 
-stim = [
-     h.IClamp(dend[1](1)),
-     h.IClamp(dend[2](1))
-   ]
-stim[0].delay = 5
-stim[0].dur = 80
-stim[0].amp = 4.5*0.1
+# dendrite geometry: all 100 µm long, 1 µm diameter.
+geom = [(0,1), (100, 1)]
 
-stim[1].delay = 40
-stim[1].dur = 10
-stim[1].amp = -2*0.1
+model = V.VModel()
+model.add_soma(12.6157)
+model.add_dendrite('dend1', geom)
+model.add_dendrite('dend2', geom, to='dend1')
+model.add_dendrite('dend3', geom, to='dend1')
 
-if args.plot :
-    pyplot.figure(figsize=(8,4)) # Default figsize is (8,6)
-    pyplot.grid()
+model.add_iclamp(5, 80, 0.45, to='dend2')
+model.add_iclamp(40, 10, -0.2, to='dend3')
 
 simdur = 100.0
-h.tstop = simdur
-h.dt = 0.001
-
-start = timer()
-results = []
-for nseg in [5, 11, 51, 101] :
-
-    print 'simulation with ', nseg, ' compartments in dendrite...'
-
-    for d in dend :
-        d.nseg=nseg
-
-    # record voltages
-    v_soma = h.Vector() # soma
-    v_dend = h.Vector() # at the dendrite branching point
-    v_clamp= h.Vector() # end of dendrite at clamp location
-
-    v_soma.record( soma(0.5)._ref_v)
-    v_dend.record( dend[0](1)._ref_v)
-    v_clamp.record(dend[1](1)._ref_v)
-
-    # record spikes
-    # this is a bit verbose, no?
-    spike_counter_soma = h.APCount(soma(0.5))
-    spike_counter_soma.thresh = 0
-    spike_counter_dend = h.APCount(dend[0](1))
-    spike_counter_dend.thresh = -20
-    spike_counter_clamp = h.APCount(dend[1](1.0))
-    spike_counter_clamp.thresh = 20
-
-    spikes_soma = h.Vector() # soma
-    spikes_dend = h.Vector() # middle of dendrite
-    spikes_clamp= h.Vector() # end of dendrite at clamp location
-
-    spike_counter_soma.record(spikes_soma)
-    spike_counter_dend.record(spikes_dend)
-    spike_counter_clamp.record(spikes_clamp)
-
-    # record time stamps
-    t_vec = h.Vector()
-    t_vec.record(h._ref_t)
-
-    # finally it's time to run the simulation
-    h.run()
-
-    results.append(
-        {
-            "nseg": nseg,
-            "dt" : h.dt,
-            "measurements": {
-               "soma" : {
-                   "thresh" :  spike_counter_soma.thresh,
-                   "spikes" :  spikes_soma.to_python()
-               },
-               "dend" : {
-                   "thresh" :  spike_counter_dend.thresh,
-                   "spikes" :  spikes_dend.to_python()
-               },
-               "clamp" : {
-                   "thresh" :  spike_counter_clamp.thresh,
-                   "spikes" :  spikes_clamp.to_python()
-               }
-           }
-        }
-    )
-
-    if args.plot :
-        pyplot.plot(t_vec, v_soma,  'k', linewidth=1, label='soma ' + str(nseg))
-        pyplot.plot(t_vec, v_dend,  'b', linewidth=1, label='dend ' + str(nseg))
-        pyplot.plot(t_vec, v_clamp, 'r', linewidth=1, label='clamp ' + str(nseg))
-
-# time the simulations
-end = timer()
-print "took ", end-start, " seconds"
-
-# save the spike info as in json format
-fp = open('ball_and_3stick.json', 'w')
-json.dump(results, fp, indent=2)
+dt = 0.001
 
-if args.plot :
-    pyplot.xlabel('time (ms)')
-    pyplot.ylabel('mV')
-    pyplot.xlim([0, simdur])
-    pyplot.legend()
+data = V.run_nrn_sim(simdur, report_dt=10, model='ball_and_3stick')
+print json.dumps(data)
 
-    pyplot.show()
+V.nrn_stop()
 
diff --git a/nrn/ball_and_stick.py b/nrn/ball_and_stick.py
index 10932d1adcda27e60e5c8781f491a665b6d8cd8c..ee4dc999f59552671230eb004891c8676a7bfd32 100644
--- a/nrn/ball_and_stick.py
+++ b/nrn/ball_and_stick.py
@@ -1,148 +1,20 @@
-from timeit import default_timer as timer
-import os.path
-from matplotlib import pyplot
-import numpy as np
-import json
-import argparse
-import sys
-from neuron import gui, h
-
-parser = argparse.ArgumentParser(description='generate spike train ball and stick model with hh channels at soma and pas channels in dendrite')
-parser.add_argument('--plot', action='store_true', dest='plot')
-
-# hack to make things work with nrniv ... -python:
-# throw away args before -python foo.py if -python present.
-
-if '-python' in sys.argv:
-    argv = sys.argv[sys.argv.index('-python')+2:]
-else:
-    argv = sys.argv
-
-args = parser.parse_args(argv)
-
-soma = h.Section(name='soma')
-dend = h.Section(name='dend')
-
-dend.connect(soma(1))
-
-# Surface area of cylinder is 2*pi*r*h (sealed ends are implicit).
-# Here we make a square cylinder in that the diameter
-# is equal to the height, so diam = h. ==> Area = 4*pi*r^2
-# We want a soma of 500 microns squared:
-# r^2 = 500/(4*pi) ==> r = 6.2078, diam = 12.6157
-soma.L = soma.diam = 12.6157 # Makes a soma of 500 microns squared.
-dend.L = 200 # microns
-dend.diam = 1 # microns
-
-for sec in h.allsec():
-    sec.Ra = 100    # Axial resistance in Ohm * cm
-    sec.cm = 1      # Membrane capacitance in micro Farads / cm^2
-
-# Insert active Hodgkin-Huxley current in the soma
-soma.insert('hh')
-soma.gnabar_hh = 0.12  # Sodium conductance in S/cm2
-soma.gkbar_hh = 0.036  # Potassium conductance in S/cm2
-soma.gl_hh = 0.0003    # Leak conductance in S/cm2
-soma.el_hh = -54.3     # Reversal potential in mV
-
-# Insert passive current in the dendrite
-dend.insert('pas')
-dend.g_pas = 0.001  # Passive conductance in S/cm2
-dend.e_pas = -65    # Leak reversal potential mV
-
-stim = h.IClamp(dend(1))
-stim.delay = 5
-stim.dur = 80
-stim.amp = 3*0.1
-
-if args.plot :
-    pyplot.figure(figsize=(8,4)) # Default figsize is (8,6)
-    pyplot.grid()
+#!/usr/bin/env python
+#coding: utf-8
 
-simdur = 100.0
-h.tstop = simdur
-h.dt = 0.001
-
-start = timer()
-results = []
-for nseg in [5, 11, 51, 101] :
-
-    print 'simulation with ', nseg, ' compartments in dendrite...'
-
-    dend.nseg=nseg
-
-    # record voltages
-    v_soma = h.Vector() # soma
-    v_dend = h.Vector() # middle of dendrite
-    v_clamp= h.Vector() # end of dendrite at clamp location
-
-    v_soma.record( soma(0.5)._ref_v)
-    v_dend.record( dend(0.5)._ref_v)
-    v_clamp.record(dend(1.0)._ref_v)
-
-    # record spikes
-    # this is a bit verbose, no?
-    spike_counter_soma = h.APCount(soma(0.5))
-    spike_counter_soma.thresh = 0
-    spike_counter_dend = h.APCount(dend(0.5))
-    spike_counter_dend.thresh = -10
-    spike_counter_clamp = h.APCount(dend(1.0))
-    spike_counter_clamp.thresh = 10
-
-    spikes_soma = h.Vector() # soma
-    spikes_dend = h.Vector() # middle of dendrite
-    spikes_clamp= h.Vector() # end of dendrite at clamp location
-
-    spike_counter_soma.record(spikes_soma)
-    spike_counter_dend.record(spikes_dend)
-    spike_counter_clamp.record(spikes_clamp)
-
-    # record time stamps
-    t_vec = h.Vector()
-    t_vec.record(h._ref_t)
-
-    # finally it's time to run the simulation
-    h.run()
-
-    results.append(
-        {
-            "nseg": nseg,
-            "dt" : h.dt,
-            "measurements": {
-               "soma" : {
-                   "thresh" :  spike_counter_soma.thresh,
-                   "spikes" :  spikes_soma.to_python()
-               },
-               "dend" : {
-                   "thresh" :  spike_counter_dend.thresh,
-                   "spikes" :  spikes_dend.to_python()
-               },
-               "clamp" : {
-                   "thresh" :  spike_counter_clamp.thresh,
-                   "spikes" :  spikes_clamp.to_python()
-               }
-           }
-        }
-    )
-
-    if args.plot :
-        pyplot.plot(t_vec, v_soma,  'k', linewidth=1, label='soma ' + str(nseg))
-        pyplot.plot(t_vec, v_dend,  'b', linewidth=1, label='dend ' + str(nseg))
-        pyplot.plot(t_vec, v_clamp, 'r', linewidth=1, label='clamp ' + str(nseg))
+import json
+import nrn_validation as V
 
-# time the simulations
-end = timer()
-print "took ", end-start, " seconds"
+V.override_defaults_from_args()
 
-# save the spike info as in json format
-fp = open('ball_and_stick.json', 'w')
-json.dump(results, fp, indent=2)
+# dendrite geometry: all 100 µm long, 1 µm diameter.
+geom = [(0,1), (200, 1)]
 
-if args.plot :
-    pyplot.xlabel('time (ms)')
-    pyplot.ylabel('mV')
-    pyplot.xlim([0, simdur])
-    pyplot.legend()
+model = V.VModel()
+model.add_soma(12.6157)
+model.add_dendrite('dend', geom)
+model.add_iclamp(5, 80, 0.3, to='dend')
 
-    pyplot.show()
+data = V.run_nrn_sim(100, report_dt=10, model='ball_and_stick')
+print json.dumps(data)
+V.nrn_stop()
 
diff --git a/nrn/ball_and_taper.py b/nrn/ball_and_taper.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b5d35dca92eec8953bd6157d1d711f668e45417
--- /dev/null
+++ b/nrn/ball_and_taper.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#coding: utf-8
+
+import json
+import nrn_validation as V
+
+V.override_defaults_from_args()
+
+# dendrite geometry: 100 µm long, diameter 1 µm to 0.1 µm.
+geom = [(0,1), (100, 0.1)]
+
+model = V.VModel()
+model.add_soma(12.6157)
+model.add_dendrite('taper', geom)
+model.add_iclamp(5, 80, 0.3, to='taper')
+
+data = V.run_nrn_sim(100, report_dt=10, model='ball_and_taper')
+print json.dumps(data)
+V.nrn_stop()
+
diff --git a/nrn/generate_validation.sh b/nrn/generate_validation.sh
index 15149cddc022aa47d2f94149fc4efc6c833e9ecd..81c2ecfe19dc0c7ec04cc4063c140e02fef66a46 100755
--- a/nrn/generate_validation.sh
+++ b/nrn/generate_validation.sh
@@ -1,7 +1,31 @@
 #!/usr/bin/env bash
 
-python2 ./soma.py
-python2 ./ball_and_stick.py
-python2 ./ball_and_3stick.py
-python2 ./simple_synapse.py --synapse exp2
-python2 ./simple_synapse.py --synapse exp
+# NB: this is a convenience script; under normal circumstances
+# validation files will be built as required by CMake.
+
+cmd="${0##*/}"
+function usage {
+    echo "usage: $cmd SCRIPTDIR [DESTDIR]"
+    exit 1
+}
+
+if [ $# -gt 2 -o $# -lt 1 ]; then usage; fi
+
+scriptdir="$1"
+if [ ! -d "$scriptdir" ]; then
+    echo "$cmd: no such directory '$scriptdir'"
+    usage
+fi
+
+destdir="."
+if [ $# -eq 2 ]; then destdir="$2"; fi
+if [ ! -d "$destdir" ]; then
+    echo "$cmd: no such directory '$destdir'"
+    usage
+fi
+
+for v in soma ball_and_stick ball_and_taper ball_and_3stick simple_exp_synapse simple_exp2_synapse; do
+    (cd "$scriptdir"; nrniv -nobanner -python ./$v.py) > "$destdir/neuron_$v.json" &
+done
+wait
+
diff --git a/nrn/nrn_validation.py b/nrn/nrn_validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..66563885b05ba9212bd017ea1c90b514c93c6ff2
--- /dev/null
+++ b/nrn/nrn_validation.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python
+#coding: utf-8
+
+import json
+import sys
+import os
+import re
+import numpy as np
+import neuron
+from neuron import h
+
+# This is super annoying: without neuron.gui, need
+# to explicit load 'standard' hoc routines like 'run',
+# but this is chatty on stdout, which means we get
+# junk in our data if capturing output.
+
+def hoc_setup():
+    with open(os.devnull, 'wb') as null:
+        fd = sys.stdout.fileno()
+        keep = os.dup(fd)
+        sys.stdout.flush()
+        os.dup2(null.fileno(), fd)
+
+        h('load_file("stdrun.hoc")')
+        sys.stdout.flush()
+        os.dup2(keep, fd)
+
+default_model_parameters = {
+    'gnabar_hh':  0.12,   # H-H sodium conductance in S/cm^2
+    'gkbar_hh':   0.036,  # H-H potassium conductance in S/cm^2
+    'gl_hh':      0.0003, # H-H leak conductance in S/cm^2
+    'el_hh':    -54.3,    # H-H reversal potential in mV
+    'g_pas':      0.001,  # Passive conductance in S/cm^2
+    'e_pas':    -65.0,    # Leak reversal potential in mV
+    'Ra':       100.0,    # Intracellular resistivity in Ω·cm
+    'cm':         1.0,    # Membrane areal capacitance in µF/cm2
+    'tau':        2.0,    # Exponential synapse time constant
+    'tau1':       0.5,    # Exp2 synapse tau1
+    'tau2':       2.0,    # Exp2 synapse tau2
+    'ncomp':   1001       # Number of compartments (nseg) in dendrites
+}
+
+def override_defaults_from_args(args=sys.argv):
+    global default_model_parameters
+    keys = default_model_parameters.keys()
+    r = re.compile('('+'|'.join(keys)+')=(.*)')
+    for m in [r.match(a) for a in args]:
+        if m:
+            default_model_parameters[m.group(1)]=float(m.group(2))
+
+def combine(*dicts, **kw):
+    r = {}
+    for d in dicts:
+        r.update(d)
+    r.update(kw)
+    return r
+
+class VModel:
+    def __init__(self):
+        self.soma = None
+        self.sections = {}
+        self.stims = []
+        self.synapses = []
+        self.netcons = []
+
+    def set_ncomp(self, n):
+        for s in self.sections.values():
+            s.nseg = int(n)
+
+    def add_iclamp(self, t0, dt, i, to=None, pos=1):
+        # If no section specified, attach to middle of soma
+        if to is None:
+            sec = self.soma
+            pos = 0.5
+        else:
+            sec = self.sections[to]
+
+        stim = h.IClamp(sec(pos))
+        stim.delay = t0
+        stim.dur = dt
+        stim.amp = i
+        self.stims.append(stim)
+
+    def add_exp_syn(self, secname, pos=0.5, **kw):
+        p = combine(default_model_parameters, kw)
+
+        syn = h.ExpSyn(self.sections[secname](pos))
+        syn.tau = p['tau']
+        self.synapses.append(syn)
+        return len(self.synapses)-1
+
+    def add_exp2_syn(self, secname, pos=0.5, **kw):
+        p = combine(default_model_parameters, kw)
+
+        syn = h.Exp2Syn(self.sections[secname](pos))
+        syn.tau1 = p['tau1']
+        syn.tau2 = p['tau2']
+        self.synapses.append(syn)
+        return len(self.synapses)-1
+
+    def add_spike(self, t, weight, target=0):
+        stim = h.NetStim()
+        stim.number = 1
+        stim.start  = 0
+
+        nc = h.NetCon(stim, self.synapses[target])
+        nc.delay = t
+        nc.weight[0] = weight
+
+        self.stims.append(stim)
+        self.netcons.append(nc)
+
+    def add_soma(self, diam, **kw):
+        p = combine(default_model_parameters, kw)
+
+        soma = h.Section(name='soma')
+        soma.diam = diam
+        soma.L = diam
+
+        soma.Ra = p['Ra']
+        soma.cm = p['cm']
+
+        # Insert active Hodgkin-Huxley channels in the soma.
+        soma.insert('hh')
+        soma.gnabar_hh = p['gnabar_hh']
+        soma.gkbar_hh = p['gkbar_hh']
+        soma.gl_hh = p['gl_hh']
+        soma.el_hh = p['el_hh']
+
+        self.soma = soma
+
+    def add_dendrite(self, name, geom, to=None, **kw):
+        p = combine(default_model_parameters, kw)
+
+        dend = h.Section(name=name)
+        dend.push()
+        for x, d in geom:
+            h.pt3dadd(x, 0, 0, d)
+        h.pop_section()
+
+        dend.Ra = p['Ra']
+        dend.cm = p['cm']
+
+        # Add passive membrane properties to dendrite.
+        dend.insert('pas')
+        dend.g_pas = p['g_pas']
+        dend.e_pas = p['e_pas']
+
+        dend.nseg = int(p['ncomp'])
+
+        if to is None:
+            if self.soma is not None:
+                dend.connect(self.soma(1))
+        else:
+            dend.connect(self.sections[to](1))
+
+        self.sections[name] = dend
+
+# Run 'current' model, return list of traces.
+# Samples at cable mid- and end-points taken every `sample_dt`;
+# Voltage on all compartments per section reported every `report_dt`.
+
+def run_nrn_sim(tend, sample_dt=0.025, report_t=None, report_dt=None, dt=0.001, **meta):
+    # Instrument mid-point and ends of each section for traces.
+    vtraces = []
+    vtrace_t_hoc = h.Vector()
+
+    ncomps = set([s.nseg for s in h.allsec() if s.name()!='soma'])
+    if len(ncomps)==1:
+        common_ncomp = { 'ncomp': ncomps.pop() }
+    else:
+        common_ncomp = {}
+
+    for s in h.allsec():
+        vend = h.Vector()
+        vend.record(s(0.5)._ref_v, sample_dt)
+        vtraces.append((s.name()+".mid", vend))
+        if s.nseg!=1 or s.name()!='soma':
+            vmid = h.Vector()
+            vmid.record(s(1.0)._ref_v, sample_dt)
+            vtraces.append((s.name()+".end", vmid))
+
+    vtrace_t_hoc.record(h._ref_t, sample_dt)
+
+    # Instrument every segment for section voltage reports.
+    if report_t is None:
+        if report_dt is not None:
+            report_t = [report_dt*(1+i) for i in xrange(int(tend/report_dt))]
+        else:
+            report_t = []
+    elif not isinstance(report_t, list):
+        report_t = [report_t]
+
+    vreports = []
+    vreport_t_hoc = h.Vector(report_t)
+
+    if report_t:
+        for s in h.allsec():
+            nseg = s.nseg;
+            ps = [0] + [(i+0.5)/nseg for i in xrange(nseg)] + [1]
+            vs = [h.Vector() for p in ps]
+            for p, v in zip(ps, vs):
+                v.record(s(p)._ref_v, vreport_t_hoc)
+            vreports.append((s.name(), s.L, s.nseg, ps, vs))
+
+    # Run sim
+    h.dt = dt
+    h.tstop = tend
+    h.run()
+
+    # convert results to traces with metadata
+    traces = []
+
+    vtrace_t = list(vtrace_t_hoc)
+    traces.append(combine(meta, common_ncomp, {
+        'name':  'membrane voltage',
+        'sim':   'neuron',
+        'units': 'mV',
+        'data':  combine({n: list(v) for n, v in vtraces}, time=vtrace_t)
+    }))
+
+    # and section reports too
+    vreport_t = list(vreport_t_hoc)
+    for name, length, nseg, ps, vs in vreports:
+        obs = np.column_stack([np.array(v) for v in vs])
+        xs = [length*p for p in ps]
+        for i, t in enumerate(report_t):
+            traces.append(combine(meta, {
+                'name': 'membrane voltage',
+                'sim':  'neuron',
+                'units': {'x': 'µm', name: 'mV'},
+                'ncomp': nseg,
+                'time': t,
+                'data': {
+                    'x': xs,
+                    name: list(obs[i,:])
+                }
+            }))
+
+    return traces
+
+def nrn_assert_no_sections():
+    for s in h.allsec():
+        assert False, 'a section exists'
+
+def nrn_stop():
+    h.quit()
+
+# Run hoc setup on load
+hoc_setup()
+
diff --git a/nrn/simple_exp2_synapse.py b/nrn/simple_exp2_synapse.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf8377fffc4406cb7c74a3a44b6e99b4c996748e
--- /dev/null
+++ b/nrn/simple_exp2_synapse.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#coding: utf-8
+
+import json
+import nrn_validation as V
+
+V.override_defaults_from_args()
+
+# dendrite geometry: 200 µm long, 1 µm diameter.
+geom = [(0,1), (200, 1)]
+
+model = V.VModel()
+model.add_soma(12.6157)
+model.add_dendrite('dend', geom)
+model.add_exp2_syn('dend')
+
+model.add_spike(10, 0.04)
+model.add_spike(20, 0.04)
+model.add_spike(40, 0.04)
+
+data = V.run_nrn_sim(70, report_dt=10, model='exp2syn')
+print json.dumps(data)
+V.nrn_stop()
+
diff --git a/nrn/simple_exp_synapse.py b/nrn/simple_exp_synapse.py
new file mode 100644
index 0000000000000000000000000000000000000000..b890dc03f9094790956b80d1d9ae7df2cb719431
--- /dev/null
+++ b/nrn/simple_exp_synapse.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+#coding: utf-8
+
+import json
+import nrn_validation as V
+
+V.override_defaults_from_args()
+
+# dendrite geometry: 200 µm long, 1 µm diameter.
+geom = [(0,1), (200, 1)]
+
+model = V.VModel()
+model.add_soma(12.6157)
+model.add_dendrite('dend', geom)
+model.add_exp_syn('dend')
+
+model.add_spike(10, 0.04)
+model.add_spike(20, 0.04)
+model.add_spike(40, 0.04)
+
+data = V.run_nrn_sim(70, report_dt=10, model='expsyn')
+print json.dumps(data)
+V.nrn_stop()
diff --git a/nrn/simple_synapse.py b/nrn/simple_synapse.py
deleted file mode 100644
index de7bf22d7c84f652148605bdb3581defc249d208..0000000000000000000000000000000000000000
--- a/nrn/simple_synapse.py
+++ /dev/null
@@ -1,161 +0,0 @@
-from timeit import default_timer as timer
-import os.path
-from matplotlib import pyplot
-import numpy as np
-import json
-import argparse
-from neuron import gui, h
-
-parser = argparse.ArgumentParser(description='generate spike train ball and stick model with hh channels at soma and pas channels in dendrite')
-parser.add_argument('--plot', action='store_true', dest='plot')
-parser.add_argument('--synapse', metavar='SYN', dest='synapse',
-                    choices=['exp', 'exp2'],
-                    default='exp',
-                    help='use synapse type SYN (exp or exp2)')
-
-# hack to make things work with nrniv ... -python:
-# throw away args before -python foo.py if -python present.
-
-if '-python' in sys.argv:
-    argv = sys.argv[sys.argv.index('-python')+2:]
-else:
-    argv = sys.argv
-
-args = parser.parse_args(argv)
-
-soma = h.Section(name='soma')
-dend = h.Section(name='dend')
-
-dend.connect(soma(1))
-
-# Surface area of cylinder is 2*pi*r*h (sealed ends are implicit).
-# Here we make a square cylinder in that the diameter
-# is equal to the height, so diam = h. ==> Area = 4*pi*r^2
-# We want a soma of 500 microns squared:
-# r^2 = 500/(4*pi) ==> r = 6.2078, diam = 12.6157
-soma.L = soma.diam = 12.6157 # Makes a soma of 500 microns squared.
-dend.L = 200 # microns
-dend.diam = 1 # microns
-
-for sec in h.allsec():
-    sec.Ra = 100    # Axial resistance in Ohm * cm
-    sec.cm = 1      # Membrane capacitance in micro Farads / cm^2
-
-# Insert active Hodgkin-Huxley current in the soma
-soma.insert('hh')
-soma.gnabar_hh = 0.12  # Sodium conductance in S/cm2
-soma.gkbar_hh = 0.036  # Potassium conductance in S/cm2
-soma.gl_hh = 0.0003    # Leak conductance in S/cm2
-soma.el_hh = -54.3     # Reversal potential in mV
-
-# Insert passive current in the dendrite
-dend.insert('pas')
-dend.g_pas = 0.001  # Passive conductance in S/cm2
-dend.e_pas = -65    # Leak reversal potential mV
-
-# create a stimulator that stimulates once at 9 ms
-stim = h.NetStim()
-stim.number = 1
-stim.start  = 9
-
-if args.plot :
-    pyplot.figure(figsize=(8,4)) # Default figsize is (8,6)
-    pyplot.grid()
-
-simdur = 50.0
-h.tstop = simdur
-h.dt = 0.01
-
-start = timer()
-results = []
-for nseg in [5, 11, 51, 101] :
-
-    print 'simulation with ', nseg, ' compartments in dendrite...'
-
-    dend.nseg=nseg
-
-    # add a synapse
-    if args.synapse == 'exp':
-        syn_ = h.ExpSyn(dend(0.5))
-        syn_.tau = 2
-    elif args.synapse == 'exp2':
-        syn_ = h.Exp2Syn(dend(0.5))
-        syn_.tau1 = 0.5 # artificially slow onset
-        syn_.tau2 = 2
-    else:
-        raise RuntimeError('unrecognized synapse type')
-
-    ncstim = h.NetCon(stim, syn_)
-    ncstim.delay = 1 # 1 ms delay (arrives at 10ms)
-    ncstim.weight[0] = 0.04 # NetCon weight is a vector
-
-    ncstim1 = h.NetCon(stim, syn_)
-    ncstim1.delay = 11 # 1 ms delay (arrives at 10ms)
-    ncstim1.weight[0] = 0.04 # NetCon weight is a vector
-    ncstim2 = h.NetCon(stim, syn_)
-    ncstim2.delay = 31 # 1 ms delay (arrives at 10ms)
-    ncstim2.weight[0] = 0.04 # NetCon weight is a vector
-
-    # record voltages
-    v_soma = h.Vector() # soma
-    v_dend = h.Vector() # middle of dendrite
-
-    v_soma.record( soma(0.5)._ref_v)
-    v_dend.record( dend(0.5)._ref_v)
-
-    # record spikes
-    spike_counter_soma = h.APCount(soma(0.5))
-    spike_counter_soma.thresh = 20
-    spike_counter_dend = h.APCount(dend(0.5))
-    spike_counter_dend.thresh = -10
-
-    spikes_soma = h.Vector() # soma
-    spikes_dend = h.Vector() # middle of dendrite
-
-    spike_counter_soma.record(spikes_soma)
-    spike_counter_dend.record(spikes_dend)
-
-    # record time stamps
-    t_vec = h.Vector()
-    t_vec.record(h._ref_t)
-
-    # finally it's time to run the simulation
-    h.run()
-
-    results.append(
-        {
-            "nseg": nseg,
-            "dt" : h.dt,
-            "measurements": {
-               "soma" : {
-                   "thresh" :  spike_counter_soma.thresh,
-                   "spikes" :  spikes_soma.to_python()
-               },
-               "dend" : {
-                   "thresh" :  spike_counter_dend.thresh,
-                   "spikes" :  spikes_dend.to_python()
-               },
-           }
-        }
-    )
-
-    if args.plot :
-        pyplot.plot(t_vec, v_soma,  'k', linewidth=1, label='soma ' + str(nseg))
-        pyplot.plot(t_vec, v_dend,  'b', linewidth=1, label='dend ' + str(nseg))
-
-# time the simulations
-end = timer()
-print "took ", end-start, " seconds"
-
-# save the spike info as in json format
-fp = open('simple_'+args.synapse+'_synapse.json', 'w')
-json.dump(results, fp, indent=2)
-
-if args.plot :
-    pyplot.xlabel('time (ms)')
-    pyplot.ylabel('mV')
-    pyplot.xlim([0, simdur])
-    pyplot.legend()
-
-    pyplot.show()
-
diff --git a/nrn/soma.py b/nrn/soma.py
index 9decb2c1700a6fd113530019835dad3e807c2b9b..867fa7cc7b82e2dabb28d4caa03ca88ca10be3f2 100644
--- a/nrn/soma.py
+++ b/nrn/soma.py
@@ -1,84 +1,21 @@
-from timeit import default_timer as timer
-import os.path
-from matplotlib import pyplot
-import numpy as np
-import json
-import argparse
-import sys
-from neuron import gui, h
-
-parser = argparse.ArgumentParser(description='generate spike train info for a soma with hh channels')
-parser.add_argument('--plot', action='store_true', dest='plot')
-
-# hack to make things work with nrniv ... -python:
-# throw away args before -python foo.py if -python present.
-
-if '-python' in sys.argv:
-    argv = sys.argv[sys.argv.index('-python')+2:]
-else:
-    argv = sys.argv
-
-args = parser.parse_args(argv)
-
-if args.plot :
-    print '-- plotting turned on'
-else :
-    print '-- plotting turned off'
-
-soma = h.Section(name='soma')
-
-soma.L = soma.diam = 18.8
-soma.Ra = 123
+#!/usr/bin/env python
+#coding: utf-8
 
-print "Surface area of soma =", h.area(0.5, sec=soma)
-
-soma.insert('hh')
-
-stim = h.IClamp(soma(0.5))
-stim.delay = 10
-stim.dur = 100
-stim.amp = 0.1
-
-spike_counter = h.APCount(soma(0.5))
-spike_counter.thresh = 0
-
-v_vec = h.Vector()        # Membrane potential vector
-t_vec = h.Vector()        # Time stamp vector
-s_vec = h.Vector()        # Time stamp vector
-v_vec.record(soma(0.5)._ref_v)
-t_vec.record(h._ref_t)
-spike_counter.record(s_vec)
-
-simdur = 120
-
-# initialize plot
-if args.plot :
-    pyplot.figure(figsize=(8,4)) # Default figsize is (8,6)
-
-h.tstop = simdur
+import json
+import nrn_validation as V
 
-# run neuron with multiple dt
-start = timer()
-results = []
-for dt in [0.05, 0.02, 0.01, 0.001, 0.0001]:
-    h.dt = dt
-    h.run()
-    results.append({"dt": dt, "spikes": s_vec.to_python()})
-    if args.plot :
-        pyplot.plot(t_vec, v_vec, label='neuron ' + str(dt))
+V.override_defaults_from_args()
 
-end = timer()
-print "took ", end-start, " seconds"
+# dendrite geometry: all 100 µm long, 1 µm diameter.
+geom = [(0,1), (100, 1)]
 
-# save the spike info as in json format
-fp = open('soma.json', 'w')
-json.dump(results, fp, indent=1)
+model = V.VModel()
+model.add_soma(18.8, Ra=123)
+model.add_iclamp(10, 100, 0.1)
 
-if args.plot :
-    pyplot.xlabel('time (ms)')
-    pyplot.ylabel('mV')
-    pyplot.xlim([0, 120])
-    pyplot.grid()
-    pyplot.legend()
-    pyplot.show()
+# NB: this doesn't seem to have converged with
+# the default dt of 0.001.
+data = V.run_nrn_sim(100, report_dt=None, model='soma')
+print json.dumps(data)
+V.nrn_stop()
 
diff --git a/scripts/tsplot b/scripts/tsplot
index 15977538a3ce5d977c7559e536d2e03af845e4e4..9ad6e2a5c714a5f634e37c31c88411f63923f130 100755
--- a/scripts/tsplot
+++ b/scripts/tsplot
@@ -9,6 +9,7 @@ import re
 import logging
 import matplotlib as M
 import matplotlib.pyplot as P
+import numbers
 from distutils.version import LooseVersion
 from itertools import chain, islice, cycle
 
@@ -17,7 +18,7 @@ if LooseVersion(M.__version__)<LooseVersion("1.5.0"):
     logging.critical("require matplotlib version ≥ 1.5.0")
     sys.exit(1)
 
-# Read timeseries data from multiple files, plot each in one planel, with common
+# Read timeseries data from multiple files, plot each in one panel, with common
 # time axis, and optionally sharing a vertical axis as governed by the configuration.
 
 def parse_clargs():
@@ -32,12 +33,18 @@ def parse_clargs():
     P = argparse.ArgumentParser(description='Plot time series data on one or more graphs.')
     P.add_argument('inputs', metavar='FILE', nargs='+',
                    help='time series data in JSON format')
+    P.add_argument('-A', '--abscissa', metavar='AXIS', dest='axis',
+                   type=unicode,
+                   help='use values from AXIS instead of \'time\' as abscissa')
     P.add_argument('-t', '--trange', metavar='RANGE', dest='trange',
                    type=parse_range_spec,
                    help='restrict time axis to RANGE (see below)')
     P.add_argument('-g', '--group', metavar='KEY,...', dest='groupby',
                    type=lambda s: s.split(','),
                    help='plot series with same KEYs on the same axes')
+    P.add_argument('-s', '--select', metavar='EXPR,...', dest='select',
+                   type=lambda s: s.split(','),
+                   help='select only series matching EXPR')
     P.add_argument('-o', '--output', metavar='FILE', dest='outfile',
                    help='save plot to file FILE')
     P.add_argument('--dpi', metavar='NUM', dest='dpi',
@@ -53,6 +60,8 @@ def parse_clargs():
     P.epilog =  'A range is specifed by a pair of floating point numbers min,max where '
     P.epilog += 'either may be omitted to indicate the minimum or maximum of the corresponding '
     P.epilog += 'values in the data.'
+    P.epilog += '\n'
+    P.epilog += 'Filter expressions are of the form KEY=VALUE. (Might add other ops later.)'
 
     # modify args to avoid argparse having a fit when it encounters an option
     # argument of the form '<negative number>,...'
@@ -112,15 +121,49 @@ class TimeSeries:
         return self.meta.get('units',"")
 
     def trange(self):
-        return (min(self.t), max(self.t))
+        return self.t.min(), self.t.max()
 
     def yrange(self):
-        return (min(self.y), max(self.y))
-
+        return self.y.min(), self.y.max()
+
+def run_select(expr, v):
+    m = re.match(r'([^=>!<~]+)(>=|<=|>|<|!=|=|!~|~)(.*)', expr)
+    if not m:
+        return True
+
+    key, op, test = m.groups()
+    if not key in v:
+        return False
+
+    val = v[key]
+    if op=='~':
+        return test in str(val)
+    elif op=='!~':
+        return test not in str(val)
+    else:
+        if isinstance(val, numbers.Number):
+            try:
+                test=int(test)
+            except ValueError:
+                test=float(test)
+
+        if op=='=':
+            return val==test
+        elif op=='!=':
+            return val!=test
+        elif op=='<':
+            return val<test
+        elif op=='>':
+            return val>test
+        elif op=='<=':
+            return val<=test
+        elif op=='>=':
+            return val>=test
+        else:
+            return False
 
-def read_json_timeseries(source):
-    j = json.load(source)
 
+def read_json_timeseries(j, axis='time', select=[]):
     # Convention:
     #
     # Time series data is represented by an object with a subobject 'data' and optionally
@@ -134,12 +177,23 @@ def read_json_timeseries(source):
     # (units are common for all data series in the object) or by a map that
     # takes a label to a unit string.
 
+    # If given a list instead of a hash, collect time series from each entry.
+
     ts_list = []
-    jdata = j['data']
-    ncol = len(jdata)
+    if isinstance(j, list):
+        for o in j:
+            ts_list.extend(read_json_timeseries(o, axis, select))
+        return ts_list
+
+    try:
+        jdata = j['data']
+        ncol = len(jdata)
 
-    times = jdata['time']
-    nsample = len(times)
+        times = jdata[axis]
+        nsample = len(times)
+    except KeyError:
+        # This wasn't a time series after all.
+        return ts_list
 
     def units(label):
         try:
@@ -153,20 +207,29 @@ def read_json_timeseries(source):
 
     i = 1
     for key in jdata.keys():
-        if key=="time": continue
+        if key==axis: continue
 
         meta = dict(j.items() + {'label': key, 'data': None, 'units': units(key)}.items())
         del meta['data']
 
-        ts_list.append(TimeSeries(times, jdata[key], **meta))
+        if all([run_select(sel, meta) for sel in select]):
+            ts_list.append(TimeSeries(times, jdata[key], **meta))
 
     return ts_list
 
+def min_(a,b):
+    if a is None:
+        return b
+    elif b is None:
+        return a
+    else:
+        return min(a,b)
+
 def range_join(r, s):
-    return (min(r[0], s[0]), max(r[1], s[1]))
+    return (min_(r[0], s[0]), max(r[1], s[1]))
 
 def range_meet(r, s):
-    return (max(r[0], s[0]), min(r[1], s[1]))
+    return (max(r[0], s[0]), min_(r[1], s[1]))
 
 
 class PlotData:
@@ -186,7 +249,7 @@ class PlotData:
     def group_label(self):
         return self.group_key_label
 
-    def unique_labels(self):
+    def unique_labels(self, formatter=lambda x: x):
         # attempt to create unique labels for plots in the group based on
         # meta data
         labels = [s.label() for s in self.series]
@@ -209,7 +272,7 @@ class PlotData:
                 for i in xrange(n):
                     prefix = '' if k=='name' else k+'='
                     if vs[i] is not None:
-                        labels[i] += u', '+k+u'='+unicode(vs[i])
+                        labels[i] += u', '+k+u'='+unicode(formatter(vs[i]))
 
         except StopIteration:
             pass
@@ -242,7 +305,12 @@ def make_palette(cm_name, n, cmin=0, cmax=1):
                                M.cm.get_cmap(cm_name))
     return [smap.to_rgba((2*i+1)/float(2*n)) for i in xrange(n)]
 
-def plot_plots(plot_groups, save=None, dpi=None, scale=None):
+def round_numeric_(x):
+    # Helper to round numbers in labels
+    if not isinstance(x,float): return x
+    return "{:6g}".format(x)
+
+def plot_plots(plot_groups, axis='time', save=None, dpi=None, scale=None):
     nplots = len(plot_groups)
     plot_groups = sorted(plot_groups, key=lambda g: g.group_label())
 
@@ -285,7 +353,7 @@ def plot_plots(plot_groups, save=None, dpi=None, scale=None):
         # together with a best-effort label
 
         series_by_unit = [[] for i in xrange(len(uniq_units))]
-        unique_labels = group.unique_labels()
+        unique_labels = group.unique_labels(formatter=round_numeric_)
 
         for si in xrange(len(group.series)):
             s = group.series[si]
@@ -340,7 +408,7 @@ def plot_plots(plot_groups, save=None, dpi=None, scale=None):
 
     # adapted from http://stackoverflow.com/questions/6963035
     axis_ymin = min([ax.get_position().ymin for ax in figure.axes])
-    figure.text(0.5, axis_ymin - float(3)/figure.dpi, 'time', ha='center', va='center')
+    figure.text(0.5, axis_ymin - float(3)/figure.dpi, axis, ha='center', va='center')
     if save:
         if scale:
             base = figure.get_size_inches()
@@ -352,9 +420,12 @@ def plot_plots(plot_groups, save=None, dpi=None, scale=None):
 
 args = parse_clargs()
 tss = []
+axis = args.axis if args.axis else 'time'
 for filename in args.inputs:
+    select = args.select if args.select else []
     with open(filename) as f:
-        tss.extend(read_json_timeseries(f))
+        j = json.load(f)
+        tss.extend(read_json_timeseries(j, axis, select))
 
 if args.trange:
     for ts in tss:
@@ -370,4 +441,4 @@ plots = gather_ts_plots(tss, groupby)
 if not args.outfile:
     M.interactive(False)
 
-plot_plots(plots, save=args.outfile, dpi=args.dpi, scale=args.scale)
+plot_plots(plots, axis=axis, save=args.outfile, dpi=args.dpi, scale=args.scale)
diff --git a/src/cell.hpp b/src/cell.hpp
index bd83b1b182238377cdd9e7642c60d0be31fe4892..11f938f52f3e4e69a35f9a3eb951ca2fce95cbd6 100644
--- a/src/cell.hpp
+++ b/src/cell.hpp
@@ -10,6 +10,7 @@
 #include "segment.hpp"
 #include "stimulus.hpp"
 #include "util/debug.hpp"
+#include "util/rangeutil.hpp"
 
 namespace nest {
 namespace mc {
@@ -50,6 +51,10 @@ struct probe_spec {
     probeKind kind;
 };
 
+// used in constructor below
+struct clone_cell_t {};
+constexpr clone_cell_t clone_cell{};
+
 /// high-level abstract representation of a cell and its segments
 class cell {
 public:
@@ -76,6 +81,19 @@ public:
     // constructor
     cell();
 
+    // sometimes we really do want a copy (pending big morphology
+    // refactor)
+    cell(clone_cell_t, const cell& other):
+        parents_(other.parents_),
+        stimuli_(other.stimuli_),
+        synapses_(other.synapses_),
+        spike_detectors_(other.spike_detectors_),
+        probes_(other.probes_)
+     {
+        util::assign_by(segments_, other.segments_,
+            [](const segment_ptr& p) { return p->clone(); });
+     }
+
     /// add a soma to the cell
     /// radius must be specified
     soma_segment* add_soma(value_type radius, point_type center=point_type());
@@ -181,7 +199,6 @@ public:
     probes() const { return probes_; }
 
 private:
-
     // storage for connections
     std::vector<index_type> parents_;
 
diff --git a/src/math.hpp b/src/math.hpp
index 006a429e06372412c1fa5dd7d84401a4df4e68f4..4d8ee51c103cc74107938135b8094276fde10c47 100644
--- a/src/math.hpp
+++ b/src/math.hpp
@@ -7,76 +7,90 @@
 namespace nest {
 namespace mc {
 namespace math {
-    template <typename T>
-    T constexpr pi()
-    {
-        return T(3.1415926535897932384626433832795);
-    }
-
-    template <typename T>
-    T constexpr mean(T a, T b)
-    {
-        return (a+b) / T(2);
-    }
-
-    template <typename T>
-    T constexpr mean(std::pair<T,T> const& p)
-    {
-        return (p.first+p.second) / T(2);
-    }
-
-    template <typename T>
-    T constexpr square(T a)
-    {
-        return a*a;
-    }
-
-    template <typename T>
-    T constexpr cube(T a)
-    {
-        return a*a*a;
-    }
-
-    // area of a circle
-    //   pi r-squared
-    template <typename T>
-    T constexpr area_circle(T r)
-    {
-        return pi<T>() * square(r);
-    }
-
-    // volume of a conic frustrum
-    template <typename T>
-    T constexpr area_frustrum(T L, T r1, T r2)
-    {
-        return pi<T>() * (r1+r2) * sqrt(square(L) + square(r1-r2));
-    }
-
-    // surface area of conic frustrum, not including the area of the
-    // circles at either end (i.e. just the area of the surface of rotation)
-    template <typename T>
-    T constexpr volume_frustrum(T L, T r1, T r2)
-    {
-        // this formulation uses one less multiplication
-        return pi<T>()/T(3) * (square(r1+r2) - r1*r2) * L;
-        //return pi<T>()/T(3) * (square(r1) + r1*r2 + square(r2)) * L;
-    }
-
-    // volume of a sphere
-    //   four-thirds pi r-cubed
-    template <typename T>
-    T constexpr volume_sphere(T r)
-    {
-        return T(4)/T(3) * pi<T>() * cube(r);
-    }
-
-    // surface area of a sphere
-    //   four pi r-squared
-    template <typename T>
-    T constexpr area_sphere(T r)
-    {
-        return T(4) * pi<T>() * square(r);
-    }
+
+template <typename T>
+T constexpr pi()
+{
+    return T(3.1415926535897932384626433832795);
+}
+
+template <typename T>
+T constexpr mean(T a, T b)
+{
+    return (a+b) / T(2);
+}
+
+template <typename T>
+T constexpr mean(std::pair<T,T> const& p)
+{
+    return (p.first+p.second) / T(2);
+}
+
+template <typename T>
+T constexpr square(T a)
+{
+    return a*a;
+}
+
+template <typename T>
+T constexpr cube(T a)
+{
+    return a*a*a;
+}
+
+// area of a circle
+//   pi r-squared
+template <typename T>
+T constexpr area_circle(T r)
+{
+    return pi<T>() * square(r);
+}
+
+// volume of a conic frustrum
+template <typename T>
+T constexpr area_frustrum(T L, T r1, T r2)
+{
+    return pi<T>() * (r1+r2) * sqrt(square(L) + square(r1-r2));
+}
+
+// surface area of conic frustrum, not including the area of the
+// circles at either end (i.e. just the area of the surface of rotation)
+template <typename T>
+T constexpr volume_frustrum(T L, T r1, T r2)
+{
+    // this formulation uses one less multiplication
+    return pi<T>()/T(3) * (square(r1+r2) - r1*r2) * L;
+    //return pi<T>()/T(3) * (square(r1) + r1*r2 + square(r2)) * L;
+}
+
+// volume of a sphere
+//   four-thirds pi r-cubed
+template <typename T>
+T constexpr volume_sphere(T r)
+{
+    return T(4)/T(3) * pi<T>() * cube(r);
+}
+
+// surface area of a sphere
+//   four pi r-squared
+template <typename T>
+T constexpr area_sphere(T r)
+{
+    return T(4) * pi<T>() * square(r);
+}
+
+// linear interpolation in interval [a,b]
+template <typename T, typename U>
+T constexpr lerp(T a, T b, U u) {
+    // (1-u)*a + u*b
+    return std::fma(u, b, std::fma(-u, a, a));
+}
+
+// -1, 0 or 1 according to sign of parameter
+template <typename T>
+int signum(T x) {
+    return (x<T(0)) - (x>T(0));
+}
 
 } // namespace math
 } // namespace mc
diff --git a/src/model.hpp b/src/model.hpp
index 2298e5af8365151654b2ca8a266f0dce2f82041b..5521913ae078bf3ca6e5045e2dca83403ae076d1 100644
--- a/src/model.hpp
+++ b/src/model.hpp
@@ -91,6 +91,11 @@ public:
         future_events().resize(num_groups());
     }
 
+    // one cell per group:
+    model(const recipe& rec):
+        model(rec, util::partition_view(util::make_span(0, rec.num_cells()+1)))
+    {}
+
     void reset() {
         t_ = 0.;
         for (auto& group: cell_groups_) {
@@ -220,6 +225,11 @@ public:
         return bounds.second-bounds.first;
     }
 
+    // access cell_group directly
+    cell_group_type& group(int i) {
+        return cell_groups_[i];
+    }
+
     // register a callback that will perform a export of the global
     // spike vector
     void set_global_spike_callback(spike_export_function export_callback) {
diff --git a/src/recipe.hpp b/src/recipe.hpp
index b451a1a1cffbd2139707e8f8d8a60fa6e2f4e037..fbc78656c0038b67c2bab89364730a2cba06ed71 100644
--- a/src/recipe.hpp
+++ b/src/recipe.hpp
@@ -50,5 +50,41 @@ public:
     virtual std::vector<cell_connection> connections_on(cell_gid_type) const =0;
 };
 
+
+/*
+ * Recipe consisting of a single, unconnected cell
+ * is particularly simple. Note keeps a reference to
+ * the provided cell, so be aware of life time issues.
+ */
+
+class singleton_recipe: public recipe {
+public:
+    singleton_recipe(const cell& the_cell): cell_(the_cell) {}
+
+    cell_size_type num_cells() const override {
+        return 1;
+    }
+
+    cell get_cell(cell_gid_type) const override {
+        return cell(clone_cell, cell_);
+    }
+
+    cell_count_info get_cell_count_info(cell_gid_type) const override {
+        cell_count_info k;
+        k.num_sources = cell_.detectors().size();
+        k.num_targets = cell_.synapses().size();
+        k.num_probes = cell_.probes().size();
+
+        return k;
+    }
+
+    std::vector<cell_connection> connections_on(cell_gid_type) const override {
+        return std::vector<cell_connection>{};
+    }
+
+private:
+    const cell& cell_;
+};
+
 } // namespace mc
 } // namespace nest
diff --git a/src/segment.hpp b/src/segment.hpp
index 5a9782b9ce41d8748bb5af6234ab9640c349f360..eedb5095f89166deb5f71735da28d0a74a3ab0cb 100644
--- a/src/segment.hpp
+++ b/src/segment.hpp
@@ -34,12 +34,14 @@ class cable_segment;
 
 // abstract base class for a cell segment
 class segment {
-    public:
-
+public:
     using value_type = double;
     using size_type = cell_local_size_type;
     using point_type = point<value_type>;
 
+    // (Yet more motivation for a separate morphology description class!)
+    virtual std::unique_ptr<segment> clone() const = 0;
+
     segmentKind kind() const {
         return kind_;
     }
@@ -149,8 +151,7 @@ class segment {
         return mechanisms_;
     }
 
-    protected:
-
+protected:
     segmentKind kind_;
     std::vector<parameter_list> mechanisms_;
 };
@@ -167,6 +168,11 @@ public:
         kind_ = segmentKind::none;
     }
 
+    std::unique_ptr<segment> clone() const override {
+        // use default copy constructor
+        return util::make_unique<placeholder_segment>(*this);
+    }
+
     value_type volume() const override
     {
         return std::numeric_limits<value_type>::quiet_NaN();
@@ -187,8 +193,7 @@ public:
         return 0;
     }
 
-    virtual void set_compartments(size_type) override
-    { }
+    virtual void set_compartments(size_type) override {}
 };
 
 class soma_segment : public segment {
@@ -201,19 +206,24 @@ public:
 
     soma_segment() = delete;
 
-    soma_segment(value_type r)
-    :   radius_{r}
+    soma_segment(value_type r):
+        radius_{r}
     {
         kind_ = segmentKind::soma;
         mechanisms_.push_back(membrane_parameters());
     }
 
-    soma_segment(value_type r, point_type const& c)
-    :   soma_segment(r)
+    soma_segment(value_type r, point_type const& c):
+        soma_segment(r)
     {
         center_ = c;
     }
 
+    std::unique_ptr<segment> clone() const override {
+        // use default copy constructor
+        return util::make_unique<soma_segment>(*this);
+    }
+
     value_type volume() const override
     {
         return math::volume_sphere(radius_);
@@ -250,11 +260,9 @@ public:
         return 1;
     }
 
-    void set_compartments(size_type n) override
-    { }
-
-    private :
+    void set_compartments(size_type n) override {}
 
+private :
     // store the center and radius of the soma
     // note that the center may be undefined
     value_type radius_;
@@ -327,6 +335,11 @@ public:
     :   cable_segment(k, {r1, r2}, {p1, p2})
     { }
 
+    std::unique_ptr<segment> clone() const override {
+        // use default copy constructor
+        return util::make_unique<cable_segment>(*this);
+    }
+
     value_type volume() const override
     {
         auto sum = value_type{0};
diff --git a/src/simple_sampler.hpp b/src/simple_sampler.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..cb05573468928ca108e585a21fabd9a74d2bac6e
--- /dev/null
+++ b/src/simple_sampler.hpp
@@ -0,0 +1,77 @@
+#pragma once
+
+/*
+ * Simple(st?) implementation of a recorder of scalar
+ * trace data from a cell probe, with some metadata.
+ */
+
+#include <functional>
+#include <vector>
+
+#include <common_types.hpp>
+#include <util/optional.hpp>
+#include <util/deduce_return.hpp>
+#include <util/transform.hpp>
+
+namespace nest {
+namespace mc {
+
+struct trace_entry {
+    float t;
+    double v;
+};
+
+using trace_data = std::vector<trace_entry>;
+
+// NB: work-around for lack of function return type deduction
+// in C++11; can't use lambda within DEDUCED_RETURN_TYPE.
+
+namespace impl {
+    inline float time(const trace_entry& x) { return x.t; }
+    inline float value(const trace_entry& x) { return x.v; }
+}
+
+inline auto times(const trace_data& trace) DEDUCED_RETURN_TYPE(
+   util::transform_view(trace, impl::time)
+)
+
+inline auto values(const trace_data& trace) DEDUCED_RETURN_TYPE(
+   util::transform_view(trace, impl::value)
+)
+
+class simple_sampler {
+public:
+    trace_data trace;
+
+    simple_sampler(float dt, float t0=0):
+        t0_(t0),
+        sample_dt_(dt),
+        t_next_sample_(t0)
+    {}
+
+    void reset() {
+        trace.clear();
+        t_next_sample_ = t0_;
+    }
+
+    template <typename Time = float, typename Value = double>
+    std::function<util::optional<Time> (Time, Value)> sampler() {
+        return [&](Time t, Value v) -> util::optional<Time> {
+            if (t<(Time)t_next_sample_) {
+                return (Time)t_next_sample_;
+            }
+            else {
+                trace.push_back({float(t), double(v)});
+                return t_next_sample_+=sample_dt_;
+            }
+        };
+    }
+
+private:
+    float t0_ = 0;
+    float sample_dt_ = 0;
+    float t_next_sample_ = 0;
+};
+
+} // namespace mc
+} // namespace nest
diff --git a/src/util/deduce_return.hpp b/src/util/deduce_return.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..1211ebfbd1368f249f8a077fa07d6acddd6c1d28
--- /dev/null
+++ b/src/util/deduce_return.hpp
@@ -0,0 +1,10 @@
+#pragma once
+
+/* Just one macro, to fill the gap before C++14 */
+
+#ifdef DEDUCED_RETURN_TYPE
+#undef DEDUCED_RETURN_TYPE
+#endif
+
+#define DEDUCED_RETURN_TYPE(expr) -> decltype(expr) { return expr; }
+
diff --git a/src/util/meta.hpp b/src/util/meta.hpp
index e3d7fc9566a221425bc0d326debaa412a50b16bc..0e58b49e7090b494547711247fc6152253244ad0 100644
--- a/src/util/meta.hpp
+++ b/src/util/meta.hpp
@@ -40,6 +40,16 @@ constexpr auto cend(const T& c) -> decltype(std::end(c)) {
     return std::end(c);
 }
 
+template <typename T>
+constexpr bool empty(const T& c) {
+    return c.empty();
+}
+
+template <typename T, std::size_t N>
+constexpr bool empty(const T (& c)[N]) noexcept {
+    return false; // N cannot be zero
+}
+
 // Types associated with a container or sequence
 
 template <typename Seq>
@@ -55,7 +65,7 @@ struct sequence_traits {
     using const_sentinel = decltype(cend(std::declval<Seq&>()));
 };
 
-// Convenience short cuts
+// Convenience short cuts for `enable_if`
 
 template <typename T>
 using enable_if_copy_constructible_t =
@@ -188,6 +198,51 @@ public:
     constexpr static bool value = test(nullptr);
 };
 
+template <typename S>
+struct is_sequence {
+private:
+    constexpr static bool test(...) { return false; }
+
+    template <typename U = S>
+    constexpr static bool test(decltype(std::begin(std::declval<U>()))*) { return true; }
+
+public:
+    constexpr static bool value = test(nullptr);
+};
+
+template <typename T>
+using enable_if_sequence_t =
+    enable_if_t<is_sequence<T>::value>;
+
+// No generic lambdas in C++11, so some convenience accessors for pairs that
+// are type-generic
+
+struct first_t {
+    template <typename U, typename V>
+    U& operator()(std::pair<U, V>& p) {
+        return p.first;
+    }
+
+    template <typename U, typename V>
+    const U& operator()(const std::pair<U, V>& p) const {
+        return p.first;
+    }
+};
+constexpr first_t first{};
+
+struct second_t {
+    template <typename U, typename V>
+    V& operator()(std::pair<U, V>& p) {
+        return p.second;
+    }
+
+    template <typename U, typename V>
+    const V& operator()(const std::pair<U, V>& p) const {
+        return p.second;
+    }
+};
+constexpr second_t second{};
+
 
 
 } // namespace util
diff --git a/src/util/optional.hpp b/src/util/optional.hpp
index 40b40d7c3ff27352638c789575172a5cd5fee8d4..56a75adda1cadf62eb3a9d12bd9c6b6b517027e3 100644
--- a/src/util/optional.hpp
+++ b/src/util/optional.hpp
@@ -115,7 +115,7 @@ namespace detail {
         }
 
         pointer operator->() { return data.ptr(); }
-        const_pointer operator->() const { return data.ptr(); }
+        const_pointer operator->() const { return data.cptr(); }
 
         reference operator*() { return ref(); }
         const_reference operator*() const { return ref(); }
@@ -361,6 +361,7 @@ struct optional<void>: detail::optional_base<void> {
     using base::reset;
 
     optional(): base() {}
+    optional(nothing_t): base() {}
 
     template <typename T>
     optional(T): base(true, true) {}
diff --git a/src/util/path.hpp b/src/util/path.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..6df1194d5d0285dbb06de9acb2fe16d2887f3614
--- /dev/null
+++ b/src/util/path.hpp
@@ -0,0 +1,276 @@
+#pragma once
+
+/*
+ * Small subset of C++17 filesystem::path functionality
+ *
+ * Missing functionality:
+ *   * Wide character methods
+ *   * Locale-aware conversions
+ *   * Path decomposition
+ *   * Path element queries
+ *   * Lexical normalizations and comparisons
+ *   * Path element iteration
+ * 
+ * If we do want to support non-POSIX paths properly, consider
+ * making posix::path into a generic `basic_path` parameterized
+ * on `value_type`, `preferred_separator`, and using CRTP to
+ * handle system-specific conversions.
+ */
+
+#include <string>
+#include <iostream>
+
+#include <util/meta.hpp>
+#include <util/rangeutil.hpp>
+
+namespace nest {
+namespace mc {
+namespace util {
+
+namespace posix {
+    class path {
+    public:
+        using value_type = char;
+        using string_type = std::basic_string<value_type>;
+        static constexpr value_type preferred_separator = '/';
+
+        path() = default;
+        path(const path&) = default;
+        path(path&&) = default;
+
+        path& operator=(const path&) = default;
+        path& operator=(path&&) = default;
+
+        // Swap internals
+
+        void swap(path& other) {
+            p_.swap(other.p_);
+        }
+
+        // Construct or assign from value_type string or sequence.
+
+        template <typename Source>
+        path(const Source& source) { assign(source); }
+
+        template <typename Iter>
+        path(Iter b, Iter e) { assign(b, e); }
+
+        template <typename Source>
+        path& operator=(const Source& source) { return assign(source); }
+
+        path& assign(const path& other) {
+            p_ = other.p_;
+            return *this;
+        }
+
+        path& assign(const string_type& source) {
+            p_ = source;
+            return *this;
+        }
+
+        path& assign(const value_type* source) {
+            p_ = source;
+            return *this;
+        }
+
+        template <typename Seq, typename = enable_if_sequence_t<Seq>>
+        path& assign(const Seq& seq) {
+            util::assign(p_, seq);
+            return *this;
+        }
+
+        template <typename Iter>
+        path& assign(Iter b, Iter e) {
+            p_.assign(b, e);
+            return *this;
+        }
+
+        // Empty?
+        bool empty() const { return p_.empty(); }
+
+        // Make empty
+        void clear() { p_.clear(); }
+
+        // Append path components
+
+        template <typename Source>
+        path& append(const Source& source) {
+            return append(path(source));
+        }
+
+        template <typename Iter>
+        path& append(Iter b, Iter e) {
+            return append(path(b, e));
+        }
+
+        template <typename Source>
+        path& operator/=(const Source& source) {
+            return append(path(source));
+        }
+
+        path& append(const path& tail) {
+            if (!p_.empty() &&
+                !is_separator(p_.back()) &&
+                !tail.p_.empty() &&
+                !is_separator(tail.p_.front()))
+            {
+                p_ += preferred_separator;
+            }
+            if (!p_.empty() &&
+                !tail.empty() &&
+                is_separator(p_.back()) &&
+                is_separator(tail.p_.front()))
+            {
+                p_ += tail.p_.substr(1);
+            }
+            else {
+                p_ += tail.p_;
+            }
+            return *this;
+        }
+
+        // Concat to path
+
+        template <typename Source>
+        path& concat(const Source& source) {
+            return concat(path(source));
+        }
+
+        template <typename Iter>
+        path& concat(Iter b, Iter e) {
+            return concat(path(b, e));
+        }
+
+        template <typename Source>
+        path& operator+=(const Source& source) {
+           return  concat(path(source));
+        }
+
+        path& concat(const path& tail) {
+            p_ += tail.p_;
+            return *this;
+        }
+
+        // Native path string
+
+        const value_type* c_str() const { return p_.c_str(); }
+        const string_type& native() const { return p_; }
+        operator string_type() const { return p_; }
+
+        // Generic path string (same for POSIX)
+
+        std::string generic_string() const { return p_; }
+
+        // Queries
+
+        bool is_absolute() const {
+            return !p_.empty() && p_.front()==preferred_separator;
+        }
+
+        bool is_relative() const {
+            return !is_absolute();
+        }
+
+        // Compare
+
+        int compare(const string_type& s) const {
+            return compare(path(s));
+        }
+
+        int compare(const value_type* s) const {
+            return compare(path(s));
+        }
+
+        int compare(const path& other) const {
+            // TODO: replace with cleaner implementation if/when iterators
+            // are implemented.
+
+            return canonical().compare(other.canonical());
+        }
+
+        // Non-member functions
+
+        friend path operator/(const path& a, const path& b) {
+            path p(a);
+            return p/=b;
+        }
+
+        friend std::size_t hash_value(const path& p) {
+            std::hash<path::string_type> hash;
+            return hash(p.p_);
+        }
+
+        friend std::ostream& operator<<(std::ostream& o, const path& p) {
+            return o << p.native();
+        }
+
+        friend std::istream& operator>>(std::istream& i, path& p) {
+            path::string_type s;
+            i >> s;
+            p = s;
+            return i;
+        }
+
+        friend void swap(path& lhs, path& rhs) {
+            lhs.swap(rhs);
+        }
+
+        friend bool operator<(const path& p, const path& q) {
+            return p.compare(q)<0;
+        }
+
+        friend bool operator>(const path& p, const path& q) {
+            return p.compare(q)>0;
+        }
+
+        friend bool operator==(const path& p, const path& q) {
+            return p.compare(q)==0;
+        }
+
+        friend bool operator!=(const path& p, const path& q) {
+            return p.compare(q)!=0;
+        }
+
+        friend bool operator<=(const path& p, const path& q) {
+            return p.compare(q)<=0;
+        }
+
+        friend bool operator>=(const path& p, const path& q) {
+            return p.compare(q)>=0;
+        }
+
+    protected:
+        static bool is_separator(value_type c) {
+            return c=='/' || c==preferred_separator;
+        }
+
+        std::string canonical() const {
+            std::string n;
+            value_type prev = 0;
+            for (value_type c: p_) {
+                if (is_separator(c)) {
+                    if (!is_separator(prev)) {
+                        n += '/';
+                    }
+                }
+                else {
+                    n += c;
+                }
+                prev = c;
+            }
+            if (!n.empty() && n.back()=='/') {
+                n += '.';
+            }
+            return n;
+        }
+
+        string_type p_;
+    };
+} // namespace posix
+
+using path = posix::path;
+
+} // namespace util
+} // namespace mc
+} // namespace nest
+
diff --git a/src/util/rangeutil.hpp b/src/util/rangeutil.hpp
index d87edc93be85b7211717d979bbbd86506b174574..46101e3eef8160124d73d60e730c31d34c85f7c1 100644
--- a/src/util/rangeutil.hpp
+++ b/src/util/rangeutil.hpp
@@ -5,6 +5,7 @@
  * with ranges.
  */
 
+#include <algorithm>
 #include <iterator>
 
 #include <util/meta.hpp>
@@ -78,14 +79,14 @@ AssignableContainer& assign_by(AssignableContainer& c, const Seq& seq, const Pro
 // Note that a const range reference may wrap non-const iterators.
 
 template <typename Seq>
-enable_if_t<!std::is_const<typename util::sequence_traits<Seq>::reference>::value>
+enable_if_t<!std::is_const<typename sequence_traits<Seq>::reference>::value>
 sort(Seq& seq) {
     auto canon = canonical_view(seq);
     std::sort(std::begin(canon), std::end(canon));
 }
 
 template <typename Seq>
-enable_if_t<!std::is_const<typename util::sequence_traits<Seq>::reference>::value>
+enable_if_t<!std::is_const<typename sequence_traits<Seq>::reference>::value>
 sort(const Seq& seq) {
     auto canon = canonical_view(seq);
     std::sort(std::begin(canon), std::end(canon));
@@ -94,7 +95,7 @@ sort(const Seq& seq) {
 // Sort in-place by projection `proj`
 
 template <typename Seq, typename Proj>
-enable_if_t<!std::is_const<typename util::sequence_traits<Seq>::reference>::value>
+enable_if_t<!std::is_const<typename sequence_traits<Seq>::reference>::value>
 sort_by(Seq& seq, const Proj& proj) {
     using value_type = typename sequence_traits<Seq>::value_type;
     auto canon = canonical_view(seq);
@@ -106,7 +107,7 @@ sort_by(Seq& seq, const Proj& proj) {
 }
 
 template <typename Seq, typename Proj>
-enable_if_t<!std::is_const<typename util::sequence_traits<Seq>::reference>::value>
+enable_if_t<!std::is_const<typename sequence_traits<Seq>::reference>::value>
 sort_by(const Seq& seq, const Proj& proj) {
     using value_type = typename sequence_traits<Seq>::value_type;
     auto canon = canonical_view(seq);
@@ -117,6 +118,77 @@ sort_by(const Seq& seq, const Proj& proj) {
         });
 }
 
+// Accumulate by projection `proj`
+
+template <
+    typename Seq,
+    typename Proj,
+    typename Value = typename transform_iterator<typename sequence_traits<Seq>::const_iterator, Proj>::value_type
+>
+Value sum_by(const Seq& seq, const Proj& proj, Value base = Value{}) {
+    auto canon = canonical_view(transform_view(seq, proj));
+    return std::accumulate(std::begin(canon), std::end(canon), base);
+}
+
+// Maximum element by projection `proj`
+// - returns an iterator `i` into supplied sequence which has the maximum
+//   value of `proj(*i)`.
+
+template <typename Seq, typename Proj>
+typename sequence_traits<Seq>::iterator
+max_element_by(Seq& seq, const Proj& proj) {
+    using value_type = typename sequence_traits<Seq>::value_type;
+    auto canon = canonical_view(seq);
+
+    return std::max_element(std::begin(canon), std::end(canon),
+        [&proj](const value_type& a, const value_type& b) {
+            return proj(a) < proj(b);
+        });
+}
+
+template <typename Seq, typename Proj>
+typename sequence_traits<Seq>::iterator
+max_element_by(const Seq& seq, const Proj& proj) {
+    using value_type = typename sequence_traits<Seq>::value_type;
+    auto canon = canonical_view(seq);
+
+    return std::max_element(std::begin(canon), std::end(canon),
+        [&proj](const value_type& a, const value_type& b) {
+            return proj(a) < proj(b);
+        });
+}
+
+// Maximum value
+//
+// Value semantics instead of iterator semantics means it will operate
+// with input iterators.  Will return default-constructed value if sequence
+// is empty.
+//
+// (Consider making generic associative reduction with TBB implementation
+// for random-access iterators?)
+
+template <
+    typename Seq,
+    typename Value = typename sequence_traits<Seq>::value_type,
+    typename Compare = std::less<Value>
+>
+Value max_value(const Seq& seq, Compare cmp = Compare{}) {
+    if (util::empty(seq)) {
+        return Value{};
+    }
+
+    auto i = std::begin(seq);
+    auto e = std::end(seq);
+    auto m = *i;
+    while (++i!=e) {
+        Value x = *i;
+        if (cmp(m, x)) {
+            m = std::move(x);
+        }
+    }
+    return m;
+}
+
 } // namespace util
 } // namespace mc
 } // namespace nest
diff --git a/src/util/sentinel.hpp b/src/util/sentinel.hpp
index eebbfd2382359f388f84567701678713a177feee..3cfd84e381ee7f4ffb52ff853291c9c79bcb86b5 100644
--- a/src/util/sentinel.hpp
+++ b/src/util/sentinel.hpp
@@ -29,8 +29,6 @@ template <typename I, typename S>
 class sentinel_iterator {
     nest::mc::util::either<I, S> e_;
 
-    bool is_sentinel() const { return e_.index()!=0; }
-
     I& iter() {
         EXPECTS(!is_sentinel());
         return e_.template unsafe_get<0>();
@@ -163,6 +161,16 @@ public:
     bool operator>(const sentinel_iterator& x) const {
         return !(x<=*this);
     }
+
+    // access to underlying iterator/sentinel
+
+    bool is_sentinel() const { return e_.index()!=0; }
+    bool is_iterator() const { return e_.index()==0; }
+
+    // default conversion to iterator, defined only if `is_iterator()` is true
+    // (this really simplifies e.g. `maximum_element_by`, but it might not be
+    // a super good idea.)
+    operator I() const { return iter(); }
 };
 
 template <typename I, typename S>
@@ -179,6 +187,7 @@ sentinel_iterator_t<I, S> make_sentinel_end(const I& i, const S& s) {
     return sentinel_iterator_t<I, S>(s);
 }
 
+
 } // namespace util
 } // namespace mc
 } // namespace nest
diff --git a/src/util/transform.hpp b/src/util/transform.hpp
index a5ab0b348a61fb0f3a174058dc31fdb128776e24..5982cfea668354e1f13b5893336f656cca60f3e5 100644
--- a/src/util/transform.hpp
+++ b/src/util/transform.hpp
@@ -17,6 +17,9 @@ namespace nest {
 namespace mc {
 namespace util {
 
+/* Note, this is actually only an input iterator if F is non-assignable, such
+ * as when it is a lambda! */
+
 template <typename I, typename F>
 class transform_iterator: public iterator_adaptor<transform_iterator<I, F>, I> {
     using base = iterator_adaptor<transform_iterator<I, F>, I>;
@@ -33,7 +36,7 @@ class transform_iterator: public iterator_adaptor<transform_iterator<I, F>, I> {
 
 public:
     using typename base::difference_type;
-    using value_type = typename std::result_of<F (inner_value_type)>::type;
+    using value_type = util::decay_t<typename std::result_of<F (inner_value_type)>::type>;
     using pointer = const value_type*;
     using reference = const value_type&;
 
@@ -92,7 +95,6 @@ transform_view(const Seq& s, const F& f) {
     return {make_transform_iterator(cbegin(s), f), make_transform_iterator(cend(s), f)};
 }
 
-
 template <
     typename Seq,
     typename F,
@@ -105,7 +107,6 @@ transform_view(const Seq& s, const F& f) {
     return {make_transform_iterator(cbegin(s), f), cend(s)};
 }
 
-
 } // namespace util
 } // namespace mc
 } // namespace nest
diff --git a/tests/test_common_cells.hpp b/tests/test_common_cells.hpp
index c4641c879950fb6a5249cc25f38c3e6006df69ea..9e302365dbea18cff2b4f82a7e74905c9143c5ed 100644
--- a/tests/test_common_cells.hpp
+++ b/tests/test_common_cells.hpp
@@ -16,14 +16,16 @@ namespace mc {
  *    soma centre, t=[10 ms, 110 ms), 0.1 nA
  */
 
-inline cell make_cell_soma_only() {
+inline cell make_cell_soma_only(bool with_stim = true) {
     cell c;
 
     auto soma = c.add_soma(18.8/2.0);
     soma->mechanism("membrane").set("r_L", 123);
     soma->add_mechanism(hh_parameters());
 
-    c.add_stimulus({0,0.5}, {10., 100., 0.1});
+    if (with_stim) {
+        c.add_stimulus({0,0.5}, {10., 100., 0.1});
+    }
 
     return c;
 }
@@ -46,7 +48,7 @@ inline cell make_cell_soma_only() {
  *    end of dendrite, t=[5 ms, 85 ms), 0.3 nA
  */
 
-inline cell make_cell_ball_and_stick() {
+inline cell make_cell_ball_and_stick(bool with_stim = true) {
     cell c;
 
     auto soma = c.add_soma(12.6157/2.0);
@@ -57,8 +59,45 @@ inline cell make_cell_ball_and_stick() {
     dendrite->mechanism("membrane").set("r_L", 100);
     dendrite->set_compartments(4);
 
-    c.add_stimulus({1,1}, {5., 80., 0.3});
+    if (with_stim) {
+        c.add_stimulus({1,1}, {5., 80., 0.3});
+    }
+    return c;
+}
+
+/*
+ * Create cell with a soma and unbranched tapered dendrite:
+ *
+ * Soma:
+ *    mechanisms: HH
+ *    diameter: 12.6157 µm
+ *
+ * Dendrite:
+ *    mechanisms: none
+ *    diameter proximal: 1 µm
+ *    diameter distal: 0.2 µm
+ *    length: 200 µm
+ *    membrane resistance: 100 Ω·cm
+ *    compartments: 4
+ *
+ * Stimulus:
+ *    end of dendrite, t=[5 ms, 85 ms), 0.3 nA
+ */
+
+inline cell make_cell_ball_and_taper(bool with_stim = true) {
+    cell c;
+
+    auto soma = c.add_soma(12.6157/2.0);
+    soma->add_mechanism(hh_parameters());
+
+    auto dendrite = c.add_cable(0, segmentKind::dendrite, 1.0/2, 0.2/2, 200.0);
+    dendrite->add_mechanism(pas_parameters());
+    dendrite->mechanism("membrane").set("r_L", 100);
+    dendrite->set_compartments(4);
 
+    if (with_stim) {
+        c.add_stimulus({1,1}, {5., 80., 0.3});
+    }
     return c;
 }
 
@@ -83,7 +122,7 @@ inline cell make_cell_ball_and_stick() {
  *    end of second terminal branch, t=[40 ms, 50 ms), -0.2 nA
  */
 
-inline cell make_cell_ball_and_3sticks() {
+inline cell make_cell_ball_and_3stick(bool with_stim = true) {
     cell c;
 
     auto soma = c.add_soma(12.6157/2.0);
@@ -102,9 +141,26 @@ inline cell make_cell_ball_and_3sticks() {
         }
     }
 
-    c.add_stimulus({2,1}, {5.,  80., 0.45});
-    c.add_stimulus({3,1}, {40., 10.,-0.2});
+    if (with_stim) {
+        c.add_stimulus({2,1}, {5.,  80., 0.45});
+        c.add_stimulus({3,1}, {40., 10.,-0.2});
+    }
+    return c;
+}
 
+/*
+ * Attach voltage probes at each cable mid-point and end-point,
+ * and at soma mid-point.
+ */
+
+inline cell& add_common_voltage_probes(cell& c) {
+    auto ns = c.num_segments();
+    for (auto i=0u; i<ns; ++i) {
+        c.add_probe({{i, 0.5}, probeKind::membrane_voltage});
+        if (i>0) {
+            c.add_probe({{i, 1.0}, probeKind::membrane_voltage});
+        }
+    }
     return c;
 }
 
diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt
index b67683ecebd803018185f9b09b18be7a774dd097..b6a7d811f24a88fdd618ea80e2e2ac17d6e919f5 100644
--- a/tests/unit/CMakeLists.txt
+++ b/tests/unit/CMakeLists.txt
@@ -18,6 +18,7 @@ set(TEST_SOURCES
     test_optional.cpp
     test_parameters.cpp
     test_partition.cpp
+    test_path.cpp
     test_point.cpp
     test_probe.cpp
     test_segment.cpp
diff --git a/tests/unit/test_fvm.cpp b/tests/unit/test_fvm.cpp
index 45c038bf529f80e3339753662bf26e5a1762c168..34318ea90e8fa3173e22f6c93501cc9757603dc3 100644
--- a/tests/unit/test_fvm.cpp
+++ b/tests/unit/test_fvm.cpp
@@ -14,7 +14,7 @@ TEST(fvm, cable)
 {
     using namespace nest::mc;
 
-    nest::mc::cell cell = make_cell_ball_and_3sticks();
+    nest::mc::cell cell = make_cell_ball_and_3stick();
 
     using fvm_cell = fvm::fvm_cell<double, cell_lid_type>;
 
diff --git a/tests/unit/test_fvm_multi.cpp b/tests/unit/test_fvm_multi.cpp
index 7310d8831f952548496ddd387c97add8c5a8dfe0..f6f0a0b27bf422f864e9739ccc7e6bcfc8390bb2 100644
--- a/tests/unit/test_fvm_multi.cpp
+++ b/tests/unit/test_fvm_multi.cpp
@@ -14,7 +14,7 @@ TEST(fvm_multi, cable)
 {
     using namespace nest::mc;
 
-    nest::mc::cell cell=make_cell_ball_and_3sticks();
+    nest::mc::cell cell=make_cell_ball_and_3stick();
 
     using fvm_cell = fvm::fvm_multicell<double, cell_lid_type>;
 
@@ -84,7 +84,7 @@ TEST(fvm_multi, multi_init)
 
     nest::mc::cell cells[] = {
         make_cell_ball_and_stick(),
-        make_cell_ball_and_3sticks()
+        make_cell_ball_and_3stick()
     };
 
     EXPECT_EQ(cells[0].num_segments(), 2u);
diff --git a/tests/unit/test_optional.cpp b/tests/unit/test_optional.cpp
index 0c7758bcf84708cc873827bad5d33cceceabbf18..fe34cc3dc1278f02b866aefc3c0b8942a1169d98 100644
--- a/tests/unit/test_optional.cpp
+++ b/tests/unit/test_optional.cpp
@@ -215,13 +215,14 @@ TEST(optionalm,bind) {
 }
 
 TEST(optionalm,void) {
-    optional<void> a,b(true),c(a),d=b,e(false);
+    optional<void> a,b(true),c(a),d=b,e(false),f(nothing);
 
     EXPECT_FALSE((bool)a);
     EXPECT_TRUE((bool)b);
     EXPECT_FALSE((bool)c);
     EXPECT_TRUE((bool)d);
     EXPECT_TRUE((bool)e);
+    EXPECT_FALSE((bool)f);
 
     auto x=a >> []() { return 1; };
     EXPECT_FALSE((bool)x);
diff --git a/tests/unit/test_path.cpp b/tests/unit/test_path.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..2fea6e5f5ab3256defa7e702da6cdec1d1c61a12
--- /dev/null
+++ b/tests/unit/test_path.cpp
@@ -0,0 +1,204 @@
+#include "gtest.h"
+
+#include <cstring>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include <util/path.hpp>
+
+using namespace nest::mc::util;
+
+TEST(path, posix_ctor) {
+    // test constructor ans assignment overloads with sample character sequences.
+    posix::path p1;
+    EXPECT_EQ("", p1.native());
+    EXPECT_TRUE(p1.empty());
+
+    const char* cs = "foo/bar";
+    std::string str_cs(cs);
+    std::vector<char> vec_cs(str_cs.begin(), str_cs.end());
+
+    posix::path p2(cs);
+    posix::path p3(str_cs);
+    posix::path p4(str_cs.begin(), str_cs.end());
+    posix::path p5(vec_cs.begin(), vec_cs.end());
+
+    EXPECT_FALSE(p2.empty());
+    EXPECT_EQ(str_cs, p2.native());
+    EXPECT_EQ(str_cs, p3.native());
+    EXPECT_EQ(str_cs, p4.native());
+    EXPECT_EQ(str_cs, p5.native());
+
+    posix::path p6(p2);
+    EXPECT_EQ(str_cs, p6.native());
+
+    posix::path p7(std::move(p6));
+    EXPECT_EQ(str_cs, p7.native());
+
+    // test operator= overloads (and ref return values)
+    posix::path p;
+    EXPECT_EQ(str_cs, (p=p2).native());
+    EXPECT_EQ(str_cs, (p=cs).native());
+    EXPECT_EQ(str_cs, (p=str_cs).native());
+    EXPECT_EQ(str_cs, (p=vec_cs).native());
+    EXPECT_EQ(str_cs, (p=std::move(p7)).native());
+
+    // test assign overloads (and ref return values)
+    EXPECT_EQ(str_cs, p.assign(p2).native());
+    EXPECT_EQ(str_cs, p.assign(cs).native());
+    EXPECT_EQ(str_cs, p.assign(str_cs).native());
+    EXPECT_EQ(str_cs, p.assign(vec_cs).native());
+    EXPECT_EQ(str_cs, p.assign(vec_cs.begin(), vec_cs.end()).native());
+}
+
+TEST(path, posix_native) {
+    // native path should match string argument exactly
+    std::string ps = "/abs/path";
+    std::string qs = "rel/path.ext";
+
+    EXPECT_EQ(ps, posix::path{ps}.native());
+    EXPECT_EQ(qs, posix::path{qs}.native());
+
+    // default string conversion
+    std::string ps_bis = posix::path{ps};
+    std::string qs_bis = posix::path{qs};
+
+    EXPECT_EQ(ps, ps_bis);
+    EXPECT_EQ(qs, qs_bis);
+
+    // cstr
+    const char *c = posix::path{ps}.c_str();
+    EXPECT_TRUE(!std::strcmp(c, ps.c_str()));
+}
+
+TEST(path, posix_generic) {
+    // expect native and generic to be same for POSIX paths
+    path p("/abs/path"), q("rel/path.ext");
+
+    EXPECT_EQ(p.generic_string(), p.native());
+    EXPECT_EQ(q.generic_string(), q.native());
+}
+
+TEST(path, posix_append) {
+    posix::path p1{""}, q1{"rel"};
+
+    posix::path p(p1);
+    p.append(q1);
+    EXPECT_EQ(p1/q1, p);
+
+    p = p1;
+    p /= q1;
+    EXPECT_EQ(p1/q1, p);
+    EXPECT_EQ("rel", p.native());
+
+    posix::path p2{"ab"}, q2{"rel"};
+
+    p = p2;
+    p.append(q2);
+    EXPECT_EQ(p2/q2, p);
+
+    p = p2;
+    p /= q2;
+    EXPECT_EQ(p2/q2, p);
+    EXPECT_EQ("ab/rel", p.native());
+
+    EXPECT_EQ("foo/bar", (posix::path("foo/")/posix::path("/bar")).native());
+    EXPECT_EQ("foo/bar", (posix::path("foo")/posix::path("/bar")).native());
+    EXPECT_EQ("foo/bar", (posix::path("foo/")/posix::path("bar")).native());
+    EXPECT_EQ("/foo/bar/", (posix::path("/foo/")/posix::path("/bar/")).native());
+}
+
+TEST(path, compare) {
+    posix::path p1("/a/b"), p2("/a//b"), p3("/a/b/c/."), p4("/a/b/c/"), p5("a/bb/c"), p6("a/b/c/");
+
+    EXPECT_EQ(p1, p2);
+    EXPECT_LE(p1, p2);
+    EXPECT_GE(p1, p2);
+    EXPECT_EQ(p3, p4);
+    EXPECT_LE(p3, p4);
+    EXPECT_GE(p3, p4);
+
+    EXPECT_LT(p1, p3);
+    EXPECT_LE(p1, p3);
+    EXPECT_GT(p3, p1);
+    EXPECT_GE(p3, p1);
+    EXPECT_NE(p3, p1);
+
+    EXPECT_NE(p4, p6);
+
+    EXPECT_LT(p4, p5);
+    EXPECT_LE(p4, p5);
+    EXPECT_GT(p5, p4);
+    EXPECT_GE(p5, p4);
+    EXPECT_NE(p5, p4);
+}
+
+TEST(path, posix_concat) {
+    posix::path p1{""}, q1{"tail"};
+
+    posix::path p(p1);
+    p.concat(q1);
+    EXPECT_EQ("tail", p.native());
+
+    p = p1;
+    p += q1;
+    EXPECT_EQ("tail", p.native());
+
+    posix::path p2{"ab"}, q2{"cd"};
+
+    p = p2;
+    p.concat(q2);
+    EXPECT_EQ("abcd", p.native());
+
+    p = p2;
+    p += q2;
+    EXPECT_EQ("abcd", p.native());
+}
+
+TEST(path, posix_absrel_query) {
+    posix::path p1("/abc/def");
+    EXPECT_FALSE(p1.is_relative());
+    EXPECT_TRUE(p1.is_absolute());
+
+    posix::path p2("abc/def");
+    EXPECT_TRUE(p2.is_relative());
+    EXPECT_FALSE(p2.is_absolute());
+
+    posix::path p3("");
+    EXPECT_TRUE(p3.is_relative());
+    EXPECT_FALSE(p3.is_absolute());
+
+    posix::path p4("/");
+    EXPECT_FALSE(p4.is_relative());
+    EXPECT_TRUE(p4.is_absolute());
+
+    posix::path p5("..");
+    EXPECT_TRUE(p3.is_relative());
+    EXPECT_FALSE(p3.is_absolute());
+}
+
+TEST(path, posix_swap) {
+    posix::path p1("foo"), p2("/bar");
+    p1.swap(p2);
+
+    EXPECT_EQ("foo", p2.native());
+    EXPECT_EQ("/bar", p1.native());
+
+    swap(p1, p2);
+
+    EXPECT_EQ("foo", p1.native());
+    EXPECT_EQ("/bar", p2.native());
+}
+
+TEST(path, posix_iostream) {
+    std::istringstream ss("/quux/xyzzy");
+    posix::path p;
+    ss >> p;
+    EXPECT_EQ("/quux/xyzzy", p.native());
+
+    std::ostringstream uu;
+    uu << p;
+    EXPECT_EQ("/quux/xyzzy", uu.str());
+}
+
diff --git a/tests/unit/test_range.cpp b/tests/unit/test_range.cpp
index 1f1e3cb60847ba896eb3bd2ffed447f43dc0c93b..90df4067f743e4e1baa1a6bdefa0442f9bc34b4c 100644
--- a/tests/unit/test_range.cpp
+++ b/tests/unit/test_range.cpp
@@ -16,6 +16,7 @@
 #include <util/range.hpp>
 #include <util/rangeutil.hpp>
 #include <util/sentinel.hpp>
+#include <util/transform.hpp>
 
 using namespace nest::mc;
 
@@ -194,6 +195,32 @@ TEST(range, strictify) {
     EXPECT_EQ(cstr+11, ptr_range.right);
 }
 
+TEST(range, max_element_by) {
+    const char *cstr = "helloworld";
+    auto cstr_range = util::make_range(cstr, null_terminated);
+
+    auto i = util::max_element_by(cstr_range,
+        [](char c) -> int { return -c; });
+
+    EXPECT_TRUE((std::is_same<const char *, decltype(i)>::value));
+    EXPECT_EQ('d', *i);
+    EXPECT_EQ(cstr+9, i);
+}
+
+TEST(range, max_value) {
+    const char *cstr = "hello world";
+    auto cstr_range = util::make_range(cstr, null_terminated);
+
+    // use a lambda to get a range over non-assignable iterators
+    // (at least until we specialize `transform_iterator` for
+    // non-copyable functors passed by const reference).
+    auto i = util::max_value(
+        util::transform_view(cstr_range, [](char c) { return c+1; }));
+
+    EXPECT_EQ('x', i);
+}
+
+
 template <typename V>
 class counter_range: public ::testing::Test {};
 
@@ -312,6 +339,20 @@ TEST(range, sort) {
     EXPECT_EQ(std::string("ywohd"), cstr);
 }
 
+TEST(range, sum_by) {
+    std::string words[] = { "fish", "cakes", "!" };
+    auto prepend_ = [](const std::string& x) { return "_"+x; };
+
+    auto result = util::sum_by(words, prepend_);
+    EXPECT_EQ("_fish_cakes_!", result);
+
+    result = util::sum_by(words, prepend_, std::string("tasty"));
+    EXPECT_EQ("tasty_fish_cakes_!", result);
+
+    auto count = util::sum_by(words, [](const std::string &x) { return x.size(); });
+    EXPECT_EQ(10u, count);
+}
+
 #ifdef WITH_TBB
 
 TEST(range, tbb_split) {
diff --git a/tests/validation/CMakeLists.txt b/tests/validation/CMakeLists.txt
index ae892560492dccca331036ad1481d2f4ce5da7bb..5f61ecc32af9c2083cc69f3945bf8bc8d58417db 100644
--- a/tests/validation/CMakeLists.txt
+++ b/tests/validation/CMakeLists.txt
@@ -6,6 +6,7 @@ set(VALIDATION_SOURCES
 
     # support code
     validation_data.cpp
+    trace_analysis.cpp
 
     # unit test driver
     validate.cpp
@@ -34,5 +35,9 @@ foreach(target ${TARGETS})
         PROPERTIES
         RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/tests"
     )
+
+    if (BUILD_VALIDATION_DATA)
+	add_dependencies(${target} validation_data)
+    endif()
 endforeach()
 
diff --git a/tests/validation/tinyopt.hpp b/tests/validation/tinyopt.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..85a2e4a7dae008819827f79319fd32badd99b01e
--- /dev/null
+++ b/tests/validation/tinyopt.hpp
@@ -0,0 +1,102 @@
+#pragma once
+
+#include <cstring>
+#include <iostream>
+#include <sstream>
+#include <stdexcept>
+#include <string>
+
+#include "gtest.h"
+
+#include <communication/global_policy.hpp>
+
+#include "util/optional.hpp"
+
+namespace nest {
+namespace mc {
+namespace to {
+
+struct parse_opt_error: public std::runtime_error {
+    parse_opt_error(const std::string& s): std::runtime_error(s) {}
+    parse_opt_error(const char *arg, const std::string& s):
+        std::runtime_error(s+": "+arg) {}
+};
+
+void usage(const char* argv0, const std::string& usage_str) {
+    const char* basename = std::strrchr(argv0, '/');
+    basename = basename? basename+1: argv0;
+
+    std::cout << "Usage: " << basename << " " << usage_str << "\n";
+}
+
+void usage(const char* argv0, const std::string& usage_str, const std::string& parse_err) {
+    const char* basename = std::strrchr(argv0, '/');
+    basename = basename? basename+1: argv0;
+
+    std::cerr << basename << ": " << parse_err << "\n";
+    std::cerr << "Usage: " << basename << " " << usage_str << "\n";
+}
+
+template <typename V = std::string>
+util::optional<V> parse_opt(char **& argp, char shortopt, const char* longopt=nullptr) {
+    const char* arg = argp[0];
+
+    if (!arg || arg[0]!='-') {
+        return util::nothing;
+    }
+
+    std::stringstream buf;
+
+    if (arg[1]=='-' && longopt) {
+        const char* rest = arg+2;
+        const char* eq = std::strchr(rest, '=');
+
+        if (!std::strcmp(rest, longopt)) {
+            buf.str(argp[1]? argp[1]: throw parse_opt_error(arg, "missing argument"));
+            argp += 2;
+        }
+        else if (eq && !std::strncmp(rest, longopt,  eq-rest)) {
+            buf.str(eq+1);
+            argp += 1;
+        }
+        else {
+            return util::nothing;
+        }
+    }
+    else if (shortopt && arg[1]==shortopt && arg[2]==0) {
+        buf.str(argp[1]? argp[1]: throw parse_opt_error(arg, "missing argument"));
+        argp += 2;
+    }
+    else {
+        return util::nothing;
+    }
+
+    V v;
+    if (!(buf >> v)) {
+        throw parse_opt_error(arg, "failed to parse option argument");
+    }
+    return v;
+}
+
+template <>
+util::optional<void> parse_opt(char **& argp, char shortopt, const char* longopt) {
+    if (!*argp || *argp[0]!='-') {
+        return util::nothing;
+    }
+    else if (argp[0][1]=='-' && longopt && !std::strcmp(argp[0]+2, longopt)) {
+        ++argp;
+        return true;
+    }
+    else if (shortopt && argp[0][1]==shortopt && argp[0][2]==0) {
+        ++argp;
+        return true;
+    }
+    else {
+        return util::nothing;
+    }
+}
+
+
+} // namespace to;
+} // namespace mc
+} // namespace nest
diff --git a/tests/validation/trace_analysis.cpp b/tests/validation/trace_analysis.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..d641e34bcbad20ea1c64aece9d47f9c17a4856f0
--- /dev/null
+++ b/tests/validation/trace_analysis.cpp
@@ -0,0 +1,102 @@
+#include <algorithm>
+#include <fstream>
+#include <string>
+
+#include <json/json.hpp>
+
+#include "gtest.h"
+
+#include <math.hpp>
+#include <simple_sampler.hpp>
+#include <util/optional.hpp>
+#include <util/partition.hpp>
+#include <util/rangeutil.hpp>
+
+#include "trace_analysis.hpp"
+
+namespace nest {
+namespace mc {
+
+struct trace_interpolant {
+    trace_interpolant(const trace_data& trace): trace_(trace) {}
+
+    double operator()(float t) const {
+        if (trace_.empty()) return std::nan("");
+
+        auto tx = times(trace_);
+        auto vx = values(trace_);
+
+        // special case for end points
+        if (t<tx.front()) return vx.front();
+        if (t>=tx.back()) return vx.back();
+
+        auto part = util::partition_view(tx);
+        auto i = part.index(t);
+        EXPECTS(i != part.npos);
+        auto p = part[i];
+        return math::lerp(vx[i], vx[i+1], (t-p.first)/(p.second-p.first));
+    }
+
+    const trace_data& trace_;
+};
+
+double linf_distance(const trace_data& u, const trace_data& ref) {
+    trace_interpolant f{ref};
+
+    return util::max_value(
+            util::transform_view(u,
+                [&](trace_entry x) { return std::abs(x.v-f(x.t)); }));
+}
+
+std::vector<trace_peak> local_maxima(const trace_data& u) {
+    std::vector<trace_peak> peaks;
+    if (u.size()<2) return peaks;
+
+    auto tx = times(u);
+    auto vx = values(u);
+
+    int s_prev = math::signum(vx[1]-vx[0]);
+    std::size_t i_start = 0;
+
+    for (std::size_t i = 2; i<u.size()-1; ++i) {
+        int s = math::signum(vx[i]-vx[i-1]);
+        if (s_prev==1 && s==-1) {
+            // found peak between i_start and i,
+            // observerd peak value at i-1.
+            float t0 = tx[i_start];
+            float t1 = tx[i];
+
+            peaks.push_back({(t0+t1)/2, vx[i-1], (t1-t0)/2});
+        }
+
+        if (s!=0) {
+            s_prev = s;
+            if (s_prev>0) {
+                i_start = i-1;
+            }
+        }
+    }
+    return peaks;
+}
+
+util::optional<trace_peak> peak_delta(const trace_data& a, const trace_data& b) {
+    auto p = local_maxima(a);
+    auto q = local_maxima(b);
+
+    if (p.size()!=q.size()) return util::nothing;
+
+    auto max_delta = p[0]-q[0];
+
+    for (std::size_t i = 1u; i<p.size(); ++i) {
+        auto delta = p[i]-q[i];
+        // pick maximum delta by greatest lower bound on dt
+        if (std::abs(delta.t)-delta.t_err>std::abs(max_delta.t)-max_delta.t_err) {
+            max_delta = delta;
+        }
+    }
+    return max_delta;
+}
+
+} // namespace mc
+} // namespace nest
+
diff --git a/tests/validation/trace_analysis.hpp b/tests/validation/trace_analysis.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..36dc6a91d84e3ea78ab3bb49e599a6f0434a1369
--- /dev/null
+++ b/tests/validation/trace_analysis.hpp
@@ -0,0 +1,106 @@
+#pragma once
+
+#include <vector>
+
+#include "gtest.h"
+
+#include <simple_sampler.hpp>
+#include <util/optional.hpp>
+#include <util/path.hpp>
+#include <util/rangeutil.hpp>
+
+namespace nest {
+namespace mc {
+
+/* Trace data comparison */
+
+// Compute max |v_i - f(t_i)| where (t, v) is the 
+// first trace `u` and f is the piece-wise linear interpolant
+// of the second trace `ref`.
+
+double linf_distance(const trace_data& u, const trace_data& ref);
+
+// Find local maxima (peaks) in a trace, excluding end points.
+
+struct trace_peak {
+    float t;
+    double v;
+    float t_err;
+
+    friend trace_peak operator-(trace_peak x, trace_peak y) {
+        return {x.t-y.t, x.v-y.v, x.t_err+y.t_err};
+    }
+};
+
+std::vector<trace_peak> local_maxima(const trace_data& u);
+
+// Compare differences in peak times across two traces.
+// Returns largest magnitute displacement between peaks,
+// together with a sampling error bound, or `nothing`
+// if the number of peaks differ.
+
+util::optional<trace_peak> peak_delta(const trace_data& a, const trace_data& b);
+
+// Record for error data for convergence testing.
+
+template <typename Param>
+struct conv_entry {
+    std::string id;
+    Param param;
+    double linf;
+    util::optional<trace_peak> pd;
+};
+
+template <typename Param>
+using conv_data = std::vector<conv_entry<Param>>;
+
+// Assert error convergence (gtest).
+
+template <typename Param>
+void assert_convergence(const conv_data<Param>& cs) {
+    if (cs.size()<2) return;
+
+    auto tbound = [](trace_peak p) { return std::abs(p.t)+p.t_err; };
+    auto smallest_pd = cs[0].pd;
+
+    for (unsigned i = 1; i<cs.size(); ++i) {
+        const auto& p = cs[i-1];
+        const auto& c = cs[i];
+
+        EXPECT_LE(c.linf, p.linf) << "L∞ error increase";
+        EXPECT_TRUE(c.pd || (!c.pd && !p.pd)) << "divergence in peak count";
+
+        if (c.pd && smallest_pd) {
+            double t = std::abs(c.pd->t);
+            EXPECT_LE(t, c.pd->t_err+tbound(*smallest_pd)) << "divergence in max peak displacement";
+        }
+
+        if (c.pd && (!smallest_pd || tbound(*c.pd)<tbound(*smallest_pd))) {
+            smallest_pd = c.pd;
+        }
+    }
+}
+
+// Report table of convergence results.
+// (Takes collection with pair<string, conv_data>
+// entries.)
+
+template <typename Map>
+void report_conv_table(std::ostream& out, const Map& tbl, const std::string& param_name) {
+    out << "location," << param_name << ",linf,peak_dt,peak_dt_err\n";
+    for (const auto& p: tbl) {
+        const auto& location = p.first;
+        for (const auto& c: p.second) {
+            out << location << "," << c.param << "," << c.linf << ",";
+            if (c.pd) {
+                out << c.pd->t << "," << c.pd->t_err << "\n";
+            }
+            else {
+                out << "NA,NA\n";
+            }
+        }
+    }
+}
+
+} // namespace mc
+} // namespace nest
diff --git a/tests/validation/validate.cpp b/tests/validation/validate.cpp
index b89a7b54223d7c2342c9882749c28a886782b316..e3d457b939cac1c584588c8c1f94672a2d655587 100644
--- a/tests/validation/validate.cpp
+++ b/tests/validation/validate.cpp
@@ -1,31 +1,69 @@
 #include <cstring>
 #include <iostream>
-#include <fstream>
-#include <numeric>
-#include <vector>
+#include <sstream>
+#include <string>
+#include <exception>
 
 #include "gtest.h"
+
+#include <communication/global_policy.hpp>
+
+#include "tinyopt.hpp"
 #include "validation_data.hpp"
 
-int usage(const char* argv0) {
-    std::cerr << "usage: " << argv0 << " [-p|--path validation_data_directory]\n";
-    return 1;
-}
+using namespace nest::mc;
+
+const char* usage_str =
+"[OPTION]...\n"
+"\n"
+"  -v, --verbose       Print results to stdout\n"
+"  -o, --output=FILE   Save traces from simulations to FILE\n"
+"  -p, --path=DIR      Look for validation reference data in DIR\n"
+"  -m, --max-comp=N    Run convergence tests to a maximum of N\n"
+"                      compartments per segment\n"
+"  -h, --help          Display usage information and exit\n";
 
 int main(int argc, char **argv) {
+    using to::parse_opt;
+
+    communication::global_policy_guard global_guard(argc, argv);
     ::testing::InitGoogleTest(&argc, argv);
 
-    if (argv[1] && (!std::strcmp(argv[1], "-p") || !std::strcmp(argv[1], "--path"))) {
-        if (argv[2]) {
-            testing::g_validation_data.set_path(argv[2]);
-        }
-        else {
-            return usage(argv[0]);
+    int rv = 0;
+    try {
+        auto arg = argv+1;
+        while (*arg) {
+            if (auto o = parse_opt<std::string>(arg, 'p', "path")) {
+                g_trace_io.set_datadir(*o);
+            }
+            else if (auto o = parse_opt<std::string>(arg, 'o', "output")) {
+                g_trace_io.set_output(*o);
+            }
+            else if (auto o = parse_opt<int>(arg, 'm', "max-comp")) {
+                g_trace_io.set_max_ncomp(*o);
+            }
+            else if (auto o = parse_opt<void>(arg, 'v', "verbose")) {
+                g_trace_io.set_verbose(true);
+            }
+            else if (auto o = parse_opt<void>(arg, 'h', "help")) {
+                to::usage(argv[0], usage_str);
+                return 0;
+            }
+            else {
+                throw to::parse_opt_error(*arg, "unrecognized option");
+            }
         }
+
+        rv = RUN_ALL_TESTS();
+    }
+    catch (to::parse_opt_error& e) {
+        to::usage(argv[0], usage_str, e.what());
+        return 1;
     }
-    else if (argv[1]) {
-        return usage(argv[0]);
+    catch (std::exception& e) {
+        std::cerr << "caught exception: " << e.what() << "\n";
+        return 2;
     }
 
-    return RUN_ALL_TESTS();
+    return 0;
 }
diff --git a/tests/validation/validate_ball_and_stick.cpp b/tests/validation/validate_ball_and_stick.cpp
index 62beaa74a346c1635ca47018646e10999b7f5c54..0a57fa840e52e70ea7dcaef4dc14c0f54ee86d93 100644
--- a/tests/validation/validate_ball_and_stick.cpp
+++ b/tests/validation/validate_ball_and_stick.cpp
@@ -1,293 +1,296 @@
 #include <fstream>
+#include <utility>
+
 #include <json/json.hpp>
 
 #include <common_types.hpp>
 #include <cell.hpp>
-//#include <fvm_cell.hpp>
 #include <fvm_multicell.hpp>
-#include <util/range.hpp>
+#include <model.hpp>
+#include <recipe.hpp>
+#include <simple_sampler.hpp>
+#include <util/rangeutil.hpp>
+#include <util/transform.hpp>
 
 #include "gtest.h"
 
 #include "../test_common_cells.hpp"
 #include "../test_util.hpp"
+#include "trace_analysis.hpp"
 #include "validation_data.hpp"
 
-// compares results with those generated by nrn/ball_and_stick.py
-TEST(ball_and_stick, neuron_baseline)
-{
-    using namespace nest::mc;
+using namespace nest::mc;
+
+// TODO: further consolidate common code
+
+TEST(ball_and_stick, neuron_ref) {
+    // compare voltages against reference data produced from
+    // nrn/ball_and_stick.py
+
     using namespace nlohmann;
 
-    nest::mc::cell cell = make_cell_ball_and_stick();
-
-    // load data from file
-    auto cell_data = testing::g_validation_data.load("ball_and_stick.json");
-    EXPECT_TRUE(cell_data.size()>0);
-    if(cell_data.size()==0) return;
-
-    json& nrn =
-        *std::max_element(
-            cell_data.begin(), cell_data.end(),
-            [](json& lhs, json& rhs) {return lhs["nseg"]<rhs["nseg"];}
-        );
-
-    auto& measurements = nrn["measurements"];
-
-    double dt = nrn["dt"];
-    double tfinal =   100.; // ms
-    int nt = tfinal/dt;
-
-    // inline type for storing the results of a simulation along with
-    // the information required to compare two simulations for accuracy
-    struct result {
-        std::vector<std::vector<double>> spikes;
-        std::vector<std::vector<double>> baseline_spikes;
-        std::vector<testing::spike_comparison> comparisons;
-        std::vector<double> thresh;
-        int n_comparments;
-
-        result(int nseg, double dt,
-          std::vector<std::vector<double>> &v,
-          json& measurements
-        ) {
-            n_comparments = nseg;
-            baseline_spikes = {
-                measurements["soma"]["spikes"],
-                measurements["dend"]["spikes"],
-                measurements["clamp"]["spikes"]
-            };
-            thresh = {
-                measurements["soma"]["thresh"],
-                measurements["dend"]["thresh"],
-                measurements["clamp"]["thresh"]
-            };
-            for(auto i=0; i<3; ++i) {
-                // calculate the NEST MC spike times
-                spikes.push_back
-                    (testing::find_spikes(v[i], thresh[i], dt));
-                // compare NEST MC and baseline NEURON spike times
-                comparisons.push_back
-                    (testing::compare_spikes(spikes[i], baseline_spikes[i]));
-            }
-        }
+    using lowered_cell = fvm::fvm_multicell<double, cell_local_size_type>;
+    auto& V = g_trace_io;
+
+    bool verbose = V.verbose();
+    int max_ncomp = V.max_ncomp();
+
+    // load validation data
+    auto ref_data = V.load_traces("neuron_ball_and_stick.json");
+    bool run_validation =
+        ref_data.count("soma.mid") &&
+        ref_data.count("dend.mid") &&
+        ref_data.count("dend.end");
+
+    EXPECT_TRUE(run_validation);
+
+    // generate test data
+    cell c = make_cell_ball_and_stick();
+    add_common_voltage_probes(c);
+
+    float sample_dt = .025;
+    std::pair<const char *, simple_sampler> samplers[] = {
+        {"soma.mid", simple_sampler(sample_dt)},
+        {"dend.mid", simple_sampler(sample_dt)},
+        {"dend.end", simple_sampler(sample_dt)}
     };
 
-    using fvm_cell = fvm::fvm_multicell<double, cell_local_size_type>;
-    std::vector<fvm_cell::detector_handle> detectors(cell.detectors().size());
-    std::vector<fvm_cell::target_handle> targets(cell.synapses().size());
-    std::vector<fvm_cell::probe_handle> probes(cell.probes().size());
-
-    std::vector<result> results;
-    for (auto run_index=0u; run_index<cell_data.size(); ++run_index) {
-        auto& run = cell_data[run_index];
-        int num_compartments = run["nseg"];
-        cell.segment(1)->set_compartments(num_compartments);
-        std::vector<std::vector<double>> v(3);
-
-        // make the lowered finite volume cell
-
-        fvm_cell model;
-        model.initialize(util::singleton_view(cell), detectors, targets, probes);
-        auto graph = cell.model();
-
-        // run the simulation
-        auto soma_comp = nest::mc::find_compartment_index({0,0}, graph);
-        auto dend_comp = nest::mc::find_compartment_index({1,0.5}, graph);
-        auto clamp_comp = nest::mc::find_compartment_index({1,1.0}, graph);
-        v[0].push_back(model.voltage()[soma_comp]);
-        v[1].push_back(model.voltage()[dend_comp]);
-        v[2].push_back(model.voltage()[clamp_comp]);
-        for(auto i=0; i<nt; ++i) {
-            model.advance(dt);
-            // save voltage at soma
-            v[0].push_back(model.voltage()[soma_comp]);
-            v[1].push_back(model.voltage()[dend_comp]);
-            v[2].push_back(model.voltage()[clamp_comp]);
+    std::map<std::string, std::vector<conv_entry<int>>> conv_results;
+
+    for (int ncomp = 10; ncomp<max_ncomp; ncomp*=2) {
+        for (auto& se: samplers) {
+            se.second.reset();
         }
+        c.cable(1)->set_compartments(ncomp);
+        model<lowered_cell> m(singleton_recipe{c});
 
-        results.push_back( {run["nseg"], dt, v, measurements} );
-    }
+        // the ball-and-stick-cell (should) have three voltage probes:
+        // centre of soma, centre of dendrite, end of dendrite.
+
+        m.attach_sampler({0u, 0u}, samplers[0].second.sampler<>());
+        m.attach_sampler({0u, 1u}, samplers[1].second.sampler<>());
+        m.attach_sampler({0u, 2u}, samplers[2].second.sampler<>());
+
+        m.run(100, 0.001);
+
+        for (auto& se: samplers) {
+            std::string key = se.first;
+            const simple_sampler& s = se.second;
+
+            // save trace
+            json meta = {
+                {"name", "membrane voltage"},
+                {"model", "ball_and_stick"},
+                {"sim", "nestmc"},
+                {"ncomp", ncomp},
+                {"units", "mV"}};
 
-    // print results
-    auto colors = {memory::util::kWhite, memory::util::kGreen, memory::util::kYellow};
-    for(auto& r : results){
-        auto color = colors.begin();
-        for(auto const& result : r.comparisons) {
-            std::cout << std::setw(5) << r.n_comparments << " compartments : ";
-            std::cout << memory::util::colorize(util::pprintf("%\n", result), *(color++));
+            V.save_trace(key, s.trace, meta);
+
+            // compute metrics
+            if (run_validation) {
+                double linf = linf_distance(s.trace, ref_data[key]);
+                auto pd = peak_delta(s.trace, ref_data[key]);
+
+                conv_results[key].push_back({key, ncomp, linf, pd});
+            }
         }
     }
 
-    // sort results in ascending order of compartments
-    std::sort(
-        results.begin(), results.end(),
-        [](const result& l, const result& r)
-            {return l.n_comparments<r.n_comparments;}
-    );
-
-    // the strategy for testing is the following:
-    //  1. test that the solution converges to the finest reference solution as
-    //     the number of compartments increases (i.e. spatial resolution is
-    //     refined)
-    for(auto i=1u; i<results.size(); ++i) {
-        for(auto j=0; j<3; ++j) {
-            EXPECT_TRUE(
-                  results[i].comparisons[j].max_relative_error()
-                < results[i-1].comparisons[j].max_relative_error()
-            );
-        }
+    if (verbose && run_validation) {
+        report_conv_table(std::cout, conv_results, "ncomp");
     }
 
-    //  2. test that the best solution (i.e. with most compartments) matches the
-    //     reference solution closely (less than 0.5% over the course of 100ms
-    //     simulation)
-    auto tol = 0.5;
-    for(auto j=0; j<3; ++j) {
-        EXPECT_TRUE(results.back().comparisons[j].max_relative_error()*100<tol);
+    for (auto key: util::transform_view(samplers, util::first)) {
+        SCOPED_TRACE(key);
+
+        const auto& results = conv_results[key];
+        assert_convergence(results);
     }
 }
 
-// compares results with those generated by nrn/ball_and_3sticks.py
-TEST(ball_and_3stick, neuron_baseline)
-{
-    using namespace nest::mc;
+TEST(ball_and_taper, neuron_ref) {
+    // compare voltages against reference data produced from
+    // nrn/ball_and_taper.py
+
     using namespace nlohmann;
 
-    nest::mc::cell cell = make_cell_ball_and_3sticks();
-
-    // load data from file
-    auto cell_data = testing::g_validation_data.load("ball_and_3stick.json");
-    EXPECT_TRUE(cell_data.size()>0);
-    if(cell_data.size()==0) return;
-
-    json& nrn =
-        *std::max_element(
-            cell_data.begin(), cell_data.end(),
-            [](json& lhs, json& rhs) {return lhs["nseg"]<rhs["nseg"];}
-        );
-
-    auto& measurements = nrn["measurements"];
-
-    double dt = nrn["dt"];
-    double tfinal =   100.; // ms
-    int nt = tfinal/dt;
-
-    // inline type for storing the results of a simulation along with
-    // the information required to compare two simulations for accuracy
-    struct result {
-        std::vector<std::vector<double>> spikes;
-        std::vector<std::vector<double>> baseline_spikes;
-        std::vector<testing::spike_comparison> comparisons;
-        std::vector<double> thresh;
-        int n_comparments;
-
-        result(int nseg, double dt,
-          std::vector<std::vector<double>> &v,
-          json& measurements
-        ) {
-            n_comparments = nseg;
-            baseline_spikes = {
-                measurements["soma"]["spikes"],
-                measurements["dend"]["spikes"],
-                measurements["clamp"]["spikes"]
-            };
-            thresh = {
-                measurements["soma"]["thresh"],
-                measurements["dend"]["thresh"],
-                measurements["clamp"]["thresh"]
-            };
-            for(auto i=0; i<3; ++i) {
-                // calculate the NEST MC spike times
-                spikes.push_back
-                    (testing::find_spikes(v[i], thresh[i], dt));
-                // compare NEST MC and baseline NEURON spike times
-                comparisons.push_back
-                    (testing::compare_spikes(spikes[i], baseline_spikes[i]));
+    using lowered_cell = fvm::fvm_multicell<double, cell_local_size_type>;
+    auto& V = g_trace_io;
+
+    bool verbose = V.verbose();
+    int max_ncomp = V.max_ncomp();
+
+    // load validation data
+    auto ref_data = V.load_traces("neuron_ball_and_taper.json");
+    bool run_validation =
+        ref_data.count("soma.mid") &&
+        ref_data.count("taper.mid") &&
+        ref_data.count("taper.end");
+
+    EXPECT_TRUE(run_validation);
+
+    // generate test data
+    cell c = make_cell_ball_and_taper();
+    add_common_voltage_probes(c);
+
+    float sample_dt = .025;
+    std::pair<const char *, simple_sampler> samplers[] = {
+        {"soma.mid", simple_sampler(sample_dt)},
+        {"taper.mid", simple_sampler(sample_dt)},
+        {"taper.end", simple_sampler(sample_dt)}
+    };
+
+    std::map<std::string, std::vector<conv_entry<int>>> conv_results;
+
+    for (int ncomp = 10; ncomp<max_ncomp; ncomp*=2) {
+        for (auto& se: samplers) {
+            se.second.reset();
+        }
+        c.cable(1)->set_compartments(ncomp);
+        model<lowered_cell> m(singleton_recipe{c});
+
+        // the ball-and-stick-cell (should) have three voltage probes:
+        // centre of soma, centre of dendrite, end of dendrite.
+
+        m.attach_sampler({0u, 0u}, samplers[0].second.sampler<>());
+        m.attach_sampler({0u, 1u}, samplers[1].second.sampler<>());
+        m.attach_sampler({0u, 2u}, samplers[2].second.sampler<>());
+
+        m.run(100, 0.001);
+
+        for (auto& se: samplers) {
+            std::string key = se.first;
+            const simple_sampler& s = se.second;
+
+            // save trace
+            json meta = {
+                {"name", "membrane voltage"},
+                {"model", "ball_and_taper"},
+                {"sim", "nestmc"},
+                {"ncomp", ncomp},
+                {"units", "mV"}};
+
+            V.save_trace(key, s.trace, meta);
+
+            // compute metrics
+            if (run_validation) {
+                double linf = linf_distance(s.trace, ref_data[key]);
+                auto pd = peak_delta(s.trace, ref_data[key]);
+
+                conv_results[key].push_back({key, ncomp, linf, pd});
             }
         }
+    }
+
+    if (verbose && run_validation) {
+        report_conv_table(std::cout, conv_results, "ncomp");
+    }
+
+    for (auto key: util::transform_view(samplers, util::first)) {
+        SCOPED_TRACE(key);
+
+        const auto& results = conv_results[key];
+        assert_convergence(results);
+    }
+}
+
+
+TEST(ball_and_3stick, neuron_ref) {
+    // compare voltages against reference data produced from
+    // nrn/ball_and_3stick.py
+
+    using namespace nlohmann;
+
+    using lowered_cell = fvm::fvm_multicell<double, cell_local_size_type>;
+    auto& V = g_trace_io;
+
+    bool verbose = V.verbose();
+    int max_ncomp = V.max_ncomp();
+
+    // load validation data
+    auto ref_data = V.load_traces("neuron_ball_and_3stick.json");
+    bool run_validation =
+        ref_data.count("soma.mid") &&
+        ref_data.count("dend1.mid") &&
+        ref_data.count("dend1.end") &&
+        ref_data.count("dend2.mid") &&
+        ref_data.count("dend2.end") &&
+        ref_data.count("dend3.mid") &&
+        ref_data.count("dend3.end");
+
+
+    EXPECT_TRUE(run_validation);
+
+    // generate test data
+    cell c = make_cell_ball_and_3stick();
+    add_common_voltage_probes(c);
+
+    float sample_dt = .025;
+    std::pair<const char *, simple_sampler> samplers[] = {
+        {"soma.mid", simple_sampler(sample_dt)},
+        {"dend1.mid", simple_sampler(sample_dt)},
+        {"dend1.end", simple_sampler(sample_dt)},
+        {"dend2.mid", simple_sampler(sample_dt)},
+        {"dend2.end", simple_sampler(sample_dt)},
+        {"dend3.mid", simple_sampler(sample_dt)},
+        {"dend3.end", simple_sampler(sample_dt)}
     };
 
-    using fvm_cell = fvm::fvm_multicell<double, cell_local_size_type>;
-    std::vector<fvm_cell::detector_handle> detectors(cell.detectors().size());
-    std::vector<fvm_cell::target_handle> targets(cell.synapses().size());
-    std::vector<fvm_cell::probe_handle> probes(cell.probes().size());
-
-    std::vector<result> results;
-    auto start = testing::tic();
-    for(auto run_index=0u; run_index<cell_data.size(); ++run_index) {
-        auto& run = cell_data[run_index];
-        int num_compartments = run["nseg"];
-        for (auto& seg: cell.segments()) {
-            if (seg->is_dendrite()) {
-                seg->set_compartments(num_compartments);
-            }
+    std::map<std::string, std::vector<conv_entry<int>>> conv_results;
+
+    for (int ncomp = 10; ncomp<max_ncomp; ncomp*=2) {
+        for (auto& se: samplers) {
+            se.second.reset();
         }
-        std::vector<std::vector<double>> v(3);
-
-        // make the lowered finite volume cell
-        fvm_cell model;
-        model.initialize(util::singleton_view(cell), detectors, targets, probes);
-        auto graph = cell.model();
-
-        // set initial conditions
-
-        // run the simulation
-        auto soma_comp  = nest::mc::find_compartment_index({0,0.}, graph);
-        auto dend_comp  = nest::mc::find_compartment_index({1,1.}, graph);
-        auto clamp_comp = nest::mc::find_compartment_index({2,1.}, graph);
-        v[0].push_back(model.voltage()[soma_comp]);
-        v[1].push_back(model.voltage()[dend_comp]);
-        v[2].push_back(model.voltage()[clamp_comp]);
-        for(auto i=0; i<nt; ++i) {
-            model.advance(dt);
-            // save voltage at soma
-            v[0].push_back(model.voltage()[soma_comp]);
-            v[1].push_back(model.voltage()[dend_comp]);
-            v[2].push_back(model.voltage()[clamp_comp]);
+        c.cable(1)->set_compartments(ncomp);
+        c.cable(2)->set_compartments(ncomp);
+        c.cable(3)->set_compartments(ncomp);
+        model<lowered_cell> m(singleton_recipe{c});
+
+        // the ball-and-3stick-cell (should) have seven voltage probes:
+        // centre of soma, followed by centre of section, end of section
+        // for each of the three dendrite sections.
+
+        for (unsigned i = 0; i < util::size(samplers); ++i) {
+            m.attach_sampler({0u, i}, samplers[i].second.sampler<>());
         }
 
-        results.push_back( {num_compartments, dt, v, measurements} );
-    }
-    auto time_taken = testing::toc(start);
-    std::cout << "took " << time_taken << " seconds\n";
-
-    // print results
-    auto colors = {memory::util::kWhite, memory::util::kGreen, memory::util::kYellow};
-    for(auto& r : results){
-        auto color = colors.begin();
-        for(auto const& result : r.comparisons) {
-            std::cout << std::setw(5) << r.n_comparments << " compartments : ";
-            std::cout << memory::util::colorize(util::pprintf("%\n", result), *(color++));
+        m.run(100, 0.001);
+
+        for (auto& se: samplers) {
+            std::string key = se.first;
+            const simple_sampler& s = se.second;
+
+            // save trace
+            json meta = {
+                {"name", "membrane voltage"},
+                {"model", "ball_and_3stick"},
+                {"sim", "nestmc"},
+                {"ncomp", ncomp},
+                {"units", "mV"}};
+
+            V.save_trace(key, s.trace, meta);
+
+            // compute metrics
+            if (run_validation) {
+                double linf = linf_distance(s.trace, ref_data[key]);
+                auto pd = peak_delta(s.trace, ref_data[key]);
+
+                conv_results[key].push_back({key, ncomp, linf, pd});
+            }
         }
     }
 
-    // sort results in ascending order of compartments
-    std::sort(
-        results.begin(), results.end(),
-        [](const result& l, const result& r)
-            {return l.n_comparments<r.n_comparments;}
-    );
-
-    // the strategy for testing is the following:
-    //  1. test that the solution converges to the finest reference solution as
-    //     the number of compartments increases (i.e. spatial resolution is
-    //     refined)
-    for(auto i=1u; i<results.size(); ++i) {
-        for(auto j=0; j<3; ++j) {
-            EXPECT_TRUE(
-                  results[i].comparisons[j].max_relative_error()
-                < results[i-1].comparisons[j].max_relative_error()
-            );
-        }
+    if (verbose && run_validation) {
+        report_conv_table(std::cout, conv_results, "ncomp");
     }
 
-    //  2. test that the best solution (i.e. with most compartments) matches the
-    //     reference solution closely (less than 0.5% over the course of 100ms
-    //     simulation)
-    auto tol = 0.5;
-    for(auto j=0; j<3; ++j) {
-        EXPECT_TRUE(results.back().comparisons[j].max_relative_error()*100<tol);
+    for (auto key: util::transform_view(samplers, util::first)) {
+        SCOPED_TRACE(key);
+
+        const auto& results = conv_results[key];
+        assert_convergence(results);
     }
 }
 
diff --git a/tests/validation/validate_soma.cpp b/tests/validation/validate_soma.cpp
index b78f1e38905d38e8cc06481ac7f12ec00e69a6ce..26d919b07c10a5966734c0e12dd7a7296e55f139 100644
--- a/tests/validation/validate_soma.cpp
+++ b/tests/validation/validate_soma.cpp
@@ -1,151 +1,82 @@
 #include <fstream>
+#include <utility>
+
 #include <json/json.hpp>
 
 #include <common_types.hpp>
 #include <cell.hpp>
-#include <fvm_cell.hpp>
+#include <fvm_multicell.hpp>
+#include <model.hpp>
+#include <recipe.hpp>
+#include <simple_sampler.hpp>
 #include <util/rangeutil.hpp>
 
 #include "gtest.h"
 
 #include "../test_util.hpp"
 #include "../test_common_cells.hpp"
+#include "trace_analysis.hpp"
 #include "validation_data.hpp"
 
-// compares results with those generated by nrn/soma.py
-// single compartment model with HH channels
-TEST(soma, neuron_baseline)
-{
-    using namespace nest::mc;
+using namespace nest::mc;
+
+TEST(soma, neuron_ref) {
+    // compare voltages against reference data produced from
+    // nrn/ball_and_taper.py
+
     using namespace nlohmann;
 
-    nest::mc::cell cell = make_cell_soma_only();
-
-    // make the lowered finite volume cell
-    using fvm_cell = fvm::fvm_cell<double, cell_local_size_type>;
-    std::vector<fvm_cell::detector_handle> detectors(cell.detectors().size());
-    std::vector<fvm_cell::target_handle> targets(cell.synapses().size());
-    std::vector<fvm_cell::probe_handle> probes(cell.probes().size());
-
-    fvm_cell model;
-    model.initialize(util::singleton_view(cell), detectors, targets, probes);
-
-    // load data from file
-    auto cell_data = testing::g_validation_data.load("soma.json");
-    EXPECT_TRUE(cell_data.size()>0);
-    if(cell_data.size()==0) return;
-
-    json& nrn =
-        *std::min_element(
-            cell_data.begin(), cell_data.end(),
-            [](json& lhs, json& rhs) {return lhs["dt"]<rhs["dt"];}
-        );
-    std::vector<double> nrn_spike_times = nrn["spikes"];
-
-    std::cout << "baseline with time step size " << nrn["dt"] << " ms\n";
-    std::cout << "baseline spikes : " << nrn["spikes"] << "\n";
-
-    for(auto& run : cell_data) {
-        std::vector<double> v;
-        double dt = run["dt"];
-
-        // set initial conditions
-        model.reset();
-
-        // run the simulation
-        auto tfinal =   120.; // ms
-        int nt = tfinal/dt;
-        v.push_back(model.voltage()[0]);
-        for(auto i=0; i<nt; ++i) {
-            model.advance(dt);
-            // save voltage at soma
-            v.push_back(model.voltage()[0]);
-        }
+    using lowered_cell = fvm::fvm_multicell<double, cell_local_size_type>;
+    auto& V = g_trace_io;
 
-        // get the spike times from the NEST MC simulation
-        auto nst_spike_times = testing::find_spikes(v, 0., dt);
-        // compare NEST MC and baseline NEURON results
-        auto comparison = testing::compare_spikes(nst_spike_times, nrn_spike_times);
+    bool verbose = V.verbose();
 
-        // Assert that relative error is less than 1%.
-        // For a 100 ms simulation this asserts that the difference between
-        // NEST and the most accurate NEURON simulation is less than 1 ms.
-        std::cout << "MAX ERROR @ " << dt << " is " << comparison.max_relative_error()*100. << "\n";
-        EXPECT_TRUE(comparison.max_relative_error()*100. < 1.);
-    }
-}
+    // load validation data
+    auto ref_data = V.load_traces("neuron_soma.json");
+    const char* key = "soma.mid";
+    bool run_validation = ref_data.count(key);
+    EXPECT_TRUE(run_validation);
 
-// check if solution converges
-TEST(soma, convergence)
-{
-    using namespace nest::mc;
-
-    nest::mc::cell cell = make_cell_soma_only();
-
-    // make the lowered finite volume cell
-    using fvm_cell = fvm::fvm_cell<double, cell_local_size_type>;
-    std::vector<fvm_cell::detector_handle> detectors(cell.detectors().size());
-    std::vector<fvm_cell::target_handle> targets(cell.synapses().size());
-    std::vector<fvm_cell::probe_handle> probes(cell.probes().size());
-
-    fvm_cell model;
-    model.initialize(util::singleton_view(cell), detectors, targets, probes);
-
-    // generate baseline solution with small dt=0.0001
-    std::vector<double> baseline_spike_times;
-    {
-        auto dt = 1e-4;
-        std::vector<double> v;
-
-        // set initial conditions
-        model.reset();
-
-        // run the simulation
-        auto tfinal =   120.; // ms
-        int nt = tfinal/dt;
-        v.push_back(model.voltage()[0]);
-        for(auto i=0; i<nt; ++i) {
-            model.advance(dt);
-            // save voltage at soma
-            v.push_back(model.voltage()[0]);
-        }
+    // generate test data
+    cell c = make_cell_soma_only();
+    add_common_voltage_probes(c);
 
-        baseline_spike_times = testing::find_spikes(v, 0., dt);
-    }
+    float sample_dt = .025;
+    simple_sampler sampler(sample_dt);
+    conv_data<float> convs;
 
-    testing::spike_comparison previous;
-    for(auto dt : {0.05, 0.02, 0.01, 0.005, 0.001}) {
-        std::vector<double> v;
-
-        // set initial conditions
-        model.reset();
-
-        // run the simulation
-        auto tfinal =   120.; // ms
-        int nt = tfinal/dt;
-        v.push_back(model.voltage()[0]);
-        for(auto i=0; i<nt; ++i) {
-            model.advance(dt);
-            // save voltage at soma
-            v.push_back(model.voltage()[0]);
-        }
+    for (auto dt: {0.05f, 0.02f, 0.01f, 0.005f, 0.001f}) {
+        sampler.reset();
+        model<lowered_cell> m(singleton_recipe{c});
 
-        // get the spike times from the NEST MC simulation
-        auto nst_spike_times = testing::find_spikes(v, 0., dt);
+        m.attach_sampler({0u, 0u}, sampler.sampler<>());
+        m.run(100, dt);
 
-        // compare NEST MC and baseline NEURON results
-        auto comparison = testing::compare_spikes(nst_spike_times, baseline_spike_times);
-        std::cout << "dt " << std::setw(8) << dt << " : " << comparison << "\n";
-        if(!previous.is_valid()) {
-            previous = std::move(comparison);
-        }
-        else {
-            // Assert that relative error is less than 1%.
-            // For a 100 ms simulation this asserts that the difference between
-            // NEST and the most accurate NEURON simulation is less than 1 ms.
-            EXPECT_TRUE(comparison.max_relative_error() < previous.max_relative_error());
-            EXPECT_TRUE(comparison.rms < previous.rms);
-            EXPECT_TRUE(comparison.max < previous.max);
+        // save trace
+        auto& trace = sampler.trace;
+        json meta = {
+            {"name", "membrane voltage"},
+            {"model", "soma"},
+            {"sim", "nestmc"},
+            {"dt", dt},
+            {"units", "mV"}};
+
+        V.save_trace(key, trace, meta);
+
+        // compute metrics
+        if (run_validation) {
+            double linf = linf_distance(trace, ref_data[key]);
+            auto pd = peak_delta(trace, ref_data[key]);
+
+            convs.push_back({key, dt, linf, pd});
         }
     }
+
+    if (verbose && run_validation) {
+        std::map<std::string, std::vector<conv_entry<float>>> conv_results = {{key, convs}};
+        report_conv_table(std::cout, conv_results, "dt");
+    }
+
+    SCOPED_TRACE("soma.mid");
+    assert_convergence(convs);
 }
diff --git a/tests/validation/validate_synapses.cpp b/tests/validation/validate_synapses.cpp
index 4c767e18833e6f491c5fecf6c8b1c5fba8ea731c..65c99ba345cc6ae7ed52b0703974386c2929bba5 100644
--- a/tests/validation/validate_synapses.cpp
+++ b/tests/validation/validate_synapses.cpp
@@ -1,82 +1,49 @@
 #include <fstream>
-#include <iterator>
+#include <utility>
 
 #include <json/json.hpp>
 
 #include <common_types.hpp>
 #include <cell.hpp>
-#include <cell_group.hpp>
-#include <fvm_cell.hpp>
-#include <mechanism_interface.hpp>
+#include <fvm_multicell.hpp>
+#include <model.hpp>
+#include <recipe.hpp>
+#include <simple_sampler.hpp>
 #include <util/rangeutil.hpp>
+#include <util/transform.hpp>
 
 #include "gtest.h"
+
+#include "../test_common_cells.hpp"
 #include "../test_util.hpp"
+#include "trace_analysis.hpp"
 #include "validation_data.hpp"
 
-// For storing the results of a simulation along with the information required
-// to compare two simulations for accuracy.
-struct result {
-    std::vector<std::vector<double>> spikes;
-    std::vector<std::vector<double>> baseline_spikes;
-    std::vector<testing::spike_comparison> comparisons;
-    std::vector<double> thresh;
-    int n_comparments;
-
-    result(int nseg, double dt,
-      std::vector<std::vector<double>> &v,
-      nlohmann::json& measurements
-    ) {
-        n_comparments = nseg;
-        baseline_spikes = {
-            measurements["soma"]["spikes"],
-            measurements["dend"]["spikes"]
-        };
-        thresh = {
-            measurements["soma"]["thresh"],
-            measurements["dend"]["thresh"]
-        };
-        for(auto i=0u; i<v.size(); ++i) {
-            // calculate the NEST MC spike times
-            spikes.push_back
-                (testing::find_spikes(v[i], thresh[i], dt));
-            // compare NEST MC and baseline NEURON spike times
-            comparisons.push_back
-                (testing::compare_spikes(spikes[i], baseline_spikes[i]));
-        }
-    }
-};
+using namespace nest::mc;
 
-// compares results with those generated by nrn/simple_synapse.py
-void run_neuron_baseline(const char* syn_type, const char* data_file)
-{
-    using namespace nest::mc;
+void run_synapse_test(const char* syn_type, const char* ref_file) {
     using namespace nlohmann;
-    using lowered_cell = fvm::fvm_cell<double, cell_local_size_type>;
 
-    nest::mc::cell cell;
+    using lowered_cell = fvm::fvm_multicell<double, cell_local_size_type>;
+    auto& V = g_trace_io;
 
-    // Soma with diameter 12.6157 um and HH channel
-    auto soma = cell.add_soma(12.6157/2.0);
-    soma->add_mechanism(hh_parameters());
+    bool verbose = V.verbose();
+    int max_ncomp = V.max_ncomp();
 
-    // add dendrite of length 200 um and diameter 1 um with passive channel
-    auto dendrite = cell.add_cable(0, segmentKind::dendrite, 0.5, 0.5, 200);
-    dendrite->add_mechanism(pas_parameters());
+    // load validation data
+    auto ref_data = V.load_traces(ref_file);
+    bool run_validation =
+        ref_data.count("soma.mid") &&
+        ref_data.count("dend.mid") &&
+        ref_data.count("dend.end");
 
-    dendrite->mechanism("membrane").set("r_L", 100);
-    soma->mechanism("membrane").set("r_L", 100);
+    EXPECT_TRUE(run_validation);
 
-    // add synapse
+    // generate test data
+    cell c = make_cell_ball_and_stick(false); // no stimuli
     parameter_list syn_default(syn_type);
-    cell.add_synapse({1, 0.5}, syn_default);
-
-    // add probes
-    auto probe_soma_idx = cell.add_probe({{0,0}, probeKind::membrane_voltage});
-    auto probe_dend_idx = cell.add_probe({{1,0.5}, probeKind::membrane_voltage});
-
-    cell_member_type probe_soma{0u, probe_soma_idx};
-    cell_member_type probe_dend{0u, probe_dend_idx};
+    c.add_synapse({1, 0.5}, syn_default);
+    add_common_voltage_probes(c);
 
     // injected spike events
     postsynaptic_spike_event<float> synthetic_events[] = {
@@ -85,93 +52,75 @@ void run_neuron_baseline(const char* syn_type, const char* data_file)
         {{0u, 0u}, 40.0, 0.04}
     };
 
-    // load data from file
-    auto cell_data = testing::g_validation_data.load(data_file);
-    EXPECT_TRUE(cell_data.size()>0);
-    if(cell_data.size()==0) return;
-
-    json& nrn =
-        *std::max_element(
-            cell_data.begin(), cell_data.end(),
-            [](json& lhs, json& rhs) {return lhs["nseg"]<rhs["nseg"];}
-        );
-
-    auto& measurements = nrn["measurements"];
-
-    double dt = nrn["dt"];
-    double tfinal = 50.; // ms
-
-    std::vector<result> results;
-    for(auto run_index=0u; run_index<cell_data.size(); ++run_index) {
-        auto& run = cell_data[run_index];
-        int num_compartments = run["nseg"];
-        dendrite->set_compartments(num_compartments);
-        std::vector<std::vector<double>> v(2);
-
-        // make the lowered finite volume cell
-        cell_group<lowered_cell> group(0, util::singleton_view(cell));
-
-        // add the 3 spike events to the queue
-        group.enqueue_events(synthetic_events);
-
-        // run the simulation
-        v[0].push_back(group.probe(probe_soma));
-        v[1].push_back(group.probe(probe_dend));
-        double t  = 0.;
-        while(t < tfinal) {
-            t += dt;
-            group.advance(t, dt);
-            // save voltage at soma and dendrite
-            v[0].push_back(group.probe(probe_soma));
-            v[1].push_back(group.probe(probe_dend));
+    float sample_dt = .025;
+    std::pair<const char *, simple_sampler> samplers[] = {
+        {"soma.mid", simple_sampler(sample_dt)},
+        {"dend.mid", simple_sampler(sample_dt)},
+        {"dend.end", simple_sampler(sample_dt)}
+    };
+
+    std::map<std::string, std::vector<conv_entry<int>>> conv_results;
+
+    for (int ncomp = 10; ncomp<max_ncomp; ncomp*=2) {
+        for (auto& se: samplers) {
+            se.second.reset();
         }
+        c.cable(1)->set_compartments(ncomp);
+        model<lowered_cell> m(singleton_recipe{c});
 
-        results.push_back({num_compartments, dt, v, measurements});
-    }
+        // the ball-and-stick-cell (should) have three voltage probes:
+        // centre of soma, centre of dendrite, end of dendrite.
 
-    // print results
-    auto colors = {memory::util::kWhite, memory::util::kGreen, memory::util::kYellow};
-    for(auto& r : results){
-        auto color = colors.begin();
-        for(auto const& result : r.comparisons) {
-            std::cout << std::setw(5) << r.n_comparments << " compartments : ";
-            std::cout << memory::util::colorize(util::pprintf("%\n", result), *(color++));
+        m.attach_sampler({0u, 0u}, samplers[0].second.sampler<>());
+        m.attach_sampler({0u, 1u}, samplers[1].second.sampler<>());
+        m.attach_sampler({0u, 2u}, samplers[2].second.sampler<>());
+
+        m.group(0).enqueue_events(synthetic_events);
+        m.run(70, 0.001);
+
+        for (auto& se: samplers) {
+            std::string key = se.first;
+            const simple_sampler& s = se.second;
+
+            // save trace
+            json meta = {
+                {"name", "membrane voltage"},
+                {"model", syn_type},
+                {"sim", "nestmc"},
+                {"ncomp", ncomp},
+                {"units", "mV"}};
+
+            V.save_trace(key, s.trace, meta);
+
+            // compute metrics
+            if (run_validation) {
+                double linf = linf_distance(s.trace, ref_data[key]);
+                auto pd = peak_delta(s.trace, ref_data[key]);
+
+                conv_results[key].push_back({key, ncomp, linf, pd});
+            }
         }
     }
 
-    // sort results in ascending order of compartments
-    std::sort(
-        results.begin(), results.end(),
-        [](const result& l, const result& r)
-            {return l.n_comparments<r.n_comparments;}
-    );
-
-    // the strategy for testing is the following:
-    //  1. test that the solution converges to the finest reference solution as
-    //     the number of compartments increases (i.e. spatial resolution is
-    //     refined)
-    for(auto j=0; j<2; ++j) {
-        EXPECT_TRUE(
-              results.back().comparisons[j].max_relative_error()
-            < results.front().comparisons[j].max_relative_error()
-        );
+    if (verbose && run_validation) {
+        report_conv_table(std::cout, conv_results, "ncomp");
     }
 
-    //  2. test that the best solution (i.e. with most compartments) matches the
-    //     reference solution closely (less than 0.5% over the course of 100ms
-    //     simulation)
-    auto tol = 0.5;
-    for(auto j=0; j<2; ++j) {
-        EXPECT_TRUE(results.back().comparisons[j].max_relative_error()*100<tol);
+    for (auto key: util::transform_view(samplers, util::first)) {
+        SCOPED_TRACE(key);
+
+        const auto& results = conv_results[key];
+        assert_convergence(results);
     }
 }
 
-TEST(simple_synapse, expsyn_neuron_baseline) {
+TEST(simple_synapse, expsyn_neuron_ref) {
     SCOPED_TRACE("expsyn");
-    run_neuron_baseline("expsyn","simple_exp_synapse.json");
+    run_synapse_test("expsyn", "neuron_simple_exp_synapse.json");
 }
 
-TEST(simple_synapse, exp2syn_neuron_baseline) {
+TEST(simple_synapse, exp2syn_neuron_ref) {
     SCOPED_TRACE("exp2syn");
-    run_neuron_baseline("exp2syn","simple_exp2_synapse.json");
+    run_synapse_test("exp2syn", "neuron_simple_exp2_synapse.json");
 }
+
diff --git a/tests/validation/validation_data.cpp b/tests/validation/validation_data.cpp
index 306adbaf38c02171f56a042f62662702d4faa3be..942ce8a7f1b6c9d4f5fa0b33d12f859d72c85ce2 100644
--- a/tests/validation/validation_data.cpp
+++ b/tests/validation/validation_data.cpp
@@ -1,21 +1,81 @@
+#include <algorithm>
+#include <fstream>
+#include <stdexcept>
+#include <string>
+
+#include <json/json.hpp>
+
+#include <simple_sampler.hpp>
+#include <util/path.hpp>
+
 #include "validation_data.hpp"
 
-#include <fstream>
+namespace nest {
+namespace mc {
+
+trace_io g_trace_io;
+
+void trace_io::save_trace(const std::string& label, const trace_data& data, const nlohmann::json& meta) {
+    save_trace("time", label, data, meta);
+}
+
+void trace_io::save_trace(const std::string& abscissa, const std::string& label, const trace_data& data, const nlohmann::json& meta) {
+    using namespace nest::mc;
+
+    nlohmann::json j = meta;
+    j["data"] = {
+        {abscissa, times(data)},
+        {label, values(data)}
+    };
+
+    jtraces_ += std::move(j);
+}
+
+template <typename Seq1, typename Seq2>
+static trace_data zip_trace_data(const Seq1& ts, const Seq2& vs) {
+    trace_data trace;
+
+    auto ti = std::begin(ts);
+    auto te = std::end(ts);
+    auto vi = std::begin(vs);
+    auto ve = std::end(vs);
 
-namespace testing {
+    while (ti!=te && vi!=ve) {
+        trace.push_back({*ti++, *vi++});
+    }
+    return trace;
+}
 
-nlohmann::json data_loader::load(const std::string& name) const {
-    std::string data_path=path_+'/'+name;
-    std::ifstream fid(data_path);
+static void parse_trace_json(const nlohmann::json& j, std::map<std::string, trace_data>& traces) {
+    if (j.is_array()) {
+        for (auto& i: j) parse_trace_json(i, traces);
+    }
+    else if (j.is_object() && j.count("data")>0 && j["data"].count("time")>0) {
+        auto data = j["data"];
+        auto time = data["time"].get<std::vector<float>>();
+        for (const auto& p: nlohmann::json::iterator_wrapper(data)) {
+            if (p.key()=="time") continue;
+
+            traces[p.key()] = zip_trace_data(time, p.value().get<std::vector<double>>());
+        }
+    }
+}
+
+std::map<std::string, trace_data> trace_io::load_traces(const util::path& name) {
+    util::path file  = datadir_/name;
+    std::ifstream fid(file);
     if (!fid) {
-        throw std::runtime_error("unable to load validation data: "+data_path);
+        throw std::runtime_error("unable to load validation data: "+file.native());
     }
 
     nlohmann::json data;
     fid >> data;
-    return data;
+
+    std::map<std::string, trace_data> traces;
+    parse_trace_json(data, traces);
+    return traces;
 }
 
-data_loader g_validation_data;
+} // namespace mc
+} // namespace nest
 
-} // namespace testing
diff --git a/tests/validation/validation_data.hpp b/tests/validation/validation_data.hpp
index d9b30989ea9b980257d9f3ad18967ece133b314a..d9026da3df5ce1e53638c6b2a4ebf6987fa3cde6 100644
--- a/tests/validation/validation_data.hpp
+++ b/tests/validation/validation_data.hpp
@@ -1,28 +1,83 @@
 #pragma once
 
+#include <fstream>
+#include <stdexcept>
 #include <string>
+#include <utility>
 
 #include <json/json.hpp>
 
+#include <simple_sampler.hpp>
+#include <util/path.hpp>
+
 #ifndef DATADIR
 #define DATADIR "../data"
 #endif
 
-namespace testing {
+namespace nest {
+namespace mc {
+
+/*
+ * Class manages input (loading and parsing) of JSON
+ * reference traces, and output of saved traces from
+ * the validation tests.
+ *
+ * Global object has paths initalised by the test
+ * main() and will write all saved traces to a single
+ * output JSON file, if specified.
+ */
 
-class data_loader {
+class trace_io {
 public:
-    // set where to find the validation JSON files
-    void set_path(const std::string& path) { path_=path; }
+    void clear_traces() {
+        jtraces_ = nlohmann::json::array();
+    }
+
+    void write_traces() {
+        if (f_) {
+            f_ << jtraces_;
+            f_.close();
+        }
+    }
+
+    void save_trace(const std::string& label, const trace_data& data, const nlohmann::json& meta);
+    void save_trace(const std::string& abscissa, const std::string& label, const trace_data& data, const nlohmann::json& meta);
+    std::map<std::string, trace_data> load_traces(const util::path& name);
+
+    // common flags, options set by driver
+
+    void set_verbose(bool v) { verbose_flag_ = v; }
+    bool verbose() const { return verbose_flag_; }
+
+    void set_max_ncomp(int ncomp) { max_ncomp_ = ncomp; }
+    int max_ncomp() const { return max_ncomp_; }
+
+    void set_datadir(const util::path& dir) { datadir_ = dir; }
+
+    void set_output(const util::path& file) {
+        f_.open(file);
+        if (!f_) {
+            throw std::runtime_error("unable to open file for writing");
+        }
+    }
+
+    // write traces on exit
 
-    // load JSON file from path plus name, throw exception
-    // if cannot be found or loaded.
-    nlohmann::json load(const std::string& name) const;
+    ~trace_io() {
+        if (f_) {
+            write_traces();
+        }
+    }
 
 private:
-    std::string path_=DATADIR "/validation";
+    util::path datadir_ = DATADIR "/validation";
+    std::ofstream f_;
+    nlohmann::json jtraces_ = nlohmann::json::array();
+    bool verbose_flag_ = false;
+    int max_ncomp_ = 1000;
 };
 
-extern data_loader g_validation_data;
+extern trace_io g_trace_io;
 
-} // namespace testing
+} // namespace mc
+} // namespace nest