diff --git a/.github/workflows/basic.yml b/.github/workflows/basic.yml
index c4492e85a3d6b22f7f50e6afed4e1a573eca6368..2f1114177ce8271069c23250d2e3b16ea1915655 100644
--- a/.github/workflows/basic.yml
+++ b/.github/workflows/basic.yml
@@ -180,5 +180,5 @@ jobs:
         run: scripts/run_python_examples.sh
       - name: Build and test a catalogue
         run: |
-          build-catalogue -v default mechanisms/default
+          arbor-build-catalogue -v default mechanisms/default
           ./scripts/test-catalogue.py ./default-catalogue.so
diff --git a/.github/workflows/ciwheel.yml b/.github/workflows/ciwheel.yml
index e8078084b6d2870a29d7cf5a7f89e8e29fccf2be..42b54a3296c17cdc41e50b69c11b31821cc013b5 100644
--- a/.github/workflows/ciwheel.yml
+++ b/.github/workflows/ciwheel.yml
@@ -6,6 +6,9 @@ on:
     tags:
       - v*
 
+  schedule:
+    - cron: '0 2 * * 0' # run at 2 AM every sunday
+
 jobs:
   build_binary_wheels:
     name: Build wheels on ${{ matrix.os }}
@@ -22,29 +25,29 @@ jobs:
 
       - name: Build wheels Linux
         if: ${{ startsWith(matrix.os, 'ubuntu') }}
-        uses: pypa/cibuildwheel@v1.9.0
+        uses: pypa/cibuildwheel@v2.3.0
         with:
           output-dir: dist
         env:
-          CIBW_BEFORE_BUILD: python -m pip install numpy setuptools
+          CIBW_BEFORE_ALL: yum -y install libxml2-devel
+          CIBW_BEFORE_BUILD: python -m pip install numpy setuptools scikit-build ninja cmake
           CIBW_BUILD: "cp3?-manylinux_x86_64"
-          CIBW_SKIP: "cp35-*"
           CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
           CIBW_ARCHS_LINUX: x86_64
-          # CIBW_TEST_COMMAND: TODO
+          CIBW_REPAIR_WHEEL_COMMAND: 'auditwheel repair -w {dest_dir} {wheel} && python /project/scripts/patchwheel.py {dest_dir}'
+          CIBW_TEST_COMMAND: python -m unittest discover -v -s {project}/python
 
       - name: Build wheels macos
         if: ${{ startsWith(matrix.os, 'macos') }}
-        uses: pypa/cibuildwheel@v1.9.0
+        uses: pypa/cibuildwheel@v2.3.0
         with:
           output-dir: dist
         env:
           MACOSX_DEPLOYMENT_TARGET: "10.15" #needed to undo some CIBW settings
-          CIBW_BEFORE_BUILD: python -m pip install numpy setuptools
+          CIBW_BEFORE_BUILD: python -m pip install numpy setuptools scikit-build ninja cmake
           CIBW_BUILD: "cp3?-macosx_x86_64"
-          CIBW_SKIP: "cp35-*"
           CIBW_ARCHS_MACOS: x86_64 universal2
-          # CIBW_TEST_COMMAND: TODO
+          CIBW_TEST_COMMAND: python -m unittest discover -v -s {project}/python
 
       # this action runs auditwheel automatically with the following args:
       # https://cibuildwheel.readthedocs.io/en/stable/options/#repair-wheel-command
@@ -61,6 +64,8 @@ jobs:
     steps:
       - name: Set up Python
         uses: actions/setup-python@v2
+      - name: Get packages
+        run: python -m pip install numpy setuptools scikit-build ninja cmake
       - uses: actions/checkout@v2
         with:
           fetch-depth: 0
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c5e6e846b2606d636456acb8a83ea7255e0f5f31..d1a95db559e5f07afe811f3653df7f66c8d4a7ce 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -13,6 +13,10 @@ enable_language(CXX)
 
 include(GNUInstallDirs)
 
+# Effectively adds '-fpic' flag to CXX_FLAGS. Needed for dynamic catalogues.
+set(CMAKE_POSITION_INDEPENDENT_CODE ON)
+
+
 # Turn on this option to force the compilers to produce color output when output is
 # redirected from the terminal (e.g. when using ninja or a pager).
 
@@ -107,6 +111,9 @@ endif()
 # as to enable CUDA tests in generator expressions.)
 if(ARB_GPU STREQUAL "cuda")
     set(ARB_WITH_NVCC TRUE)
+    # CMake 3.18 and later set the default CUDA architecture for
+    # each target according to CMAKE_CUDA_ARCHITECTURES. 
+
     # This fixes nvcc picking up a wrong host compiler for linking, causing
     # issues with outdated libraries, eg libstdc++ and std::filesystem. Must
     # happen before all calls to enable_language(CUDA)
@@ -164,6 +171,9 @@ set(CMAKE_CXX_EXTENSIONS OFF)
 
 # Data and internal scripts go here
 set(ARB_INSTALL_DATADIR ${CMAKE_INSTALL_FULL_DATAROOTDIR}/arbor)
+# Derived paths for arbor-build-catalogue
+file(RELATIVE_PATH ARB_REL_DATADIR ${CMAKE_INSTALL_FULL_BINDIR} ${CMAKE_INSTALL_FULL_DATAROOTDIR}/arbor)
+file(RELATIVE_PATH ARB_REL_PACKAGEDIR ${CMAKE_INSTALL_FULL_BINDIR} ${CMAKE_INSTALL_FULL_LIBDIR}/cmake/arbor)
 
 # Interface library `arbor-config-defs` collects configure-time defines
 # for arbor, arborenv, arborio, of the form ARB_HAVE_XXX. These
@@ -208,8 +218,8 @@ install(TARGETS arborio-public-deps EXPORT arborio-targets)
 
 # Add scripts and supporting CMake for setting up external catalogues
 
-configure_file(scripts/build-catalogue.in ${CMAKE_CURRENT_BINARY_DIR}/build-catalogue @ONLY)
-install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/build-catalogue DESTINATION ${CMAKE_INSTALL_FULL_BINDIR})
+configure_file(scripts/build-catalogue.in ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue @ONLY)
+install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue DESTINATION ${CMAKE_INSTALL_FULL_BINDIR})
 install(FILES mechanisms/BuildModules.cmake DESTINATION ${ARB_INSTALL_DATADIR})
 install(FILES mechanisms/generate_catalogue DESTINATION ${ARB_INSTALL_DATADIR} PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
 # External libraries in `ext` sub-directory: json, tinyopt and randon123.
@@ -290,9 +300,6 @@ if(ARB_WITH_PYTHON)
         find_package(Python3 ${arb_py_version} COMPONENTS Interpreter Development REQUIRED)
     endif()
 
-    # Required to link the dynamic libraries for python modules.
-    # Effectively adds '-fpic' flag to CXX_FLAGS.
-    set(CMAKE_POSITION_INDEPENDENT_CODE ON)
 else()
     # If not building the Python module, the interpreter is still required
     # to build some targets, e.g. when building the documentation.
diff --git a/arbor/include/CMakeLists.txt b/arbor/include/CMakeLists.txt
index 82415e4f7c1f965f1a0f9c0d782a761fe5fe1999..1cfc13b0c09e2d916aebdbb09e6fb7559ced2e2a 100644
--- a/arbor/include/CMakeLists.txt
+++ b/arbor/include/CMakeLists.txt
@@ -46,6 +46,10 @@ if(ARB_WITH_PROFILING)
     # define ARB_PROFILE_ENABLED in version.hpp
     list(APPEND arb_features PROFILE)
 endif()
+if(ARB_USE_BUNDLED_LIBS)
+    # define ARB_BUNDLED_ENABLED in version.hpp
+    list(APPEND arb_features BUNDLED)
+endif()
 if(ARB_VECTORIZE)
     list(APPEND arb_features VECTORIZE)
 endif()
diff --git a/arborio/CMakeLists.txt b/arborio/CMakeLists.txt
index 104afdacefb5553d3cb727ad51dc9adcff7376a0..4c1eff4abcc8d7ba00ed414cdaf8e8fc9817f9e1 100644
--- a/arborio/CMakeLists.txt
+++ b/arborio/CMakeLists.txt
@@ -11,8 +11,7 @@ if(ARB_WITH_NEUROML)
             neuroml.cpp
             nml_parse_morphology.cpp
             xml.cpp
-            xmlwrap.cpp
-        )
+            xmlwrap.cpp)
     find_package(LibXml2 REQUIRED)
 endif()
 
diff --git a/ci/gitlab-cscs.yml b/ci/gitlab-cscs.yml
index 39392368a5c8a6759250a2aad943386eeaf9be77..819c4431e3978dd366cd08f1f08b4f2fe0f41c9e 100644
--- a/ci/gitlab-cscs.yml
+++ b/ci/gitlab-cscs.yml
@@ -101,7 +101,7 @@ single_node_release:
   only: ['master', 'staging', 'trying']
   stage: test
   script:
-    - unit
+    - unit --gtest_filter="-mechcat.loading"
     - unit-local
     - unit-modcc
   variables:
diff --git a/cmake/arbor-config.cmake.in b/cmake/arbor-config.cmake.in
index 5ddbc0cc3817e4a54c4c0a758844fc34335f5d8e..c9edecdc53f8294b1fef7cee6c393ab51207c819 100644
--- a/cmake/arbor-config.cmake.in
+++ b/cmake/arbor-config.cmake.in
@@ -45,6 +45,7 @@ function(_append_property target property)
 endfunction()
 
 set(ARB_VECTORIZE @ARB_VECTORIZE@)
+set(ARB_WITH_GPU @ARB_WITH_GPU@)
 set(ARB_ARCH @ARB_ARCH@)
 set(ARB_MODCC_FLAGS @ARB_MODCC_FLAGS@)
 set(ARB_CXX @CMAKE_CXX_COMPILER@)
diff --git a/doc/concepts/mechanisms.rst b/doc/concepts/mechanisms.rst
index 445347df7a84874a68ec94a5cb16be06512c1f42..11bb5eda7ee6255fec3143ff1fb87f84ee22d140 100644
--- a/doc/concepts/mechanisms.rst
+++ b/doc/concepts/mechanisms.rst
@@ -90,17 +90,17 @@ This will produce a catalogue loadable at runtime by calling ``load_catalogue``
 with a filename in both C++ and Python. The steps are
 
 1. Prepare a directory containing your NMODL files (.mod suffixes required)
-2. Call ``build-catalogue`` installed by arbor
+2. Call ``arbor-build-catalogue`` installed by arbor
 
    .. code-block :: bash
 
-     build-catalogue <name> <path/to/nmodl>
-
-All files with the suffix ``.mod`` located in ``<path/to/nmodl>`` will be baked into
-a catalogue named ``lib<name>-catalogue.so`` and placed into your current working
-directory. Note that these files are platform-specific and should only be used
-on the combination of OS, compiler, arbor, and machine they were built with.
+     arbor-build-catalogue <name> <path/to/nmodl>
 
+All files with the suffix ``.mod`` located in ``<path/to/nmodl>`` will be baked
+into a catalogue named ``lib<name>-catalogue.so`` and placed into your current
+working directory. Note that these files are platform-specific and should only
+be used on the combination of OS, compiler, arbor, and machine they were built
+with. See our internal documentation for more advanced usage of the builder.
 Errors might be diagnosable by passing the ``-v`` flag.
 
 This catalogue can then be used similarly to the built-in ones
diff --git a/doc/install/python.rst b/doc/install/python.rst
index 766cdab8cb7759906b2775df132360c8fbcfa6a8..3b931ad38ea1319125289433abeda37b15cd7989 100644
--- a/doc/install/python.rst
+++ b/doc/install/python.rst
@@ -6,13 +6,17 @@ Python Installation
 Arbor's Python API will be the most convenient interface for most users.
 
 .. note::
-    Arbor requires Python version 3.6 and later. It is advised that you update `pip` as well.
+    Arbor requires Python version 3.6 and later. It is advised that you update ``pip`` as well.
+    We strongly encourage using ``pip`` to install Arbor.
+    
+    To get help in case of problems installing with pip, run pip with the ``--verbose`` flag, and attach the output
+    (along with the pip command itself) to a ticket on `Arbor's issues page <https://github.com/arbor-sim/arbor/issues>`_.
 
 Getting Arbor
 -------------
 
 Every point release of Arbor is pushed to the Python Package Index.
-For x86-64 Linux and MacOS plaftorms, we provide binary wheels.
+For x86-64 Linux and MacOS platforms, we provide binary wheels.
 The easiest way to get Arbor is with
 `pip <https://packaging.python.org/tutorials/installing-packages>`_:
 
@@ -20,15 +24,6 @@ The easiest way to get Arbor is with
 
     pip3 install arbor
 
-.. note::
-    For other platforms, `pip` will build Arbor from source.
-    You will need to have some development packages installed in order to build Arbor this way.
-
-    * Ubuntu/Debian: `git cmake gcc python3-dev python3-pip libxml2-dev`
-    * Fedora/CentOS/OpenSuse: `git cmake gcc-c++ python3-devel python3-pip libxml2-devel`
-    * MacOS: get `brew` `here <https://brew.sh>`_ and run `brew install cmake clang python3 libxml2`
-    * Windows: the simplest way is to use `WSL <https://docs.microsoft.com/en-us/windows/wsl/install-win10>`_ and then follow the instructions for Ubuntu.
-
 To test that Arbor is available, try the following in a Python interpreter
 to see information about the version and enabled features:
 
@@ -41,9 +36,20 @@ to see information about the version and enabled features:
 You are now ready to use Arbor! You can continue reading these documentation pages, have a look at the
 :ref:`Python API reference<pyoverview>`, or visit the :ref:`tutorial`.
 
-.. Note::
-    To get help in case of problems installing with pip, run pip with the ``--verbose`` flag, and attach the output
-    (along with the pip command itself) to a ticket on `Arbor's issues page <https://github.com/arbor-sim/arbor/issues>`_.
+.. Warning::
+    
+    For builds from Arbor's source, you will need to have some development packages installed. Installing Arbor
+    for any other platforms than listed above, ``pip`` will attempt a build from source and thus require these
+    packages as well.
+
+    * Ubuntu/Debian: `git cmake gcc python3-dev python3-pip libxml2-dev`
+    * Fedora/CentOS/OpenSuse: `git cmake gcc-c++ python3-devel python3-pip libxml2-devel`
+    * MacOS: get `brew` `here <https://brew.sh>`_ and run `brew install cmake clang python3 libxml2`
+    * Windows: the simplest way is to use `WSL <https://docs.microsoft.com/en-us/windows/wsl/install-win10>`_ and then follow the instructions for Ubuntu.
+
+    In addition, you'll need a few Python packages present:
+
+    ``pip3 install ninja scikit-build wheel setuptools numpy``
 
 .. _in_python_custom:
 
@@ -71,88 +77,92 @@ Every time you make changes to the code, you'll have to repeat the second step.
 Advanced options
 ^^^^^^^^^^^^^^^^
 
-By default Arbor is installed with multi-threading enabled. To enable more advanced forms of parallelism,
-Arbor comes with a few compilation options. These can be used on both local (``pip3 install ./arbor``) and
-remote (``pip3 install arbor``) copies of Arbor. Below we assume you are working off a local copy.
+By default Arbor is installed with multi-threading enabled. To enable more
+advanced forms of parallelism and other features, Arbor comes with a few
+compilation options. These are of the form ``-D<KEY>=<VALUE>``, must be appended
+to the ``pip`` invocation via ``--install-option="-D<...>" --install-option="-D<...>" ...`` and can
+be used on both local (``pip3 install ./arbor``) and remote (``pip3 install
+arbor``) copies of Arbor. See the examples below.
+
+.. Note::
 
-The following optional flags can be used to configure the installation:
+   If you run into build issues while experimenting with build options, be sure
+   to remove the ``_skbuild`` directory. If you had Arbor installed already,
+   you may need to remove it first before you can (re)compile it with the flags you need.
+
+   Also, make sure to pass each option individually via
+   ``--install-option="..."``.
+
+The following flags can be used to configure the installation:
+
+* ``ARB_WITH_NEUROML=<ON|OFF>``: Enable support for NeuroML2 morphologies,
+  requires ``libxml2`` library. Default ``OFF``
+* ``ARB_WITH_MPI=<ON|OFF>``: Enable MPI support, requires MPI library.
+  Default ``OFF``.
+* ``ARB_GPU=<none|cuda|cuda-clang|hip>``: Enable GPU support for NVIDIA GPUs
+  with nvcc using ``cuda``, or with clang using ``cuda-clang`` (both require
+  cudaruntime). Enable GPU support for AMD GPUs with hipcc using ``hip``. By
+  default set to ``none``, which disables GPU support.
+* ``ARB_VECTORIZE=<ON|OFF>``: Enable vectorization. The architecture argument,
+  documented below, may also have to be set appropriately to generated
+  vectorized code. See :ref:`install-architecture` for details.
+* ``ARB_ARCH=<native|*>``: CPU micro-architecture to target. The advised
+  default is ``native``. See `here
+  <https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html>`_ for a full list of
+  options.
+
+.. note::
 
-* ``--mpi``: Enable MPI support (requires MPI library).
-* ``--gpu``: Enable GPU support for NVIDIA GPUs with nvcc using ``cuda``, or with clang using ``cuda-clang`` (both require cudaruntime).
-  Enable GPU support for AMD GPUs with hipcc using ``hip``. By default set to ``none``, which disables gpu support.
-* ``--vec``: Enable vectorization. The ``--arch`` argument, documented below, may also have to be set appropriately to generated vectorized code.
-  See :ref:`install-architecture` for details.
-* ``--arch``: CPU micro-architecture to target. The advised default is ``native``.
-  See `here <https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html>`_ for a full list of options.
-* ``--makejobs``: Specify the amount of jobs to ``make`` the project with for faster build times on multicore systems. By default set to ``2``.
+   There are more, advanced flags that can be set. We are using ``scikit-build``
+   and ``CMake`` under the hood, so all flags and options valid in ``CMake`` can
+   be used in this fashion.
+
+   Detailed instructions on how to install using CMake are in the :ref:`Python
+   configuration <install-python>` section of the :ref:`installation guide
+   <in_build_install>`. CMake is recommended if you need more control over
+   compilation and installation, plan to use Arbor with C++, or if you are
+   integrating with package managers such as Spack and EasyBuild.
+
+In the examples below we assume you are installing from a local copy.
 
 **Vanilla install** with no additional features enabled:
 
 .. code-block:: bash
 
-    pip3 install arbor
+    pip3 install ./arbor
 
 **With MPI support**. This might require loading an MPI module or setting the ``CC`` and ``CXX``
 :ref:`environment variables <install-mpi>`:
 
 .. code-block:: bash
 
-    pip3 install --install-option='--mpi' ./arbor
+    pip3 install ./arbor --install-option="-DARB_WITH_MPI=ON"
 
 **Compile with** :ref:`vectorization <install-vectorize>` on a system with a SkyLake
 :ref:`architecture <install-architecture>`:
 
 .. code-block:: bash
 
-    pip3 install --install-option='--vec' --install-option='--arch=skylake' arbor
+    pip3 install ./arbor --install-option="-DARB_VECTORIZE=ON" --install-option="-DARB_ARCH=skylake"
 
 **Enable NVIDIA GPUs (compiled with nvcc)**. This requires the :ref:`CUDA toolkit <install-gpu>`:
 
 .. code-block:: bash
 
-    pip3 install --install-option='--gpu=cuda' ./arbor
+    pip3 install ./arbor --install-option="-DARB_GPU=cuda"
 
 **Enable NVIDIA GPUs (compiled with clang)**. This also requires the :ref:`CUDA toolkit <install-gpu>`:
 
 .. code-block:: bash
 
-    pip3 install --install-option='--gpu=cuda-clang' ./arbor
+    pip3 install ./arbor --install-option="-DARB_GPU=cuda-clang"
 
 **Enable AMD GPUs (compiled with hipcc)**. This requires setting the ``CC`` and ``CXX``
 :ref:`environment variables <install-gpu>`
 
 .. code-block:: bash
 
-    pip3 install --install-option='--gpu=hip' ./arbor
-
-.. Note::
-    Setuptools compiles the Arbor C++ library and wrapper, as well as dependencies you did not have installed
-    yet (e.g. `numpy`). It may take a few minutes. Pass the ``--verbose`` flag to pip
-    to see the individual steps being performed if you are concerned that progress
-    is halting.
-
-    If you had Arbor installed already, you may need to remove it first before you can (re)compile
-    it with the flags you need.
-
-.. Note::
-    Detailed instructions on how to install using CMake are in the
-    :ref:`Python configuration <install-python>` section of the :ref:`installation guide <in_build_install>`.
-    CMake is recommended if you need more control over compilation and installation, plan to use Arbor with C++,
-    or if you are integrating with package managers such as Spack and EasyBuild.
-
-Dependencies
-^^^^^^^^^^^^
-
-If a downstream dependency requires Arbor be built with
-a specific feature enabled, use ``requirements.txt`` to
-`define the constraints <https://pip.pypa.io/en/stable/reference/pip_install/#per-requirement-overrides>`_.
-For example, a package that depends on `arbor` version 0.3 or later
-with MPI support would add the following to its requirements:
-
-.. code-block:: python
-
-    arbor >= 0.3 --install-option='--gpu=cuda' \
-                 --install-option='--mpi'
+    pip3 install ./arbor --install-option="-DARB_GPU=hip"
 
 Note on performance
 -------------------
diff --git a/doc/internals/extending_catalogues.rst b/doc/internals/extending_catalogues.rst
index 1e954d19108766af77753c7db098cfc8a320e1f7..d60182ecc2d5a9529f9755aeaa41be4147c678b2 100644
--- a/doc/internals/extending_catalogues.rst
+++ b/doc/internals/extending_catalogues.rst
@@ -12,7 +12,7 @@ all likelihood interested in the former.
    If you are coming from NEURON and looking for the equivalent of
    ``nrnivmodl``, please read on :ref:`here <mechanisms_dynamic>`.
 
-   Following this path is for developers rather than end-users.
+   Following the below instructions is for developers rather than end-users.
 
 This requires a copy of the Arbor source tree and the compiler toolchain used to
 build Arbor in addition to the installed library. Following these steps will
@@ -21,7 +21,10 @@ produce a catalogue of the same level of integration as the built-in catalogues
 
 1. Go to the Arbor source tree.
 2. Create a new directory under *mechanisms*.
-3. Add your .mod files.
+
+   1. Add any ``.mod`` files you wish to integrate.
+   2. Add any raw C++ files to be included in the catalogue.
+
 4. Edit *mechanisms/CMakeLists.txt* to add a definition like this (example from
    *default* catalogue)
 
@@ -31,7 +34,8 @@ produce a catalogue of the same level of integration as the built-in catalogues
        NAME default                                                   # Name of your catalogue
        SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/default"                  # Directory name (added above)
        OUTPUT "CAT_DEFAULT_SOURCES"                                   # Variable name to store C++ files into (see below)
-       MECHS exp2syn expsyn expsyn_stdp hh kamt kdrmt nax nernst pas  # Space separated list of mechanisms w/o .mod suffix.
+       MOD exp2syn expsyn expsyn_stdp hh kamt kdrmt nax nernst pas    # Space separated list of NMODL mechanism names
+       CXX                                                            # Space separated list of raw C++ mechanism names
        PREFIX "${PROJECT_SOURCE_DIR}/mechanisms"                      # where does 'generate_catalogue' live, do not change
        STANDALONE FALSE                                               # build as shared object, must be OFF
        VERBOSE OFF)                                                   # Print debug info at configuration time
@@ -49,4 +53,10 @@ produce a catalogue of the same level of integration as the built-in catalogues
 6. Add a ``global_NAME_catalogue`` function in ``mechcat.hpp`` and ``mechcat.cpp``
 7. Bind this function in ``python/mechanisms.cpp``.
 
-All steps can be more or less copied from the surrounding code.
+All steps can be directly adapted from the surrounding code.
+
+.. note::
+
+   If you have special requirements, you can write mechanisms in C/C++ directly
+   against Arbor's ABI. These need to adhere to the calling convention of the
+   ABI. See :ref:`here <abi_raw>` for more.
diff --git a/doc/internals/mechanism_abi.rst b/doc/internals/mechanism_abi.rst
index df5d4250d32cb9344a2f8cce402e4f5beec3898c..a8c9205900f1ca9d98194dfe7f72536657e62316 100644
--- a/doc/internals/mechanism_abi.rst
+++ b/doc/internals/mechanism_abi.rst
@@ -454,3 +454,52 @@ supported. The naming scheme is shown in the example below
   arb_mechanism_type make_arb_default_catalogue_pas();
   arb_mechanism_interface* make_arb_default_catalogue_pas_interface_multicore();
   arb_mechanism_interface* make_arb_default_catalogue_pas_interface_gpu();
+
+Writing Mechanisms Directly Against the ABI
+-------------------------------------------
+.. _abi_raw:
+
+.. warning::
+
+   This is a measure of last resort. Usage is not recommended unless you have a
+   dire need and a solid understanding of Arbor, its internals, and the ABI.
+
+If your use case requires features not supported in NMODL, you can write mechanisms
+in C++ directly. See the ABI documentation for what callbacks need to be filled in.
+These mechanisms can be compiled with ``arbor-build-catalogue`` as well and must be
+present in the same folder as the NMODL files. Example
+
+.. code-block::
+
+   $> ls mod
+   A.mod
+   B.hpp B_cpu.cpp B_gpu.cpp B_gpu.cu
+   C.hpp C_cpu.cpp C_gpu.cpp C_gpu.cu
+   $> arbor-build-catalogue my mod --raw B C
+   Building catalogue 'my' from mechanisms in 'mod'
+   * NMODL
+     * A
+   * Raw
+     * B
+     * C
+
+The ``--raw`` flag must go last due to the argument parsing.
+
+For this to compile, the following must be upheld:
+
+- For each mechanism ``M`` these files must be present
+
+  - ``M.hpp`` must define the mechanism metadata and declare the used interfaces.
+  - ``M_cpu.cpp`` must define the CPU interface. (You can disable this for raw
+    mechanisms by passing ``-C``.)
+
+  - If GPU support is used
+
+    - ``M_gpu.cpp`` must define the GPU interface.
+    - ``M_gpu.cu``  must define CUDA kernels.
+
+- The interface names must adhere to the chosen catalogue name, eg here ``make_arb_my_catalogue_B_interface_multicore();``
+
+  - names may only contain alphanumeric characters and underscores.
+  - names must not contain multiple successive underscores.
+  - in general, a valid ``C++`` variable name should be used.
diff --git a/doc/python/hardware.rst b/doc/python/hardware.rst
index 4cbfe56a3d612b562e312f1728a9b360d1b3ac62..bf38e6b6d4196be51a0f5c33b7243a5c4a3a46bf 100644
--- a/doc/python/hardware.rst
+++ b/doc/python/hardware.rst
@@ -27,6 +27,8 @@ Helper functions for checking cmake or environment variables, as well as configu
     * ``ARB_GPU_ENABLED``
     * ``ARB_VECTORIZE``
     * ``ARB_WITH_PROFILING``
+    * ``ARB_WITH_NEUROML``
+    * ``ARB_USE_BUNDLED_LIBS``
     * ``ARB_VERSION``
     * ``ARB_ARCH``
 
@@ -37,7 +39,7 @@ Helper functions for checking cmake or environment variables, as well as configu
             import arbor
             arbor.config()
 
-            {'mpi': True, 'mpi4py': True, 'gpu': False, 'vectorize': True, 'profiling': True, 'version': '0.5.3-dev', 'arch': 'native'}
+            {'mpi': True, 'mpi4py': True, 'gpu': False, 'vectorize': True, 'profiling': True, 'neuroml': True, 'bundled': True, 'version': '0.5.3-dev', 'arch': 'native'}
 
 .. function:: mpi_init()
 
diff --git a/lmorpho/lsystem.hpp b/lmorpho/lsystem.hpp
index 3130ba64877c05f97cedeb0f6fa1ea58504a41aa..9dfc45353a25a9bce3c1a6fcac6f5fb1eafe83c6 100644
--- a/lmorpho/lsystem.hpp
+++ b/lmorpho/lsystem.hpp
@@ -8,7 +8,7 @@ struct lsys_param;
 
 using lsys_generator = std::minstd_rand;
 
-class lsys_distribution_param;
+struct lsys_distribution_param;
 arb::segment_tree generate_morphology(const lsys_distribution_param& soma, std::vector<lsys_param> Ps, lsys_generator& g);
 
 // The distribution parameters used in the specification of the L-system parameters.
diff --git a/mechanisms/BuildModules.cmake b/mechanisms/BuildModules.cmake
index 38f466f0abaa2829818f37fe6e77336d3eacdd03..3aadf9fcb4a8aa6dc6e3e24633390667f386bb33 100644
--- a/mechanisms/BuildModules.cmake
+++ b/mechanisms/BuildModules.cmake
@@ -58,7 +58,7 @@ function(build_modules)
 endfunction()
 
 function("make_catalogue")
-  cmake_parse_arguments(MK_CAT "" "NAME;SOURCES;OUTPUT;PREFIX;STANDALONE;VERBOSE" "CXX_FLAGS_TARGET;MECHS" ${ARGN})
+  cmake_parse_arguments(MK_CAT "" "NAME;SOURCES;OUTPUT;PREFIX;STANDALONE;VERBOSE" "CXX_FLAGS_TARGET;MOD;CXX" ${ARGN})
   set(MK_CAT_OUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/generated/${MK_CAT_NAME}")
 
   # Need to set ARB_WITH_EXTERNAL_MODCC *and* modcc
@@ -69,7 +69,8 @@ function("make_catalogue")
 
   if(MK_CAT_VERBOSE)
     message("Catalogue name:       ${MK_CAT_NAME}")
-    message("Catalogue mechanisms: ${MK_CAT_MECHS}")
+    message("Catalogue mechanisms: ${MK_CAT_MOD}")
+    message("Extra cxx files:      ${MK_CAT_CXX}")
     message("Catalogue sources:    ${MK_CAT_SOURCES}")
     message("Catalogue output:     ${MK_CAT_OUT_DIR}")
     message("Build as standalone:  ${MK_CAT_STANDALONE}")
@@ -86,7 +87,7 @@ function("make_catalogue")
   endif()
 
   build_modules(
-    ${MK_CAT_MECHS}
+    ${MK_CAT_MOD}
     SOURCE_DIR "${MK_CAT_SOURCES}"
     DEST_DIR "${MK_CAT_OUT_DIR}"
     ${external_modcc} # NB: expands to 'MODCC <binary>' to add an optional argument
@@ -102,7 +103,7 @@ function("make_catalogue")
 
   add_custom_command(
     OUTPUT  ${catalogue_${MK_CAT_NAME}_source}
-    COMMAND ${MK_CAT_PREFIX}/generate_catalogue ${catalogue_${MK_CAT_NAME}_options} ${MK_CAT_MECHS}
+    COMMAND ${MK_CAT_PREFIX}/generate_catalogue ${catalogue_${MK_CAT_NAME}_options} ${MK_CAT_MOD} ${MK_CAT_CXX}
     COMMENT "Building catalogue ${MK_CAT_NAME}"
     DEPENDS ${MK_CAT_PREFIX}/generate_catalogue)
 
@@ -110,7 +111,13 @@ function("make_catalogue")
   add_dependencies(build_catalogue_${MK_CAT_NAME}_mods ${MK_CAT_NAME}_catalogue_cpp_target)
   add_dependencies(build_all_mods build_catalogue_${MK_CAT_NAME}_mods)
 
-  foreach(mech ${MK_CAT_MECHS})
+  foreach(mech ${MK_CAT_MOD})
+    list(APPEND catalogue_${MK_CAT_NAME}_source ${MK_CAT_OUT_DIR}/${mech}_cpu.cpp)
+    if(ARB_WITH_GPU)
+      list(APPEND catalogue_${MK_CAT_NAME}_source ${MK_CAT_OUT_DIR}/${mech}_gpu.cpp ${MK_CAT_OUT_DIR}/${mech}_gpu.cu)
+    endif()
+  endforeach()
+  foreach(mech ${MK_CAT_CXX})
     list(APPEND catalogue_${MK_CAT_NAME}_source ${MK_CAT_OUT_DIR}/${mech}_cpu.cpp)
     if(ARB_WITH_GPU)
       list(APPEND catalogue_${MK_CAT_NAME}_source ${MK_CAT_OUT_DIR}/${mech}_gpu.cpp ${MK_CAT_OUT_DIR}/${mech}_gpu.cu)
diff --git a/mechanisms/CMakeLists.txt b/mechanisms/CMakeLists.txt
index ac69ce9402b10266f1a83fdcbe850b06682e0e8a..a219475fac58e26ffe6eda487ad67bd256bbd507 100644
--- a/mechanisms/CMakeLists.txt
+++ b/mechanisms/CMakeLists.txt
@@ -5,7 +5,8 @@ make_catalogue(
   NAME bbp
   SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/bbp"
   OUTPUT "CAT_BBP_SOURCES"
-  MECHS CaDynamics_E2 Ca_HVA Ca_LVAst Ih Im K_Pst K_Tst Nap_Et2 NaTa_t NaTs2_t SK_E2 SKv3_1
+  MOD CaDynamics_E2 Ca_HVA Ca_LVAst Ih Im K_Pst K_Tst Nap_Et2 NaTa_t NaTs2_t SK_E2 SKv3_1
+  CXX
   PREFIX "${PROJECT_SOURCE_DIR}/mechanisms"
   CXX_FLAGS_TARGET "${ARB_CXX_FLAGS_TARGET_FULL}"
   STANDALONE FALSE
@@ -15,7 +16,8 @@ make_catalogue(
   NAME allen
   SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/allen"
   OUTPUT "CAT_ALLEN_SOURCES"
-  MECHS CaDynamics Ca_HVA Ca_LVA Ih Im Im_v2 K_P K_T Kd Kv2like Kv3_1 NaTa NaTs NaV Nap SK
+  MOD CaDynamics Ca_HVA Ca_LVA Ih Im Im_v2 K_P K_T Kd Kv2like Kv3_1 NaTa NaTs NaV Nap SK
+  CXX
   PREFIX "${PROJECT_SOURCE_DIR}/mechanisms"
   CXX_FLAGS_TARGET "${ARB_CXX_FLAGS_TARGET_FULL}"
   STANDALONE FALSE
@@ -25,7 +27,8 @@ make_catalogue(
   NAME default
   SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/default"
   OUTPUT "CAT_DEFAULT_SOURCES"
-  MECHS exp2syn expsyn expsyn_stdp hh kamt kdrmt nax nernst pas gj
+  MOD exp2syn expsyn expsyn_stdp hh kamt kdrmt nax nernst pas gj
+  CXX
   PREFIX "${PROJECT_SOURCE_DIR}/mechanisms"
   CXX_FLAGS_TARGET "${ARB_CXX_FLAGS_TARGET_FULL}"
   STANDALONE FALSE
diff --git a/mechanisms/default/expsyn.mod b/mechanisms/default/expsyn.mod
index c1f7d24e9d84b4cdf23c81b05ff491f0086631ae..1ac2f3e55effc24fea871802dfab5981456afc9f 100644
--- a/mechanisms/default/expsyn.mod
+++ b/mechanisms/default/expsyn.mod
@@ -36,4 +36,3 @@ DERIVATIVE state {
 NET_RECEIVE(weight) {
     g = g + weight
 }
-
diff --git a/python/config.cpp b/python/config.cpp
index 3de7f86d7d07f164a4dcbb3155aa407c056d1c57..efe50545edb5d0d0ab711225bdb0734c1e22c75b 100644
--- a/python/config.cpp
+++ b/python/config.cpp
@@ -38,6 +38,16 @@ pybind11::dict config() {
     dict[pybind11::str("profiling")] = pybind11::bool_(true);
 #else
     dict[pybind11::str("profiling")] = pybind11::bool_(false);
+#endif
+#ifdef ARB_NEUROML_ENABLED
+    dict[pybind11::str("neuroml")] = pybind11::bool_(true);
+#else
+    dict[pybind11::str("neuroml")] = pybind11::bool_(false);
+#endif
+#ifdef ARB_BUNDLED_ENABLED
+    dict[pybind11::str("bundled")] = pybind11::bool_(true);
+#else
+    dict[pybind11::str("bundled")] = pybind11::bool_(false);
 #endif
     dict[pybind11::str("version")] = pybind11::str(ARB_VERSION);
     dict[pybind11::str("source")]  = pybind11::str(ARB_SOURCE_ID);
diff --git a/python/test/fixtures.py b/python/test/fixtures.py
index 38142fb3e577d41179e7162f5eeb16d96aee0f8e..a0d90a56b842783d4c7d53924280a02e1c0a0774 100644
--- a/python/test/fixtures.py
+++ b/python/test/fixtures.py
@@ -79,7 +79,7 @@ class _BuildCatError(Exception): pass
 
 def _build_cat_local(name, path):
     try:
-        subprocess.run(["build-catalogue", name, str(path)], check=True, stderr=subprocess.PIPE)
+        subprocess.run(["arbor-build-catalogue", name, str(path)], check=True, stderr=subprocess.PIPE)
     except subprocess.CalledProcessError as e:
         raise _BuildCatError("Tests can't build catalogues:\n" + e.stderr.decode()) from None
 
diff --git a/scripts/build-catalogue.in b/scripts/build-catalogue.in
index e79a45dc4d2f9d077305576db866947e4915ac58..de4bcd1ca13491b9540d738481b846be75242fdd 100755
--- a/scripts/build-catalogue.in
+++ b/scripts/build-catalogue.in
@@ -6,10 +6,10 @@ from tempfile import mkdtemp, TemporaryDirectory
 import os
 from pathlib import Path
 import shutil
-import stat
 import string
 import argparse
 import re
+
 def parse_arguments():
     def append_slash(s):
         return s+'/' if s and not s.endswith('/') else s
@@ -39,6 +39,16 @@ def parse_arguments():
                         type=str,
                         help='Catalogue name.')
 
+    parser.add_argument('--raw',
+                        metavar='raw',
+                        nargs='+',
+                        default=[],
+                        type=str,
+                        help='''Advanced: Raw mechanisms as C++ files. Per <name> the
+files <name>.hpp, <name>_cpu.cpp must be present
+in the target directory and with GPU support
+also <name>_gpu.cpp and <name>_gpu.cu (if not given -C).''')
+
     parser.add_argument('modpfx',
                         metavar='modpfx',
                         type=str,
@@ -52,6 +62,15 @@ def parse_arguments():
                         action='store_true',
                         help='Less output.')
 
+    parser.add_argument('-g', '--gpu',
+                        metavar='gpu',
+                        help='Enable GPU support, valid options: cuda|hip|cuda-clang.')
+
+    parser.add_argument('-C', '--no-cpu',
+                        action='store_false',
+                        default=True,
+                        help='Disable CPU support.')
+
     parser.add_argument('-d', '--debug',
                         nargs="?",
                         metavar="path",
@@ -64,7 +83,6 @@ def parse_arguments():
                         action='help',
                         help='Display this help and exit.')
 
-
     return vars(parser.parse_args())
 
 args    = parse_arguments()
@@ -74,32 +92,68 @@ name    = re.sub(r'_+', r'_',
                         args['name']))
 
 mod_dir = pwd / Path(args['modpfx'])
-mods    = [ f[:-4] for f in os.listdir(mod_dir) if f.endswith('.mod') ]
-verbose = args['verbose'] and not args['quiet']
+mods    = [f[:-4] for f in os.listdir(mod_dir) if f.endswith('.mod')]
 quiet   = args['quiet']
+verbose = args['verbose'] and not quiet
 debug   = args['debug']
+raw     = args['raw']
+gpu     = args['gpu']
+cpu     = not args['no_cpu']
+
+if gpu:
+    if gpu == 'cuda':
+        gpu_support = """
+add_compile_definitions(ARB_CUDA)
+add_compile_definitions(ARB_HAVE_GPU)
+
+enable_language(CUDA)
+set(CMAKE_CUDA_HOST_COMPILER @CMAKE_CXX_COMPILER@)
+set(CMAKE_CUDA_ARCHITECTURES @CMAKE_CUDA_ARCHITECTURES@)
+"""
+    else:
+        print(f"Unsupported GPU target: {gpu}. If you need support for HIP or Clang-CUDA, please check here: https://github.com/arbor-sim/arbor/issues/1783")
+        exit(-1)
+else:
+    gpu_support = """
+# GPU: Disabled
+"""
+
+this_path = Path(__file__).parent
+data_path = (this_path / "@ARB_REL_DATADIR@").resolve()
+pack_path = (this_path / "@ARB_REL_PACKAGEDIR@").resolve()
+exec_path = this_path.resolve()
+
+for path in [exec_path / 'modcc',
+             data_path / 'generate_catalogue',
+             data_path / 'BuildModules.cmake',
+             pack_path / 'arbor-config.cmake',]:
+    if not path.exists():
+        print(f'Could not find required tool: {path}. Please check your installation.')
+        exit(-1)
 
 cmake = f"""
 cmake_minimum_required(VERSION 3.9)
 project({name}-cat LANGUAGES CXX)
 
+set(arbor_DIR {pack_path})
 find_package(arbor REQUIRED)
-
+{gpu_support}
 set(CMAKE_BUILD_TYPE release)
-set(CMAKE_CXX_COMPILER ${{ARB_CXX}})
-set(CMAKE_CXX_FLAGS    ${{ARB_CXX_FLAGS}})
+set(CMAKE_CXX_COMPILER  ${{ARB_CXX}})
+set(CMAKE_CXX_FLAGS     ${{ARB_CXX_FLAGS}})
 
 include(BuildModules.cmake)
 
 set(ARB_WITH_EXTERNAL_MODCC true)
-find_program(modcc NAMES modcc)
+find_program(modcc NAMES modcc PATHS {exec_path})
 
 make_catalogue(
   NAME {name}
   SOURCES "${{CMAKE_CURRENT_SOURCE_DIR}}/mod"
   OUTPUT "CAT_{name.upper()}_SOURCES"
-  MECHS {' '.join(mods)}
-  PREFIX @ARB_INSTALL_DATADIR@
+  MOD {' '.join(mods)}
+  CXX {' '.join(raw)}
+  PREFIX {data_path}
   CXX_FLAGS_TARGET ${{ARB_CXX_FLAGS_TARGET}}
   STANDALONE ON
   VERBOSE {"ON" if verbose else "OFF"})
@@ -109,8 +163,14 @@ if not quiet:
     print(f"Building catalogue '{name}' from mechanisms in {mod_dir}")
     if debug:
         print("Debug mode enabled.")
-    for m in mods:
-        print(" *", m)
+    if mods:
+        print(" * NMODL")
+        for m in mods:
+            print("   *", m)
+    if raw:
+        print(" * Raw")
+        for m in raw:
+            print("   *", m)
 
 if debug:
     # Overwrite the local reference to `TemporaryDirectory` with a context
@@ -141,8 +201,25 @@ with TemporaryDirectory() as tmp:
     os.chdir(tmp / 'build')
     with open(tmp / 'CMakeLists.txt', 'w') as fd:
         fd.write(cmake)
-    shutil.copy2(f'@ARB_INSTALL_DATADIR@/BuildModules.cmake', tmp)
-    shutil.copy2(f'@ARB_INSTALL_DATADIR@/generate_catalogue', tmp)
+    shutil.copy2(f'{data_path}/BuildModules.cmake', tmp)
+    shutil.copy2(f'{data_path}/generate_catalogue', tmp)
+
+    out = tmp / 'build' / 'generated' / name
+    os.makedirs(out, exist_ok=True)
+    sfx = [".hpp"]
+    if cpu:
+        sfx += ["_cpu.cpp"]
+    if gpu:
+        sfx += ["_gpu.cpp", "_gpu.cu"]
+    for e in raw:
+        for s in sfx:
+            fn = mod_dir / (e + s)
+            if not fn.exists():
+                print(f'Could not find required file: {fn}. Please check your C++ mechanisms.')
+                exit(-1)
+            else:
+                shutil.copy2(fn, out / (e + s))
+
     cmake_cmd = 'cmake ..'
     make_cmd = 'make'
     if verbose:
diff --git a/scripts/build-wheels.sh b/scripts/build-wheels.sh
new file mode 100755
index 0000000000000000000000000000000000000000..0b881c71b09dfe9fa288056b4270da65dc23be32
--- /dev/null
+++ b/scripts/build-wheels.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# A script that can be ran in the PyPA manywheel containers if you want to produce uploadable wheels for PyPI.
+# Steps:
+# 1. Prepare a (temporary) working directory (referred to as $LOCAL_WORK_DIR). 
+# 2. Have the version of Arbor you want to build manylinux compliant wheels for available at $LOCAL_WORK_DIR/arbor
+# 3. Start an instance of the docker image with $LOCAL_WORK_DIR mounted at /src_dir
+#    Then, run /src_dir/arbor/scripts/build-wheels.sh
+#    Using podman, the follow command can be used:
+#    podman run -v $LOCAL_WORK_DIR:/src_dir:Z -ti quay.io/pypa/manylinux2014_x86_64 /src_dir/arbor/scripts/build-wheels.sh
+# 4. After the run is complete, find in $LOCAL_WORK_DIR/wheelhouse the wheels ready for PyPI.
+#    $LOCAL_WORK_DIR/builtwheel contains the wheel before auditwheel has processed them. Can be discarded,
+#    or used for analysis in case of build failure.
+
+set -e -u -x
+
+yum -y install libxml2-devel
+/opt/python/cp310-cp310/bin/pip install ninja cmake
+
+rm -rf /src_dir/arbor/_skbuild
+
+export CIBUILDWHEEL=1 #Set condition for cmake
+
+for PYBIN in /opt/python/cp*/bin; do
+    "${PYBIN}/python" -m pip install wheel scikit-build auditwheel
+    export PATH="${PYBIN}":$PATH
+    "${PYBIN}/python" -m pip wheel --wheel-dir="/src_dir/builtwheel${PYBIN}/" /src_dir/arbor
+    "${PYBIN}/python" -m auditwheel repair /src_dir/builtwheel${PYBIN}/arbor*.whl -w /src_dir/wheelhouse
+done
+
+/opt/python/cp310-cp310/bin/python /src_dir/arbor/scripts/patchwheel.py /src_dir/wheelhouse
+
+# Todo: Install packages and test
diff --git a/scripts/patchwheel.py b/scripts/patchwheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a45d375884a4b0c73f9d4a8feb9a6a839f83426
--- /dev/null
+++ b/scripts/patchwheel.py
@@ -0,0 +1,40 @@
+import shutil,subprocess,argparse
+from pathlib import Path
+
+def parse_arguments():
+    parser = argparse.ArgumentParser(description='Patch Arbor wheels built with scikit-build and corrected by auditwheel. Linux only.')
+    parser.add_argument('path', type=dir_path, help='The path where your wheels are located. They will be patched in place.')
+    parser.add_argument('-ko','--keepold', action='store_true', help='If you want to keep the old wheels in /old')
+
+    return parser.parse_args()
+
+def dir_path(path):
+    path = Path(path)
+    if Path.is_dir(path):
+        return path
+    else:
+        raise argparse.ArgumentTypeError(f"{path} is not a valid path")
+
+parsed_args = parse_arguments()
+Path.mkdir(parsed_args.path / 'old', exist_ok=True)
+
+for inwheel in parsed_args.path.glob("*.whl"):
+    zipdir = Path(f"{inwheel}.unzip")
+    # shutil.unpack_archive(inwheel,zipdir,'zip') # Disabled, because shutil (and ZipFile) don't preserve filemodes
+    subprocess.check_call(f"unzip {inwheel} -d {zipdir}",shell=True)
+
+    arborn = list(zipdir.glob("**/_arbor.cpython*.so"))[0]
+    libxml2n = list(zipdir.glob("**/libxml2*.so*"))[0]
+    subprocess.check_call(f"patchelf --set-rpath '$ORIGIN/../arbor.libs' {arborn}",shell=True)
+    subprocess.check_call(f"patchelf --set-rpath '$ORIGIN' {libxml2n}",shell=True)
+
+    # TODO? correct checksum/bytecounts in *.dist-info/RECORD.
+    # So far, Python does not report mismatches
+
+    outwheel = Path(shutil.make_archive(inwheel, 'zip', zipdir))
+    Path.rename(inwheel, parsed_args.path / 'old' / inwheel.name)
+    Path.rename(outwheel, parsed_args.path / inwheel.name)
+    shutil.rmtree(zipdir)
+
+if not parsed_args.keepold:
+    shutil.rmtree(parsed_args.path / 'old')
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index aaac5cf4e6368d6be3f74f71d89cb1beb058c30f..0000000000000000000000000000000000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[metadata]
-description-file = python/readme.md
diff --git a/setup.py b/setup.py
index 094837d4a275871fbadcf549229fd241320fbaa2..30cf7453df0d52f53b40c36ee839c0a3be718904 100644
--- a/setup.py
+++ b/setup.py
@@ -1,249 +1,58 @@
-import os
-import sys
-import setuptools
-import pathlib
-from setuptools import Extension
-from setuptools.command.build_ext import build_ext
-from setuptools.command.install import install
-import subprocess
-try:
-    from wheel.bdist_wheel import bdist_wheel
-    WHEEL_INSTALLED = True
-except:
-    #wheel package not installed.
-    WHEEL_INSTALLED = False
-    pass
-
-# Singleton class that holds the settings configured using command line
-# options. This information has to be stored in a singleton so that it
-# can be passed between different stages of the build, and because pip
-# has strange behavior between different versions.
-class CL_opt:
-    instance = None
-    def __init__(self):
-        if not CL_opt.instance:
-            CL_opt.instance = {'mpi': False,
-                               'gpu': 'none',
-                               'vec': False,
-                               'arch': 'none',
-                               'neuroml': True,
-                               'bundled': True,
-                               'makejobs': 2}
-
-    def settings(self):
-        return CL_opt.instance
-
-def cl_opt():
-    return CL_opt().settings()
-
-# extend user_options the same way for all Command()s
-user_options_ = [
-        ('mpi',   None, 'enable mpi support (requires MPI library)'),
-        ('gpu=',  None, 'enable nvidia cuda support (requires cudaruntime and nvcc) or amd hip support. Supported values: '
-                        'none, cuda, cuda-clang, hip'),
-        ('vec',   None, 'enable vectorization'),
-        ('arch=', None, 'cpu architecture, e.g. haswell, skylake, armv8.2-a+sve, znver2 (default native).'),
-        ('neuroml', None, 'enable parsing neuroml morphologies in Arbor (requires libxml)'),
-        ('sysdeps', None, 'don\'t use bundled 3rd party C++ dependencies (pybind11 and json). This flag forces use of dependencies installed on the system.'),
-        ('makejobs=', None, 'the amount of jobs to run `make` with.')
-    ]
-
-# VERSION is in the same path as setup.py
-here = os.path.abspath(os.path.dirname(__file__))
-with open(os.path.join(here, 'VERSION')) as version_file:
-    version_ = version_file.read().strip()
-
+from pathlib import Path
+from sys import executable as python
+from skbuild import setup
+import os,platform
+
+# Hard coded options, because scikit-build does not do build options.
+# Override by instructing CMAKE, e.g.:
+# pip install . -- -DARB_USE_BUNDLED_LIBS=ON -DARB_WITH_MPI=ON -DARB_GPU=cuda
+with_mpi   = False
+with_gpu   = 'none'
+with_vec   = False
+arch       = 'none'
+with_nml   = True
+use_libs   = True
+build_type = 'Release' # this is ok even for debugging, as we always produce info
+
+# Find our dir; *should* be the arbor checkout
+here = Path(__file__).resolve().parent
+# Read version file
+with open(here / 'VERSION') as fd:
+    arbor_version = fd.read().strip()
 # Get the contents of the readme
-with open(os.path.join(here, 'python/readme.md'), encoding='utf-8') as f:
-    long_description = f.read()
-
-def check_cmake():
-    try:
-        out = subprocess.check_output(['cmake', '--version'])
-        return True
-    except OSError:
-        return False
-
-
-class _command_template:
-    """
-    Override a setuptools-like command to augment the command line options.
-    Needs to appear before the command class in the class's argument list for
-    correct MRO.
-
-    Examples
-    --------
-
-    .. code-block: python
-
-      class install_command(_command_template, install):
-          pass
-
-
-      class complex_command(_command_template, mixin1, install):
-          def initialize_options(self):
-              # Both here and in `mixin1`, a `super` call is required
-              super().initialize_options()
-              # ...
-
-    """
-    def __init_subclass__(cls, **kwargs):
-        super().__init_subclass__(**kwargs)
-        cls.user_options = super().user_options + user_options_
-
-
-    def initialize_options(self):
-        super().initialize_options()
-        self.mpi  = None
-        self.gpu  = None
-        self.arch = None
-        self.vec  = None
-        self.neuroml = None
-        self.sysdeps = None
-        self.makejobs = 2
-
-    def finalize_options(self):
-        super().finalize_options()
-        try:
-            self.makejobs = int(self.makejobs)
-        except ValueError:
-            err = True
-        else:
-            err = False
-        if err or self.makejobs < 1:
-            raise AssertionError('makejobs must be a strictly positive integer')
-
-    def run(self):
-        # The options are stored in global variables:
-        opt = cl_opt()
-        #   mpi  : build with MPI support (boolean).
-        opt['mpi']  = self.mpi is not None
-        #   gpu  : compile for AMD/NVIDIA GPUs and choose compiler (string).
-        opt['gpu']  = "none" if self.gpu is None else self.gpu
-        #   vec  : generate SIMD vectorized kernels for CPU micro-architecture (boolean).
-        opt['vec']  = self.vec is not None
-        #   arch : target CPU micro-architecture (string).
-        opt['arch'] = 'none' if self.arch is None else self.arch
-        #   neuroml : compile with neuroml support for morphologies.
-        opt['neuroml'] = self.neuroml is not None
-        #   bundled : use bundled/git-submoduled 3rd party libraries.
-        #             By default use bundled libs.
-        opt['bundled'] = self.sysdeps is None
-        #   makejobs : specify amount of jobs.
-        #              By default 2.
-        opt['makejobs'] = int(self.makejobs)
-
-        super().run()
-
-
-class install_command(_command_template, install):
-    pass
-
-if WHEEL_INSTALLED:
-    class bdist_wheel_command(_command_template, bdist_wheel):
-        pass
-
-
-class cmake_extension(Extension):
-    def __init__(self, name):
-        Extension.__init__(self, name, sources=[])
-
-
-class cmake_build(build_ext):
-    def run(self):
-        if not check_cmake():
-            raise RuntimeError('CMake is not available. CMake 3.12 is required.')
-
-        # The path where CMake will be configured and Arbor will be built.
-        build_directory = os.path.abspath(self.build_temp)
-        # The path where the package will be copied after building.
-        lib_directory = os.path.abspath(self.build_lib)
-        # The path where the Python package will be compiled.
-        source_path = build_directory + '/python/arbor'
-        # Where to copy the package after it is built, so that whatever the next phase is
-        # can copy it into the target 'prefix' path.
-        dest_path = lib_directory + '/arbor'
-
-        opt = cl_opt()
-        cmake_args = [
-            '-DARB_WITH_PYTHON=on',
-            '-DPYTHON_EXECUTABLE=' + sys.executable,
-            '-DARB_WITH_MPI={}'.format( 'on' if opt['mpi'] else 'off'),
-            '-DARB_VECTORIZE={}'.format('on' if opt['vec'] else 'off'),
-            '-DARB_ARCH={}'.format(opt['arch']),
-            '-DARB_GPU={}'.format(opt['gpu']),
-            '-DARB_WITH_NEUROML={}'.format( 'on' if opt['neuroml'] else 'off'),
-            '-DARB_USE_BUNDLED_LIBS={}'.format('on' if opt['bundled'] else 'off'),
-            '-DCMAKE_BUILD_TYPE=Release' # we compile with debug symbols in release mode.
-        ]
-
-        print('-'*5, 'command line arguments: {}'.format(opt))
-        print('-'*5, 'cmake arguments: {}'.format(cmake_args))
-
-        build_args = ['--config', 'Release']
-
-        # Assuming Makefiles
-        build_args += ['--', f'-j{opt["makejobs"]}']
-
-        env = os.environ.copy()
-        env['CXXFLAGS'] = '{}'.format(env.get('CXXFLAGS', ''))
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-
-        # CMakeLists.txt is in the same directory as this setup.py file
-        cmake_list_dir = os.path.abspath(os.path.dirname(__file__))
-        print('-'*20, 'Configure CMake')
-        subprocess.check_call(['cmake', cmake_list_dir] + cmake_args,
-                              cwd=self.build_temp, env=env)
-
-        print('-'*20, 'Build')
-        cmake_cmd = ['cmake', '--build', '.'] + build_args
-        subprocess.check_call(cmake_cmd,
-                              cwd=self.build_temp)
-
-        # Copy from build path to some other place from whence it will later be installed.
-        # ... or something like that
-        # ... setuptools is an enigma monkey patched on a mystery
-        if not os.path.exists(dest_path):
-            os.makedirs(dest_path, exist_ok=True)
-        self.copy_tree(source_path, dest_path)
-
-setuptools.setup(
-    name='arbor',
-    version=version_,
-    python_requires='>=3.6',
-
-    install_requires=['numpy'],
-    setup_requires=[],
-    zip_safe=False,
-    ext_modules=[cmake_extension('arbor')],
-    cmdclass={
-        'build_ext':   cmake_build,
-        'install':     install_command,
-        'bdist_wheel': bdist_wheel_command,
-    } if WHEEL_INSTALLED else {
-        'build_ext':   cmake_build,
-        'install':     install_command,
-    },
-
-    author='The Arbor dev team.',
-    url='https://github.com/arbor-sim/arbor',
-    description='High performance simulation of networks of multicompartment neurons.',
-    long_description=long_description,
-    long_description_content_type='text/markdown',
-    classifiers=[
-        'Development Status :: 5 - Production/Stable',
-        'Intended Audience :: Science/Research',
-        'License :: OSI Approved :: BSD License',
-        'Programming Language :: Python :: 3.6',
-        'Programming Language :: Python :: 3.7',
-        'Programming Language :: Python :: 3.8',
-        'Programming Language :: Python :: 3.9',
-        'Programming Language :: C++',
-    ],
-    project_urls={
-        'Source': 'https://github.com/arbor-sim/arbor',
-        'Documentation': 'https://docs.arbor-sim.org',
-        'Bug Reports': 'https://github.com/arbor-sim/arbor/issues',
-    },
-)
+with open(here / 'python' / 'readme.md', encoding='utf-8') as fd:
+    long_description = fd.read()
+
+setup(name='arbor',
+      version=arbor_version,
+      python_requires='>=3.6',
+      install_requires=['numpy'],
+      setup_requires=[],
+      zip_safe=False,
+      packages=['arbor'],
+      cmake_args=['-DARB_WITH_PYTHON=on',
+                  f'-DPYTHON_EXECUTABLE={python}',
+                  f'-DARB_WITH_MPI={with_mpi}',
+                  f'-DARB_VECTORIZE={with_vec}',
+                  f'-DARB_ARCH={arch}',
+                  f'-DARB_GPU={with_gpu}',
+                  f'-DARB_WITH_NEUROML={with_nml}',
+                  f'-DARB_USE_BUNDLED_LIBS={use_libs}',
+                  f'-DCMAKE_BUILD_TYPE={build_type}',],
+      author='The Arbor dev team.',
+      url='https://arbor-sim.org',
+      description='High performance simulation of networks of multicompartment neurons.',
+      long_description=long_description,
+      long_description_content_type='text/markdown',
+      classifiers=['Development Status :: 5 - Production/Stable',
+                   'Intended Audience :: Science/Research',
+                   'License :: OSI Approved :: BSD License',
+                   'Programming Language :: Python :: 3.6',
+                   'Programming Language :: Python :: 3.7',
+                   'Programming Language :: Python :: 3.8',
+                   'Programming Language :: Python :: 3.9',
+                   'Programming Language :: Python :: 3.10',
+                   'Programming Language :: C++',],
+      project_urls={'Source': 'https://github.com/arbor-sim/arbor',
+                    'Documentation': 'https://docs.arbor-sim.org',
+                    'Bug Reports': 'https://github.com/arbor-sim/arbor/issues',},)
diff --git a/test/unit/CMakeLists.txt b/test/unit/CMakeLists.txt
index ef278313305f2c2534eeddf944ff485c044e5646..d7d1954a6a834e21afe130f3a8f56c5e3a0189de 100644
--- a/test/unit/CMakeLists.txt
+++ b/test/unit/CMakeLists.txt
@@ -219,7 +219,8 @@ if(${CMAKE_POSITION_INDEPENDENT_CODE})
     NAME dummy
     SOURCES "${CMAKE_CURRENT_SOURCE_DIR}/dummy"
     OUTPUT "CAT_DUMMY_SOURCES"
-    MECHS dummy
+    MOD dummy
+    CXX
     PREFIX "${PROJECT_SOURCE_DIR}/mechanisms"
     CXX_FLAGS_TARGET ${ARB_CXX_FLAGS_TARGET_FULL}
     STANDALONE ON
diff --git a/test/unit/test_mechcat.cpp b/test/unit/test_mechcat.cpp
index 05077ca2a1ba2cfadeac6b325d670d1f129ac97b..431dffb08d339dc74f3fbb866f5a9d1043b08f55 100644
--- a/test/unit/test_mechcat.cpp
+++ b/test/unit/test_mechcat.cpp
@@ -286,7 +286,6 @@ TEST(mechcat, loading) {
     EXPECT_NO_THROW(cat = &load_catalogue(LIBDIR "/dummy-catalogue.so"));
     ASSERT_NE(cat, nullptr);
     EXPECT_EQ(std::vector<std::string>{"dummy"}, cat->mechanism_names());
-
 }
 #endif