From 81096351971fd6dc0ed514b45d7bcb649170d98e Mon Sep 17 00:00:00 2001
From: Thorsten Hater <24411438+thorstenhater@users.noreply.github.com>
Date: Thu, 27 Oct 2022 11:00:47 +0200
Subject: [PATCH] Add a plethora of config options to a-b-c. (#1958)
Closes #1861
Closes #1783
- arbor-build-catalogue (a-b-c) does no longer need to be in a fixed location
- nor do we statically fix the configuration for a-b-c
- instead, we rely on `arbor.config` to read the relevant default values
- each value can be overriden, if desired
- added many more values to the configuration
- gpu type and arch
- paths and prefix
- CXX
- report default settings for better diagnosis
- implement a fallback for prefix if that does not exist; in particular for the amazing skbuild.
In essence you can now use a-b-c as a standalone tool, as long as you have a properly
configured py-arbor.
Example output after removing `_skbuild`
```
Warning: prefix '/Users/hater/src/arbor/_skbuild/macosx-11.0-x86_64-3.10/cmake-install' does not exist, falling back to '/Users/hater/src/arbor/.direnv/python-3.10.6'.
usage: arbor-build-catalogue catalogue_name mod_source_dir
Generate dynamic catalogue and build it into a shared object.
positional arguments:
name Catalogue name.
modpfx Directory name where *.mod files live.
options:
--raw raw [raw ...]
Advanced: Raw mechanisms as C++ files. Per <name> the files <name>.hpp, <name>_cpu.cpp (if CPU
is enabled) must be present in the target directory and with GPU support also <name>_gpu.cpp and
<name>_gpu.cu.
-v, --verbose Verbose.
-q, --quiet Less output.
--cpu CPU Enable CPU support.
--debug [path] Don't clean up the generated temp cpp code. Can be a target path for the generated code.
--gpu gpu Enable GPU support
--gpu-arch gpu_arch
Enable GPU support; default=-
--cxx cxx Use this C++ compiler; default=/usr/local/opt/llvm/bin/clang++.
--prefix prefix Arbor's install prefix; default=/Users/hater/src/arbor/.direnv/python-3.10.6.
--bin bin Look here for Arbor utils like modcc; relative to prefix, default=bin.
--lib lib Look here for Arbor's CMake config; relative to prefix, default=lib.
--data data Look here for Arbor supplementals like generate_catalogue; relative to prefix, default=lib
-h, --help Display this help and exit.
```
---
.github/workflows/lint.yml | 6 +-
CMakeLists.txt | 9 +-
arbor/include/CMakeLists.txt | 9 +
python/CMakeLists.txt | 8 +
python/config.cpp | 27 ++-
python/context.cpp | 4 +-
python/test/fixtures.py | 3 +-
.../test/unit/test_domain_decompositions.py | 4 +-
.../test_domain_decompositions.py | 2 +-
...ild-catalogue.in => arbor-build-catalogue} | 161 +++++++++++-------
10 files changed, 151 insertions(+), 82 deletions(-)
rename scripts/{build-catalogue.in => arbor-build-catalogue} (63%)
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 4d109e4b..743f145d 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -26,8 +26,8 @@ jobs:
- name: Python Formatting
uses: psf/black@stable
with:
- options: --check
- src: scripts/build-catalogue.in .
+ options: --check --extend-exclude '/(ext|doc/scripts/.*_theme|doc/scripts/inputs.py)'
+ src: .
- name: Python analysis
run: |
- flake8 scripts/build-catalogue.in .
+ flake8 .
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 0773d6a1..41675d28 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -177,12 +177,6 @@ set(CMAKE_CXX_EXTENSIONS OFF)
# Data and internal scripts go here
set(ARB_INSTALL_DATADIR ${CMAKE_INSTALL_DATAROOTDIR}/arbor)
-# Derived paths for arbor-build-catalogue
-get_filename_component(absolute_full_bindir ${CMAKE_INSTALL_BINDIR} REALPATH)
-get_filename_component(absolute_full_datarootdir ${CMAKE_INSTALL_DATAROOTDIR} REALPATH)
-get_filename_component(absolute_full_libdir ${CMAKE_INSTALL_LIBDIR} REALPATH)
-file(RELATIVE_PATH ARB_REL_DATADIR ${absolute_full_bindir} ${absolute_full_datarootdir}/arbor)
-file(RELATIVE_PATH ARB_REL_PACKAGEDIR ${absolute_full_bindir} ${absolute_full_libdir}/cmake/arbor)
# Interface library `arbor-config-defs` collects configure-time defines
# for arbor, arborenv, arborio, of the form ARB_HAVE_XXX. These
@@ -227,8 +221,7 @@ install(TARGETS arborio-public-deps EXPORT arborio-targets)
# Add scripts and supporting CMake for setting up external catalogues
-configure_file(scripts/build-catalogue.in ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue @ONLY)
-install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue DESTINATION ${CMAKE_INSTALL_BINDIR})
+install(PROGRAMS scripts/arbor-build-catalogue DESTINATION ${CMAKE_INSTALL_BINDIR})
install(FILES mechanisms/BuildModules.cmake DESTINATION ${ARB_INSTALL_DATADIR})
# External libraries in `ext` sub-directory: json, tinyopt and randon123.
# Creates interface libraries `ext-json`, `ext-tinyopt` and `ext-random123`
diff --git a/arbor/include/CMakeLists.txt b/arbor/include/CMakeLists.txt
index 1cfc13b0..461b7d54 100644
--- a/arbor/include/CMakeLists.txt
+++ b/arbor/include/CMakeLists.txt
@@ -53,6 +53,15 @@ endif()
if(ARB_VECTORIZE)
list(APPEND arb_features VECTORIZE)
endif()
+if(ARB_WITH_NVCC)
+ list(APPEND arb_features NVCC)
+endif()
+if(ARB_WITH_CUDA_CLANG)
+ list(APPEND arb_features CUDA_CLANG)
+endif()
+if(ARB_WITH_HIP_CLANG)
+ list(APPEND arb_features HIP)
+endif()
string(TOUPPER "${CMAKE_BUILD_TYPE}" arb_config_str)
diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt
index 61f789ee..678bfe06 100644
--- a/python/CMakeLists.txt
+++ b/python/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(GNUInstallDirs)
+
set(PYBIND11_CPP_STANDARD -std=c++17)
if(ARB_USE_BUNDLED_PYBIND11)
@@ -41,6 +43,12 @@ set(pyarb_source
env.cpp
)
+set_property(SOURCE config.cpp PROPERTY COMPILE_DEFINITIONS ARB_BINARY="${CMAKE_INSTALL_BINDIR}" APPEND)
+set_property(SOURCE config.cpp PROPERTY COMPILE_DEFINITIONS ARB_LIB="${CMAKE_INSTALL_LIBDIR}" APPEND)
+set_property(SOURCE config.cpp PROPERTY COMPILE_DEFINITIONS ARB_DATA="${CMAKE_INSTALL_DATAROOTDIR}" APPEND)
+set_property(SOURCE config.cpp PROPERTY COMPILE_DEFINITIONS ARB_CXX_COMPILER="${CMAKE_CXX_COMPILER}" APPEND)
+set_property(SOURCE config.cpp PROPERTY COMPILE_DEFINITIONS ARB_PREFIX="${CMAKE_INSTALL_PREFIX}" APPEND)
+
# compile the pyarb sources into an object library that will be
# use by both the Python wrapper target (pyarb) and for the
# unit tests of the C++ components in the Python wrapper.
diff --git a/python/config.cpp b/python/config.cpp
index 6f8ae58e..62d604ee 100644
--- a/python/config.cpp
+++ b/python/config.cpp
@@ -24,10 +24,17 @@ pybind11::dict config() {
#else
dict[pybind11::str("mpi4py")] = pybind11::bool_(false);
#endif
-#ifdef ARB_GPU_ENABLED
- dict[pybind11::str("gpu")] = pybind11::bool_(true);
-#else
- dict[pybind11::str("gpu")] = pybind11::bool_(false);
+#ifdef ARB_NVCC_ENABLED
+ dict[pybind11::str("gpu")] = pybind11::str("cuda");
+#endif
+#ifdef ARB_CUDA_CLANG_ENABLED
+ dict[pybind11::str("gpu")] = pybind11::str("cuda-clang");
+#endif
+#ifdef ARB_HIP_ENABLED
+ dict[pybind11::str("gpu")] = pybind11::str("hip");
+#endif
+#ifndef ARB_GPU_ENABLED
+ dict[pybind11::str("gpu")] = pybind11::none();
#endif
#ifdef ARB_VECTORIZE_ENABLED
dict[pybind11::str("vectorize")] = pybind11::bool_(true);
@@ -49,9 +56,15 @@ pybind11::dict config() {
#else
dict[pybind11::str("bundled")] = pybind11::bool_(false);
#endif
- dict[pybind11::str("version")] = pybind11::str(ARB_VERSION);
- dict[pybind11::str("source")] = pybind11::str(ARB_SOURCE_ID);
- dict[pybind11::str("arch")] = pybind11::str(ARB_ARCH);
+
+ dict[pybind11::str("version")] = pybind11::str(ARB_VERSION);
+ dict[pybind11::str("source")] = pybind11::str(ARB_SOURCE_ID);
+ dict[pybind11::str("arch")] = pybind11::str(ARB_ARCH);
+ dict[pybind11::str("prefix")] = pybind11::str(ARB_PREFIX);
+ dict[pybind11::str("binary_path")] = pybind11::str(ARB_BINARY);
+ dict[pybind11::str("lib_path")] = pybind11::str(ARB_LIB);
+ dict[pybind11::str("data_path")] = pybind11::str(ARB_DATA);
+ dict[pybind11::str("CXX")] = pybind11::str(ARB_CXX_COMPILER);
{
#define mk_tok(x) #x
#define mk_ver(M, m, p) mk_tok(M) "." mk_tok(m) "." mk_tok(p)
diff --git a/python/context.cpp b/python/context.cpp
index 2f0e6ffd..a2f3da57 100644
--- a/python/context.cpp
+++ b/python/context.cpp
@@ -132,7 +132,7 @@ void register_contexts(pybind11::module& m) {
"threads"_a=1, "gpu_id"_a=pybind11::none(), "mpi"_a=pybind11::none(),
"Construct a distributed context with arguments:\n"
" threads: The number of threads available locally for execution. Must be set to 1 at minimum. 1 by default.\n"
- " gpu_id: The identifier of the GPU to use, None by default. Only available if arbor.__config__['gpu']==True.\n"
+ " gpu_id: The identifier of the GPU to use, None by default. Only available if arbor.__config__['gpu']!=\"none\".\n"
" mpi: The MPI communicator, None by default. Only available if arbor.__config__['mpi']==True.\n")
.def(pybind11::init(
[](std::string threads, pybind11::object gpu, pybind11::object mpi){
@@ -146,7 +146,7 @@ void register_contexts(pybind11::module& m) {
"threads"_a, "gpu_id"_a=pybind11::none(), "mpi"_a=pybind11::none(),
"Construct a distributed context with arguments:\n"
" threads: A string option describing the number of threads. Currently, only \"avail_threads\" is supported.\n"
- " gpu_id: The identifier of the GPU to use, None by default. Only available if arbor.__config__['gpu']==True.\n"
+ " gpu_id: The identifier of the GPU to use, None by default. Only available if arbor.__config__['gpu']!=\"none\".\n"
" mpi: The MPI communicator, None by default. Only available if arbor.__config__['mpi']==True.\n")
.def(pybind11::init(
[](proc_allocation_shim alloc, pybind11::object mpi){
diff --git a/python/test/fixtures.py b/python/test/fixtures.py
index 19f88ae9..4fbb3514 100644
--- a/python/test/fixtures.py
+++ b/python/test/fixtures.py
@@ -86,10 +86,11 @@ def _build_cat_local(name, path):
["arbor-build-catalogue", name, str(path)],
check=True,
stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
)
except subprocess.CalledProcessError as e:
raise _BuildCatError(
- "Tests can't build catalogues:\n" + e.stderr.decode()
+ f"Tests can't build catalogue '{name}' from '{path}':\n{e.stderr.decode()}\n\n{e.stdout.decode()}"
) from None
diff --git a/python/test/unit/test_domain_decompositions.py b/python/test/unit/test_domain_decompositions.py
index b08ef2b1..28f3cda9 100644
--- a/python/test/unit/test_domain_decompositions.py
+++ b/python/test/unit/test_domain_decompositions.py
@@ -76,7 +76,7 @@ class TestDomain_Decompositions(unittest.TestCase):
self.assertEqual(grp.kind, arb.cell_kind.cable)
# 1 cpu core, 1 gpu; assumes all cells will be placed on gpu in a single cell group
- @unittest.skipIf(gpu_enabled is False, "GPU not enabled")
+ @unittest.skipIf(not gpu_enabled, "GPU not enabled")
def test_domain_decomposition_homogenous_GPU(self):
n_cells = 10
recipe = homo_recipe(n_cells)
@@ -139,7 +139,7 @@ class TestDomain_Decompositions(unittest.TestCase):
self.assertEqual(k, recipe.cell_kind(gid))
# 1 cpu core, 1 gpu; assumes cable cells will be placed on gpu in a single cell group; spike cells are on cpu in cell groups of size 1
- @unittest.skipIf(gpu_enabled is False, "GPU not enabled")
+ @unittest.skipIf(not gpu_enabled, "GPU not enabled")
def test_domain_decomposition_heterogenous_GPU(self):
n_cells = 10
recipe = hetero_recipe(n_cells)
diff --git a/python/test/unit_distributed/test_domain_decompositions.py b/python/test/unit_distributed/test_domain_decompositions.py
index b8fbbfb8..5535f02f 100644
--- a/python/test/unit_distributed/test_domain_decompositions.py
+++ b/python/test/unit_distributed/test_domain_decompositions.py
@@ -207,7 +207,7 @@ class TestDomain_Decompositions_Distributed(unittest.TestCase):
self.assertEqual(grp.kind, arb.cell_kind.cable)
# 1 node with 1 cpu core, 1 gpu; assumes all cells will be placed on gpu in a single cell group
- @unittest.skipIf(gpu_enabled is False, "GPU not enabled")
+ @unittest.skipIf(not gpu_enabled, "GPU not enabled")
def test_domain_decomposition_homogenous_GPU(self):
if mpi_enabled:
diff --git a/scripts/build-catalogue.in b/scripts/arbor-build-catalogue
similarity index 63%
rename from scripts/build-catalogue.in
rename to scripts/arbor-build-catalogue
index 6deea74e..1bf7b0e3 100755
--- a/scripts/build-catalogue.in
+++ b/scripts/arbor-build-catalogue
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
+import arbor as A
import subprocess as sp
import sys
from tempfile import mkdtemp
@@ -10,32 +11,24 @@ import argparse
import re
-def parse_arguments():
- def append_slash(s):
- return s + "/" if s and not s.endswith("/") else s
-
- class ConciseHelpFormatter(argparse.HelpFormatter):
- def __init__(self, **kwargs):
- super(ConciseHelpFormatter, self).__init__(max_help_position=20, **kwargs)
-
- def _format_action_invocation(self, action):
- if not action.option_strings:
- return super(ConciseHelpFormatter, self)._format_action_invocation(
- action
- )
- else:
- optstr = ", ".join(action.option_strings)
- if action.nargs == 0:
- return optstr
- else:
- return optstr + " " + self._format_args(action, action.dest.upper())
+config = A.config()
+prefix = Path(config['prefix'])
+if not prefix.exists():
+ try:
+ # Example <>/lib/python3.10/site-packages/arbor
+ altern = Path(A.__path__[0]).parent.parent.parent.parent
+ print(f"Warning: prefix '{prefix}' does not exist, falling back to '{altern}'.", file=sys.stderr)
+ prefix = altern
+ except:
+ print(f"Error: Neither prefix '{prefix}' nor fallback '{altern}' exist; giving up.", file=sys.stderr)
+ exit(-1)
+def parse_arguments():
parser = argparse.ArgumentParser(
description="Generate dynamic catalogue and build it into a shared object.",
usage="%(prog)s catalogue_name mod_source_dir",
add_help=False,
- formatter_class=ConciseHelpFormatter,
- )
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("name", metavar="name", type=str, help="Catalogue name.")
@@ -45,10 +38,9 @@ def parse_arguments():
nargs="+",
default=[],
type=str,
- help="""Advanced: Raw mechanisms as C++ files. Per <name> the
-files <name>.hpp, <name>_cpu.cpp must be present
-in the target directory and with GPU support
-also <name>_gpu.cpp and <name>_gpu.cu (if not given -C).""",
+ help="""Raw C++ mechanisms; per <name> the files <name>.hpp,
+<name>_cpu.cpp (if CPU enabled), <name>_gpu.cpp and <name>_gpu.cu (if GPU
+enabled) must be present in the target directory.""",
)
parser.add_argument(
@@ -62,26 +54,61 @@ also <name>_gpu.cpp and <name>_gpu.cu (if not given -C).""",
parser.add_argument("-q", "--quiet", action="store_true", help="Less output.")
+ parser.add_argument("--cpu",
+ default=True,
+ help="Enable CPU support.")
+
+ parser.add_argument(
+ "--debug",
+ nargs="?",
+ metavar="path",
+ const=True,
+ default=None,
+ help="Don't clean up the generated temp cpp code."
+ +" Can be a target path for the generated code.",
+ )
+
parser.add_argument(
- "-g",
"--gpu",
metavar="gpu",
- help="Enable GPU support, valid options: cuda|hip|cuda-clang.",
+ default=config["gpu"] if config["gpu"] else "none",
+ choices=["none", "cuda", "hip", "cuda-clang"],
+ help=f"Enable GPU support",
)
parser.add_argument(
- "-C", "--no-cpu", action="store_true", help="Disable CPU support."
+ "--cxx",
+ metavar="cxx",
+ default=config["CXX"],
+ help='Use this C++ compiler.',
)
parser.add_argument(
- "-d",
- "--debug",
- nargs="?",
- metavar="path",
- const=True,
- default=False,
- help="Don't clean up the generated temp cpp code."
- + " Can be a target path for the generated code.",
+ "--prefix",
+ metavar="prefix",
+ default=prefix,
+ help="Arbor's install prefix.",
+ )
+
+ parser.add_argument(
+ "--bin",
+ metavar="bin",
+ default=config["binary_path"],
+ help="Look here for Arbor utils like modcc; relative to prefix.",
+ )
+
+ parser.add_argument(
+ "--lib",
+ metavar="lib",
+ default=config["lib_path"],
+ help="Look here for Arbor's CMake config; relative to prefix.",
+ )
+
+ parser.add_argument(
+ "--data",
+ metavar="data",
+ default=config["data_path"],
+ help="Look here for Arbor supplementals like generate_catalogue; relative to prefix.",
)
parser.add_argument(
@@ -102,37 +129,52 @@ verbose = args["verbose"] and not quiet
debug = args["debug"]
raw = args["raw"]
gpu = args["gpu"]
-cpu = not args["no_cpu"]
+cpu = args["cpu"]
-if gpu:
- if gpu == "cuda":
- gpu_support = """
+if gpu == "cuda":
+ gpu_support = f"""
+include(FindCUDAToolkit)
add_compile_definitions(ARB_CUDA)
add_compile_definitions(ARB_HAVE_GPU)
+find_package(CUDAToolkit)
+enable_language(CUDA)
+set(CMAKE_CUDA_STANDARD 14)
+set(CMAKE_CUDA_HOST_COMPILER {args["cxx"]})
+"""
+elif gpu == "cuda-clang":
+ print("CUDA-Clang support is currently considered experimental only.")
+ gpu_support = f"""
+add_compile_definitions(ARB_CUDA)
+add_compile_definitions(ARB_HAVE_GPU)
+find_package(CUDAToolkit)
enable_language(CUDA)
-set(CMAKE_CUDA_HOST_COMPILER @CMAKE_CXX_COMPILER@)
-set(CMAKE_CUDA_ARCHITECTURES @CMAKE_CUDA_ARCHITECTURES@)
+set(CMAKE_CUDA_STANDARD 14)
+add_compile_options(-xcuda --cuda-gpu-arch=sm_60 --cuda-gpu-arch=sm_70 --cuda-gpu-arch=sm_80 --cuda-path=${CUDA_TOOLKIT_ROOT_DIR}))
"""
- else:
- print(
- f"Unsupported GPU target: {gpu}. If you need support for HIP or Clang-CUDA, please check here: https://github.com/arbor-sim/arbor/issues/1783"
- )
- exit(-1)
-else:
+elif gpu == "hip":
+ print("HIP support is currently considered experimental only.")
+ gpu_support = f"""
+add_compile_definitions(ARB_HIP)
+add_compile_definitions(ARB_HAVE_GPU)
+add_compile_options(-xhip --amdgpu-target=gfx906 --amdgpu-target=gfx900)
+"""
+elif gpu == "none":
gpu_support = """
# GPU: Disabled
"""
+else:
+ print(f"Internal Error: Unknown GPU type: {gpu}", file=sys.stderr)
+ exit(-1)
-this_path = Path(__file__).parent
-data_path = (this_path / "@ARB_REL_DATADIR@").resolve()
-pack_path = (this_path / "@ARB_REL_PACKAGEDIR@").resolve()
-exec_path = this_path.resolve()
+bindir = Path(args["prefix"]) / args["bin"]
+datdir = Path(args["prefix"]) / args["data"] / "arbor"
+pakdir = Path(args["prefix"]) / args["lib"] / "cmake" / "arbor"
for path in [
- exec_path / "modcc",
- data_path / "BuildModules.cmake",
- pack_path / "arbor-config.cmake",
+ bindir / "modcc",
+ datdir / "BuildModules.cmake",
+ pakdir / "arbor-config.cmake",
]:
if not path.exists():
print(f"Could not find required tool: {path}. Please check your installation.")
@@ -141,8 +183,11 @@ for path in [
cmake = f"""
cmake_minimum_required(VERSION 3.9)
project({name}-cat LANGUAGES CXX)
+set(CMAKE_CXX_STANDARD 17)
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+set(CMAKE_CXX_EXTENSIONS OFF)
-set(arbor_DIR {pack_path})
+set(arbor_DIR {pakdir})
find_package(arbor REQUIRED)
{gpu_support}
set(CMAKE_BUILD_TYPE release)
@@ -152,7 +197,7 @@ set(CMAKE_CXX_FLAGS ${{ARB_CXX_FLAGS}})
include(BuildModules.cmake)
set(ARB_WITH_EXTERNAL_MODCC true)
-find_program(modcc NAMES modcc PATHS {exec_path})
+find_program(modcc NAMES modcc PATHS {bindir})
make_catalogue_standalone(
NAME {name}
@@ -210,7 +255,7 @@ with TemporaryDirectory() as tmp:
os.chdir(tmp / "build")
with open(tmp / "CMakeLists.txt", "w") as fd:
fd.write(cmake)
- shutil.copy2(f"{data_path}/BuildModules.cmake", tmp)
+ shutil.copy2(f"{datdir}/BuildModules.cmake", tmp)
out = tmp / "build" / "generated" / name
os.makedirs(out, exist_ok=True)
--
GitLab