diff --git a/PyNutil/io/atlas_loader.py b/PyNutil/io/atlas_loader.py
index 21ac5d2ceedbdddbb4bbd7b6d3b38f62814e1672..0a58f18d917d792a2a464bd9e8cd68c977c2a1fa 100644
--- a/PyNutil/io/atlas_loader.py
+++ b/PyNutil/io/atlas_loader.py
@@ -40,7 +40,7 @@ def load_atlas_data(atlas_name):
     atlas_volume = process_atlas_volume(atlas.annotation)
     hemi_map = process_atlas_volume(atlas.hemispheres)
     print("atlas labels loaded ✅")
-    return atlas_volume,hemi_map, atlas_labels
+    return atlas_volume, hemi_map, atlas_labels
 
 
 def process_atlas_volume(vol):
diff --git a/PyNutil/io/file_operations.py b/PyNutil/io/file_operations.py
index 5c0287437b27eaf67381b0de24cf597b411df70f..3d67649f89b59612866447f0f8d22546cf3a2783 100644
--- a/PyNutil/io/file_operations.py
+++ b/PyNutil/io/file_operations.py
@@ -105,7 +105,6 @@ def save_analysis_output(
         "alignment_json": alignment_json,
         "colour": colour,
         "custom_region_path": custom_region_path,
-
     }
     pixel_points,
     centroids,
@@ -120,15 +119,15 @@ def save_analysis_output(
     segmentation_filenames,
     atlas_labels,
     output_folder,
-    segmentation_folder=None,
-    alignment_json=None,
-    colour=None,
-    atlas_name=None,
-    custom_region_path=None,
-    atlas_path=None,
-    label_path=None,
-    settings_file=None,
-    prepend=None,
+    segmentation_folder = (None,)
+    alignment_json = (None,)
+    colour = (None,)
+    atlas_name = (None,)
+    custom_region_path = (None,)
+    atlas_path = (None,)
+    label_path = (None,)
+    settings_file = (None,)
+    prepend = (None,)
     # Add atlas information to settings
     if atlas_name:
         settings_dict["atlas_name"] = atlas_name
diff --git a/PyNutil/io/read_and_write.py b/PyNutil/io/read_and_write.py
index c33ab17ef27f836fa6e6eebff313021bd6844d3a..8113b62d468eb93617577d487dc0ef211c0ac6f0 100644
--- a/PyNutil/io/read_and_write.py
+++ b/PyNutil/io/read_and_write.py
@@ -307,17 +307,19 @@ def write_points_to_meshview(points, point_names, hemi_label, filename, info_fil
         A table with region IDs, names, and color data (r, g, b) for each region.
     """
     if not (hemi_label == None).all():
-        split_fn_left = filename.split('/')
+        split_fn_left = filename.split("/")
         split_fn_left[-1] = "left_hemisphere_" + split_fn_left[-1]
         outname_left = os.sep.join(split_fn_left)
-        left_region_dict = create_region_dict(points[hemi_label==1], point_names[hemi_label==1])
+        left_region_dict = create_region_dict(
+            points[hemi_label == 1], point_names[hemi_label == 1]
+        )
         write_points(left_region_dict, outname_left, info_file)
-        split_fn_right = filename.split('/')
+        split_fn_right = filename.split("/")
         split_fn_right[-1] = "right_hemisphere_" + split_fn_right[-1]
         outname_right = os.sep.join(split_fn_right)
-        right_region_dict = create_region_dict(points[hemi_label==2], point_names[hemi_label==2])
+        right_region_dict = create_region_dict(
+            points[hemi_label == 2], point_names[hemi_label == 2]
+        )
         write_points(right_region_dict, outname_right, info_file)
     region_dict = create_region_dict(points, point_names)
     write_points(region_dict, filename, info_file)
-
-
diff --git a/PyNutil/main.py b/PyNutil/main.py
index 4bee1933678308c3dbe7ab9efa53e657053087cf..72d65e3eaa05678b0588adc25ce33ba41f344fb3 100644
--- a/PyNutil/main.py
+++ b/PyNutil/main.py
@@ -9,6 +9,7 @@ from .io.file_operations import save_analysis_output
 from .io.read_and_write import open_custom_region_file
 from .processing.coordinate_extraction import folder_to_atlas_space
 
+
 class PyNutil:
     """
     A class to perform brain-wide quantification and spatial analysis of serial section images.
@@ -130,7 +131,9 @@ class PyNutil:
                 "When atlas_path and label_path are not specified, atlas_name must be specified."
             )
 
-    def get_coordinates(self, non_linear=True, object_cutoff=0, use_flat=False, apply_damage_mask=True):
+    def get_coordinates(
+        self, non_linear=True, object_cutoff=0, use_flat=False, apply_damage_mask=True
+    ):
         """
         Retrieves pixel and centroid coordinates from segmentation data,
         applies atlas-space transformations, and optionally uses a damage
@@ -169,7 +172,7 @@ class PyNutil:
                 self.atlas_volume,
                 self.hemi_map,
                 use_flat,
-                apply_damage_mask
+                apply_damage_mask,
             )
             self.apply_damage_mask = apply_damage_mask
             if self.custom_regions_dict is not None:
@@ -219,7 +222,7 @@ class PyNutil:
                 self.centroids_hemi_labels,
                 self.per_point_undamaged,
                 self.per_centroid_undamaged,
-                self.apply_damage_mask
+                self.apply_damage_mask,
             )
             if self.custom_regions_dict is not None:
                 self.custom_label_df, self.label_df = apply_custom_regions(
diff --git a/PyNutil/processing/coordinate_extraction.py b/PyNutil/processing/coordinate_extraction.py
index 6a44490209333712a3086bdd56b00e367360b3a9..dd4bbebf0cb7839c45da467e290fa9aa66b86b4d 100644
--- a/PyNutil/processing/coordinate_extraction.py
+++ b/PyNutil/processing/coordinate_extraction.py
@@ -114,7 +114,7 @@ def folder_to_atlas_space(
     atlas_volume=None,
     hemi_map=None,
     use_flat=False,
-    apply_damage_mask=True
+    apply_damage_mask=True,
 ):
     """
     Processes all segmentation files in a folder, mapping each one to atlas space.
@@ -221,7 +221,6 @@ def folder_to_atlas_space(
     )
 
 
-
 def create_threads(
     segmentations,
     slices,
@@ -389,7 +388,9 @@ def get_region_areas(
         (seg_width, seg_height),
         atlas_labels,
     )
-    region_areas = flat_to_dataframe(atlas_map, damage_mask, hemi_mask, (seg_width, seg_height))
+    region_areas = flat_to_dataframe(
+        atlas_map, damage_mask, hemi_mask, (seg_width, seg_height)
+    )
     return region_areas, atlas_map
 
 
@@ -473,7 +474,7 @@ def segmentation_to_atlas_space(
     )
     atlas_map = rescale_image(atlas_map, (reg_height, reg_width))
     y_scale, x_scale = transform_to_registration(
-        seg_width, seg_height, reg_width,reg_height
+        seg_width, seg_height, reg_width, reg_height
     )
     centroids, points = None, None
     scaled_centroidsX, scaled_centroidsY, scaled_x, scaled_y = None, None, None, None
@@ -524,7 +525,6 @@ def segmentation_to_atlas_space(
         per_point_hemi = [None] * len(scaled_x)
         per_centroid_hemi = [None] * len(scaled_centroidsX)
 
-
     per_point_labels = per_point_labels[per_point_undamaged]
     per_centroid_labels = per_centroid_labels[per_centroid_undamaged]
 
@@ -560,7 +560,10 @@ def segmentation_to_atlas_space(
         per_point_undamaged if points is not None else []
     )
     points_hemi_labels[index] = np.array(per_point_hemi if points is not None else [])
-    centroids_hemi_labels[index] = np.array(per_centroid_hemi if points is not None else [])
+    centroids_hemi_labels[index] = np.array(
+        per_centroid_hemi if points is not None else []
+    )
+
 
 def get_triangulation(slice_dict, reg_width, reg_height, non_linear):
     """
diff --git a/PyNutil/processing/counting_and_load.py b/PyNutil/processing/counting_and_load.py
index e7b71077780e3162fcbe578f75a80986277b7430..6914bc41eff4921d0530a9e8cce3d5f3ec566240 100644
--- a/PyNutil/processing/counting_and_load.py
+++ b/PyNutil/processing/counting_and_load.py
@@ -5,6 +5,7 @@ import cv2
 from .generate_target_slice import generate_target_slice
 from .visualign_deformations import transform_vec
 
+
 def create_base_counts_dict(with_hemisphere=False, with_damage=False):
     """
     Creates and returns a base dictionary structure for tracking counts.
@@ -30,8 +31,8 @@ def create_base_counts_dict(with_hemisphere=False, with_damage=False):
             "undamaged_object_count": [],
             "damaged_object_count": [],
             "undamaged_pixel_count": [],
-            "damaged_pixel_counts": []
-            }
+            "damaged_pixel_counts": [],
+        }
         counts.update(damage_fields)
     if with_hemisphere:
         hemisphere_fields = {
@@ -39,7 +40,7 @@ def create_base_counts_dict(with_hemisphere=False, with_damage=False):
             "right_hemi_pixel_count": [],
             "left_hemi_object_count": [],
             "right_hemi_object_count": [],
-            }
+        }
         counts.update(hemisphere_fields)
     if with_damage and with_hemisphere:
         damage_hemisphere_fields = {
@@ -51,10 +52,11 @@ def create_base_counts_dict(with_hemisphere=False, with_damage=False):
             "left_hemi_damaged_object_count": [],
             "right_hemi_undamaged_object_count": [],
             "right_hemi_damaged_object_count": [],
-            }
+        }
         counts.update(damage_hemisphere_fields)
     return counts
 
+
 # related to counting_and_load
 def pixel_count_per_region(
     labels_dict_points,
@@ -64,7 +66,7 @@ def pixel_count_per_region(
     current_points_hemi,
     current_centroids_hemi,
     df_label_colours,
-    with_damage=False
+    with_damage=False,
 ):
     """
     Tally object counts by region, optionally tracking damage and hemispheres.
@@ -83,33 +85,75 @@ def pixel_count_per_region(
         DataFrame: Summed counts per region.
     """
     with_hemi = None not in current_points_hemi
-    counts_per_label = create_base_counts_dict(with_hemisphere=with_hemi, with_damage=with_damage)
+    counts_per_label = create_base_counts_dict(
+        with_hemisphere=with_hemi, with_damage=with_damage
+    )
 
     if with_hemi and with_damage:
-        left_hemi_counted_labels_points_undamaged, left_hemi_label_counts_points_undamaged = np.unique(
-            labels_dict_points[current_points_undamaged & (current_points_hemi == 1)], return_counts=True
-            )
-        left_hemi_counted_labels_points_damaged, left_hemi_label_counts_points_damaged = np.unique(
-            labels_dict_points[~current_points_undamaged & (current_points_hemi == 1)], return_counts=True
-            )
-        left_hemi_counted_labels_centroids_undamaged, left_hemi_label_counts_centroids_undamaged = np.unique(
-            labeled_dict_centroids[current_centroids_undamaged & (current_centroids_hemi == 1)], return_counts=True
-            )
-        left_hemi_counted_labels_centroids_damaged, left_hemi_label_counts_centroids_damaged = np.unique(
-            labeled_dict_centroids[~current_centroids_undamaged & (current_centroids_hemi == 1)], return_counts=True
-            )
-        right_hemi_counted_labels_points_undamaged, right_hemi_label_counts_points_undamaged = np.unique(
-            labels_dict_points[current_points_undamaged & (current_points_hemi == 2)], return_counts=True
-            )
-        right_hemi_counted_labels_points_damaged, right_hemi_label_counts_points_damaged = np.unique(
-            labels_dict_points[~current_points_undamaged & (current_points_hemi == 2)], return_counts=True
-            )
-        right_hemi_counted_labels_centroids_undamaged, right_hemi_label_counts_centroids_undamaged = np.unique(
-            labeled_dict_centroids[current_centroids_undamaged & (current_centroids_hemi == 2)], return_counts=True
-            )
-        right_hemi_counted_labels_centroids_damaged, right_hemi_label_counts_centroids_damaged = np.unique(
-            labeled_dict_centroids[~current_centroids_undamaged & (current_centroids_hemi == 2)], return_counts=True
-            )
+        (
+            left_hemi_counted_labels_points_undamaged,
+            left_hemi_label_counts_points_undamaged,
+        ) = np.unique(
+            labels_dict_points[current_points_undamaged & (current_points_hemi == 1)],
+            return_counts=True,
+        )
+        (
+            left_hemi_counted_labels_points_damaged,
+            left_hemi_label_counts_points_damaged,
+        ) = np.unique(
+            labels_dict_points[~current_points_undamaged & (current_points_hemi == 1)],
+            return_counts=True,
+        )
+        (
+            left_hemi_counted_labels_centroids_undamaged,
+            left_hemi_label_counts_centroids_undamaged,
+        ) = np.unique(
+            labeled_dict_centroids[
+                current_centroids_undamaged & (current_centroids_hemi == 1)
+            ],
+            return_counts=True,
+        )
+        (
+            left_hemi_counted_labels_centroids_damaged,
+            left_hemi_label_counts_centroids_damaged,
+        ) = np.unique(
+            labeled_dict_centroids[
+                ~current_centroids_undamaged & (current_centroids_hemi == 1)
+            ],
+            return_counts=True,
+        )
+        (
+            right_hemi_counted_labels_points_undamaged,
+            right_hemi_label_counts_points_undamaged,
+        ) = np.unique(
+            labels_dict_points[current_points_undamaged & (current_points_hemi == 2)],
+            return_counts=True,
+        )
+        (
+            right_hemi_counted_labels_points_damaged,
+            right_hemi_label_counts_points_damaged,
+        ) = np.unique(
+            labels_dict_points[~current_points_undamaged & (current_points_hemi == 2)],
+            return_counts=True,
+        )
+        (
+            right_hemi_counted_labels_centroids_undamaged,
+            right_hemi_label_counts_centroids_undamaged,
+        ) = np.unique(
+            labeled_dict_centroids[
+                current_centroids_undamaged & (current_centroids_hemi == 2)
+            ],
+            return_counts=True,
+        )
+        (
+            right_hemi_counted_labels_centroids_damaged,
+            right_hemi_label_counts_centroids_damaged,
+        ) = np.unique(
+            labeled_dict_centroids[
+                ~current_centroids_undamaged & (current_centroids_hemi == 2)
+            ],
+            return_counts=True,
+        )
         for index, row in df_label_colours.iterrows():
             # Left hemisphere pixel counts
             if row["idx"] in left_hemi_counted_labels_points_undamaged:
@@ -172,7 +216,17 @@ def pixel_count_per_region(
                 r_clcd = 0
 
             # Skip regions with no counts in any category
-            if l_clcd == l_clcu == l_clpd == l_clpu == r_clcd == r_clcu == r_clpd == r_clpu == 0:
+            if (
+                l_clcd
+                == l_clcu
+                == l_clpd
+                == l_clpu
+                == r_clcd
+                == r_clcu
+                == r_clpd
+                == r_clpu
+                == 0
+            ):
                 continue
 
             # Calculate combined counts
@@ -214,17 +268,19 @@ def pixel_count_per_region(
 
     elif with_damage and (not with_hemi):
         counted_labels_points_undamaged, label_counts_points_undamaged = np.unique(
-        labels_dict_points[current_points_undamaged], return_counts=True
-            )
+            labels_dict_points[current_points_undamaged], return_counts=True
+        )
         counted_labels_points_damaged, label_counts_points_damaged = np.unique(
             labels_dict_points[~current_points_undamaged], return_counts=True
+        )
+        counted_labels_centroids_undamaged, label_counts_centroids_undamaged = (
+            np.unique(
+                labeled_dict_centroids[current_centroids_undamaged], return_counts=True
             )
-        counted_labels_centroids_undamaged, label_counts_centroids_undamaged = np.unique(
-            labeled_dict_centroids[current_centroids_undamaged], return_counts=True
-            )
+        )
         counted_labels_centroids_damaged, label_counts_centroids_damaged = np.unique(
             labeled_dict_centroids[~current_centroids_undamaged], return_counts=True
-            )
+        )
         for index, row in df_label_colours.iterrows():
             if row["idx"] in counted_labels_points_undamaged:
                 clpu = label_counts_points_undamaged[
@@ -268,34 +324,50 @@ def pixel_count_per_region(
         left_hemi_counted_labels_points, left_hemi_label_counts_points = np.unique(
             labels_dict_points[current_points_hemi == 1], return_counts=True
         )
-        left_hemi_counted_labels_centroids, left_hemi_label_counts_centroids = np.unique(
-            labeled_dict_centroids[current_centroids_hemi == 1], return_counts=True
+        left_hemi_counted_labels_centroids, left_hemi_label_counts_centroids = (
+            np.unique(
+                labeled_dict_centroids[current_centroids_hemi == 1], return_counts=True
+            )
         )
         right_hemi_counted_labels_points, right_hemi_label_counts_points = np.unique(
             labels_dict_points[current_points_hemi == 2], return_counts=True
         )
-        right_hemi_counted_labels_centroids, right_hemi_label_counts_centroids = np.unique(
-            labeled_dict_centroids[current_centroids_hemi == 2], return_counts=True
+        right_hemi_counted_labels_centroids, right_hemi_label_counts_centroids = (
+            np.unique(
+                labeled_dict_centroids[current_centroids_hemi == 2], return_counts=True
+            )
         )
 
         for index, row in df_label_colours.iterrows():
             # Left hemisphere
             l_clp = (
-                left_hemi_label_counts_points[left_hemi_counted_labels_points == row["idx"]][0]
-                if row["idx"] in left_hemi_counted_labels_points else 0
+                left_hemi_label_counts_points[
+                    left_hemi_counted_labels_points == row["idx"]
+                ][0]
+                if row["idx"] in left_hemi_counted_labels_points
+                else 0
             )
             l_clc = (
-                left_hemi_label_counts_centroids[left_hemi_counted_labels_centroids == row["idx"]][0]
-                if row["idx"] in left_hemi_counted_labels_centroids else 0
+                left_hemi_label_counts_centroids[
+                    left_hemi_counted_labels_centroids == row["idx"]
+                ][0]
+                if row["idx"] in left_hemi_counted_labels_centroids
+                else 0
             )
             # Right hemisphere
             r_clp = (
-                right_hemi_label_counts_points[right_hemi_counted_labels_points == row["idx"]][0]
-                if row["idx"] in right_hemi_counted_labels_points else 0
+                right_hemi_label_counts_points[
+                    right_hemi_counted_labels_points == row["idx"]
+                ][0]
+                if row["idx"] in right_hemi_counted_labels_points
+                else 0
             )
             r_clc = (
-                right_hemi_label_counts_centroids[right_hemi_counted_labels_centroids == row["idx"]][0]
-                if row["idx"] in right_hemi_counted_labels_centroids else 0
+                right_hemi_label_counts_centroids[
+                    right_hemi_counted_labels_centroids == row["idx"]
+                ][0]
+                if row["idx"] in right_hemi_counted_labels_centroids
+                else 0
             )
 
             # Skip empty counts
@@ -316,17 +388,23 @@ def pixel_count_per_region(
             counts_per_label["right_hemi_object_count"].append(r_clc)
 
     else:
-        counted_labels_points, label_counts_points = np.unique(labels_dict_points, return_counts=True)
-        counted_labels_centroids, label_counts_centroids = np.unique(labeled_dict_centroids, return_counts=True)
+        counted_labels_points, label_counts_points = np.unique(
+            labels_dict_points, return_counts=True
+        )
+        counted_labels_centroids, label_counts_centroids = np.unique(
+            labeled_dict_centroids, return_counts=True
+        )
 
         for index, row in df_label_colours.iterrows():
             clp = (
                 label_counts_points[counted_labels_points == row["idx"]][0]
-                if row["idx"] in counted_labels_points else 0
+                if row["idx"] in counted_labels_points
+                else 0
             )
             clc = (
                 label_counts_centroids[counted_labels_centroids == row["idx"]][0]
-                if row["idx"] in counted_labels_centroids else 0
+                if row["idx"] in counted_labels_centroids
+                else 0
             )
             if clp == 0 and clc == 0:
                 continue
@@ -342,7 +420,6 @@ def pixel_count_per_region(
     return df_counts_per_label
 
 
-
 def read_flat_file(file):
     """
     Reads a flat file and produces an image array.
@@ -550,18 +627,22 @@ def flat_to_dataframe(image, damage_mask, hemi_mask, rescaleXY=None):
             (0, 1, "damaged_region_area"),
         ]
     else:
-        combos = [(None, None, "region_area")]  # compute for entire image with no filtering
+        combos = [
+            (None, None, "region_area")
+        ]  # compute for entire image with no filtering
 
     # Count pixels for each combo
     for hemi_val, damage_val, col_name in combos:
         mask = np.ones_like(image, dtype=bool)
         if hemi_mask is not None:
-            mask &= (hemi_mask == hemi_val)
+            mask &= hemi_mask == hemi_val
         if damage_mask is not None:
-            mask &= (damage_mask == damage_val)
+            mask &= damage_mask == damage_val
         combo_df = count_pixels_per_label(image[mask], scale_factor)
         combo_df = combo_df.rename(columns={"region_area": col_name})
-        df_area_per_label = pd.merge(df_area_per_label, combo_df, on="idx", how="outer").fillna(0)
+        df_area_per_label = pd.merge(
+            df_area_per_label, combo_df, on="idx", how="outer"
+        ).fillna(0)
 
     # If both masks exist, compute additional columns
     if (hemi_mask is not None) and (damage_mask is not None):
diff --git a/PyNutil/processing/data_analysis.py b/PyNutil/processing/data_analysis.py
index bbe1ba469af1e1c37f31a7b19443629052702cdc..2c1eeb44dcb3f4fa21665fed698f1aadcdf6777f 100644
--- a/PyNutil/processing/data_analysis.py
+++ b/PyNutil/processing/data_analysis.py
@@ -71,18 +71,33 @@ def apply_custom_regions(df, custom_regions_dict):
 
     # Define all possible columns to aggregate
     possible_columns = [
-        "pixel_count", "undamaged_pixel_count", "damaged_pixel_counts",
-        "region_area", "undamaged_region_area", "damaged_region_area",
-        "object_count", "undamaged_object_count", "damaged_object_count",
-        "left_hemi_pixel_count", "left_hemi_undamaged_pixel_count",
-        "left_hemi_damaged_pixel_count", "left_hemi_region_area",
-        "left_hemi_undamaged_region_area", "left_hemi_damaged_region_area",
-        "left_hemi_object_count", "left_hemi_undamaged_object_count",
-        "left_hemi_damaged_object_count", "right_hemi_pixel_count",
-        "right_hemi_undamaged_pixel_count", "right_hemi_damaged_pixel_count",
-        "right_hemi_region_area", "right_hemi_undamaged_region_area",
-        "right_hemi_damaged_region_area", "right_hemi_object_count",
-        "right_hemi_undamaged_object_count", "right_hemi_damaged_object_count"
+        "pixel_count",
+        "undamaged_pixel_count",
+        "damaged_pixel_counts",
+        "region_area",
+        "undamaged_region_area",
+        "damaged_region_area",
+        "object_count",
+        "undamaged_object_count",
+        "damaged_object_count",
+        "left_hemi_pixel_count",
+        "left_hemi_undamaged_pixel_count",
+        "left_hemi_damaged_pixel_count",
+        "left_hemi_region_area",
+        "left_hemi_undamaged_region_area",
+        "left_hemi_damaged_region_area",
+        "left_hemi_object_count",
+        "left_hemi_undamaged_object_count",
+        "left_hemi_damaged_object_count",
+        "right_hemi_pixel_count",
+        "right_hemi_undamaged_pixel_count",
+        "right_hemi_damaged_pixel_count",
+        "right_hemi_region_area",
+        "right_hemi_undamaged_region_area",
+        "right_hemi_damaged_region_area",
+        "right_hemi_object_count",
+        "right_hemi_undamaged_object_count",
+        "right_hemi_damaged_object_count",
     ]
 
     # Only include columns that actually exist in the DataFrame
@@ -102,7 +117,9 @@ def apply_custom_regions(df, custom_regions_dict):
 
     # Calculate area fractions only if required columns exist
     if "pixel_count" in grouped_df and "region_area" in grouped_df:
-        grouped_df["area_fraction"] = grouped_df["pixel_count"] / grouped_df["region_area"]
+        grouped_df["area_fraction"] = (
+            grouped_df["pixel_count"] / grouped_df["region_area"]
+        )
 
     if "undamaged_pixel_count" in grouped_df and "undamaged_region_area" in grouped_df:
         grouped_df["undamaged_area_fraction"] = (
@@ -110,21 +127,34 @@ def apply_custom_regions(df, custom_regions_dict):
         )
 
     if "left_hemi_pixel_count" in grouped_df and "left_hemi_region_area" in grouped_df:
-        grouped_df["left_hemi_area_fraction"] = grouped_df["left_hemi_pixel_count"] / grouped_df["left_hemi_region_area"]
+        grouped_df["left_hemi_area_fraction"] = (
+            grouped_df["left_hemi_pixel_count"] / grouped_df["left_hemi_region_area"]
+        )
 
-    if "right_hemi_pixel_count" in grouped_df and "right_hemi_region_area" in grouped_df:
-        grouped_df["right_hemi_area_fraction"] = grouped_df["right_hemi_pixel_count"] / grouped_df["right_hemi_region_area"]
+    if (
+        "right_hemi_pixel_count" in grouped_df
+        and "right_hemi_region_area" in grouped_df
+    ):
+        grouped_df["right_hemi_area_fraction"] = (
+            grouped_df["right_hemi_pixel_count"] / grouped_df["right_hemi_region_area"]
+        )
 
-    if ("left_hemi_undamaged_pixel_count" in grouped_df and
-        "left_hemi_undamaged_region_area" in grouped_df):
+    if (
+        "left_hemi_undamaged_pixel_count" in grouped_df
+        and "left_hemi_undamaged_region_area" in grouped_df
+    ):
         grouped_df["left_hemi_undamaged_area_fraction"] = (
-            grouped_df["left_hemi_undamaged_pixel_count"] / grouped_df["left_hemi_undamaged_region_area"]
+            grouped_df["left_hemi_undamaged_pixel_count"]
+            / grouped_df["left_hemi_undamaged_region_area"]
         )
 
-    if ("right_hemi_undamaged_pixel_count" in grouped_df and
-        "right_hemi_undamaged_region_area" in grouped_df):
+    if (
+        "right_hemi_undamaged_pixel_count" in grouped_df
+        and "right_hemi_undamaged_region_area" in grouped_df
+    ):
         grouped_df["right_hemi_undamaged_area_fraction"] = (
-            grouped_df["right_hemi_undamaged_pixel_count"] / grouped_df["right_hemi_undamaged_region_area"]
+            grouped_df["right_hemi_undamaged_pixel_count"]
+            / grouped_df["right_hemi_undamaged_region_area"]
         )
 
     common_columns = [col for col in df.columns if col in grouped_df.columns]
@@ -146,7 +176,7 @@ def quantify_labeled_points(
     centroids_hemi_labels,
     per_point_undamaged,
     per_centroid_undamaged,
-    apply_damage_mask
+    apply_damage_mask,
 ):
     """
     Aggregates labeled points into a summary table.
@@ -169,7 +199,7 @@ def quantify_labeled_points(
         per_centroid_undamaged,
         points_hemi_labels,
         centroids_hemi_labels,
-        apply_damage_mask
+        apply_damage_mask,
     )
     label_df = _combine_slice_reports(per_section_df, atlas_labels)
     if not apply_damage_mask:
@@ -190,7 +220,7 @@ def _quantify_per_section(
     per_centroid_undamaged,
     points_hemi_labels,
     centroids_hemi_labels,
-    with_damage=False
+    with_damage=False,
 ):
     """
     Quantifies labeled points per section.
@@ -225,7 +255,7 @@ def _quantify_per_section(
             current_points_hemi,
             current_centroids_hemi,
             atlas_labels,
-            with_damage
+            with_damage,
         )
         current_df_new = _merge_dataframes(current_df, ra, atlas_labels)
         per_section_df.append(current_df_new)
@@ -248,22 +278,33 @@ def _merge_dataframes(current_df, ra, atlas_labels):
         DataFrame: Merged DataFrame.
     """
     cols_to_use = ra.columns.difference(atlas_labels.columns)
-    all_region_df = atlas_labels.merge(ra[["idx",*cols_to_use]], on="idx", how="left")
+    all_region_df = atlas_labels.merge(ra[["idx", *cols_to_use]], on="idx", how="left")
     cols_to_use = current_df.columns.difference(all_region_df.columns)
     current_df_new = all_region_df.merge(
         current_df[["idx", *cols_to_use]], on="idx", how="left"
     )
-    if "pixel_count" in current_df_new.columns and "region_area" in current_df_new.columns:
+    if (
+        "pixel_count" in current_df_new.columns
+        and "region_area" in current_df_new.columns
+    ):
         current_df_new["area_fraction"] = (
             current_df_new["pixel_count"] / current_df_new["region_area"]
         )
-    if "left_hemi_pixel_count" in current_df_new.columns and "left_hemi_region_area" in current_df_new.columns:
+    if (
+        "left_hemi_pixel_count" in current_df_new.columns
+        and "left_hemi_region_area" in current_df_new.columns
+    ):
         current_df_new["left_hemi_area_fraction"] = (
-            current_df_new["left_hemi_pixel_count"] / current_df_new["left_hemi_region_area"]
+            current_df_new["left_hemi_pixel_count"]
+            / current_df_new["left_hemi_region_area"]
         )
-    if "right_hemi_pixel_count" in current_df_new.columns and "right_hemi_region_area" in current_df_new.columns:
+    if (
+        "right_hemi_pixel_count" in current_df_new.columns
+        and "right_hemi_region_area" in current_df_new.columns
+    ):
         current_df_new["right_hemi_area_fraction"] = (
-            current_df_new["right_hemi_pixel_count"] / current_df_new["right_hemi_region_area"]
+            current_df_new["right_hemi_pixel_count"]
+            / current_df_new["right_hemi_region_area"]
         )
     current_df_new.fillna(0, inplace=True)
     return current_df_new
@@ -289,18 +330,24 @@ def _combine_slice_reports(per_section_df, atlas_labels):
     )
     label_df["area_fraction"] = label_df["pixel_count"] / label_df["region_area"]
     if "left_hemi_pixel_count" in label_df:
-        label_df["left_hemi_area_fraction"] = label_df["left_hemi_pixel_count"] / label_df["left_hemi_region_area"]
-        label_df["right_hemi_area_fraction"] = label_df["right_hemi_pixel_count"] / label_df["right_hemi_region_area"]
+        label_df["left_hemi_area_fraction"] = (
+            label_df["left_hemi_pixel_count"] / label_df["left_hemi_region_area"]
+        )
+        label_df["right_hemi_area_fraction"] = (
+            label_df["right_hemi_pixel_count"] / label_df["right_hemi_region_area"]
+        )
     if "undamaged_region_area" in label_df:
         label_df["undamaged_area_fraction"] = (
             label_df["undamaged_pixel_count"] / label_df["undamaged_region_area"]
         )
     if ("left_hemi_pixel_count" in label_df) and ("undamaged_region_area" in label_df):
         label_df["left_hemi_undamaged_area_fraction"] = (
-            label_df["left_hemi_undamaged_pixel_count"] / label_df["left_hemi_undamaged_region_area"]
+            label_df["left_hemi_undamaged_pixel_count"]
+            / label_df["left_hemi_undamaged_region_area"]
         )
         label_df["right_hemi_undamaged_area_fraction"] = (
-            label_df["right_hemi_undamaged_pixel_count"] / label_df["right_hemi_undamaged_region_area"]
+            label_df["right_hemi_undamaged_pixel_count"]
+            / label_df["right_hemi_undamaged_region_area"]
         )
     label_df.fillna(0, inplace=True)
     label_df = label_df.set_index("idx")
diff --git a/PyNutil/processing/utils.py b/PyNutil/processing/utils.py
index 697ee6af4aeca5f862a09c038f45fde31810670b..a73a6f7fa36d7c98f20c7b151b63b34e83e5bed3 100644
--- a/PyNutil/processing/utils.py
+++ b/PyNutil/processing/utils.py
@@ -239,15 +239,23 @@ def process_results(
     centroids_len = [
         len(centroids) if None not in centroids else 0 for centroids in centroids_list
     ]
-    points_list = [points for points in points_list if (None not in points) and (len(points)!=0)]
+    points_list = [
+        points for points in points_list if (None not in points) and (len(points) != 0)
+    ]
     centroids_list = [
-        centroids for centroids in centroids_list if (None not in centroids)and (len(centroids!=0))
+        centroids
+        for centroids in centroids_list
+        if (None not in centroids) and (len(centroids != 0))
+    ]
+    points_labels = [pl for pl in points_labels if (None not in pl) and len(pl) != 0]
+    centroids_labels = [
+        cl for cl in centroids_labels if (None not in cl) and len(cl) != 0
+    ]
+    points_undamaged_list = [
+        pul for pul in points_undamaged_list if (None not in pul) and len(pul) != 0
     ]
-    points_labels = [pl for pl in points_labels if (None not in pl) and len(pl)!=0]
-    centroids_labels = [cl for cl in centroids_labels if (None not in cl) and len(cl)!=0]
-    points_undamaged_list = [pul for pul in points_undamaged_list if (None not in pul) and len(pul)!=0]
     centroids_undamaged_list = [
-        cul for cul in centroids_undamaged_list if (None not in cul) and len(cul)!=0
+        cul for cul in centroids_undamaged_list if (None not in cul) and len(cul) != 0
     ]
 
     if len(points_list) == 0:
diff --git a/gui/PyNutilGUI.py b/gui/PyNutilGUI.py
index d63dea5e913e6dc0fa03c43f37e2afb2d749f1f6..c0fcc2b04d5a09dbdabe1f9088b7bc88de42c508 100644
--- a/gui/PyNutilGUI.py
+++ b/gui/PyNutilGUI.py
@@ -201,7 +201,9 @@ class AnalysisWorker(QThread):
                 print("Analysis cancelled")
                 return
 
-            pnt.get_coordinates(object_cutoff=0, apply_damage_mask=self.arguments["apply_damage_mask"])
+            pnt.get_coordinates(
+                object_cutoff=0, apply_damage_mask=self.arguments["apply_damage_mask"]
+            )
 
             # Check if cancelled before continuing
             if self.cancelled:
@@ -476,7 +478,9 @@ class PyNutilGUI(QMainWindow):
         damage_markers_label = QLabel("Include Damage Quantification:")
         self.include_damage_markers_checkbox = QCheckBox()
         self.include_damage_markers_checkbox.setChecked(False)
-        self.include_damage_markers_checkbox.stateChanged.connect(self.update_damage_markers_flag)
+        self.include_damage_markers_checkbox.stateChanged.connect(
+            self.update_damage_markers_flag
+        )
 
         damage_markers_layout.addWidget(damage_markers_label)
         damage_markers_layout.addWidget(self.include_damage_markers_checkbox)
diff --git a/tests/test_quantification.py b/tests/test_quantification.py
index 1f5acd8b6939c604072a025785492c0249ef59dd..c603bd69939b7d50ca30ddc8111aec16bb9cc773 100644
--- a/tests/test_quantification.py
+++ b/tests/test_quantification.py
@@ -41,8 +41,13 @@ class TestQuantification(unittest.TestCase):
         pnt.save_analysis(save_path)
 
 
-test_case_files = ["brainglobe_atlas.json", "brainglobe_atlas_damage.json", "custom_atlas.json"]
+test_case_files = [
+    "brainglobe_atlas.json",
+    "brainglobe_atlas_damage.json",
+    "custom_atlas.json",
+]
 for test_case_file in test_case_files:
+
     def test_method(self, test_case_file=test_case_file):
         self.run_test_case(test_case_file)