From 10061f03b9eae07fe3ba34d326f639fb04c8d644 Mon Sep 17 00:00:00 2001
From: Harry Carey <harry.carey95@gmail.com>
Date: Wed, 4 Oct 2023 19:12:40 +0200
Subject: [PATCH] fixed issue where PyNutil would fail on sections that have 0
 pixels, or where min object size was set too high

---
 PyNutil/coordinate_extraction.py  | 61 +++++++++++++++++++++++--------
 PyNutil/counting_and_load.py      |  3 +-
 PyNutil/main.py                   |  4 ++
 PyNutil/visualign_deformations.py | 18 +++++----
 4 files changed, 62 insertions(+), 24 deletions(-)

diff --git a/PyNutil/coordinate_extraction.py b/PyNutil/coordinate_extraction.py
index 359835a..6079f2c 100644
--- a/PyNutil/coordinate_extraction.py
+++ b/PyNutil/coordinate_extraction.py
@@ -153,9 +153,21 @@ def folder_to_atlas_space(
     # Order segmentations and section_numbers
     # segmentations = [x for _,x in sorted(zip(section_numbers,segmentations))]
     # section_numbers.sort()
-    points_list = [None] * len(segmentations)
-    centroids_list = [None] * len(segmentations)
-    region_areas_list = [None] * len(segmentations)
+    points_list = [np.array([])] * len(segmentations)
+    centroids_list = [np.array([])] * len(segmentations)
+    region_areas_list = [
+        pd.DataFrame({
+            "idx": [],
+            "name": [],
+            "r": [],
+            "g": [],
+            "b": [],
+            "region_area": [],
+            "pixel_count": [],
+            "object_count": [],
+            "area_fraction": []
+        })
+    ] * len(segmentations)
     threads = []
     for segmentation_path, index in zip(segmentations, range(len(segmentations))):
         seg_nr = int(number_sections([segmentation_path])[0])
@@ -197,11 +209,12 @@ def folder_to_atlas_space(
     # Flatten points_list
     
 
-    points_len = [len(points) for points in points_list]
-    centroids_len = [len(centroids) for centroids in centroids_list]
+    points_len = [len(points) if points is not None else 0 for points in points_list]
+    centroids_len = [len(centroids) if centroids is not None else 0 for centroids in centroids_list]
     points = np.concatenate(points_list)
     centroids = np.concatenate(centroids_list)
 
+
     return (
         np.array(points),
         np.array(centroids),
@@ -274,14 +287,21 @@ def segmentation_to_atlas_space(
 
     if non_linear:
         if "markers" in slice:
+            
             # This creates a triangulation using the reg width
             triangulation = triangulate(reg_width, reg_height, slice["markers"])
             if method in ["per_pixel", "all"]:
-                new_x, new_y = transform_vec(triangulation, scaled_x, scaled_y)
+                if scaled_x is not None:
+                    new_x, new_y = transform_vec(triangulation, scaled_x, scaled_y)
+                else:
+                    new_x, new_y = scaled_x, scaled_y
             if method in ["per_object", "all"]:
-                centroids_new_x, centroids_new_y = transform_vec(
-                    triangulation, scaled_centroidsX, scaled_centroidsY
-                )
+                if centroids is not None:
+                    centroids_new_x, centroids_new_y = transform_vec(
+                        triangulation, scaled_centroidsX, scaled_centroidsY
+                    )
+                else:
+                    centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
         else:
             print(
                 f"No markers found for {slice['filename']}, result for section will be linear."
@@ -297,13 +317,20 @@ def segmentation_to_atlas_space(
             centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
     # Scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
     if method in ["per_pixel", "all"]:
-        points = transform_to_atlas_space(
-            slice["anchoring"], new_y, new_x, reg_height, reg_width
-        )
+        if new_x is not None:
+            points = transform_to_atlas_space(
+                slice["anchoring"], new_y, new_x, reg_height, reg_width
+            )
+        else:
+            points = np.array([])
     if method in ["per_object", "all"]:
-        centroids = transform_to_atlas_space(
-            slice["anchoring"], centroids_new_y, centroids_new_x, reg_height, reg_width
-        )
+        if centroids_new_x is not None:
+            centroids = transform_to_atlas_space(
+                slice["anchoring"], centroids_new_y, centroids_new_x, reg_height, reg_width
+            )
+        else:
+            centroids = np.array([])
+
     print(
         f"Finished and points len is: {len(points)} and centroids len is: {len(centroids)}"
     )
@@ -321,6 +348,8 @@ def get_centroids(segmentation, pixel_id, y_scale, x_scale, object_cutoff=0):
 
     print(f"using pixel id {pixel_id}")
     print(f"Found {len(centroids)} objects in the segmentation")
+    if len(centroids) == 0:
+        return None, None, None
     centroidsX = centroids[:, 1]
     centroidsY = centroids[:, 0]
     scaled_centroidsY, scaled_centroidsX = scale_positions(
@@ -331,6 +360,8 @@ def get_centroids(segmentation, pixel_id, y_scale, x_scale, object_cutoff=0):
 
 def get_scaled_pixels(segmentation, pixel_id, y_scale, x_scale):
     id_pixels = find_matching_pixels(segmentation, pixel_id)
+    if len(id_pixels[0]) == 0:
+        return None, None
     # Scale the seg coordinates to reg/seg
     scaled_y, scaled_x = scale_positions(id_pixels[0], id_pixels[1], y_scale, x_scale)
     return scaled_y, scaled_x
diff --git a/PyNutil/counting_and_load.py b/PyNutil/counting_and_load.py
index 22dda97..9dce6ec 100644
--- a/PyNutil/counting_and_load.py
+++ b/PyNutil/counting_and_load.py
@@ -106,8 +106,7 @@ def pixel_count_per_region(
 
         new_rows.append(row)
 
-    df_counts_per_label_name = pd.DataFrame(new_rows)
-    
+    df_counts_per_label_name = pd.DataFrame(new_rows, columns=["idx", "name", "pixel_count", "object_count", "r", "g", "b"])
     #Task for Sharon:
     #If you can get the areas per region from the flat file here 
     #you can then use those areas to calculate the load per region here
diff --git a/PyNutil/main.py b/PyNutil/main.py
index 4d19a3b..6277a1f 100644
--- a/PyNutil/main.py
+++ b/PyNutil/main.py
@@ -219,6 +219,8 @@ class PyNutil:
         per_section_df = []
         current_centroids = None
         current_points = None
+        print("region areas list")
+        print(self.region_areas_list)
         for pl,cl,ra in zip(self.points_len, self.centroids_len, self.region_areas_list):
             if hasattr(self, "centroids"):
                 current_centroids = labeled_points_centroids[prev_cl : prev_cl + cl]
@@ -227,6 +229,7 @@ class PyNutil:
             current_df = pixel_count_per_region(
                 current_points, current_centroids, self.atlas_labels
             )
+            print("current df", current_df)
 
 # create the df for section report and all report
 # pixel_count_per_region returns a df with idx, pixel count, name and RGB.
@@ -239,6 +242,7 @@ class PyNutil:
             Remove duplicate columns
             Calculate and add area_fraction to new column in the df.  
             """
+            
             all_region_df = self.atlas_labels.merge(ra, on = 'idx', how='left')           
             current_df_new = all_region_df.merge(current_df, on= 'idx', how= 'left', suffixes= (None,"_y")).drop(columns=["a","VIS", "MSH", "name_y","r_y","g_y","b_y"])
             current_df_new["area_fraction"] = current_df_new["pixel_count"] / current_df_new["region_area"]
diff --git a/PyNutil/visualign_deformations.py b/PyNutil/visualign_deformations.py
index 5441b5e..7208623 100644
--- a/PyNutil/visualign_deformations.py
+++ b/PyNutil/visualign_deformations.py
@@ -6,17 +6,16 @@ import numpy as np
 def triangulate(w, h, markers):
     vertices = [
         [-0.1 * w, -0.1 * h, -0.1 * w, -0.1 * h],
-        [1.1 * w, -0.1 * h, 1.1 * w, -0.1 * h],
-        [-0.1 * w, 1.1 * h, -0.1 * w, 1.1 * h],
-        [1.1 * w, 1.1 * h, 1.1 * w, 1.1 * h],
+        [ 1.1 * w, -0.1 * h,  1.1 * w, -0.1 * h],
+        [-0.1 * w,  1.1 * h, -0.1 * w,  1.1 * h],
+        [ 1.1 * w,  1.1 * h,  1.1 * w,  1.1 * h],
     ]
-    #    vertices = [[0, 0, 0, 0],
-    #                [w, 0, w, 0],
-    #                [0, h, 0, h],
-    #                [w, h, w, h]]
     edges = [0] * ((len(markers) + 4) * (len(markers) + 4 - 1) // 2)
     triangles = [Triangle(0, 1, 2, vertices, edges), Triangle(1, 2, 3, vertices, edges)]
     edges[0] = edges[1] = edges[4] = edges[5] = 2
+    markers = list(set(tuple(m) for m in markers))
+    markers = [list(m) for m in markers]
+
     for marker in markers:
         x, y = marker[2:4]
         found = False
@@ -155,6 +154,8 @@ class Triangle:
         self.decomp = inv3x3(
             [[bx - ax, by - ay, 0], [cx - ax, cy - ay, 0], [ax, ay, 1]]
         )
+        if self.decomp == None:
+            print('e')
         a2 = distsquare(bx, by, cx, cy)
         b2 = distsquare(ax, ay, cx, cy)
         c2 = distsquare(ax, ay, bx, by)
@@ -189,6 +190,7 @@ class Triangle:
 
     # xy: 2-dimensional array with one xy-pair per row
     def inforward_vec(self, x, y, xPrime, yPrime):
+
         uv1 = rowmul3_vec(x, y, self.forwarddecomp)
         # also compute the next step, since it uses the parameters of this triangle
         ok = (
@@ -210,6 +212,8 @@ class Triangle:
         )
 
     def intriangle_vec(self, x, y, xPrime, yPrime):
+        if self.decomp is None:
+            print('e')
         uv1 = rowmul3_vec(x, y, self.decomp)
         # also compute the next step, since it uses the parameters of this triangle
         ok = (
-- 
GitLab