Skip to content
Snippets Groups Projects
Commit 21742f22 authored by Harry Carey's avatar Harry Carey
Browse files

added object counts to the output csvgit add .

parent af63b2d6
No related branches found
No related tags found
No related merge requests found
......@@ -72,7 +72,6 @@ def transform_to_atlas_space(anchoring, y, x, reg_height, reg_width):
# Scale X and Y to between 0 and 1 using the registration width and height
y_scale = y / reg_height
x_scale = x / reg_width
# print("width: ", reg_width, " height: ", reg_height, " Xmax: ", np.max(x), " Ymax: ", np.max(y), " Xscale: ", np.max(x_scale), " Yscale: ", np.max(y_scale))
xyz_v = np.array([y_scale * v[0], y_scale * v[1], y_scale * v[2]])
xyz_u = np.array([x_scale * u[0], x_scale * u[1], x_scale * u[2]])
o = np.reshape(o, (3, 1))
......@@ -102,6 +101,7 @@ def folder_to_atlas_space(
# segmentations = [x for _,x in sorted(zip(section_numbers,segmentations))]
# section_numbers.sort()
points_list = [None] * len(segmentations)
centroids_list = [None] * len(segmentations)
threads = []
for segmentation_path, index in zip(segmentations, range(len(segmentations))):
seg_nr = int(number_sections([segmentation_path])[0])
......@@ -115,6 +115,7 @@ def folder_to_atlas_space(
pixel_id,
non_linear,
points_list,
centroids_list,
index,
method,
),
......@@ -126,18 +127,22 @@ def folder_to_atlas_space(
# Wait for threads to finish
[t.join() for t in threads]
# Flatten points_list
points_len = [len(points) for points in points_list]
centroids_len = [len(centroids) for centroids in centroids_list]
points = [item for sublist in points_list for item in sublist]
return np.array(points)
centroids_list = [item for sublist in centroids_list for item in sublist]
return np.array(points), np.array(centroids_list), points_len, centroids_len
# related to coordinate extraction
# This function returns an array of points
def segmentation_to_atlas_space(
slice,
segmentation_path,
pixel_id="auto",
non_linear=True,
points_list=None,
centroids_list=None,
index=None,
method="per_pixel",
):
......@@ -162,38 +167,65 @@ def segmentation_to_atlas_space(
y_scale, x_scale = transform_to_registration(
seg_height, seg_width, reg_height, reg_width
)
centroids, points = None, None
if method in ["per_object", "all"]:
# This function returns the centroids, area and coordinates of all the objects in the segmentation
# Right now we only use centroids
binary_seg = segmentation == pixel_id
binary_seg = np.all(binary_seg, axis=2)
centroids, area, coords = get_centroids_and_area(binary_seg, pixel_cut_off=0)
print("Number of objects: ", len(centroids))
# print(centroids)
if method in ["per_pixel", "all"]:
id_pixels = find_matching_pixels(segmentation, pixel_id)
# Scale the seg coordinates to reg/seg
scaled_y, scaled_x = scale_positions(
id_pixels[0], id_pixels[1], y_scale, x_scale
centroids, scaled_centroidsX, scaled_centroidsY = get_centroids(
segmentation, pixel_id, y_scale, x_scale
)
print("Number of objects: ", len(scaled_centroidsY))
if method in ["per_pixel", "all"]:
scaled_y, scaled_x = get_scaled_pixels(segmentation, pixel_id, y_scale, x_scale)
if non_linear:
if "markers" in slice:
# This creates a triangulation using the reg width
triangulation = triangulate(reg_width, reg_height, slice["markers"])
new_x, new_y = transform_vec(triangulation, scaled_x, scaled_y)
if method in ["per_pixel", "all"]:
new_x, new_y = transform_vec(triangulation, scaled_x, scaled_y)
if method in ["per_object", "all"]:
centroids_new_x, centroids_new_y = transform_vec(
triangulation, scaled_centroidsX, scaled_centroidsY
)
else:
print(
f"No markers found for {slice['filename']}, result for section will be linear."
)
new_x, new_y = scaled_x, scaled_y
if method in ["per_pixel", "all"]:
new_x, new_y = scaled_x, scaled_y
if method in ["per_object", "all"]:
centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
else:
new_x, new_y = scaled_x, scaled_y
if method in ["per_pixel", "all"]:
new_x, new_y = scaled_x, scaled_y
if method in ["per_object", "all"]:
centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
# Scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
points = transform_to_atlas_space(
slice["anchoring"], new_y, new_x, reg_height, reg_width
)
# points = points.reshape(-1)
if method in ["per_pixel", "all"]:
points = transform_to_atlas_space(
slice["anchoring"], new_y, new_x, reg_height, reg_width
)
if method in ["per_object", "all"]:
centroids = transform_to_atlas_space(
slice["anchoring"], centroids_new_y, centroids_new_x, reg_height, reg_width
)
points_list[index] = np.array(points)
centroids_list[index] = np.array(centroids)
def get_centroids(segmentation, pixel_id, y_scale, x_scale):
binary_seg = segmentation == pixel_id
binary_seg = np.all(binary_seg, axis=2)
centroids, area, coords = get_centroids_and_area(binary_seg, pixel_cut_off=0)
centroidsY = centroids[:, 1]
centroidsX = centroids[:, 0]
scaled_centroidsY, scaled_centroidsX = scale_positions(
centroidsY, centroidsX, y_scale, x_scale
)
return centroids, scaled_centroidsX, scaled_centroidsY
def get_scaled_pixels(segmentation, pixel_id, y_scale, x_scale):
id_pixels = find_matching_pixels(segmentation, pixel_id)
# Scale the seg coordinates to reg/seg
scaled_y, scaled_x = scale_positions(id_pixels[0], id_pixels[1], y_scale, x_scale)
return scaled_y, scaled_x
......@@ -23,15 +23,46 @@ def label_points(points, label_volume, scale_factor=1):
# related to counting_and_load
def pixel_count_per_region(labels_dict, df_label_colours):
def pixel_count_per_region(
labels_dict_points, labeled_dict_centroids, df_label_colours
):
"""Function for counting no. of pixels per region and writing to CSV based on
a dictionary with the region as the key and the points as the value."""
counted_labels, label_counts = np.unique(labels_dict, return_counts=True)
# Which regions have pixels, and how many pixels are there per region
counts_per_label = list(zip(counted_labels, label_counts))
# Create a list of unique regions and pixel counts per region
df_counts_per_label = pd.DataFrame(counts_per_label, columns=["idx", "pixel_count"])
if labels_dict_points is not None and labeled_dict_centroids is not None:
counted_labels_points, label_counts_points = np.unique(
labels_dict_points, return_counts=True
)
counted_labels_centroids, label_counts_centroids = np.unique(
labeled_dict_centroids, return_counts=True
)
# Which regions have pixels, and how many pixels are there per region
counts_per_label = list(
zip(counted_labels_points, label_counts_points, label_counts_centroids)
)
# Create a list of unique regions and pixel counts per region
df_counts_per_label = pd.DataFrame(
counts_per_label, columns=["idx", "pixel_count", "object_count"]
)
elif labels_dict_points is None and labeled_dict_centroids is not None:
counted_labels_centroids, label_counts_centroids = np.unique(
labeled_dict_centroids, return_counts=True
)
# Which regions have pixels, and how many pixels are there per region
counts_per_label = list(zip(counted_labels_centroids, label_counts_centroids))
# Create a list of unique regions and pixel counts per region
df_counts_per_label = pd.DataFrame(
counts_per_label, columns=["idx", "object_count"]
)
elif labels_dict_points is not None and labeled_dict_centroids is None:
counted_labels_points, label_counts_points = np.unique(
labels_dict_points, return_counts=True
)
# Which regions have pixels, and how many pixels are there per region
counts_per_label = list(zip(counted_labels_points, label_counts_points))
# Create a list of unique regions and pixel counts per region
df_counts_per_label = pd.DataFrame(
counts_per_label, columns=["idx", "pixel_count"]
)
# Create a pandas df with regions and pixel counts
# df_label_colours = pd.read_csv(label_colours, sep=",")
......
......@@ -157,7 +157,7 @@ class PyNutil:
f"method {method} not recognised, valid methods are: per_pixel, per_object, or all"
)
print("extracting coordinates")
pixel_points = folder_to_atlas_space(
pixel_points, centroids, points_len, centroids_len = folder_to_atlas_space(
self.segmentation_folder,
self.alignment_json,
pixel_id=self.colour,
......@@ -165,8 +165,11 @@ class PyNutil:
method=method,
)
self.pixel_points = pixel_points
self.centroids = centroids
##points len and centroids len tell us how many points were extracted from each section
##This will be used to split the data up later into per section files
self.points_len = points_len
self.centroids_len = centroids_len
def quantify_coordinates(self):
"""Quantifies the pixel coordinates by region.
......@@ -177,20 +180,29 @@ class PyNutil:
If the pixel coordinates have not been extracted.
"""
if not hasattr(self, "pixel_points"):
if not hasattr(self, "pixel_points") and not hasattr(self, "centroids"):
raise ValueError(
"Please run get_coordinates before running quantify_coordinates"
)
print("quantifying coordinates")
labeled_points = label_points(self.pixel_points, self.atlas_volume, scale_factor=1)
labeled_points_centroids = None
labeled_points = None
if hasattr(self, "centroids"):
labeled_points_centroids = label_points(
self.centroids, self.atlas_volume, scale_factor=1
)
if hasattr(self, "pixel_points"):
labeled_points = label_points(
self.pixel_points, self.atlas_volume, scale_factor=1
)
self.label_df = pixel_count_per_region(labeled_points, self.atlas_labels)
self.label_df = pixel_count_per_region(
labeled_points, labeled_points_centroids, self.atlas_labels
)
self.labeled_points = labeled_points
print("quantification complete ✅")
def save_analysis(self, output_folder):
"""Saves the pixel coordinates and pixel counts to different files in the specified
output folder.
......@@ -226,4 +238,4 @@ class PyNutil:
output_folder + "/pixels_meshview.json",
self.atlas_labels,
)
print("analysis saved ✅")
\ No newline at end of file
print("analysis saved ✅")
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment