Skip to content
Snippets Groups Projects
Commit 9b9862c4 authored by Sharon Yates's avatar Sharon Yates
Browse files

Restructure and cleanup code, OOP

parent ef46b825
No related branches found
No related tags found
No related merge requests found
...@@ -29,6 +29,8 @@ def getCentroidsAndArea(Segmentation, pixelCutOff=0): ...@@ -29,6 +29,8 @@ def getCentroidsAndArea(Segmentation, pixelCutOff=0):
def transformToRegistration(SegHeight, SegWidth, RegHeight, RegWidth): def transformToRegistration(SegHeight, SegWidth, RegHeight, RegWidth):
"""this function returns the scaling factors to transform the segmentation to the registration space""" """this function returns the scaling factors to transform the segmentation to the registration space"""
Yscale = RegHeight/SegHeight Yscale = RegHeight/SegHeight
...@@ -94,14 +96,19 @@ def SegmentationToAtlasSpace(slice, SegmentationPath, pixelID='auto', nonLinear= ...@@ -94,14 +96,19 @@ def SegmentationToAtlasSpace(slice, SegmentationPath, pixelID='auto', nonLinear=
RegWidth = slice["width"] RegWidth = slice["width"]
#this calculates reg/seg #this calculates reg/seg
Yscale , Xscale = transformToRegistration(SegHeight,SegWidth, RegHeight,RegWidth) Yscale , Xscale = transformToRegistration(SegHeight,SegWidth, RegHeight,RegWidth)
#this creates a triangulation using the reg width
triangulation = triangulate(RegWidth, RegHeight, slice["markers"])
#scale the seg coordinates to reg/seg
scaledY,scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale) scaledY,scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale)
if nonLinear: if nonLinear:
newX, newY = transform_vec(triangulation, scaledX, scaledY) if "markers" in slice:
#this creates a triangulation using the reg width
triangulation = triangulate(RegWidth, RegHeight, slice["markers"])
newX, newY = transform_vec(triangulation, scaledX, scaledY)
else:
print(f"no markers found for " + slice["filename"])
newX, newY = scaledX, scaledY
else: else:
newX, newY = scaledX, scaledY newX, newY = scaledX, scaledY
#scale U by Uxyz/RegWidth and V by Vxyz/RegHeight #scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
points = transformToAtlasSpace(slice['anchoring'], newY, newX, RegHeight, RegWidth) points = transformToAtlasSpace(slice['anchoring'], newY, newX, RegHeight, RegWidth)
# points = points.reshape(-1) # points = points.reshape(-1)
...@@ -113,7 +120,7 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru ...@@ -113,7 +120,7 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru
slices = loadVisuAlignJson(QUINT_alignment) slices = loadVisuAlignJson(QUINT_alignment)
points = [] points = []
segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"] segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"]
Segmentations = [file for file in glob(folder + "*") if any([file.endswith(type) for type in segmentationFileTypes])] Segmentations = [file for file in glob(folder + "/*") if any([file.endswith(type) for type in segmentationFileTypes])]
SectionNumbers = number_sections(Segmentations) SectionNumbers = number_sections(Segmentations)
#order segmentations and sectionNumbers #order segmentations and sectionNumbers
# Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))] # Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))]
...@@ -125,12 +132,13 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru ...@@ -125,12 +132,13 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru
##this converts the segmentation to a point cloud ##this converts the segmentation to a point cloud
points.extend(SegmentationToAtlasSpace(current_slice, SegmentationPath, pixelID, nonLinear)) points.extend(SegmentationToAtlasSpace(current_slice, SegmentationPath, pixelID, nonLinear))
return np.array(points) return np.array(points)
def FolderToAtlasSpaceMultiThreaded(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=True): def FolderToAtlasSpaceMultiThreaded(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=True):
"apply Segmentation to atlas space to all segmentations in a folder" "apply Segmentation to atlas space to all segmentations in a folder"
slices = loadVisuAlignJson(QUINT_alignment) slices = loadVisuAlignJson(QUINT_alignment)
segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"] segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"]
Segmentations = [file for file in glob(folder + "*") if any([file.endswith(type) for type in segmentationFileTypes])] Segmentations = [file for file in glob(folder + "/*") if any([file.endswith(type) for type in segmentationFileTypes])]
SectionNumbers = number_sections(Segmentations) SectionNumbers = number_sections(Segmentations)
#order segmentations and sectionNumbers #order segmentations and sectionNumbers
# Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))] # Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))]
...@@ -172,12 +180,17 @@ def SegmentationToAtlasSpaceMultiThreaded(slice, SegmentationPath, pixelID='auto ...@@ -172,12 +180,17 @@ def SegmentationToAtlasSpaceMultiThreaded(slice, SegmentationPath, pixelID='auto
RegWidth = slice["width"] RegWidth = slice["width"]
#this calculates reg/seg #this calculates reg/seg
Yscale , Xscale = transformToRegistration(SegHeight,SegWidth, RegHeight,RegWidth) Yscale , Xscale = transformToRegistration(SegHeight,SegWidth, RegHeight,RegWidth)
#this creates a triangulation using the reg width
triangulation = triangulate(RegWidth, RegHeight, slice["markers"])
#scale the seg coordinates to reg/seg #scale the seg coordinates to reg/seg
scaledY,scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale) scaledY,scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale)
if nonLinear: if nonLinear:
newX, newY = transform_vec(triangulation, scaledX, scaledY) if "markers" in slice:
#this creates a triangulation using the reg width
triangulation = triangulate(RegWidth, RegHeight, slice["markers"])
newX, newY = transform_vec(triangulation, scaledX, scaledY)
else:
print(f"no markers found for " + slice["filename"])
newX, newY = scaledX, scaledY
else: else:
newX, newY = scaledX, scaledY newX, newY = scaledX, scaledY
#scale U by Uxyz/RegWidth and V by Vxyz/RegHeight #scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
......
import PyNutil
#define parameters
#specify loacation of segmentation folder
segmentation_folder = r"blabla/blabla"
#specify location of json file
json_file = r"blabla/blabla.json"
#specify colour to quantify
colour = [255, 255, 255]
#specify output location
output_path = r"blabla/blabla/output"
quantifier = PyNutil(segmentation_folder,
json_file,
colour,
output_path)
quantifier.build_quantifier()
#define your mask as either a png, or a qcalign damage map
#this mask will be applied to all
quantifier.load_mask(mask_path=r"blablabla/")
#load a custom region file
quantifier.load_custom_regions(custom_region_json=r"blablabla/")
#run coordinate extraction
#ideally extract coordinates per section and whole brain
points = quantifier.get_coordinates()
quantifier.save_coordinates()
objects = quantifier.get_objects()
loads = quantifier.get_loads()
quantifier.save_segmentation_atlas_overlays()
\ No newline at end of file
...@@ -8,24 +8,31 @@ import csv ...@@ -8,24 +8,31 @@ import csv
from datetime import datetime from datetime import datetime
#import json and use it to define volume_path, segmentation_folder, alignment_json, label_path, colour, allen_colours
#import our function for converting a folder of segmentations to points #import our function for converting a folder of segmentations to points
from PyNutil import FolderToAtlasSpace, labelPoints, WritePointsToMeshview, FolderToAtlasSpaceMultiThreaded from PyNutil import FolderToAtlasSpace, labelPoints, WritePointsToMeshview, FolderToAtlasSpaceMultiThreaded
label_path = "../annotation_volumes//allen2022_colours.csv"
colour = [255, 0, 255]
volume_path = "../annotation_volumes//annotation_10_reoriented.nrrd" volume_path = "../annotation_volumes//annotation_10_reoriented.nrrd"
data, header = nrrd.read(volume_path) data, header = nrrd.read(volume_path)
points_json_path = "../outputs/points.json"
segmentation_folder = "../test_data/oneSection15"
alignment_json = "../test_data/C68_nonlinear_no_markers.json"
allen_colours = "../annotation_volumes//allen2022_colours.csv"
counts_per_label_name = "../outputs/counts_per_allenID.csv"
startTime = datetime.now() startTime = datetime.now()
segmentation_folder = "../test_data/tTA_2877_NOP/"
alignment_json = "../test_data/tTA_2877_NOP_horizontal_final_2017.json"
#now we can use our function to convert the folder of segmentations to points #now we can use our function to convert the folder of segmentations to points
points = FolderToAtlasSpaceMultiThreaded(segmentation_folder,alignment_json, pixelID=[0, 0, 255], nonLinear=True) points = FolderToAtlasSpaceMultiThreaded(segmentation_folder,alignment_json, pixelID=colour, nonLinear=True)
time_taken = datetime.now() - startTime time_taken = datetime.now() - startTime
print(f"Folder to atlas took: {time_taken}") print(f"Folder to atlas took: {time_taken}")
#first we need to find the label file for the volume #first we need to find the label file for the volume
label_path = "../annotation_volumes//allen2022_colours.csv"
#then the path to the volume #then the path to the volume
#read the label files #read the label files
...@@ -34,7 +41,7 @@ label_df = pd.read_csv(label_path) ...@@ -34,7 +41,7 @@ label_df = pd.read_csv(label_path)
#now we can get the labels for each point #now we can get the labels for each point
labels = labelPoints(points, data, scale_factor=2.5) labels = labelPoints(points, data, scale_factor=2.5)
#save points to a meshview json #save points to a meshview json
WritePointsToMeshview(points, labels,"../outputs/points.json", label_df) WritePointsToMeshview(points, labels,points_json_path, label_df)
#Task: #Task:
# Make a pandas dataframe # Make a pandas dataframe
...@@ -48,7 +55,6 @@ counts_per_label = list(zip(counted_labels,label_counts)) ...@@ -48,7 +55,6 @@ counts_per_label = list(zip(counted_labels,label_counts))
df_counts_per_label = pd.DataFrame(counts_per_label, columns=["allenID","pixel count"]) df_counts_per_label = pd.DataFrame(counts_per_label, columns=["allenID","pixel count"])
allen_colours = "../annotation_volumes//allen2022_colours.csv"
df_allen_colours =pd.read_csv(allen_colours, sep=",") df_allen_colours =pd.read_csv(allen_colours, sep=",")
df_allen_colours df_allen_colours
...@@ -74,7 +80,7 @@ df_counts_per_label_name = pd.DataFrame(new_rows) ...@@ -74,7 +80,7 @@ df_counts_per_label_name = pd.DataFrame(new_rows)
df_counts_per_label_name df_counts_per_label_name
# write to csv file # write to csv file
df_counts_per_label_name.to_csv("../outputs/counts_per_allenID.csv", sep=";", na_rep='', index= False) df_counts_per_label_name.to_csv(counts_per_label_name, sep=";", na_rep='', index= False)
#r = df_allen_colours["r"] #r = df_allen_colours["r"]
#g = df_allen_colours["g"] #g = df_allen_colours["g"]
......
class PyNutil:
def __init__(self, segmentation_folder) -> None:
self.segmentation_folder = segmentation_folder
self.json_file = json_file
self.colour = colour
self.output_path = output_path
def build_quantifier(self):
#do all the expensive computations
This diff is collapsed.
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment