Skip to content
Snippets Groups Projects
Commit 98869b93 authored by Harry Carey's avatar Harry Carey
Browse files

made multithreaded functions the default and removed non multithreaded versions

parent 11892cb6
No related branches found
No related tags found
No related merge requests found
......@@ -79,72 +79,8 @@ def transformToAtlasSpace(anchoring, Y, X, RegHeight, RegWidth):
return (O + XYZU + XYZV).T
# related to coordinate extraction
def SegmentationToAtlasSpace(slice, SegmentationPath, pixelID="auto", nonLinear=True):
"""combines many functions to convert a segmentation to atlas space. It takes care
of deformations"""
Segmentation = cv2.imread(SegmentationPath)
if pixelID == "auto":
# remove the background from the segmentation
SegmentationNoBackGround = Segmentation[~np.all(Segmentation == 255, axis=2)]
pixelID = np.vstack(
{tuple(r) for r in SegmentationNoBackGround.reshape(-1, 3)}
) # remove background
# currently only works for a single label
pixelID = pixelID[0]
ID_pixels = findMatchingPixels(Segmentation, pixelID)
# transform pixels to registration space (the registered image and segmentation have different dimensions)
SegHeight = Segmentation.shape[0]
SegWidth = Segmentation.shape[1]
RegHeight = slice["height"]
RegWidth = slice["width"]
# this calculates reg/seg
Yscale, Xscale = transformToRegistration(SegHeight, SegWidth, RegHeight, RegWidth)
scaledY, scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale)
if nonLinear:
if "markers" in slice:
# this creates a triangulation using the reg width
triangulation = triangulate(RegWidth, RegHeight, slice["markers"])
newX, newY = transform_vec(triangulation, scaledX, scaledY)
else:
print(f"no markers found for {slice['filename']}, result for section will be linear")
newX, newY = scaledX, scaledY
else:
newX, newY = scaledX, scaledY
# scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
points = transformToAtlasSpace(slice["anchoring"], newY, newX, RegHeight, RegWidth)
# points = points.reshape(-1)
return np.array(points)
# related to coordinate extraction
def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=True):
"apply Segmentation to atlas space to all segmentations in a folder"
slices = loadVisuAlignJson(QUINT_alignment)
points = []
segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"]
Segmentations = [
file
for file in glob(folder + "/*")
if any([file.endswith(type) for type in segmentationFileTypes])
]
SectionNumbers = number_sections(Segmentations)
# order segmentations and sectionNumbers
# Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))]
# SectionNumbers.sort()
for SegmentationPath in Segmentations:
seg_nr = int(number_sections([SegmentationPath])[0])
current_slice_index = np.where([s["nr"] == seg_nr for s in slices])
current_slice = slices[current_slice_index[0][0]]
##this converts the segmentation to a point cloud
points.extend(
SegmentationToAtlasSpace(
current_slice, SegmentationPath, pixelID, nonLinear
)
)
return np.array(points)
# points.append would make list of lists, keeping sections separate.
......@@ -152,7 +88,7 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru
# related to coordinate extraction
# this function returns an array of points
def FolderToAtlasSpaceMultiThreaded(
def FolderToAtlasSpace(
folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=True
):
"apply Segmentation to atlas space to all segmentations in a folder"
......@@ -175,7 +111,7 @@ def FolderToAtlasSpaceMultiThreaded(
current_slice_index = np.where([s["nr"] == seg_nr for s in slices])
current_slice = slices[current_slice_index[0][0]]
x = threading.Thread(
target=SegmentationToAtlasSpaceMultiThreaded,
target=SegmentationToAtlasSpace,
args=(
current_slice,
SegmentationPath,
......@@ -198,7 +134,7 @@ def FolderToAtlasSpaceMultiThreaded(
# related to coordinate extraction
# this function returns an array of points
def SegmentationToAtlasSpaceMultiThreaded(
def SegmentationToAtlasSpace(
slice, SegmentationPath, pixelID="auto", nonLinear=True, pointsList=None, index=None
):
"""combines many functions to convert a segmentation to atlas space. It takes care
......
......@@ -23,7 +23,7 @@ def labelPoints(points, label_volume, scale_factor=1):
# related to counting_and_load
def PixelCountPerRegion(labelsDict, label_colours):
def PixelCountPerRegion(labelsDict, df_label_colours):
"""Function for counting no. of pixels per region and writing to CSV based on
a dictionary with the region as the key and the points as the value,"""
counted_labels, label_counts = np.unique(labelsDict, return_counts=True)
......@@ -34,7 +34,7 @@ def PixelCountPerRegion(labelsDict, label_colours):
df_counts_per_label = pd.DataFrame(counts_per_label, columns=["idx", "pixel_count"])
# create a pandas df with regions and pixel counts
df_label_colours = pd.read_csv(label_colours, sep=",")
# df_label_colours = pd.read_csv(label_colours, sep=",")
# find colours corresponding to each region ID and add to the pandas dataframe
# look up name, r, g, b in df_allen_colours in df_counts_per_label based on "idx"
......
from .metadata import metadata_loader
from .read_and_write import readAtlasVolume
from .coordinate_extraction import FolderToAtlasSpaceMultiThreaded
from .read_and_write import readAtlasVolume, WritePointsToMeshview
from .coordinate_extraction import FolderToAtlasSpace
from .counting_and_load import labelPoints, PixelCountPerRegion
import json
import pandas as pd
from datetime import datetime
class PyNutil:
def __init__(
......@@ -46,8 +49,15 @@ class PyNutil:
atlas_root_path = self.config["annotation_volume_directory"]
current_atlas_path = self.config["annotation_volumes"][self.atlas]["volume"]
print("loading atlas volume")
startTime = datetime.now()
self.atlas_volume = readAtlasVolume(atlas_root_path + current_atlas_path)
print("atlas volume loaded")
time_taken = datetime.now() - startTime
print(f"atlas volume loaded in: {time_taken}")
atlas_label_path = self.config["annotation_volumes"][self.atlas]["labels"]
print("loading atlas labels")
self.atlas_labels = pd.read_csv(atlas_root_path + atlas_label_path)
print("atlas labels loaded")
def get_coordinates(self, nonLinear=True, method="all"):
if not hasattr(self, "atlas_volume"):
......@@ -64,4 +74,31 @@ class PyNutil:
self.points = points
def quantify_coordinates(self):
if not hasattr(self, "points"):
raise ValueError("Please run get_coordinates before running quantify_coordinates")
print("quantifying coordinates")
labeled_points = labelPoints(self.points, self.atlas_volume, scale_factor=2.5)
self.label_df = PixelCountPerRegion(labeled_points, self.atlas_labels)
self.labeled_points = labeled_points
print("quantification complete")
def save_analysis(self, output_folder):
if not hasattr(self, "points"):
raise ValueError("Please run get_coordinates before running save_analysis")
if not hasattr(self, "label_df"):
print("no quantification found so we will only save the coordinates")
print("if you want to save the quantification please run quantify_coordinates")
self.label_df.to_csv(output_folder + '/counts.csv', sep=";", na_rep="", index=False)
else:
self.label_df.to_csv(output_folder + '/counts.csv', sep=";", na_rep="", index=False)
WritePointsToMeshview(
self.points,
self.labeled_points,
output_folder + '/pixels_meshview.json',
self.atlas_labels
)
......@@ -53,7 +53,7 @@ def WritePointsToMeshview(points, pointNames, filename, infoFile):
regionDict = createRegionDict(points, pointNames)
WritePoints(regionDict, filename, infoFile)
# I think this might not need to be its own function :)
def SaveDataframeasCSV(df_to_save, output_csv):
"""Function for saving a df as a CSV file"""
df_to_save.to_csv(output_csv, sep=";", na_rep="", index=False)
......@@ -111,4 +111,4 @@ def FilesinDirectory(directory):
def readAtlasVolume(atlas_volume_path):
data, header = nrrd.read(atlas_volume_path)
return data, header
return data
......@@ -7,3 +7,6 @@ pnt.build_quantifier()
pnt.get_coordinates()
pnt.quantify_coordinates()
pnt.save_analysis('outputs/test4_2017')
\ No newline at end of file
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment