diff --git a/PyNutil/PyNutil.py b/PyNutil/PyNutil.py
index 42853a0e64948012e4fdbc95b4af69ad222861da..89ef7476fed2554baa176a4397be4bfdbf4d3246 100644
--- a/PyNutil/PyNutil.py
+++ b/PyNutil/PyNutil.py
@@ -7,6 +7,7 @@ from glob import glob
 from tqdm import tqdm
 import cv2
 from skimage import measure
+import threading
 
 def getCentroidsAndArea(Segmentation, pixelCutOff=0):
     """this function returns the center coordinate of each object in the segmentation.
@@ -124,6 +125,66 @@ def FolderToAtlasSpace(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=Tru
         ##this converts the segmentation to a point cloud
         points.extend(SegmentationToAtlasSpace(current_slice, SegmentationPath, pixelID, nonLinear))
     return np.array(points)
+def FolderToAtlasSpaceMultiThreaded(folder, QUINT_alignment, pixelID=[0, 0, 0], nonLinear=True):
+    "apply Segmentation to atlas space to all segmentations in a folder"
+    slices = loadVisuAlignJson(QUINT_alignment)
+    
+    segmentationFileTypes = [".png", ".tif", ".tiff", ".jpg", ".jpeg"] 
+    Segmentations = [file for file in glob(folder + "*") if any([file.endswith(type) for type in segmentationFileTypes])]
+    SectionNumbers = number_sections(Segmentations)
+    #order segmentations and sectionNumbers
+    # Segmentations = [x for _,x in sorted(zip(SectionNumbers,Segmentations))]
+    # SectionNumbers.sort()
+    pointsList = [None] * len(Segmentations)
+    threads = []
+    for  SegmentationPath, index in zip(Segmentations, range(len(Segmentations))):
+        seg_nr = int(number_sections([SegmentationPath])[0])
+        current_slice_index = np.where([s["nr"]==seg_nr for s in slices])
+        current_slice = slices[current_slice_index[0][0]]
+        x = threading.Thread(target=SegmentationToAtlasSpaceMultiThreaded, args=(current_slice, SegmentationPath, pixelID, nonLinear, pointsList, index))
+        threads.append(x)
+        ##this converts the segmentation to a point cloud
+    # start threads
+    [t.start() for t in threads]
+    # wait for threads to finish
+    [t.join() for t in threads]
+    # flatten pointsList
+    points = [item for sublist in pointsList for item in sublist]
+    return np.array(points)
+
+
+def SegmentationToAtlasSpaceMultiThreaded(slice, SegmentationPath, pixelID='auto', nonLinear=True, pointsList=None, index=None):
+    """combines many functions to convert a segmentation to atlas space. It takes care
+    of deformations"""
+    Segmentation = cv2.imread(SegmentationPath)
+
+    if pixelID == 'auto':
+        #remove the background from the segmentation
+        SegmentationNoBackGround = Segmentation[~np.all(Segmentation==255, axis=2)]
+        pixelID = np.vstack({tuple(r) for r in SegmentationNoBackGround.reshape(-1,3)})#remove background
+        #currently only works for a single label
+        pixelID = pixelID[0]
+    ID_pixels = findMatchingPixels(Segmentation, pixelID)
+    #transform pixels to registration space (the registered image and segmentation have different dimensions)
+    SegHeight = Segmentation.shape[0]
+    SegWidth  = Segmentation.shape[1]
+    RegHeight = slice["height"]
+    RegWidth  = slice["width"]
+    #this calculates reg/seg
+    Yscale , Xscale = transformToRegistration(SegHeight,SegWidth,  RegHeight,RegWidth)
+    #this creates a triangulation using the reg width
+    triangulation   = triangulate(RegWidth, RegHeight, slice["markers"])
+    #scale the seg coordinates to reg/seg
+    scaledY,scaledX = scalePositions(ID_pixels[0], ID_pixels[1], Yscale, Xscale)
+    if nonLinear:
+        newX, newY = transform_vec(triangulation, scaledX, scaledY)
+    else:
+        newX, newY = scaledX, scaledY
+    #scale U by Uxyz/RegWidth and V by Vxyz/RegHeight
+    points = transformToAtlasSpace(slice['anchoring'], newY, newX, RegHeight, RegWidth)
+    # points = points.reshape(-1)
+    pointsList[index] = np.array(points)
+
 def createRegionDict(points, regions):
     """points is a list of points and regions is an id for each point"""
     regionDict = {region:points[regions==region].flatten().tolist() for region in np.unique(regions)}
diff --git a/PyNutil/folder_of_segmentations_to_meshview.py b/PyNutil/folder_of_segmentations_to_meshview.py
index 6505920a5c4df96e5aa7ea3922794457ddbd97e0..7abbfd81d261ba2a638c3d9f5782c3bd7fbc91b0 100644
--- a/PyNutil/folder_of_segmentations_to_meshview.py
+++ b/PyNutil/folder_of_segmentations_to_meshview.py
@@ -16,10 +16,15 @@ data, header = nrrd.read(volume_path)
 
 startTime = datetime.now()
 
-segmentation_folder = "../test_data/oneSection15/"
-alignment_json = "../test_data/C68_nonlinear.json"
+segmentation_folder = "../test_data/ext-d000033_PVMouseExtraction_pub-Nutil_Quantifier_analysis-81264-Input_dir/"
+alignment_json = "../test_data/PVMouse_81264_nonlin.json"
 #now we can use our function to convert the folder of segmentations to points
-points = FolderToAtlasSpace(segmentation_folder,alignment_json, pixelID=[255, 0, 255], nonLinear=True)
+points = FolderToAtlasSpace(segmentation_folder,alignment_json, pixelID=[255, 0, 0], nonLinear=True)
+
+
+time_taken = datetime.now() - startTime
+
+print(f"Folder to atlas took: {time_taken}")
 #first we need to find the label file for the volume
 label_path = "../annotation_volumes//allen2022_colours.csv"
 #then the path to the volume
diff --git a/PyNutil/folder_of_segmentations_to_meshview_multithreaded.py b/PyNutil/folder_of_segmentations_to_meshview_multithreaded.py
new file mode 100644
index 0000000000000000000000000000000000000000..273c163a144cd342a882f7ae0b60b771a1e76478
--- /dev/null
+++ b/PyNutil/folder_of_segmentations_to_meshview_multithreaded.py
@@ -0,0 +1,88 @@
+
+#pandas is used for working with csv files
+import pandas as pd
+#nrrd just lets us open nrrd files
+import nrrd
+import numpy as np
+import csv
+
+from datetime import datetime
+
+#import our function for converting a folder of segmentations to points
+from PyNutil import FolderToAtlasSpace, labelPoints, WritePointsToMeshview, FolderToAtlasSpaceMultiThreaded
+
+volume_path = "../annotation_volumes//annotation_10_reoriented.nrrd"
+data, header = nrrd.read(volume_path)
+
+startTime = datetime.now()
+
+segmentation_folder = "../test_data/ext-d000033_PVMouseExtraction_pub-Nutil_Quantifier_analysis-81264-Input_dir/"
+alignment_json = "../test_data/PVMouse_81264_nonlin.json"
+#now we can use our function to convert the folder of segmentations to points
+points = FolderToAtlasSpaceMultiThreaded(segmentation_folder,alignment_json, pixelID=[255, 0, 0], nonLinear=True)
+
+time_taken = datetime.now() - startTime
+
+print(f"Folder to atlas took: {time_taken}")
+#first we need to find the label file for the volume
+label_path = "../annotation_volumes//allen2022_colours.csv"
+#then the path to the volume
+
+#read the label files
+label_df = pd.read_csv(label_path)
+#read the annotation volume, it also has a header but we don't need it
+#now we can get the labels for each point
+labels = labelPoints(points, data, scale_factor=2.5)
+#save points to a meshview json
+WritePointsToMeshview(points, labels,"../outputs/points.json", label_df)
+
+#Task:
+# Make a pandas dataframe
+# Column 1: counted_labels
+# Column 2: label_counts
+# Column 3: region_name (look up by reading Allen2022_colours.csv, look up name and RGB)
+# Save dataframe in output as CSV
+# next task is to create functions from this. 
+counted_labels, label_counts = np.unique(labels, return_counts=True)
+counts_per_label = list(zip(counted_labels,label_counts))
+
+df_counts_per_label = pd.DataFrame(counts_per_label, columns=["allenID","pixel count"])
+
+allen_colours = "../annotation_volumes//allen2022_colours.csv"
+
+df_allen_colours =pd.read_csv(allen_colours, sep=",")
+df_allen_colours
+
+#look up name, r, g, b in df_allen_colours in df_counts_per_label based on "allenID"
+new_rows = []
+for index, row in df_counts_per_label.iterrows():
+    mask = df_allen_colours["allenID"] == row["allenID"] 
+    current_region_row = df_allen_colours[mask]
+    current_region_name = current_region_row["name"].values
+    current_region_red = current_region_row["r"].values
+    current_region_green = current_region_row["g"].values
+    current_region_blue = current_region_row["b"].values
+
+    row["name"]  = current_region_name[0]
+    row["r"] = current_region_red[0]
+    row["g"] = current_region_green[0]
+    row["b"] = current_region_blue[0]
+    
+    new_rows.append(row)
+
+df_counts_per_label_name = pd.DataFrame(new_rows)
+df_counts_per_label_name
+
+# write to csv file
+df_counts_per_label_name.to_csv("../outputs/counts_per_allenID.csv", sep=";", na_rep='', index= False)
+
+#r = df_allen_colours["r"]
+#g = df_allen_colours["g"]
+#b = df_allen_colours["b"]
+#region_name = df_allen_colours["name"]
+
+#while we havent added it here it would be good to next quantify the number of cells for each label.
+
+time_taken = datetime.now() - startTime
+
+print(f"time taken was: {time_taken}")
\ No newline at end of file