Skip to content
Snippets Groups Projects
Commit 14db4a6e authored by Maximilian Schmidt's avatar Maximilian Schmidt
Browse files

Implement switch because of neuronal density data issue, add Note and switch in figures folder

parent 9e16e985
No related branches found
No related tags found
1 merge request!1Add all necessary files for the multi-area model
...@@ -8,237 +8,243 @@ from plotfuncs import create_fig ...@@ -8,237 +8,243 @@ from plotfuncs import create_fig
from scipy import stats from scipy import stats
colors = [myblue2, myblue, myyellow, myred2, myred] colors = [myblue2, myblue, myyellow, myred2, myred]
NEURON_DENSITIES_AVAILABLE = False
if NEURON_DENSITIES_AVAILABLE:
with open(os.path.join(datapath, 'viscortex_processed_data.json'), 'r') as f:
proc = json.load(f)
neuron_densities = proc['neuronal_densities']
architecture_completed = proc['architecture_completed']
categories = {}
for ii in np.arange(0, 9, 1):
categories[ii] = []
for area in architecture_completed:
categories[architecture_completed[area]].append(area)
with open(os.path.join(datapath, 'viscortex_raw_data.json'), 'r') as f:
raw = json.load(f)
thicknesses = raw['laminar_thicknesses']
total_thickness_data = raw['total_thickness_data']
# calculate average relative layer thicknesses for each area
frac_of_total = {}
for area in list(thicknesses.keys()):
area_dict_total = {}
for layer in list(thicknesses[area].keys()):
area_dict_total[layer] = np.array(
thicknesses[area][layer]) / np.array(thicknesses[area]['total'])
# if layer thickness is zero, it makes up 0% of the total, even if the
# total is unknown
if 0 in np.array(thicknesses[area][layer]):
if np.isscalar(thicknesses[area][layer]):
area_dict_total[layer] = 0
else:
indices = np.where(np.array(
thicknesses[area][layer]) == 0)[0]
for i in indices:
area_dict_total[layer][i] = 0
frac_of_total[area] = area_dict_total
total = {}
for area in list(thicknesses.keys()):
totals = thicknesses[area]['total']
if not np.isscalar(totals):
if sum(np.isfinite(totals)):
total[area] = np.nansum(totals) / sum(np.isfinite(totals))
else:
total[area] = np.nan
else:
total[area] = totals
# Create arrays
with open(os.path.join(datapath, 'viscortex_processed_data.json'), 'r') as f: frac1_of_total = np.zeros(len(area_list))
proc = json.load(f) frac23_of_total = np.zeros(len(area_list))
neuron_densities = proc['neuronal_densities'] frac4_of_total = np.zeros(len(area_list))
architecture_completed = proc['architecture_completed'] frac5_of_total = np.zeros(len(area_list))
frac6_of_total = np.zeros(len(area_list))
categories = {} for i, area in enumerate(area_list):
for ii in np.arange(0, 9, 1): temp = frac_of_total[area]['1']
categories[ii] = [] if not np.isscalar(temp):
for area in architecture_completed: if sum(np.isfinite(temp)):
categories[architecture_completed[area]].append(area) frac1_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
with open(os.path.join(datapath, 'viscortex_raw_data.json'), 'r') as f:
raw = json.load(f)
thicknesses = raw['laminar_thicknesses']
total_thickness_data = raw['total_thickness_data']
# calculate average relative layer thicknesses for each area
frac_of_total = {}
for area in list(thicknesses.keys()):
area_dict_total = {}
for layer in list(thicknesses[area].keys()):
area_dict_total[layer] = np.array(
thicknesses[area][layer]) / np.array(thicknesses[area]['total'])
# if layer thickness is zero, it makes up 0% of the total, even if the
# total is unknown
if 0 in np.array(thicknesses[area][layer]):
if np.isscalar(thicknesses[area][layer]):
area_dict_total[layer] = 0
else: else:
indices = np.where(np.array( frac1_of_total[i] = np.nan
thicknesses[area][layer]) == 0)[0]
for i in indices:
area_dict_total[layer][i] = 0
frac_of_total[area] = area_dict_total
total = {}
for area in list(thicknesses.keys()):
totals = thicknesses[area]['total']
if not np.isscalar(totals):
if sum(np.isfinite(totals)):
total[area] = np.nansum(totals) / sum(np.isfinite(totals))
else: else:
total[area] = np.nan frac1_of_total[i] = temp
else: temp = frac_of_total[area]['23']
total[area] = totals if not np.isscalar(temp):
if sum(np.isfinite(temp)):
# Create arrays frac23_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
else:
frac1_of_total = np.zeros(len(area_list)) frac23_of_total[i] = np.nan
frac23_of_total = np.zeros(len(area_list))
frac4_of_total = np.zeros(len(area_list))
frac5_of_total = np.zeros(len(area_list))
frac6_of_total = np.zeros(len(area_list))
for i, area in enumerate(area_list):
temp = frac_of_total[area]['1']
if not np.isscalar(temp):
if sum(np.isfinite(temp)):
frac1_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
else: else:
frac1_of_total[i] = np.nan frac23_of_total[i] = temp
else: temp = frac_of_total[area]['4']
frac1_of_total[i] = temp if not np.isscalar(temp):
temp = frac_of_total[area]['23'] if sum(np.isfinite(temp)):
if not np.isscalar(temp): frac4_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
if sum(np.isfinite(temp)): else:
frac23_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp)) frac4_of_total[i] = np.nan
else: else:
frac23_of_total[i] = np.nan frac4_of_total[i] = temp
else: temp = frac_of_total[area]['5']
frac23_of_total[i] = temp if not np.isscalar(temp):
temp = frac_of_total[area]['4'] if sum(np.isfinite(temp)):
if not np.isscalar(temp): frac5_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
if sum(np.isfinite(temp)): else:
frac4_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp)) frac5_of_total[i] = np.nan
else: else:
frac4_of_total[i] = np.nan frac5_of_total[i] = temp
else: temp = frac_of_total[area]['6']
frac4_of_total[i] = temp if not np.isscalar(temp):
temp = frac_of_total[area]['5'] if sum(np.isfinite(temp)):
if not np.isscalar(temp): frac6_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
if sum(np.isfinite(temp)): else:
frac5_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp)) frac6_of_total[i] = np.nan
else: else:
frac5_of_total[i] = np.nan frac6_of_total[i] = temp
else:
frac5_of_total[i] = temp
temp = frac_of_total[area]['6'] total_array = np.zeros(len(area_list))
if not np.isscalar(temp): for i, area in enumerate(area_list):
if sum(np.isfinite(temp)): total_array[i] = total[area]
frac6_of_total[i] = np.nansum(temp) / sum(np.isfinite(temp))
architecture_array = np.zeros(len(area_list))
log_density_array = np.zeros(len(area_list))
for i, area in enumerate(area_list):
architecture_array[i] = architecture_completed[area]
log_density_array[i] = np.log10(neuron_densities[area]['overall'])
# ################################################################################
scale = 1.0
width = 7.5
n_horz_panels = 3.
n_vert_panels = 1.
panel_factory = create_fig(1, scale, width, n_horz_panels,
n_vert_panels, hoffset=0.06, voffset=0.19, height_sup=.2)
axes = {}
axes['A'] = panel_factory.new_panel(0, 0, r'A', label_position=(-0.2, 1.2))
axes['B'] = panel_factory.new_panel(1, 0, r'B', label_position=(-0.2, 1.2))
axes['C'] = panel_factory.new_panel(2, 0, r'C', label_position=(-0.2, 1.2))
labels = ['A', 'B', 'C']
for label in labels:
axes[label].spines['right'].set_color('none')
axes[label].spines['top'].set_color('none')
axes[label].yaxis.set_ticks_position("left")
axes[label].xaxis.set_ticks_position("bottom")
ax = axes['A']
x = np.arange(1, 9, 1)
y = np.array([])
for cat in x:
y = np.append(y, proc['category_density'][str(cat)]['overall'])
layers = ['6', '5', '4', '23', '1'][::-1]
layer_labels = ['6', '5', '4', '2/3', '1'][::-1]
rho = {'1': np.array([]), '23': np.array([]), '4': np.array(
[]), '5': np.array([]), '6': np.array([])}
# Define a prototype area for each category, for which we do not have
# laminar data so that its laminar thicknesses are computed from the
# regression
prototype = {'2': 'TH', '4': 'AITd', '5': 'CITd',
'6': 'V3A', '7': 'V2', '8': 'V1'}
for l in layers:
if l != '1':
for cat in x:
if cat not in [1, 3]:
rho[l] = np.append(rho[l], proc['category_density'][str(cat)][
l] * proc['laminar_thicknesses'][prototype[str(cat)]][l])
else:
rho[l] = np.append(rho[l], np.nan)
else: else:
frac6_of_total[i] = np.nan rho[l] = np.zeros(8)
else:
frac6_of_total[i] = temp
total_array = np.zeros(len(area_list))
for i, area in enumerate(area_list):
total_array[i] = total[area]
architecture_array = np.zeros(len(area_list))
log_density_array = np.zeros(len(area_list))
for i, area in enumerate(area_list):
architecture_array[i] = architecture_completed[area]
log_density_array[i] = np.log10(neuron_densities[area]['overall'])
# ################################################################################
scale = 1.0
width = 7.5
n_horz_panels = 3.
n_vert_panels = 1.
panel_factory = create_fig(1, scale, width, n_horz_panels,
n_vert_panels, hoffset=0.06, voffset=0.19, height_sup=.2)
axes = {}
axes['A'] = panel_factory.new_panel(0, 0, r'A', label_position=(-0.2, 1.2))
axes['B'] = panel_factory.new_panel(1, 0, r'B', label_position=(-0.2, 1.2))
axes['C'] = panel_factory.new_panel(2, 0, r'C', label_position=(-0.2, 1.2))
labels = ['A', 'B', 'C']
for label in labels:
axes[label].spines['right'].set_color('none')
axes[label].spines['top'].set_color('none')
axes[label].yaxis.set_ticks_position("left")
axes[label].xaxis.set_ticks_position("bottom")
ax = axes['A']
x = np.arange(1, 9, 1)
y = np.array([])
for cat in x:
y = np.append(y, proc['category_density'][str(cat)]['overall'])
layers = ['6', '5', '4', '23', '1'][::-1]
layer_labels = ['6', '5', '4', '2/3', '1'][::-1]
rho = {'1': np.array([]), '23': np.array([]), '4': np.array(
[]), '5': np.array([]), '6': np.array([])}
# Define a prototype area for each category, for which we do not have
# laminar data so that its laminar thicknesses are computed from the
# regression
prototype = {'2': 'TH', '4': 'AITd', '5': 'CITd',
'6': 'V3A', '7': 'V2', '8': 'V1'}
for l in layers:
if l != '1':
for cat in x:
if cat not in [1, 3]:
rho[l] = np.append(rho[l], proc['category_density'][str(cat)][
l] * proc['laminar_thicknesses'][prototype[str(cat)]][l])
else:
rho[l] = np.append(rho[l], np.nan)
else:
rho[l] = np.zeros(8)
bottom = np.zeros(8) bottom = np.zeros(8)
for l in layers[:]: for l in layers[:]:
bottom += rho[l] bottom += rho[l]
for ii, l in enumerate(layers): for ii, l in enumerate(layers):
print(l, rho[l][4], bottom[4]) print(l, rho[l][4], bottom[4])
bottom -= rho[l] bottom -= rho[l]
print("LAYER", l, bottom) print("LAYER", l, bottom)
ax.bar(x - 0.4, rho[l], bottom=bottom, ax.bar(x - 0.4, rho[l], bottom=bottom,
color=colors[ii], label='L' + layer_labels[ii], color=colors[ii], label='L' + layer_labels[ii],
edgecolor='k') edgecolor='k')
ax.set_xlabel('Architectural type', labelpad=0.3) ax.set_xlabel('Architectural type', labelpad=0.3)
ax.set_ylabel(r'Neuron density ($10^4$/mm$^2$)') ax.set_ylabel(r'Neuron density ($10^4$/mm$^2$)')
yticklocs = [50000, 100000, 150000, 200000] yticklocs = [50000, 100000, 150000, 200000]
ytickslabels = ['5', '10', '15', '20'] ytickslabels = ['5', '10', '15', '20']
ax.set_yticks(yticklocs) ax.set_yticks(yticklocs)
ax.set_yticklabels(ytickslabels) ax.set_yticklabels(ytickslabels)
ax.set_ylim((0., 200000.)) ax.set_ylim((0., 200000.))
ax.set_xlim((-0.5, 9)) ax.set_xlim((-0.5, 9))
ax.set_xticks(np.arange(2, 9, 1) - 0.4) ax.set_xticks(np.arange(2, 9, 1) - 0.4)
ax.set_xticklabels([r'${}$'.format(ii) for ii in list(range(2, 9))]) ax.set_xticklabels([r'${}$'.format(ii) for ii in list(range(2, 9))])
ax.legend(loc=(0.035, 0.45), edgecolor='k') ax.legend(loc=(0.035, 0.45), edgecolor='k')
################################################## ##################################################
barbas_array = np.zeros(len(area_list)) barbas_array = np.zeros(len(area_list))
for i, area in enumerate(area_list): for i, area in enumerate(area_list):
barbas_array[i] = total_thickness_data[area] / 1000. barbas_array[i] = total_thickness_data[area] / 1000.
gradient, intercept, r_value, p_value, std_err = stats.linregress( gradient, intercept, r_value, p_value, std_err = stats.linregress(
log_density_array[np.isfinite(barbas_array)], barbas_array[np.isfinite(barbas_array)]) log_density_array[np.isfinite(barbas_array)], barbas_array[np.isfinite(barbas_array)])
print('total thicknesses from Barbas lab vs log. densities:')
print('gradient: ', gradient)
print('intercept: ', intercept)
print('r-value: ', r_value)
print('p-value: ', p_value)
ax = axes['B']
ax.plot(log_density_array, barbas_array, '.', ms=6, color='k')
line = gradient * log_density_array + intercept
ax.plot(log_density_array, line, '-', linewidth=1.5, color='k')
ax.set_xlabel('Log neuron density', labelpad=0.3)
ax.set_ylabel('Total thickness (mm)')
ax.set_xticks([4.7, 5.0])
ax.set_yticks(np.arange(1., 3., 0.5))
print('total thicknesses from Barbas lab vs log. densities:')
print('gradient: ', gradient)
print('intercept: ', intercept)
print('r-value: ', r_value)
print('p-value: ', p_value)
############################################################
ax = axes['C'] ax = axes['B']
print('fractions of total thickness vs log. densities') ax.plot(log_density_array, barbas_array, '.', ms=6, color='k')
layers = ['1', '23', '4', '5', '6']
for i, data in enumerate([frac1_of_total, frac23_of_total,
frac4_of_total, frac5_of_total, frac6_of_total]):
ax.plot(log_density_array, data, '.', c=colors[i], ms=6)
gradient, intercept, r_value, p_value, std_err = stats.linregress(
log_density_array[np.isfinite(data)], data[np.isfinite(data)])
print('r: ', r_value, ', p-value: ', p_value)
line = gradient * log_density_array + intercept line = gradient * log_density_array + intercept
ax.plot(log_density_array, line, '-', linewidth=2.0, c=colors[i]) ax.plot(log_density_array, line, '-', linewidth=1.5, color='k')
ax.set_xlabel('Log neuron density', labelpad=0.3)
ax.set_xlabel('Log neuron density', labelpad=0.3) ax.set_ylabel('Total thickness (mm)')
ax.set_ylabel('Proportion of \n total thickness') ax.set_xticks([4.7, 5.0])
ax.set_xlim((4.6, 5.3))
ax.set_xticks([4.7, 5.0]) ax.set_yticks(np.arange(1., 3., 0.5))
ax.set_yticks(np.arange(0., 0.7, 0.2))
pl.savefig('Fig2_anatomy.eps', dpi=600) ############################################################
ax = axes['C']
print('fractions of total thickness vs log. densities')
layers = ['1', '23', '4', '5', '6']
for i, data in enumerate([frac1_of_total, frac23_of_total,
frac4_of_total, frac5_of_total, frac6_of_total]):
ax.plot(log_density_array, data, '.', c=colors[i], ms=6)
gradient, intercept, r_value, p_value, std_err = stats.linregress(
log_density_array[np.isfinite(data)], data[np.isfinite(data)])
print('r: ', r_value, ', p-value: ', p_value)
line = gradient * log_density_array + intercept
ax.plot(log_density_array, line, '-', linewidth=2.0, c=colors[i])
ax.set_xlabel('Log neuron density', labelpad=0.3)
ax.set_ylabel('Proportion of \n total thickness')
ax.set_xlim((4.6, 5.3))
ax.set_xticks([4.7, 5.0])
ax.set_yticks(np.arange(0., 0.7, 0.2))
pl.savefig('Fig2_anatomy.eps', dpi=600)
else:
print("Figure 2 can currently not be produced because"
"we cannot publish the underlying raw data.")
...@@ -4,7 +4,9 @@ This folder contains the scripts to reproduce all figures of Schmidt M, Bakker R ...@@ -4,7 +4,9 @@ This folder contains the scripts to reproduce all figures of Schmidt M, Bakker R
Please note: Figures 2, 5, and 8 show slight deviations from the published figures in the paper. Published Figures 2 and 5 miss a few data points. This error slipped in during the review process. Importantly, the presented fits are identical in the (correct) figures in this repository and in the manuscript. These deviations thus do not affect the scientific conclusions. Please note: Figures 2, 5, and 8 show slight deviations from the published figures in the paper. Published Figures 2 and 5 miss a few data points. This error slipped in during the review process. Importantly, the presented fits are identical in the (correct) figures in this repository and in the manuscript. These deviations thus do not affect the scientific conclusions.
Note that the placement of areas in Figure 7 will deviate from the published figure, because their location depends on the force-directed algorithm implemented in `igraph` and `python-igraph` does not allow manual setting of the random seed for the algorithm. This is a mere visual issue and does not affect the scientific content. Please note that the placement of areas in Figure 7 will deviate from the published figure, because their location depends on the force-directed algorithm implemented in `igraph` and `python-igraph` does not allow manual setting of the random seed for the algorithm. This is a mere visual issue and does not affect the scientific content.
Please note that, since we currently cannot publish the data on Neuronal Densities, Figure 2 can currently not be produced and executing it throws an error.
If snakemake is installed, the figures can be produced by executing If snakemake is installed, the figures can be produced by executing
......
...@@ -28,9 +28,8 @@ import pprint ...@@ -28,9 +28,8 @@ import pprint
from copy import deepcopy from copy import deepcopy
from nested_dict import nested_dict from nested_dict import nested_dict
from itertools import product from itertools import product
# from .. import default_params as defpar from multiarea_model.default_params import network_params, nested_update
from ..default_params import network_params, nested_update from multiarea_model.data_multiarea.VisualCortex_Data import process_raw_data
from .VisualCortex_Data import process_raw_data
def compute_Model_params(out_label='', mode='default'): def compute_Model_params(out_label='', mode='default'):
......
This diff is collapsed.
# Neuronal densities from Barbas Lab. Overall densities (1st column) are presented in Table 4 of Hilgetag, C. C., Medalla, M., Beul, S. F., & Barbas, H. (2015). The primate connectome in context: Principles of connections of the cortical visual system. NeuroImage, 134, 685–702. https://doi.org/10.1016/j.neuroimage.2016.04.017
V1 161365.313353296 19427.7514309175 153894.970495772 11237.0785022221 179088.958685915 19483.3411458588 139272.680066401 28642.7198414341
V2 97618.8004262742 6318.4209238154 92913.7502053809 9168.7044437395 180488.12639129 15599.6221210207 78810.1380340474 5788.3313934117
V4 71236.5276327589 7947.590257791 67154.5446596756 5803.6225683117 167188.428539576 26194.6145324879 56752.2320613314 10570.4766493826
MT 65991.5887115086 6996.5656738783 67044.2266724167 5773.2854729385 121372.429209652 5958.4564687061 50507.6146086331 9182.4918445723
TEO 63271.1025249173 7263.1015279643 56057.260207707 6168.8769518766 153600.74824572 26717.1840533565 57071.8248578393 6982.2666533817
V3A 61381.5412392137 6202.963723809 58305.5035933036 4035.9619344902 109843.719889781 22376.7397541261 50647.3572613343 5259.7123097559
LIP 52134.5731946242 4491.9762527831 50703.2775227163 4603.3939842453 90334.8968775757 8221.417134307 43926.0361437937 4960.2985281281
Opt/DP 48015.1995568109 5932.0962970323 45758.1365355312 4678.0990750539 86467.4698480542 12137.3808972281 41725.5532649263 6548.0941769156
TF 46083.8963535136 2697.6987375072 44164.6081343695 492.0978572682 88810.1652976761 2904.792305899 40653.2604320568 8573.9510135437
FEF 44977.7691582594 1147.4075861589 45284.2708812591 3025.5401968487 68733.783030714 1368.1732105009 39382.0621828807 1175.1024421689
Te1 38839.7460239597 4018.9831766332 36312.629019575 3798.0977648742 84833.0161404169 20492.3782565057 36379.5587038022 6385.8472725441
area 46v 38026.8136272429 4185.5590572311 34625.8382448048 3028.1656500057 59774.256849191 9644.0789780673 36192.0308636869 4420.3808841888
7a 36229.994135607 2214.8748387887 35064.7353890857 2945.3862132917 64711.115816133 8419.739423376 31239.53369249 1451.8383374224
TH 33196.1450004882 4032.662630167 32778.5191185925 5269.5723764642 0 0 33814.8407473548 2994.3319115364
# Neuronal density data obtained with Nissl staining, providede by Helen Barbas (personal communication)
V1,8,173360,1.24
V2,7,111730,1.46
V3,7,—,—
VP,7,—,—
TEO,6,78523,2.13
V3A,6,76696,1.66
V4,6,86223,1.89
V4t,6,—,—
V5/MT,6,81153,1.96
VOT,6,—,—
A8,5,60837,2.21
CIT,5,—,—
DP,5,—,—
FEF,5,—,—
LIPd,5,61087,2.3
LIPv,5,69275,2.3
MST,5,—,—
PIP,5,—,—
PIT,5,—,—
PO,5,—,—
TEc,5,—,—
TF,5,61906,1.62
VIP,5,—,—
A36,4,50074,2.93
A46v,4,52720,1.86
A7a,4,52379,2.68
AIT,4,—,—
FST,4,—,—
STP,4,—,—
TEr,4,54902,2.63
A11 rostral,3,53874,1.62
A12 lateral,3,—,—
A12 rostral,3,50570,2.25
MVisPro,2,—,—
OPro,2,40136,2.5
TH,2,49446,1.87
VPolePro,2,—,—
OPAll/OPro,1.5,39607,—
A28,1,40825,2.05
A35,1,40313,1.6
ER,1,—,—
This diff is collapsed.
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment