diff --git a/actions/longitudinal-comparisons-gridcells/attributes.yaml b/actions/longitudinal-comparisons-gridcells/attributes.yaml new file mode 100644 index 000000000..2437ece8d --- /dev/null +++ b/actions/longitudinal-comparisons-gridcells/attributes.yaml @@ -0,0 +1,4 @@ +registered: '2019-10-14T09:38:27' +data: + notebook: 20_longitudinal_comparisons_gridcells.ipynb + html: 20_longitudinal_comparisons_gridcells.html diff --git a/actions/longitudinal-comparisons-gridcells/data/20_longitudinal_comparisons_gridcells.html b/actions/longitudinal-comparisons-gridcells/data/20_longitudinal_comparisons_gridcells.html new file mode 100644 index 000000000..2064aa720 --- /dev/null +++ b/actions/longitudinal-comparisons-gridcells/data/20_longitudinal_comparisons_gridcells.html @@ -0,0 +1,16255 @@ + + +
+ +%load_ext autoreload
+%autoreload 2
+
import os
+import pathlib
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib import colors
+import seaborn as sns
+import re
+import shutil
+import pandas as pd
+import scipy.stats
+
+import exdir
+import expipe
+from distutils.dir_util import copy_tree
+import septum_mec
+import spatial_maps as sp
+import head_direction.head as head
+import septum_mec.analysis.data_processing as dp
+import septum_mec.analysis.registration
+from septum_mec.analysis.plotting import violinplot
+
+from spike_statistics.core import permutation_resampling
+
+from tqdm import tqdm_notebook as tqdm
+from tqdm._tqdm_notebook import tqdm_notebook
+tqdm_notebook.pandas()
+
project_path = dp.project_path()
+project = expipe.get_project(project_path)
+actions = project.actions
+
+output_path = pathlib.Path("output") / "longitudinal-comparisons-gridcells"
+(output_path / "statistics").mkdir(exist_ok=True, parents=True)
+(output_path / "figures").mkdir(exist_ok=True, parents=True)
+
statistics_action = actions['calculate-statistics']
+identification_action = actions['identify-neurons']
+sessions = pd.read_csv(identification_action.data_path('sessions'))
+units = pd.read_csv(identification_action.data_path('units'))
+session_units = pd.merge(sessions, units, on='action')
+statistics_results = pd.read_csv(statistics_action.data_path('results'))
+statistics = pd.merge(session_units, statistics_results, how='left')
+statistics.head()
+
stim_response_action = actions['stimulus-response']
+stim_response_results = pd.read_csv(stim_response_action.data_path('results'))
+
statistics = pd.merge(statistics, stim_response_results, how='left')
+
print('N cells:',statistics.shape[0])
+
shuffling = actions['shuffling']
+quantiles_95 = pd.read_csv(shuffling.data_path('quantiles_95'))
+quantiles_95.head()
+
action_columns = ['action', 'channel_group', 'unit_name']
+data = pd.merge(statistics, quantiles_95, on=action_columns, suffixes=("", "_threshold"))
+
+data['specificity'] = np.log10(data['in_field_mean_rate'] / data['out_field_mean_rate'])
+
+data.head()
+
data.groupby('stimulated').count()['action']
+
data['date'] = data.apply(lambda row: row.action.split('-')[1], axis=1)
+
query = 'gridness > gridness_threshold and information_rate > information_rate_threshold'
+sessions_above_threshold = data.query(query)
+print("Number of gridcells", len(sessions_above_threshold))
+print("Number of animals", len(sessions_above_threshold.groupby(['entity'])))
+
columns = [
+ 'average_rate', 'gridness', 'sparsity', 'selectivity', 'information_specificity',
+ 'max_rate', 'information_rate', 'interspike_interval_cv',
+ 'in_field_mean_rate', 'out_field_mean_rate',
+ 'burst_event_ratio', 'specificity', 'speed_score'
+]
+
once_a_gridcell = data[data.unit_id.isin(sessions_above_threshold.unit_id)]
+
baseline_i = once_a_gridcell.query('baseline and i')
+stimulated_11 = once_a_gridcell.query('stimulated and frequency==11 and stim_location=="ms" and i')
+
+baseline_ii = once_a_gridcell.query('baseline and ii')
+stimulated_30 = once_a_gridcell.query('stimulated and frequency==30 and stim_location=="ms" and ii')
+
+print("Number of gridcells in baseline i sessions", len(baseline_i))
+print("Number of gridcells in stimulated 11Hz ms sessions", len(stimulated_11))
+
+print("Number of gridcells in baseline ii sessions", len(baseline_ii))
+print("Number of gridcells in stimulated 30Hz ms sessions", len(stimulated_30))
+
max_speed = 1, # m/s only used for speed score
+min_speed = 0.02, # m/s only used for speed score
+position_sampling_rate = 100 # for interpolation
+position_low_pass_frequency = 6 # for low pass filtering of position
+
+box_size = [1.0, 1.0]
+bin_size = 0.02
+smoothing_low = 0.03
+smoothing_high = 0.06
+
data_loader = dp.Data(
+ position_sampling_rate=position_sampling_rate,
+ position_low_pass_frequency=position_low_pass_frequency,
+ box_size=box_size, bin_size=bin_size
+)
+
neuron_ids = once_a_gridcell.unit_id.unique()
+
results_corr = [[], [], []]
+results_gridness = [[], [], []]
+results_unit_name = [[], [], []]
+results_unit_id = [[], [], []]
+results_id_map = {}
+nuid = 0
+for nid in neuron_ids:
+ unit_sessions = once_a_gridcell.query(f'unit_id=="{nid}"')
+ base_i = unit_sessions.query("baseline and i")
+ base_ii = unit_sessions.query("baseline and ii")
+ base = unit_sessions.query("baseline")
+ stim_i = unit_sessions.query("stimulated and i")
+ stim_ii = unit_sessions.query("stimulated and ii")
+ dfs = [(base_i, base_ii), (base_i, stim_i), (base_ii, stim_ii)]
+ for i, pair in enumerate(dfs):
+ for (_, row_1), (_, row_2) in zip(pair[0].iterrows(), pair[1].iterrows()):
+ rate_map_1 = data_loader.rate_map(
+ row_1['action'], row_1['channel_group'], row_1['unit_name'], smoothing_low)
+ rate_map_2 = data_loader.rate_map(
+ row_2['action'], row_2['channel_group'], row_2['unit_name'], smoothing_low)
+ results_corr[i].append(np.corrcoef(rate_map_1.ravel(), rate_map_2.ravel())[0,1])
+ results_gridness[i].append((row_1.gridness, row_2.gridness))
+ results_unit_name[i].append((
+ f'{row_1.action}_{row_1.channel_group}_{row_1.unit_name}',
+ f'{row_2.action}_{row_2.channel_group}_{row_2.unit_name}'))
+ assert row_1.unit_id == row_2.unit_id
+ uid = row_2.unit_id
+ if uid not in results_id_map:
+ nuid += 1
+ results_id_map[uid] = nuid
+ results_unit_id[i].append(results_id_map[uid])
+
def session_id(row):
+ if row.baseline and row.i:
+ n = 0
+ elif row.stimulated and row.i:
+ n = 1
+ elif row.baseline and row.ii:
+ n = 2
+ elif row.stimulated and row.ii:
+ n = 3
+ else:
+ raise ValueError('what')
+ return n
+
+once_a_gridcell['session_id'] = once_a_gridcell.apply(session_id, axis=1)
+
plt.rc('axes', titlesize=12)
+plt.rcParams.update({
+ 'font.size': 12,
+ 'figure.figsize': (6, 4),
+ 'figure.dpi': 150
+})
+
# exclude = [i for i, n in results_id_map.items() if n in [25,41]]
+exclude = [
+ '751d2de8-faf1-4048-82db-34cbd64a7c1d',
+ '9f6eb181-321a-4ef7-8e2d-870bac6ceb37'
+]
+
for unit_id, id_num in results_id_map.items():
+ sessions = once_a_gridcell.query(f'unit_id=="{unit_id}"')
+ n_action = sessions.date.nunique()
+ fig, axs = plt.subplots(n_action, 4, sharey=True, sharex=True, figsize=(8, n_action*4))
+ sns.despine(left=True, bottom=True)
+ fig.suptitle(f'Neuron {id_num}')
+ if n_action == 1:
+ axs = [axs]
+ waxs = None
+ for ax, (date, rows) in zip(axs, sessions.groupby('date')):
+ entity = rows.iloc[0].entity
+ ax[0].set_ylabel(f'{entity}-{date}')
+ for _, row in rows.iterrows():
+ action_id = row['action']
+ channel_id = row['channel_group']
+ unit_name = row['unit_name']
+ rate_map = data_loader.rate_map(action_id, channel_id, unit_name, smoothing_low)
+ idx = row.session_id
+ ax[idx].imshow(rate_map)
+ ax[idx].set_title(f'{row.gridness:.2f} {row.max_rate:.2f}')
+ ax[idx].set_yticklabels([])
+ ax[idx].set_xticklabels([])
+ plt.tight_layout()
+ fig.savefig(output_path / 'figures' / f'neuron_{id_num}_rate_map.png', bbox_inches='tight')
+ fig.savefig(output_path / 'figures' / f'neuron_{id_num}_rate_map.svg', bbox_inches='tight')
+
+ # waveforms
+# template = data_loader.template(action_id, channel_id, unit_name)
+# if waxs is None:
+# wfig, waxs = plt.subplots(1, template.data.shape[0], sharey=True, sharex=True)
+# for i, wax in enumerate(waxs):
+# wax.plot(template.data[i,:])
+# if i > 0:
+# ax.set_yticklabels([])
+
from scipy.interpolate import interp1d
+
+
+for unit_id, id_num in results_id_map.items():
+ sessions = once_a_gridcell.query(f'unit_id=="{unit_id}"')
+ n_action = sessions.date.nunique()
+ fig, axs = plt.subplots(n_action, 4, sharey=True, sharex=True, figsize=(8, n_action*4))
+# sns.despine(left=True, bottom=True)
+ fig.suptitle(f'Neuron {id_num}')
+ if n_action == 1:
+ axs = [axs]
+ waxs = None
+ for ax, (date, rows) in zip(axs, sessions.groupby('date')):
+ entity = rows.iloc[0].entity
+ ax[0].set_ylabel(f'{entity}-{date}')
+ for _, row in rows.iterrows():
+ action_id = row['action']
+ channel_id = row['channel_group']
+ unit_name = row['unit_name']
+ idx = row.session_id
+ x, y, t, speed = map(data_loader.tracking(action_id).get, ['x', 'y', 't', 'v'])
+ ax[idx].plot(x, y, 'k', alpha=0.3)
+ spike_times = data_loader.spike_train(action_id, channel_id, unit_name)
+ spike_times = spike_times[(spike_times > min(t)) & (spike_times < max(t))]
+ x_spike = interp1d(t, x)(spike_times)
+ y_spike = interp1d(t, y)(spike_times)
+ ax[idx].set_xticks([])
+ ax[idx].set_yticks([])
+ ax[idx].scatter(x_spike, y_spike, marker='.', color=(0.7, 0.2, 0.2), s=1.5)
+ ax[idx].set_title(f'{row.session}')
+ ax[idx].set_yticklabels([])
+ ax[idx].set_xticklabels([])
+ for a in ax:
+ a.set_aspect(1)
+ plt.tight_layout()
+ fig.savefig(output_path / 'figures' / f'neuron_{id_num}_spike_map.png', bbox_inches='tight')
+ fig.savefig(output_path / 'figures' / f'neuron_{id_num}_spike_map.svg', bbox_inches='tight')
+
plt.rc('axes', titlesize=12)
+plt.rcParams.update({
+ 'font.size': 12,
+ 'figure.figsize': (6, 4),
+ 'figure.dpi': 150
+})
+cmap = ['#1b9e77','#d95f02','#7570b3']
+
msize = 9
+fig = plt.figure()
+ticks = []
+for i, pairs in enumerate(results_gridness):
+ for j, pair in enumerate(pairs):
+ if results_unit_id[i][j] in [results_id_map[i] for i in exclude]:
+ continue
+ plt.plot(
+ results_unit_id[i][j], abs(np.diff(pair)),
+ color=cmap[i], marker='.', ls='none', markersize=msize)
+for l in range(nuid):
+ plt.axvline(l, color='k', lw=.1, alpha=.5)
+
+from matplotlib.lines import Line2D
+
+labels = ['Baseline I vs baseline II', 'Baseline I vs stim I', 'Baseline II vs stim II']
+custom_lines = [
+ Line2D([],[], color=cmap[i], marker='.', ls='none', label=label, markersize=msize)
+ for i, label in enumerate(labels)
+]
+plt.ylabel('Absolute difference in gridness')
+plt.xlabel('Neuron')
+plt.legend(handles=custom_lines, bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
+fig.savefig(output_path / 'figures' / 'neuron_gridness.png', bbox_inches='tight')
+fig.savefig(output_path / 'figures' / 'neuron_gridness.svg', bbox_inches='tight')
+
fig = plt.figure()
+labels = ['Baseline I vs baseline II', 'Baseline I vs stim I', 'Baseline II vs stim II']
+for i, pairs in enumerate(results_gridness):
+ for j, pair in enumerate(pairs):
+ if results_unit_id[i][j] in [results_id_map[i] for i in exclude]:
+ continue
+ plt.plot(*pair, color=cmap[i], marker='.', ls='none', markersize=msize)
+# plt.scatter(*np.array(pairs).T, label=labels[i], color=cmap[i])
+# plt.legend(bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
+custom_lines = [
+ Line2D([],[], color=cmap[i], marker='.', ls='none', label=label, markersize=msize)
+ for i, label in enumerate(labels)
+]
+plt.legend(handles=custom_lines, bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
+plt.ylabel('Gridness')
+plt.xlabel('Baseline gridness')
+lim = [-.7, 1.35]
+plt.ylim(lim)
+plt.xlim(lim)
+plt.plot(lim, lim, '--k', alpha=.5, lw=1)
+fig.savefig(output_path / 'figures' / 'baseline_gridness_vs_other.png', bbox_inches='tight')
+fig.savefig(output_path / 'figures' / 'baseline_gridness_vs_other.svg', bbox_inches='tight')
+
fig = plt.figure()
+import matplotlib
+cNorm = matplotlib.colors.Normalize(vmin=-np.pi/2, vmax=np.pi/2)
+scalarMap = plt.cm.ScalarMappable(norm=cNorm, cmap=plt.cm.Blues)
+
+ticks = []
+for i, pairs in enumerate(results_gridness):
+ for j, pair in enumerate(pairs):
+ if results_unit_id[i][j] in [results_id_map[i] for i in exclude]:
+ continue
+ angle = float(np.arctan(np.diff(pair) / 0.9))
+ color = scalarMap.to_rgba(angle)
+# color = plt.cm.Paired((np.sign(angle)+1)/14)
+ tick = (i, i+.8)
+ plt.plot(tick, pair, marker='.', color=color)
+ ticks.append(tick)
+plt.xticks(
+ [t for tick in ticks for t in tick],
+ ['Baseline I', 'Baseline II', 'Baseline I', 'Stimulation I', 'Baseline II', 'Stimulation II'],
+ rotation=-45, ha='left'
+)
+plt.ylabel('Gridness')
+fig.savefig(output_path / 'figures' / 'stickplot_gridness.png', bbox_inches='tight')
+fig.savefig(output_path / 'figures' / 'stickplot_gridness.svg', bbox_inches='tight')
+
fig = plt.figure()
+ticks = [0,0.6,1.2]
+
+diff_res = [[], [], []]
+for i, pairs in enumerate(results_gridness):
+ for j, pair in enumerate(pairs):
+ if results_unit_id[i][j] in [results_id_map[i] for i in exclude]:
+ continue
+ diff_res[i].append(abs(np.diff(pair)))
+violins = plt.violinplot(
+ diff_res, ticks, showmedians=True, showextrema=False, points=1000, bw_method=.2)
+
+
+for category in ['cbars', 'cmins', 'cmaxes', 'cmedians']:
+ if category in violins:
+ violins[category].set_color(['k', 'k'])
+ violins[category].set_linewidth(2.0)
+
+colors = plt.cm.Paired(np.linspace(0,1,12))
+
+for pc, c in zip(violins['bodies'], cmap):
+ pc.set_facecolor(c)
+ pc.set_edgecolor(c)
+
+plt.xticks(ticks, ['baseline', 'stim i', 'stim ii'])
+plt.ylabel('Absolute difference in gridness')
+
+plt.gca().spines['top'].set_visible(False)
+plt.gca().spines['right'].set_visible(False)
+fig.savefig(output_path / 'figures' / 'violins_gridness_difference.png', bbox_inches='tight')
+fig.savefig(output_path / 'figures' / 'violins_gridness_difference.svg', bbox_inches='tight')
+
action = project.require_action("longitudinal-comparisons-gridcells")
+
copy_tree(output_path, str(action.data_path()))
+
septum_mec.analysis.registration.store_notebook(action, "20_longitudinal_comparisons_gridcells.ipynb")
+
+
\n", + " | action | \n", + "baseline | \n", + "entity | \n", + "frequency | \n", + "i | \n", + "ii | \n", + "session | \n", + "stim_location | \n", + "stimulated | \n", + "tag | \n", + "... | \n", + "burst_event_ratio | \n", + "bursty_spike_ratio | \n", + "gridness | \n", + "border_score | \n", + "information_rate | \n", + "information_specificity | \n", + "head_mean_ang | \n", + "head_mean_vec_len | \n", + "spacing | \n", + "orientation | \n", + "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "0.397921 | \n", + "0.676486 | \n", + "-0.459487 | \n", + "0.078474 | \n", + "0.965845 | \n", + "0.309723 | \n", + "5.788704 | \n", + "0.043321 | \n", + "0.624971 | \n", + "22.067900 | \n", + "
1 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "0.146481 | \n", + "0.277121 | \n", + "-0.615405 | \n", + "0.311180 | \n", + "0.191375 | \n", + "0.032266 | \n", + "1.821598 | \n", + "0.014624 | \n", + "0.753333 | \n", + "0.000000 | \n", + "
2 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "0.373466 | \n", + "0.658748 | \n", + "-0.527711 | \n", + "0.131660 | \n", + "3.833587 | \n", + "0.336590 | \n", + "4.407614 | \n", + "0.121115 | \n", + "0.542877 | \n", + "27.758541 | \n", + "
3 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "0.097464 | \n", + "0.196189 | \n", + "-0.641543 | \n", + "0.274989 | \n", + "0.153740 | \n", + "0.068626 | \n", + "6.128601 | \n", + "0.099223 | \n", + "0.484916 | \n", + "11.309932 | \n", + "
4 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "0.248036 | \n", + "0.461250 | \n", + "-0.085292 | \n", + "0.198676 | \n", + "0.526720 | \n", + "0.033667 | \n", + "1.602362 | \n", + "0.051825 | \n", + "0.646571 | \n", + "0.000000 | \n", + "
5 rows × 34 columns
\n", + "\n", + " | border_score | \n", + "gridness | \n", + "head_mean_ang | \n", + "head_mean_vec_len | \n", + "information_rate | \n", + "speed_score | \n", + "action | \n", + "channel_group | \n", + "unit_name | \n", + "
---|---|---|---|---|---|---|---|---|---|
0 | \n", + "0.348023 | \n", + "0.275109 | \n", + "3.012689 | \n", + "0.086792 | \n", + "0.707197 | \n", + "0.149071 | \n", + "1833-010719-1 | \n", + "0.0 | \n", + "127.0 | \n", + "
1 | \n", + "0.362380 | \n", + "0.166475 | \n", + "3.133138 | \n", + "0.037271 | \n", + "0.482486 | \n", + "0.132212 | \n", + "1833-010719-1 | \n", + "0.0 | \n", + "161.0 | \n", + "
2 | \n", + "0.367498 | \n", + "0.266865 | \n", + "5.586395 | \n", + "0.182843 | \n", + "0.271188 | \n", + "0.062821 | \n", + "1833-010719-1 | \n", + "0.0 | \n", + "191.0 | \n", + "
3 | \n", + "0.331942 | \n", + "0.312155 | \n", + "5.955767 | \n", + "0.090786 | \n", + "0.354018 | \n", + "0.052009 | \n", + "1833-010719-1 | \n", + "0.0 | \n", + "223.0 | \n", + "
4 | \n", + "0.325842 | \n", + "0.180495 | \n", + "5.262721 | \n", + "0.103584 | \n", + "0.210427 | \n", + "0.094041 | \n", + "1833-010719-1 | \n", + "0.0 | \n", + "225.0 | \n", + "
\n", + " | action | \n", + "baseline | \n", + "entity | \n", + "frequency | \n", + "i | \n", + "ii | \n", + "session | \n", + "stim_location | \n", + "stimulated | \n", + "tag | \n", + "... | \n", + "p_e_peak | \n", + "t_i_peak | \n", + "p_i_peak | \n", + "border_score_threshold | \n", + "gridness_threshold | \n", + "head_mean_ang_threshold | \n", + "head_mean_vec_len_threshold | \n", + "information_rate_threshold | \n", + "speed_score_threshold | \n", + "specificity | \n", + "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "NaN | \n", + "NaN | \n", + "NaN | \n", + "0.332548 | \n", + "0.229073 | \n", + "6.029431 | \n", + "0.205362 | \n", + "1.115825 | \n", + "0.066736 | \n", + "0.445206 | \n", + "
1 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "NaN | \n", + "NaN | \n", + "NaN | \n", + "0.354830 | \n", + "0.089333 | \n", + "6.120055 | \n", + "0.073566 | \n", + "0.223237 | \n", + "0.052594 | \n", + "0.097485 | \n", + "
2 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "NaN | \n", + "NaN | \n", + "NaN | \n", + "0.264610 | \n", + "-0.121081 | \n", + "5.759406 | \n", + "0.150827 | \n", + "4.964984 | \n", + "0.027120 | \n", + "0.393687 | \n", + "
3 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "NaN | \n", + "NaN | \n", + "NaN | \n", + "0.344280 | \n", + "0.215829 | \n", + "6.033364 | \n", + "0.110495 | \n", + "0.239996 | \n", + "0.054074 | \n", + "0.262612 | \n", + "
4 | \n", + "1849-060319-3 | \n", + "True | \n", + "1849 | \n", + "NaN | \n", + "False | \n", + "True | \n", + "3 | \n", + "NaN | \n", + "False | \n", + "baseline ii | \n", + "... | \n", + "NaN | \n", + "NaN | \n", + "NaN | \n", + "0.342799 | \n", + "0.218967 | \n", + "5.768170 | \n", + "0.054762 | \n", + "0.524990 | \n", + "0.144702 | \n", + "0.133677 | \n", + "
5 rows × 45 columns
\n", + "