%load_ext autoreload
%autoreload 2
import os
import expipe
import pathlib
import numpy as np
import spatial_maps.stats as stats
import septum_mec
import septum_mec.analysis.data_processing as dp
import septum_mec.analysis.registration
import head_direction.head as head
import spatial_maps as sp
import speed_cells.speed as spd
import re
import joblib
import multiprocessing
import shutil
import psutil
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
import seaborn as sns
from distutils.dir_util import copy_tree
from neo import SpikeTrain
import scipy
from tqdm import tqdm_notebook as tqdm
from tqdm._tqdm_notebook import tqdm_notebook
tqdm_notebook.pandas()
from spike_statistics.core import permutation_resampling
from spikewaveform.core import calculate_waveform_features_from_template, cluster_waveform_features
from septum_mec.analysis.plotting import violinplot
%matplotlib inline
plt.rc('axes', titlesize=12)
plt.rcParams.update({
'font.size': 12,
'figure.figsize': (6, 4),
'figure.dpi': 150
})
output_path = pathlib.Path("output") / "stimulus-response"
(output_path / "statistics").mkdir(exist_ok=True, parents=True)
(output_path / "figures").mkdir(exist_ok=True, parents=True)
output_path.mkdir(exist_ok=True)
data_loader = dp.Data()
actions = data_loader.actions
project = data_loader.project
identification_action = actions['identify-neurons']
sessions = pd.read_csv(identification_action.data_path('sessions'))
units = pd.read_csv(identification_action.data_path('units'))
session_units = pd.merge(sessions, units, on='action')
stim_action = actions['stimulus-response']
stim_results = pd.read_csv(stim_action.data_path('results'))
# lfp_results has old unit id's but correct on (action, unit_name, channel_group)
stim_results = stim_results.drop('unit_id', axis=1)
statistics_action = actions['calculate-statistics']
shuffling = actions['shuffling']
statistics_results = pd.read_csv(statistics_action.data_path('results'))
statistics_results = session_units.merge(statistics_results, how='left')
quantiles_95 = pd.read_csv(shuffling.data_path('quantiles_95'))
action_columns = ['action', 'channel_group', 'unit_name']
data = pd.merge(statistics_results, quantiles_95, on=action_columns, suffixes=("", "_threshold"))
data['unit_day'] = data.apply(lambda x: str(x.unit_idnum) + '_' + x.action.split('-')[1], axis=1)
data = data.merge(stim_results, how='left')
waveform_action = actions['waveform-analysis']
waveform_results = pd.read_csv(waveform_action.data_path('results')).drop('template', axis=1)
data = data.merge(waveform_results, how='left')
colors = ['#d95f02','#e7298a']
labels = ['11 Hz', '30 HZ']
queries = ['frequency==11', 'frequency==30']
data.bs = data.bs.astype(bool)
grid_query = 'gridness > gridness_threshold and information_rate > information_rate_threshold'
gridcell_sessions = data.query(grid_query)
print("Number of gridcells", len(gridcell_sessions))
# print("Number of animals", len(gridcell_sessions.groupby(['entity'])))
data['gridcell'] = data.isin(data.query(grid_query))
data.query('baseline and gridcell')
density = True
cumulative = True
histtype = 'step'
lw = 2
bins = {
't_i_peak': None,
't_e_peak': None,
'p_i_peak': None,
'p_e_peak': None,
}
xlabel = {
't_i_peak': 's',
't_e_peak': 's',
'p_i_peak': 'prob',
'p_e_peak': 'prob',
}
for cell_type in ['gridcell', 'not bs']:
for key in bins:
fig = plt.figure(figsize=(3.5,2.2))
plt.suptitle(key + ' ' + cell_type)
legend_lines = []
for color, query, label in zip(colors, queries, labels):
data.query(query + ' and ' + cell_type)[key].hist(
bins=bins[key], density=density, cumulative=cumulative, lw=lw,
histtype=histtype, color=color)
legend_lines.append(matplotlib.lines.Line2D([0], [0], color=color, lw=lw, label=label))
plt.xlabel(xlabel[key])
plt.legend(
handles=legend_lines,
bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
plt.tight_layout()
plt.grid(False)
# plt.xlim(-0.05, bins[key].max() - bins[key].max()*0.02)
sns.despine()
figname = f'histogram-{key}-{cell_type}'.replace(' ', '-')
fig.savefig(
output_path / 'figures' / f'{figname}.png',
bbox_inches='tight', transparent=True)
fig.savefig(
output_path / 'figures' / f'{figname}.svg',
bbox_inches='tight', transparent=True)
from septum_mec.analysis.plotting import plot_bootstrap_timeseries
psth = pd.read_feather(output_path / 'data' / 'psth.feather')
times = pd.read_feather(output_path / 'data' / 'times.feather')
times = times.T.iloc[0].values
cs = ['#d95f02', '#e7298a', '#993404', '#980043']
lb = ['GC 11 Hz', 'GC 30 Hz', 'NS 11 Hz', 'NS 30 Hz']
fig, axs = plt.subplots(1, 2, sharex=True, sharey=True, figsize=(5,2))
ii = 0
for cell_type, ls in zip(['gridcell', 'not bs'], ['-', '--']):
for i, (ax, query) in enumerate(zip(axs.ravel(), queries)):
selection = [
f'{r.action}_{r.channel_group}_{r.unit_name}'
for i, r in data.query(query + ' and ' + cell_type).iterrows()]
values = psth.loc[:, selection].dropna(axis=1).to_numpy()
plot_bootstrap_timeseries(times, values, ax=ax, lw=2, label=lb[ii], color=cs[ii], ls=ls)
# ax.set_title(titles[i])
ax.set_xlabel('Time (s)')
ax.legend(frameon=False)
ii += 1
axs[0].set_ylabel('Probability density')
sns.despine()
plt.xlim(0, 0.029)
figname = f'response-probability'
fig.savefig(
output_path / 'figures' / f'{figname}.png',
bbox_inches='tight', transparent=True)
fig.savefig(
output_path / 'figures' / f'{figname}.svg',
bbox_inches='tight', transparent=True)
action = project.require_action("stimulus-response")