%load_ext autoreload
%autoreload 2
import os
import expipe
import pathlib
import numpy as np
import spatial_maps.stats as stats
import septum_mec
import septum_mec.analysis.data_processing as dp
import septum_mec.analysis.registration
import head_direction.head as head
import spatial_maps as sp
import speed_cells.speed as spd
import re
import joblib
import multiprocessing
import shutil
import psutil
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
from distutils.dir_util import copy_tree
from neo import SpikeTrain
import scipy
import seaborn as sns
from tqdm.notebook import tqdm_notebook as tqdm
tqdm.pandas()
from spike_statistics.core import permutation_resampling
from spikewaveform.core import calculate_waveform_features_from_template, cluster_waveform_features
from septum_mec.analysis.plotting import violinplot, despine
%matplotlib inline
plt.rc('axes', titlesize=12)
plt.rcParams.update({
'font.size': 12,
'figure.figsize': (6, 4),
'figure.dpi': 150
})
output_path = pathlib.Path("output") / "stimulus-lfp-response-mec"
(output_path / "statistics").mkdir(exist_ok=True, parents=True)
(output_path / "figures").mkdir(exist_ok=True, parents=True)
output_path.mkdir(exist_ok=True)
data_loader = dp.Data()
actions = data_loader.actions
project = data_loader.project
identify_neurons = actions['identify-neurons']
sessions = pd.read_csv(identify_neurons.data_path('sessions'))
lfp_action = actions['stimulus-lfp-response']
lfp_results = pd.read_csv(lfp_action.data_path('results'))
lfp_results = pd.merge(sessions, lfp_results, how='left')
def action_group(row):
a = int(row.channel_group in [0,1,2,3])
return f'{row.action}-{a}'
lfp_results['action_side_a'] = lfp_results.apply(action_group, axis=1)
lfp_results['stim_strength'] = lfp_results['stim_p_max'] / lfp_results['theta_energy']
lfp_results = lfp_results.query('stim_location!="ms"')
lfp_results_hemisphere = lfp_results.sort_values(
by=['action_side_a', 'stim_strength', 'signal_to_noise'], ascending=[True, False, False]
).drop_duplicates(subset='action_side_a', keep='first')
lfp_results_hemisphere.loc[:,['action_side_a','channel_group', 'signal_to_noise', 'stim_strength']].head()
colors = ['#1b9e77','#d95f02','#7570b3','#e7298a']
labels = ['Baseline I', '11 Hz', 'Baseline II', '30 Hz']
queries = ['baseline and Hz11', 'frequency==11', 'baseline and Hz30', 'frequency==30']
density = True
cumulative = True
histtype = 'step'
lw = 2
bins = {
'theta_energy': np.arange(0, .7, .03),
'theta_peak': np.arange(0, .7, .03),
'theta_freq': np.arange(4, 10, .5),
'theta_half_width': np.arange(0, 15, .5)
}
xlabel = {
'theta_energy': 'Theta energy (dB)',
'theta_peak': 'Peak PSD (dB/Hz)',
'theta_freq': '(Hz)',
'theta_half_width': '(Hz)',
}
# key = 'theta_energy'
# key = 'theta_peak'
results = {}
for key in bins:
results[key] = pd.DataFrame()
fig = plt.figure(figsize=(3.5,2))
plt.suptitle(key)
legend_lines = []
for color, query, label in zip(colors, queries, labels):
values = lfp_results_hemisphere.query(query)[key]
results[key] = pd.concat([results[key], values.rename(label).reset_index(drop=True)], axis=1)
values.hist(
bins=bins[key], density=density, cumulative=cumulative, lw=lw,
histtype=histtype, color=color)
legend_lines.append(matplotlib.lines.Line2D([0], [0], color=color, lw=lw, label=label))
plt.legend(
handles=legend_lines,
bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
plt.tight_layout()
plt.grid(False)
plt.xlim(-0.05, bins[key].max() - bins[key].max()*0.025)
despine()
plt.xlabel(xlabel[key])
figname = f'lfp-psd-histogram-{key}'
fig.savefig(
output_path / 'figures' / f'{figname}.png',
bbox_inches='tight', transparent=True)
fig.savefig(
output_path / 'figures' / f'{figname}.svg',
bbox_inches='tight', transparent=True)
density = True
cumulative = True
histtype = 'step'
lw = 2
bins = {
'stim_energy': np.arange(0, .7, .01),
'stim_half_width': np.arange(0, 10, .5),
'stim_p_max': np.arange(0, 4, .01),
'stim_strength': np.arange(0, 160, 1)
}
xlabel = {
'stim_energy': 'Energy (dB)',
'stim_half_width': '(Hz)',
'stim_p_max': 'Peak PSD (dB/Hz)',
'stim_strength': 'Ratio',
}
# key = 'theta_energy'
# key = 'theta_peak'
for key in bins:
results[key] = pd.DataFrame()
fig = plt.figure(figsize=(3.2,2))
plt.suptitle(key)
legend_lines = []
for color, query, label in zip(colors[1::2], queries[1::2], labels[1::2]):
values = lfp_results_hemisphere.query(query)[key]
results[key] = pd.concat([results[key], values.rename(label).reset_index(drop=True)], axis=1)
values.hist(
bins=bins[key], density=density, cumulative=cumulative, lw=lw,
histtype=histtype, color=color)
legend_lines.append(matplotlib.lines.Line2D([0], [0], color=color, lw=lw, label=label))
plt.legend(
handles=legend_lines,
bbox_to_anchor=(1.04,1), borderaxespad=0, frameon=False)
plt.tight_layout()
plt.grid(False)
plt.xlim(-0.05, bins[key].max() - bins[key].max()*0.02)
despine()
plt.xlabel(xlabel[key])
figname = f'lfp-psd-histogram-{key}'
fig.savefig(
output_path / 'figures' / f'{figname}.png',
bbox_inches='tight', transparent=True)
fig.savefig(
output_path / 'figures' / f'{figname}.svg',
bbox_inches='tight', transparent=True)
def summarize(data):
return "{:.2f} ± {:.2f} ({})".format(data.mean(), data.sem(), sum(~np.isnan(data)))
def MWU(df, keys):
'''
Mann Whitney U
'''
Uvalue, pvalue = scipy.stats.mannwhitneyu(
df[keys[0]].dropna(),
df[keys[1]].dropna(),
alternative='two-sided')
return "{:.2f}, {:.3f}".format(Uvalue, pvalue)
def PRS(df, keys):
'''
Permutation ReSampling
'''
pvalue, observed_diff, diffs = permutation_resampling(
df[keys[0]].dropna(),
df[keys[1]].dropna(), statistic=np.median)
return "{:.2f}, {:.3f}".format(observed_diff, pvalue)
def rename(name):
return name.replace("_field", "-field").replace("_", " ").capitalize()
stat = pd.DataFrame()
for key, df in results.items():
Key = rename(key)
stat[Key] = df.agg(summarize)
stat[Key] = df.agg(summarize)
for i, c1 in enumerate(df.columns):
for c2 in df.columns[i+1:]:
stat.loc[f'MWU {c1} {c2}', Key] = MWU(df, [c1, c2])
stat.loc[f'PRS {c1} {c2}', Key] = PRS(df, [c1, c2])
stat
stat.to_latex(output_path / "statistics" / "statistics.tex")
stat.to_csv(output_path / "statistics" / "statistics.csv")
psd = pd.read_feather(pathlib.Path("output") / "stimulus-lfp-response" / 'data' / 'psd.feather')
freqs = pd.read_feather(pathlib.Path("output") / "stimulus-lfp-response" / 'data' / 'freqs.feather')
from septum_mec.analysis.plotting import plot_bootstrap_timeseries
freq = freqs.T.iloc[0].values
mask = (freq < 49)
fig, axs = plt.subplots(1, 2, sharex=True, sharey=True, figsize=(5,2))
axs = axs.repeat(2)
for i, (ax, query) in enumerate(zip(axs.ravel(), queries)):
selection = [
f'{r.action}_{r.channel_group}'
for i, r in lfp_results_hemisphere.query(query).iterrows()]
values = psd.loc[mask, selection].to_numpy()
values = 10 * np.log10(values)
plot_bootstrap_timeseries(freq[mask], values, ax=ax, lw=1, label=labels[i], color=colors[i])
# ax.set_title(titles[i])
ax.set_xlabel('Frequency Hz')
ax.legend(frameon=False)
axs[0].set_ylabel('PSD (dB/Hz)')
axs[0].set_ylim(-31, 1)
despine()
figname = 'lfp-psd'
fig.savefig(
output_path / 'figures' / f'{figname}.png',
bbox_inches='tight', transparent=True)
fig.savefig(
output_path / 'figures' / f'{figname}.svg',
bbox_inches='tight', transparent=True)
action = project.require_action("stimulus-lfp-response-mec")
copy_tree(output_path, str(action.data_path()))
septum_mec.analysis.registration.store_notebook(action, "20_stimulus-lfp-response-mec.ipynb")