%load_ext autoreload
%autoreload 2
import os
import expipe
import pathlib
import numpy as np
import spatial_maps.stats as stats
import septum_mec.analysis.data_processing as dp
from septum_mec.analysis.registration import store_notebook
import head_direction.head as head
import spatial_maps as sp
import pnnmec.registration
import speed_cells.speed as spd
import re
import joblib
import multiprocessing
import shutil
import psutil
import pandas as pd
import matplotlib.pyplot as plt
import pnnmec
import scipy.ndimage.measurements
import quantities as pq
import exdir
from tqdm import tqdm_notebook as tqdm
from septum_mec.analysis.trackunitmulticomparison import TrackMultipleSessions
import networkx as nx
from nxpd import draw
%matplotlib inline
project_path = dp.project_path()
project = expipe.get_project(project_path)
actions = project.actions
identify_neurons = project.require_action('identify-neurons')
actions['1833-010719-2'].attributes
actions['1833-010719-1'].attributes
data_loader = dp.Data()
sessions = []
for action in actions.values():
if action.type != 'Recording':
continue
action_data_path = pathlib.Path(action.data_path('main'))
processing = exdir.File(action_data_path)['processing']
if not 'electrophysiology' in processing:
continue
elphys = processing['electrophysiology']
if 'spikesorting' not in elphys:
continue
tags = [t.lower() for t in action.tags]
freq = np.nan
stimulated = False
control = False
stim_times = data_loader.stim_times(action.id)
if stim_times is not None:
stimulated = True
freq = round(1 / np.mean(np.diff(stim_times)))
tag = ""
stim_location = ""
tag_i = [i for i, t in enumerate(tags) if 'baseline' in t or 'stim' in t]
if len(tag_i) == 1:
tag = tags[tag_i[0]]
if 'stim' in tag:
stim_location = tag.split('-')[-1]
elif 'baseline' in tag:
control = True
sessions.append({
'tag': tag,
'action': action.id,
'stimulated': stimulated,
'control': control,
'frequency': freq,
'session': int(action.id.split('-')[-1]),
'stim_location': stim_location,
'entity': int(action.entities[0]),
})
sessions = pd.DataFrame(sessions)
sessions.query('stimulated and not control')
output = pathlib.Path('output/identify_neurons_weighted')
max_dissimilarity = .035
for entity in sessions.entity.unique():
unit_matching = TrackMultipleSessions(
actions, list(sessions.query(f'entity=={entity}').action),
progress_bar=tqdm, verbose=False, data_path=output / f'{entity}-graphs'
)
unit_matching.do_matching()
unit_matching.make_graphs_from_matches()
# save graph with all dissimilarities for later use
unit_matching.save_graphs()
# cutoff large dissimilarities
unit_matching.threshold_dissimilarity(max_dissimilarity)
unit_matching.remove_edges_with_duplicate_actions()
unit_matching.identify_units()
units = []
for ch, group in unit_matching.identified_units.items():
for unit_id, val in group.items():
for action_id, orig_unit_ids in val['original_unit_ids'].items():
units.extend([
{
'unit_name': name,
'unit_id': unit_id,
'action_id': action_id,
'channel_group': ch,
'max_dissimilarity': max_dissimilarity
}
for name in orig_unit_ids])
pd.DataFrame(units).to_csv(output / f'{entity}-units.csv', index=False)
sessions.to_csv(output / 'sessions.csv', index=False)
unique_units = pd.concat([
pd.read_csv(p)
for p in output.iterdir()
if p.name.endswith('units.csv')])
unique_units.to_csv(output / 'unique_units.csv', index=False)
unit_comp = TrackMultipleSessions(actions, data_path=f'output/identify_neurons_weighted/1833-graphs')
unit_comp.load_graphs()
# unit_comp._compute_timedelta()
# unit_comp.save_graphs()
unit_comp.threshold_dissimilarity(0.08)
# unit_comp.threshold_timedelta(timedelta(days=10))
unit_comp.remove_edges_with_duplicate_actions()
unit_comp.identify_units()
unit_comp.plot_matches('template', chan_group=6, step_color=False)
plt.tight_layout()
[d['weight'] for _,_, d in unit_comp.graphs[6].edges(data=True)]
cmp = TrackMultipleSessions(actions)
from septum_mec.analysis.track_units_tools import plot_waveform, dissimilarity, dissimilarity_weighted
from matplotlib import gridspec
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(1, 1)
wf1 = cmp.load_waveforms('1833-050619-3', 143, 6)
wf2 = cmp.load_waveforms('1833-200619-3', 126, 6)
axs = plot_waveform(wf1, fig, gs[0])
plot_waveform(wf2, fig, gs[0], axs=axs)
d00 = dissimilarity(wf1.mean(), wf2.mean())
d00
d10 = dissimilarity_weighted(wf1, wf2)
d10
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(1, 1)
wf3 = cmp.load_waveforms('1833-050619-3', 143, 6)
wf4 = cmp.load_waveforms('1833-060619-1', 170, 6)
axs = plot_waveform(wf3, fig, gs[0])
plot_waveform(wf4, fig, gs[0], axs=axs)
d01 = dissimilarity(wf3.mean(), wf4.mean())
d01
d11 = dissimilarity_weighted(wf3, wf4)
d11
d00 / d01
d10 / d11
t = abs(actions['1833-260619-2'].datetime - actions['1833-050619-3'].datetime)
t > timedelta(15)
identify_neurons.data['sessions'] = 'sessions.csv'
identify_neurons.data['units'] = 'units.csv'
sessions.to_csv(identify_neurons.data_path('sessions'), index=False)
units.to_csv(identify_neurons.data_path('units'), index=False)
store_notebook(
identify_neurons, "00-identify-neurons.ipynb")