%load_ext autoreload
%autoreload 2
import os
import expipe
import pathlib
import numpy as np
import spatial_maps.stats as stats
import septum_mec.analysis.data_processing as dp
import head_direction.head as head
import spatial_maps as sp
import septum_mec.analysis.registration
import speed_cells.speed as spd
import septum_mec.analysis.spikes as spikes
import re
import joblib
import multiprocessing
import shutil
import psutil
import pandas as pd
import matplotlib.pyplot as plt
import septum_mec
import scipy.ndimage.measurements
from distutils.dir_util import copy_tree
from spike_statistics.core import theta_mod_idx
from tqdm import tqdm_notebook as tqdm
from tqdm._tqdm_notebook import tqdm_notebook
tqdm_notebook.pandas()
max_speed = 1, # m/s only used for speed score
min_speed = 0.02, # m/s only used for speed score
position_sampling_rate = 100 # for interpolation
position_low_pass_frequency = 6 # for low pass filtering of position
box_size = [1.0, 1.0]
bin_size = 0.02
smoothing_low = 0.03
smoothing_high = 0.06
stim_mask = True
baseline_duration = 600
project_path = dp.project_path()
project = expipe.get_project(project_path)
actions = project.actions
identify_neurons = actions['identify-neurons']
units = pd.read_csv(identify_neurons.data_path('units'))
units.head()
%matplotlib inline
units.groupby('action').count().unit_name.hist()
data_loader = dp.Data(
position_sampling_rate=position_sampling_rate,
position_low_pass_frequency=position_low_pass_frequency,
box_size=box_size, bin_size=bin_size, stim_mask=stim_mask, baseline_duration=baseline_duration
)
first_row = units[units['action'] == '1849-060319-3'].iloc[0]
#first_row = sessions.iloc[50]
def process(row):
action_id = row['action']
channel_id = row['channel_group']
unit_id = row['unit_name']
# common values for all units == faster calculations
x, y, t, speed = map(data_loader.tracking(action_id).get, ['x', 'y', 't', 'v'])
# ang, ang_t = map(data_loader.head_direction(action_id).get, ['a', 't'])
occupancy_map = data_loader.occupancy(action_id)
xbins, ybins = data_loader.spatial_bins
box_size_, bin_size_ = data_loader.box_size_, data_loader.bin_size_
prob_dist = data_loader.prob_dist(action_id)
# smooth_low_occupancy_map = sp.maps.smooth_map(
# occupancy_map, bin_size=bin_size_, smoothing=smoothing_low)
smooth_high_occupancy_map = sp.maps.smooth_map(
occupancy_map, bin_size=bin_size_, smoothing=smoothing_high)
spike_times = data_loader.spike_train(action_id, channel_id, unit_id)
if len(spike_times) == 0:
result = pd.Series({
'spatial_average_rate': np.nan
})
return result
# common
spike_map = sp.maps._spike_map(x, y, t, spike_times, xbins, ybins)
# smooth_low_spike_map = sp.maps.smooth_map(spike_map, bin_size=bin_size_, smoothing=smoothing_low)
smooth_high_spike_map = sp.maps.smooth_map(spike_map, bin_size=bin_size_, smoothing=smoothing_high)
# smooth_low_rate_map = smooth_low_spike_map / smooth_low_occupancy_map
smooth_high_rate_map = smooth_high_spike_map / smooth_high_occupancy_map
tmp_rate_map = smooth_high_rate_map.copy()
tmp_rate_map[np.isnan(tmp_rate_map)] = 0
avg_rate = np.sum(np.ravel(tmp_rate_map * prob_dist))
result = pd.Series({
'spatial_average_rate': avg_rate
})
return result
process(first_row)
results = units.merge(
units.progress_apply(process, axis=1),
left_index=True, right_index=True)
output_path = pathlib.Path("output") / "calculate-statistics-extra"
output_path.mkdir(exist_ok=True)
results.to_csv(output_path / "results.csv", index=False)
statistics_action = project.require_action("calculate-statistics-extra")
statistics_action.data["results"] = "results.csv"
copy_tree(output_path, str(statistics_action.data_path()))
statistics_action.modules['parameters'] = {
'max_speed': max_speed,
'min_speed': min_speed,
'position_sampling_rate': position_sampling_rate,
'position_low_pass_frequency': position_low_pass_frequency,
'box_size': box_size,
'bin_size': bin_size,
'smoothing_low': smoothing_low,
'smoothing_high': smoothing_high,
'stim_mask': stim_mask,
'baseline_duration': baseline_duration
}
septum_mec.analysis.registration.store_notebook(statistics_action, "10_calculate_statistics_extra.ipynb")