def process(row):
action_id = row['action']
channel_id = row['channel_group']
unit_id = row['unit_name']
# common values for all units == faster calculations
x, y, t, speed = map(data_loader.tracking(action_id).get, ['x', 'y', 't', 'v'])
ang, ang_t = map(data_loader.head_direction(action_id).get, ['a', 't'])
occupancy_map = data_loader.occupancy(action_id)
xbins, ybins = data_loader.spatial_bins
box_size_, bin_size_ = data_loader.box_size_, data_loader.bin_size_
prob_dist = data_loader.prob_dist(action_id)
smooth_low_occupancy_map = sp.maps.smooth_map(occupancy_map, bin_size=bin_size_, smoothing=smoothing_low)
smooth_high_occupancy_map = sp.maps.smooth_map(occupancy_map, bin_size=bin_size_, smoothing=smoothing_high)
spike_times = data_loader.spike_train(action_id, channel_id, unit_id)
# common
spike_map = sp.maps._spike_map(x, y, t, spike_times, xbins, ybins)
smooth_low_spike_map = sp.maps.smooth_map(spike_map, bin_size=bin_size_, smoothing=smoothing_low)
smooth_high_spike_map = sp.maps.smooth_map(spike_map, bin_size=bin_size_, smoothing=smoothing_high)
smooth_low_rate_map = smooth_low_spike_map / smooth_low_occupancy_map
smooth_high_rate_map = smooth_high_spike_map / smooth_high_occupancy_map
# find fields with laplace
fields_laplace = sp.separate_fields_by_laplace(smooth_high_rate_map)
fields = fields_laplace.copy() # to be cleaned by Ismakov
fields_areas = scipy.ndimage.measurements.sum(
np.ones_like(fields), fields, index=np.arange(fields.max() + 1))
fields_area = fields_areas[fields]
fields[fields_area < 9.0] = 0
# find fields with Ismakov-method
fields_ismakov, radius = sp.separate_fields_by_distance(smooth_high_rate_map)
fields_ismakov_real = fields_ismakov * bin_size
approved_fields = []
# remove fields not found by both methods
for point in fields_ismakov:
field_id = fields[tuple(point)]
approved_fields.append(field_id)
for field_id in np.arange(1, fields.max() + 1):
if not field_id in approved_fields:
fields[fields == field_id] = 0
# varying statistics
average_rate = len(spike_times) / (t.max() - t.min())
max_rate = smooth_low_rate_map.max()
out_field_mean_rate = smooth_low_rate_map[np.where(fields == 0)].mean()
in_field_mean_rate = smooth_low_rate_map[np.where(fields != 0)].mean()
max_field_mean_rate = smooth_low_rate_map[np.where(fields == 1)].mean()
interspike_interval = np.diff(spike_times)
interspike_interval_cv = interspike_interval.std() / interspike_interval.mean()
autocorrelogram = sp.autocorrelation(smooth_high_rate_map)
peaks = sp.fields.find_peaks(autocorrelogram)
real_peaks = peaks * bin_size
autocorrelogram_box_size = box_size[0] * autocorrelogram.shape[0] / smooth_high_rate_map.shape[0]
spacing, orientation = sp.spacing_and_orientation(real_peaks, autocorrelogram_box_size)
orientation *= 180 / np.pi
selectivity = stats.selectivity(smooth_low_rate_map, prob_dist)
sparsity = stats.sparsity(smooth_low_rate_map, prob_dist)
gridness = sp.gridness(smooth_high_rate_map)
border_score = sp.border_score(smooth_high_rate_map, fields_laplace)
information_rate = stats.information_rate(smooth_high_rate_map, prob_dist)
information_spec = stats.information_specificity(smooth_high_rate_map, prob_dist)
single_spikes, bursts, bursty_spikes = spikes.find_bursts(spike_times, threshold=0.01)
burst_event_ratio = np.sum(bursts) / (np.sum(single_spikes) + np.sum(bursts))
bursty_spike_ratio = np.sum(bursty_spikes) / (np.sum(bursty_spikes) + np.sum(single_spikes))
mean_spikes_per_burst = np.sum(bursty_spikes) / np.sum(bursts)
speed_score = spd.speed_correlation(
speed, t, spike_times, min_speed=min_speed, max_speed=max_speed)
ang_bin, ang_rate = head.head_direction_rate(spike_times, ang, ang_t)
head_mean_ang, head_mean_vec_len = head.head_direction_score(ang_bin, ang_rate)
result = pd.Series({
'average_rate': average_rate,
'speed_score': speed_score,
'out_field_mean_rate': out_field_mean_rate,
'in_field_mean_rate': in_field_mean_rate,
'max_field_mean_rate': max_field_mean_rate,
'max_rate': max_rate,
'sparsity': sparsity,
'selectivity': selectivity,
'interspike_interval_cv': float(interspike_interval_cv),
'burst_event_ratio': burst_event_ratio,
'bursty_spike_ratio': bursty_spike_ratio,
'gridness': gridness,
'border_score': border_score,
'information_rate': information_rate,
'information_specificity': information_spec,
'head_mean_ang': head_mean_ang,
'head_mean_vec_len': head_mean_vec_len,
'spacing': spacing,
'orientation': orientation
})
return result
process(first_row)