diff --git a/actions/lfp_speed_stim/attributes.yaml b/actions/lfp_speed_stim/attributes.yaml new file mode 100644 index 000000000..5e3955fc8 --- /dev/null +++ b/actions/lfp_speed_stim/attributes.yaml @@ -0,0 +1,7 @@ +registered: '2020-08-28T10:17:36' +data: + results: results.exdir + notebook: 20_lfp_speed.ipynb + html: 20_lfp_speed.html + figures: figures + statistics: statistics diff --git a/actions/lfp_speed_stim/data/10_lfp_speed_stim.html b/actions/lfp_speed_stim/data/10_lfp_speed_stim.html new file mode 100644 index 000000000..1509c4622 --- /dev/null +++ b/actions/lfp_speed_stim/data/10_lfp_speed_stim.html @@ -0,0 +1,13614 @@ + + +
+ +%load_ext autoreload
+%autoreload 2
+
import os
+import pathlib
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib import colors
+import seaborn as sns
+import re
+import shutil
+import pandas as pd
+import scipy.stats
+
+import exdir
+import expipe
+from distutils.dir_util import copy_tree
+import septum_mec
+import spatial_maps as sp
+import head_direction.head as head
+import septum_mec.analysis.data_processing as dp
+import septum_mec.analysis.registration
+from septum_mec.analysis.plotting import violinplot, despine
+from spatial_maps.fields import (
+ find_peaks, calculate_field_centers, separate_fields_by_laplace,
+ map_pass_to_unit_circle, calculate_field_centers, distance_to_edge_function,
+ which_field, compute_crossings)
+from phase_precession import cl_corr
+from spike_statistics.core import permutation_resampling
+import matplotlib.mlab as mlab
+import scipy.signal as ss
+from scipy.interpolate import interp1d
+from septum_mec.analysis.plotting import regplot
+from skimage import measure
+from tqdm.notebook import tqdm_notebook as tqdm
+tqdm.pandas()
+
+import pycwt
+
max_speed = 1 # m/s only used for speed score
+min_speed = 0.02 # m/s only used for speed score
+position_sampling_rate = 1000 # for interpolation
+position_low_pass_frequency = 6 # for low pass filtering of position
+
+box_size = [1.0, 1.0]
+bin_size = 0.02
+
+speed_binsize = 0.02
+
+stim_mask = True
+baseline_duration = 600
+
data_loader = dp.Data(
+ position_sampling_rate=position_sampling_rate,
+ position_low_pass_frequency=position_low_pass_frequency,
+ box_size=box_size, bin_size=bin_size,
+ stim_mask=stim_mask, baseline_duration=baseline_duration
+)
+
project_path = dp.project_path()
+project = expipe.get_project(project_path)
+actions = project.actions
+
+output_path = pathlib.Path("output") / "lfp-speed-stim"
+(output_path / "statistics").mkdir(exist_ok=True, parents=True)
+(output_path / "figures").mkdir(exist_ok=True, parents=True)
+
identify_neurons = actions['identify-neurons']
+sessions = pd.read_csv(identify_neurons.data_path('sessions'))
+
channel_groups = []
+for i, row in sessions.iterrows():
+ for ch in range(8):
+ row['channel_group'] = ch
+ channel_groups.append(row.to_dict())
+
sessions = pd.DataFrame(channel_groups)
+
def signaltonoise(a, axis=0, ddof=0):
+ a = np.asanyarray(a)
+ m = a.mean(axis)
+ sd = a.std(axis=axis, ddof=ddof)
+ return np.where(sd == 0, 0, m / sd)
+
+
+def remove_artifacts(anas, spikes=None, width=500, threshold=2, sampling_rate=None, fillval=0):
+ sampling_rate = sampling_rate or anas.sampling_rate.magnitude
+ times = np.arange(anas.shape[0]) / sampling_rate
+ anas = np.array(anas)
+ if anas.ndim == 1:
+ anas = np.reshape(anas, (anas.size, 1))
+ assert len(times) == anas.shape[0]
+ nchan = anas.shape[1]
+ if spikes is not None:
+ spikes = np.array(spikes)
+ for ch in range(nchan):
+ idxs, = np.where(abs(anas[:, ch]) > threshold)
+ for idx in idxs:
+ if spikes is not None:
+ t0 = times[idx-width]
+ stop = idx+width
+ if stop > len(times) - 1:
+ stop = len(times) - 1
+ t1 = times[stop]
+ mask = (spikes > t0) & (spikes < t1)
+ spikes = spikes[~mask]
+ anas[idx-width:idx+width, ch] = fillval
+ if spikes is not None:
+ spikes = spikes[spikes <= times[-1]]
+ return anas, times, spikes
+ else:
+ return anas, times
+
+def find_theta_peak(p, f, f1, f2):
+ if np.all(np.isnan(p)):
+ return np.nan, np.nan
+ mask = (f > f1) & (f < f2)
+ p_m = p[mask]
+ f_m = f[mask]
+ peaks = find_peaks(p_m)
+ idx = np.argmax(p_m[peaks])
+ return f_m[peaks[idx]], p_m[peaks[idx]]
+
def zscore(a):
+ return (a - a.mean()) / a.std()
+# return a
+
def compute_stim_freq(action_id):
+ stim_times = data_loader.stim_times(action_id)
+ if stim_times is None:
+ return
+ stim_times = np.array(stim_times)
+ return 1 / np.mean(np.diff(stim_times))
+
output = exdir.File(output_path / 'results')
+
+mother = pycwt.Morlet(80)
+NFFT = 2056
+
+def process(row):
+ name = row['action'] + '-' + str(row['channel_group'])
+ stim_freq = compute_stim_freq(row['action'])
+ if stim_freq is None:
+ return
+
+ flim = [stim_freq - 2, stim_freq + 2]
+
+ lfp = data_loader.lfp(row.action, row.channel_group)
+ sample_rate = lfp.sampling_rate.magnitude
+ sampling_period = 1 / sample_rate
+ x, y, t, speed = map(data_loader.tracking(row.action).get, ['x', 'y', 't', 'v'])
+ cleaned_lfp, times = remove_artifacts(lfp)
+ speed = interp1d(t, speed, bounds_error=False, fill_value='extrapolate')(times)
+ peak_amp = {}
+ for i, ch in enumerate(cleaned_lfp.T):
+ pxx, freqs = mlab.psd(ch, Fs=lfp.sampling_rate.magnitude, NFFT=4000)
+ f, p = find_theta_peak(pxx, freqs, 6, 10)
+ peak_amp[i] = p
+
+ theta_channel = max(peak_amp, key=lambda x: peak_amp[x])
+ signal = zscore(cleaned_lfp[:,theta_channel])
+
+ if name in output:
+ return
+
+
+ results = output.require_group(name)
+ freqs = np.arange(*flim, .1)
+ wave, scales, freqs, coi, fft, fftfreqs = pycwt.cwt(
+ signal, sampling_period, freqs=freqs, wavelet=mother)
+
+ power = (np.abs(wave)) ** 2
+ power /= scales[:, None] #rectify the power spectrum according to the suggestions proposed by Liu et al. (2007)
+
+ theta_freq = np.array([freqs[i] for i in np.argmax(power, axis=0)])
+ theta_power = np.mean(power, axis=0)
+
+ speed_bins = np.arange(min_speed, max_speed + speed_binsize, speed_binsize)
+ ia = np.digitize(speed, bins=speed_bins, right=True)
+ mean_freq = np.zeros_like(speed_bins)
+ mean_power = np.zeros_like(speed_bins)
+ for i in range(len(speed_bins)):
+ mean_freq[i] = np.mean(theta_freq[ia==i])
+ mean_power[i] = np.mean(theta_power[ia==i])
+
+ freq_score = np.corrcoef(speed, theta_freq)[0,1]
+ power_score = np.corrcoef(speed, theta_power)[0,1]
+
+ results.attrs = {
+ 'freq_score': float(freq_score),
+ 'sample_rate': float(sample_rate),
+ 'power_score': float(power_score),
+ 'action': row['action'],
+ 'channel_group': int(row['channel_group']),
+ 'max_speed': max_speed,
+ 'min_speed': min_speed,
+ 'position_low_pass_frequency': position_low_pass_frequency
+ }
+
+ results.create_dataset('wavelet_power', data=power)
+ results.create_dataset('wavelet_freqs', data=freqs)
+ results.create_dataset('theta_freq', data=theta_freq)
+ results.create_dataset('theta_power', data=theta_power)
+ results.create_dataset('speed', data=speed)
+ results.create_dataset('mean_freq', data=mean_freq)
+ results.create_dataset('mean_power', data=mean_power)
+ results.create_dataset('speed_bins', data=speed_bins)
+
sessions.progress_apply(process, axis=1);
+
action = project.require_action("lfp_speed_stim")
+
action.data["results"] = "results.exdir"
+copy_tree(output_path, str(action.data_path()))
+
septum_mec.analysis.registration.store_notebook(action, "10_lfp_speed_stim.ipynb")
+
+
%load_ext autoreload
+%autoreload 2
+
import os
+import pathlib
+import numpy as np
+import matplotlib.pyplot as plt
+import seaborn as sns
+import re
+import shutil
+import pandas as pd
+import scipy.stats
+
+import exdir
+import expipe
+from distutils.dir_util import copy_tree
+import septum_mec
+import septum_mec.analysis.data_processing as dp
+import septum_mec.analysis.registration
+from septum_mec.analysis.plotting import despine, plot_bootstrap_timeseries, violinplot, savefig
+from phase_precession import cl_corr
+from spike_statistics.core import permutation_resampling_test
+import matplotlib.mlab as mlab
+import scipy.signal as ss
+from scipy.interpolate import interp1d
+from skimage import measure
+from tqdm.notebook import tqdm_notebook as tqdm
+tqdm.pandas()
+import scipy.signal as ss
+
+
+from tqdm.notebook import tqdm_notebook as tqdm
+tqdm.pandas()
+
+import pycwt
+
plt.rcParams['figure.dpi'] = 150
+figsize_violin = (1.7, 3)
+figsize_speed = (4, 3)
+plt.rc('axes', titlesize=10)
+
project_path = dp.project_path()
+project = expipe.get_project(project_path)
+actions = project.actions
+
+output_path = pathlib.Path("output") / "lfp_speed"
+(output_path / "statistics").mkdir(exist_ok=True, parents=True)
+(output_path / "figures").mkdir(exist_ok=True, parents=True)
+
data_action = actions['lfp_speed']
+output = exdir.File(
+ data_action.data_path('results'),
+ plugins = [exdir.plugins.git_lfs, exdir.plugins.quantities])
+
+ignore = ['wavelet_power', 'wavelet_freqs', 'signal']
+results = []
+for group in output.values():
+ d = group.attrs.to_dict()
+ d.update({k: np.array(v.data) for k, v in group.items() if k not in ignore})
+ results.append(d)
+results = pd.DataFrame(results)
+
results.head()
+
identify_neurons = actions['identify-neurons']
+sessions = pd.read_csv(identify_neurons.data_path('sessions'))
+
results = results.merge(sessions, on='action')
+
results.head()
+
results = results.query('stim_location!="mecl" and stim_location!="mecr"')
+
def action_group(row):
+ a = int(row.channel_group in [0,1,2,3])
+ return f'{row.action}-{a}'
+results['action_side_a'] = results.apply(action_group, axis=1)
+
lfp_results_hemisphere = results.sort_values(
+ by=['action_side_a', 'channel_group'], ascending=[True, False]
+).drop_duplicates(subset='action_side_a', keep='first')
+lfp_results_hemisphere.loc[:,['action_side_a','channel_group', 'min_speed']].head()
+
colors = ['#1b9e77','#d95f02','#7570b3','#e7298a']
+labels = ['Baseline I', '11 Hz', 'Baseline II', '30 Hz']
+# Hz11 means that the baseline session was indeed before an 11 Hz session
+queries = ['baseline and i and Hz11', 'frequency==11', 'baseline and ii and Hz30', 'frequency==30']
+
def make_entity_date_side(row):
+ s = row.action_side_a.split('-')
+ del s[2]
+ return '-'.join(s)
+
lfp_results_hemisphere['entity_date_side'] = lfp_results_hemisphere.apply(make_entity_date_side, axis=1)
+
from functools import reduce
+
keys = [
+ 'freq_score',
+ 'power_score',
+ 'speed_bins',
+ 'mean_freq',
+ 'mean_power'
+]
+
+results = {}
+for key in keys:
+ results[key] = list()
+ for query, label in zip(queries, labels):
+ values = lfp_results_hemisphere.query(query).loc[:,['entity_date_side', key]]
+ results[key].append(values.rename({key: label}, axis=1))
+
+for key, val in results.items():
+ df = reduce(lambda left,right: pd.merge(left, right, on='entity_date_side', how='outer'), val)
+ results[key] = df.drop('entity_date_side', axis=1)
+
vss = [
+ ['Baseline I', '11 Hz'],
+ ['Baseline I', 'Baseline II'],
+ ['Baseline II', '30 Hz'],
+ ['11 Hz', '30 Hz'],
+]
+
ylabel = {
+ 'freq_score': 'Frequency score',
+ 'power_score': 'Power score'
+}
+
for stuff in ['freq_score', 'power_score']:
+ for vs in vss:
+ base, stim = results[stuff][vs].dropna().values.T
+ plt.figure(figsize=figsize_violin)
+ plt.ylabel(ylabel[stuff])
+ violinplot(base, stim, colors=[colors[labels.index(l)] for l in vs], xticks=vs)
+ plt.ylim(-0.35, 0.5)
+ plt.yticks([-0.25, 0, 0.25, 0.5])
+ despine()
+ fig_path = output_path / "figures" / f"{stuff}_{' '.join(vs)}".replace(' ', '_')
+ savefig(fig_path)
+
+
def plot_speed(results, stuff, colors, labels, filename=None, show_raw=False, ylim=None):
+ base, stim = results[stuff][labels].dropna().values.T
+ base_bins, stim_bins = results['speed_bins'][labels].dropna().values.T
+
+ base = np.array([s for s in base])
+ stim = np.array([s for s in stim])
+
+ if show_raw:
+ fig, axs = plt.subplots(1, 2, sharey=True, figsize=figsize_speed)
+
+ for b, h in zip(base_bins, base):
+ axs[1].plot(b, h)
+ axs[1].set_xlim(0.1,1)
+ axs[1].set_title(labels[0])
+
+ for b, h in zip(stim_bins, stim):
+ axs[0].plot(b, h)
+ axs[0].set_xlim(0.1,1)
+ axs[0].set_title(labels[1])
+
+ fig, ax = plt.subplots(1, 1, figsize=figsize_speed)
+ plot_bootstrap_timeseries(base_bins[0], base.T, ax=ax, label=labels[0], color=colors[0])
+ plot_bootstrap_timeseries(stim_bins[0], stim.T, ax=ax, label=labels[1], color=colors[1])
+
+ plt.xlim(0, 0.9)
+ plt.gca().spines['top'].set_visible(False)
+ plt.gca().spines['right'].set_visible(False)
+ plt.legend(frameon=False)
+ if ylim is not None:
+ plt.ylim(ylim)
+ despine()
+ if filename is not None:
+ savefig(output_path / "figures" / f"{filename}")
+
plot_speed(results, 'mean_freq',
+ colors[:2], labels[:2], filename='lfp_speed_freq_11', ylim=(7.3, 8.3))
+
plot_speed(results, 'mean_freq',
+ colors[2:], labels[2:], filename='lfp_speed_freq_30', ylim=(7.3, 8.3))
+
plot_speed(
+ results, 'mean_freq',
+ colors=[colors[0], colors[2]], labels=[labels[0], labels[2]], filename='lfp_speed_freq_baseline', ylim=(7.3, 8.3))
+
plot_speed(
+ results, 'mean_freq',
+ colors=[colors[1], colors[3]], labels=[labels[1], labels[3]], filename='lfp_speed_freq_stim', ylim=(7.3, 8.3))
+
plot_speed(
+ results, 'mean_power',
+ colors[:2], labels[:2], filename='lfp_speed_power_11', ylim=(5, 35))
+
plot_speed(
+ results, 'mean_power',
+ colors[2:], labels[2:], filename='lfp_speed_power_30', ylim=(5, 35))
+
plot_speed(
+ results, 'mean_power',
+ colors=[colors[0], colors[2]], labels=[labels[0], labels[2]], filename='lfp_speed_power_baseline', ylim=(5, 35))
+
plot_speed(
+ results, 'mean_power',
+ colors=[colors[1], colors[3]], labels=[labels[1], labels[3]], filename='lfp_speed_power_stim', ylim=(5, 35))
+
from septum_mec.analysis.statistics import make_statistics_table
+
stat, _ = make_statistics_table(
+ {k:v for k, v in results.items() if k in ['power_score', 'freq_score']},
+ labels, lmm_test=False, wilcoxon_test=True, use_weighted_stats=False, normality_test=True)
+stat
+
stat.to_latex(output_path / "statistics" / f"statistics.tex")
+stat.to_csv(output_path / "statistics" / f"statistics.csv")
+
for key, result in results.items():
+ result.to_latex(output_path / "statistics" / f"values_{key}.tex")
+ result.to_csv(output_path / "statistics" / f"values_{key}.csv")
+
action = project.actions["lfp_speed"]
+
outdata = {
+ "figures": "figures",
+ "statistics": "statistics"
+}
+
+for key, value in outdata.items():
+ action.data[key] = value
+ data_path = action.data_path(key)
+ data_path.parent.mkdir(exist_ok=True, parents=True)
+ source = output_path / value
+ if source.is_file():
+ shutil.copy(source, data_path)
+ else:
+ copy_tree(str(source), str(data_path))
+
septum_mec.analysis.registration.store_notebook(action, "20_lfp_speed.ipynb")
+
+
+
\n", + " | freq_score | \n", + "sample_rate | \n", + "power_score | \n", + "action | \n", + "channel_group | \n", + "max_speed | \n", + "min_speed | \n", + "position_low_pass_frequency | \n", + "mean_freq | \n", + "mean_power | \n", + "speed | \n", + "speed_bins | \n", + "theta_freq | \n", + "theta_power | \n", + "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", + "0.191729 | \n", + "1000.0 | \n", + "0.432532 | \n", + "1833-010719-1 | \n", + "0 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.154332133229601, 7.106500202042717, 7.13862... | \n", + "[18.005621200653046, 18.66435212100411, 20.504... | \n", + "[0.02795137493203615, 0.0283076211590443, 0.02... | \n", + "[0.02, 0.04, 0.06, 0.08, 0.1, 0.12000000000000... | \n", + "[6.799999999999997, 6.799999999999997, 6.79999... | \n", + "[3.990633076071412, 3.992883430179942, 3.99513... | \n", + "
1 | \n", + "0.255882 | \n", + "1000.0 | \n", + "0.434938 | \n", + "1833-010719-1 | \n", + "1 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.035831237674811, 7.05973079549096, 7.120455... | \n", + "[16.966011451769536, 17.60417640800431, 19.452... | \n", + "[0.02795137493203615, 0.0283076211590443, 0.02... | \n", + "[0.02, 0.04, 0.06, 0.08, 0.1, 0.12000000000000... | \n", + "[6.799999999999997, 6.799999999999997, 6.79999... | \n", + "[3.649171825378523, 3.6511305369806806, 3.6530... | \n", + "
2 | \n", + "0.169116 | \n", + "1000.0 | \n", + "0.338942 | \n", + "1833-010719-1 | \n", + "2 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.156957284750235, 7.121730043055997, 7.17760... | \n", + "[14.747162413722597, 15.548073560884317, 16.81... | \n", + "[0.02795137493203615, 0.0283076211590443, 0.02... | \n", + "[0.02, 0.04, 0.06, 0.08, 0.1, 0.12000000000000... | \n", + "[6.799999999999997, 6.799999999999997, 6.79999... | \n", + "[3.069575227276876, 3.0713927350182493, 3.0732... | \n", + "
3 | \n", + "0.071480 | \n", + "1000.0 | \n", + "0.141405 | \n", + "1833-010719-1 | \n", + "3 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.256682286107137, 7.237350035531646, 7.27254... | \n", + "[13.017027147293039, 12.651121743582284, 13.91... | \n", + "[0.02795137493203615, 0.0283076211590443, 0.02... | \n", + "[0.02, 0.04, 0.06, 0.08, 0.1, 0.12000000000000... | \n", + "[6.399999999999999, 6.399999999999999, 6.39999... | \n", + "[1.9508693636836856, 1.9523977795413874, 1.953... | \n", + "
4 | \n", + "0.216792 | \n", + "1000.0 | \n", + "-0.012191 | \n", + "1833-010719-1 | \n", + "4 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.095817125902336, 7.050223640391819, 7.12869... | \n", + "[32.456068185302364, 23.01562486642484, 21.395... | \n", + "[0.02795137493203615, 0.0283076211590443, 0.02... | \n", + "[0.02, 0.04, 0.06, 0.08, 0.1, 0.12000000000000... | \n", + "[6.399999999999999, 6.399999999999999, 6.39999... | \n", + "[1.2545438245339104, 1.2553897239251606, 1.256... | \n", + "
\n", + " | freq_score | \n", + "sample_rate | \n", + "power_score | \n", + "action | \n", + "channel_group | \n", + "max_speed | \n", + "min_speed | \n", + "position_low_pass_frequency | \n", + "mean_freq | \n", + "mean_power | \n", + "... | \n", + "i | \n", + "ii | \n", + "session | \n", + "stim_location | \n", + "stimulated | \n", + "tag | \n", + "date | \n", + "entity_date | \n", + "Hz11 | \n", + "Hz30 | \n", + "
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n", + "0.191729 | \n", + "1000.0 | \n", + "0.432532 | \n", + "1833-010719-1 | \n", + "0 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.154332133229601, 7.106500202042717, 7.13862... | \n", + "[18.005621200653046, 18.66435212100411, 20.504... | \n", + "... | \n", + "True | \n", + "False | \n", + "1 | \n", + "NaN | \n", + "False | \n", + "baseline i | \n", + "10719 | \n", + "1833-010719 | \n", + "True | \n", + "False | \n", + "
1 | \n", + "0.255882 | \n", + "1000.0 | \n", + "0.434938 | \n", + "1833-010719-1 | \n", + "1 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.035831237674811, 7.05973079549096, 7.120455... | \n", + "[16.966011451769536, 17.60417640800431, 19.452... | \n", + "... | \n", + "True | \n", + "False | \n", + "1 | \n", + "NaN | \n", + "False | \n", + "baseline i | \n", + "10719 | \n", + "1833-010719 | \n", + "True | \n", + "False | \n", + "
2 | \n", + "0.169116 | \n", + "1000.0 | \n", + "0.338942 | \n", + "1833-010719-1 | \n", + "2 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.156957284750235, 7.121730043055997, 7.17760... | \n", + "[14.747162413722597, 15.548073560884317, 16.81... | \n", + "... | \n", + "True | \n", + "False | \n", + "1 | \n", + "NaN | \n", + "False | \n", + "baseline i | \n", + "10719 | \n", + "1833-010719 | \n", + "True | \n", + "False | \n", + "
3 | \n", + "0.071480 | \n", + "1000.0 | \n", + "0.141405 | \n", + "1833-010719-1 | \n", + "3 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.256682286107137, 7.237350035531646, 7.27254... | \n", + "[13.017027147293039, 12.651121743582284, 13.91... | \n", + "... | \n", + "True | \n", + "False | \n", + "1 | \n", + "NaN | \n", + "False | \n", + "baseline i | \n", + "10719 | \n", + "1833-010719 | \n", + "True | \n", + "False | \n", + "
4 | \n", + "0.216792 | \n", + "1000.0 | \n", + "-0.012191 | \n", + "1833-010719-1 | \n", + "4 | \n", + "1 | \n", + "0.02 | \n", + "6 | \n", + "[7.095817125902336, 7.050223640391819, 7.12869... | \n", + "[32.456068185302364, 23.01562486642484, 21.395... | \n", + "... | \n", + "True | \n", + "False | \n", + "1 | \n", + "NaN | \n", + "False | \n", + "baseline i | \n", + "10719 | \n", + "1833-010719 | \n", + "True | \n", + "False | \n", + "
5 rows × 27 columns
\n", + "\n", + " | action_side_a | \n", + "channel_group | \n", + "min_speed | \n", + "
---|---|---|---|
7 | \n", + "1833-010719-1-0 | \n", + "7 | \n", + "0.02 | \n", + "
3 | \n", + "1833-010719-1-1 | \n", + "3 | \n", + "0.02 | \n", + "
15 | \n", + "1833-010719-2-0 | \n", + "7 | \n", + "0.02 | \n", + "
11 | \n", + "1833-010719-2-1 | \n", + "3 | \n", + "0.02 | \n", + "
23 | \n", + "1833-020719-1-0 | \n", + "7 | \n", + "0.02 | \n", + "
\n", + " | Freq score | \n", + "Power score | \n", + "
---|---|---|
Baseline I | \n", + "1.8e-01 ± 1.7e-02 (46) | \n", + "1.6e-01 ± 2.2e-02 (46) | \n", + "
Normality Baseline I | \n", + "3.4e+00, 1.8e-01 | \n", + "2.5e+00, 2.8e-01 | \n", + "
11 Hz | \n", + "-6.4e-03 ± 1.1e-02 (44) | \n", + "-2.2e-02 ± 3.0e-02 (44) | \n", + "
Normality 11 Hz | \n", + "1.1e+01, 4.0e-03 | \n", + "2.0e+01, 5.5e-05 | \n", + "
Baseline II | \n", + "2.2e-01 ± 2.0e-02 (32) | \n", + "1.1e-01 ± 1.8e-02 (32) | \n", + "
Normality Baseline II | \n", + "2.0e+00, 3.6e-01 | \n", + "1.2e+00, 5.5e-01 | \n", + "
30 Hz | \n", + "1.1e-02 ± 1.4e-02 (34) | \n", + "5.3e-02 ± 2.3e-02 (34) | \n", + "
Normality 30 Hz | \n", + "2.8e+00, 2.4e-01 | \n", + "2.0e+01, 3.7e-05 | \n", + "
Wilcoxon Baseline I - 11 Hz | \n", + "1.6e+01, 2.3e-08, (44) | \n", + "1.1e+02, 7.0e-06, (44) | \n", + "
Wilcoxon Baseline I - Baseline II | \n", + "1.8e+02, 1.2e-01, (32) | \n", + "1.7e+02, 7.3e-02, (32) | \n", + "
Wilcoxon Baseline I - 30 Hz | \n", + "7.0e+00, 1.5e-06, (32) | \n", + "1.3e+02, 1.0e-02, (32) | \n", + "
Wilcoxon 11 Hz - Baseline II | \n", + "3.0e+00, 1.1e-06, (32) | \n", + "9.2e+01, 1.3e-03, (32) | \n", + "
Wilcoxon 11 Hz - 30 Hz | \n", + "2.2e+02, 3.8e-01, (32) | \n", + "1.6e+02, 5.9e-02, (32) | \n", + "
Wilcoxon Baseline II - 30 Hz | \n", + "9.0e+00, 1.9e-06, (32) | \n", + "1.5e+02, 3.6e-02, (32) | \n", + "