multielectrode_grasp_i140703_001_cutting_script_TS_ON_to_GO_ON.py 3.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. from neo.utils import get_events
  2. import quantities as pq
  3. from scipy.io import savemat
  4. import numpy as np
  5. from pathlib import Path
  6. # from multielectrode_grasp G-Node Gin repository
  7. # used version from branch 'master':
  8. # commit "placeholder-hash"
  9. # TODO: update commit after 'elephant-data' PR-'enh/neo09-2' is merged into master
  10. import reachgraspio.reachgraspio as rg
  11. def main():
  12. # # load & read dataset for i140703-001
  13. multielectrode_grasp_path = "/home/kramer/datasets/multielectrode_grasp/" \
  14. "datasets"
  15. # Todo: before commiting use THIS generic path to multielectrode_grasp:
  16. # multielectrode_grasp_path = "GNode-GIN/multielectrode_grasp/datasets"
  17. session_name = Path(multielectrode_grasp_path, "i140703-001")
  18. session = rg.ReachGraspIO(session_name.__str__(),
  19. odml_directory=multielectrode_grasp_path)
  20. # channels to slice from
  21. channels = [1, 2, 3]
  22. # read block in lazy-mode for updated reachgraspio
  23. data_block = session.read_block(lazy=True, correct_filter_shifts=True)
  24. # get the TrialStart(TS)-Events for correct trials
  25. correct_trial_start_events = get_events(
  26. data_block, name='TrialEvents', trial_event_labels='TS-ON',
  27. performance_in_trial=session.performance_codes['correct_trial'])[0]
  28. # create time-slices of length 2.1sec
  29. neural_signals = data_block.filter(targdict={'neural_signal': True})[0]
  30. neural_signals = [neural_signals.time_slice(i, i + 2.102 * pq.s)
  31. for i in correct_trial_start_events]
  32. # reset t_start to 0sec
  33. neural_signals = [neural_signals[i].time_shift(-neural_signals[i].times[0])
  34. for i in range(len(neural_signals))]
  35. times = neural_signals[0].times[:2100]
  36. srate = neural_signals[0].sampling_rate
  37. # transpose matrix to get shape trials x samples
  38. neural_signals = [neural_signals[i].transpose()
  39. for i in range(len(neural_signals))]
  40. # create for each channel a dictionary with the fields:
  41. # 'lfp_matrix', 'time' and 'sf' and save them as *.mat files
  42. dataset_4_path = "../../../../dataset-4"
  43. channel_dicts = {}
  44. fname_dict = {}
  45. for i in channels:
  46. ch_id = i
  47. channel_dicts[f"mdic_{ch_id}"] = {
  48. "lfp_matrix": np.array(
  49. [neural_signals[j][i-1][:2100].squeeze()
  50. # python-indexing!: first channel has index 0 -> i-1 needed
  51. for j in range(len(neural_signals))]),
  52. "time": times,
  53. "sf": srate}
  54. fname_dict[f"fname{ch_id}"] = \
  55. f"i140703-001_ch0{ch_id}_slice_TS_ON_to_GO_ON_correct_trials.mat"
  56. savemat(Path(dataset_4_path, fname_dict[f"fname{ch_id}"]),
  57. channel_dicts[f"mdic_{ch_id}"])
  58. fft1 = np.fft.rfft(channel_dicts["mdic_1"]['lfp_matrix'])
  59. fft2 = np.fft.rfft(channel_dicts["mdic_2"]['lfp_matrix'])
  60. window_length = np.shape(channel_dicts["mdic_1"]['lfp_matrix'])[1]
  61. freq = np.fft.rfftfreq(window_length, d=1./srate)
  62. cs = fft1 * np.conjugate(fft2)
  63. cs_dict = {"cross_spectrum_matrix": cs, "frequencies": freq}
  64. cs_fname = "i140703-001_cross_spectrum_of_channel_1_and_2_of_slice_" \
  65. "TS_ON_to_GO_ON_corect_trials.mat"
  66. savemat(Path(dataset_4_path, cs_fname), cs_dict)
  67. if __name__ == '__main__':
  68. main()