1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980 |
- from neo.utils import get_events
- import quantities as pq
- from scipy.io import savemat
- import numpy as np
- from pathlib import Path
- # from multielectrode_grasp G-Node Gin repository
- # used version from branch 'master':
- # commit "placeholder-hash"
- # TODO: update commit after 'elephant-data' PR-'enh/neo09-2' is merged into master
- import reachgraspio.reachgraspio as rg
- def main():
- # # load & read dataset for i140703-001
- multielectrode_grasp_path = "/home/kramer/datasets/multielectrode_grasp/" \
- "datasets"
- # Todo: before commiting use THIS generic path to multielectrode_grasp:
- # multielectrode_grasp_path = "GNode-GIN/multielectrode_grasp/datasets"
- session_name = Path(multielectrode_grasp_path, "i140703-001")
- session = rg.ReachGraspIO(session_name.__str__(),
- odml_directory=multielectrode_grasp_path)
-
- # channels to slice from
- channels = [1, 2, 3]
- # read block in lazy-mode for updated reachgraspio
- data_block = session.read_block(lazy=True, correct_filter_shifts=True)
- # get the TrialStart(TS)-Events for correct trials
- correct_trial_start_events = get_events(
- data_block, name='TrialEvents', trial_event_labels='TS-ON',
- performance_in_trial=session.performance_codes['correct_trial'])[0]
- # create time-slices of length 2.1sec
- neural_signals = data_block.filter(targdict={'neural_signal': True})[0]
- neural_signals = [neural_signals.time_slice(i, i + 2.102 * pq.s)
- for i in correct_trial_start_events]
- # reset t_start to 0sec
- neural_signals = [neural_signals[i].time_shift(-neural_signals[i].times[0])
- for i in range(len(neural_signals))]
- times = neural_signals[0].times[:2100]
- srate = neural_signals[0].sampling_rate
- # transpose matrix to get shape trials x samples
- neural_signals = [neural_signals[i].transpose()
- for i in range(len(neural_signals))]
- # create for each channel a dictionary with the fields:
- # 'lfp_matrix', 'time' and 'sf' and save them as *.mat files
- dataset_4_path = "../../../../dataset-4"
- channel_dicts = {}
- fname_dict = {}
- for i in channels:
- ch_id = i
- channel_dicts[f"mdic_{ch_id}"] = {
- "lfp_matrix": np.array(
- [neural_signals[j][i-1][:2100].squeeze()
- # python-indexing!: first channel has index 0 -> i-1 needed
- for j in range(len(neural_signals))]),
- "time": times,
- "sf": srate}
- fname_dict[f"fname{ch_id}"] = \
- f"i140703-001_ch0{ch_id}_slice_TS_ON_to_GO_ON_correct_trials.mat"
- savemat(Path(dataset_4_path, fname_dict[f"fname{ch_id}"]),
- channel_dicts[f"mdic_{ch_id}"])
- fft1 = np.fft.rfft(channel_dicts["mdic_1"]['lfp_matrix'])
- fft2 = np.fft.rfft(channel_dicts["mdic_2"]['lfp_matrix'])
- window_length = np.shape(channel_dicts["mdic_1"]['lfp_matrix'])[1]
- freq = np.fft.rfftfreq(window_length, d=1./srate)
- cs = fft1 * np.conjugate(fft2)
- cs_dict = {"cross_spectrum_matrix": cs, "frequencies": freq}
- cs_fname = "i140703-001_cross_spectrum_of_channel_1_and_2_of_slice_" \
- "TS_ON_to_GO_ON_corect_trials.mat"
- savemat(Path(dataset_4_path, cs_fname), cs_dict)
- if __name__ == '__main__':
- main()
|