""" This script loads a complete session in the blackrock format and converts it to a single nix file. For sessions without online recorded LFP (i.e., 1000Hz neural signal in the ns2 files), this LFP is generated offline by filtering and downsampling. Two versions of the nix files are saved: one containing the complete data, and a smaller one containing everything except the AnalogSignal object with the raw data records. """ import os import numpy as np import quantities as pq import neo from neo.test.tools import (assert_same_sub_schema, assert_same_annotations, assert_same_array_annotations) from elephant.signal_processing import butter from reachgraspio import reachgraspio ##### INITIAL SETUP ############ # Choose which session you want to convert into a nix file session = "i140703-001" session = "l101210-001" # Input data. i.e., original Blackrock files and odML dataset_dir = '../datasets_blackrock' session_path = f'{dataset_dir}/{session}' odml_dir = os.path.join('..', 'datasets_blackrock') # Output for the nix files nix_dataset_dir = '../datasets_nix' nix_session_path = f'{nix_dataset_dir}/{session}' ##### LOAD BLACKROCK FILES ############ session = reachgraspio.ReachGraspIO( filename=session_path, odml_directory=odml_dir, verbose=False) block = session.read_block(lazy=False, load_waveforms=True) ##### CREATE LFP IF MISSING ############ # Here, we construct one offline filtered LFP from each ns5 (monkey L) or ns6 # (monkey N) raw recording trace. For monkey N, this filtered LFP can be # compared to the LFPs in the ns2 file (note that monkey L contains only # behavioral signals in the ns2 file). Also, we assign telling names to each # Neo AnalogSignal, which is used for plotting later on in this script. # this factor was heuristically determined as an approximate shift introduced # by the online filtering. Here, the offline filtering does not introduce a # noticeable shift, so we set it to zero. time_shift_factor = 0*pq.ms # Identify neuronal signals and provide labels for plotting filtered_anasig = None raw_anasig = None for anasig in block.segments[0].analogsignals: # skip non-neuronal signals if not anasig.annotations['neural_signal']: continue # Identify nsx source of signals in this AnalogSignal object if 'nsx' in anasig.annotations: nsx = anasig.annotations['nsx'] else: nsx = np.unique(anasig.array_annotations['nsx']) assert len(nsx) == 1, 'Different nsx sources in AnalogSignal' nsx = nsx[0] if nsx == 2: # AnalogSignal is LFP from ns2 filtered_anasig = anasig elif nsx in [5, 6]: # AnalogSignal is raw signal from ns5 or ns6 raw_anasig = anasig # Does the file contain a filtered signal, i.e., an LFP? if filtered_anasig is None: print("Filtering raw time series to obtain LFP") # Filtering must be done channel by channel for memory reasons (requires approx. 32 GB RAM) for f in range(raw_anasig.shape[1]): filtered_signal = butter( raw_anasig[:, f], highpass_freq=None, lowpass_freq=250 * pq.Hz, filter_function='sosfiltfilt', order=4) # Downsampling 30-fold here to get to 1000Hz from 30kHz # Note: For filters that may introduce a time shift, here would be the # place to correct for this shift using: # [...] .time_shift(time_shift_factor) downsampled_signal=filtered_signal.downsample(30) # First run? Create a new Analogsignal if f == 0: offline_filtered_anasig = neo.AnalogSignal( np.zeros((downsampled_signal.shape[0], raw_anasig.shape[1]), dtype=np.float32) *\ downsampled_signal.units, t_start=downsampled_signal.t_start, sampling_rate=downsampled_signal.sampling_rate) # add missing annotations (decision not to put nsx2, since this # signal is not in the original files offline_filtered_anasig.annotate( neural_signal=True, filter_shift_correction=time_shift_factor, nsx=-1, stream_id='', ) # all array annotations of the raw signal also apply to the filtered # signal offline_filtered_anasig.array_annotate( **raw_anasig.array_annotations) offline_filtered_anasig[:, f] = downsampled_signal if 'nsx' in anasig.annotations: nsx = anasig.annotations['nsx'] else: nsx = anasig.array_annotations["nsx"][0] offline_filtered_anasig.name = f"NeuralTimeSeriesDownsampled" offline_filtered_anasig.description = "Downsampled continuous neuronal " \ "recordings, where the downsampling was " \ "performed off-line during post-processing" offline_filtered_group = neo.Group( name="offline", stream_id='' ) offline_filtered_group.analogsignals.append(offline_filtered_anasig) # Attach all offline filtered LFPs to the segment of data and the groups block.segments[0].analogsignals.insert(0, offline_filtered_anasig) block.groups.insert(0, offline_filtered_group) ##### MISC FIXES ################### # For lilou, behavior channels were not set with nice names in the setup. # Here, we hard code these handwritten annotations to make both recordings # more similar block.segments[0].analogsignals[1].array_annotate( channel_names=['GFpr1', 'GFside1', 'GFpr2', 'GFside2', 'LF', 'Displ']) # Unify group names of ns2 for Nikos session if session == "i140703-001": block.groups[0].name = 'nsx2' block.groups[1].name = 'nsx2' ##### SAVE NIX FILE ################### nix_filename = nix_session_path + '.nix' if os.path.exists(nix_filename): print('Nix file already exists and will not be overwritten.') else: with neo.NixIO(nix_filename) as io: print(f'Saving nix file at {nix_filename}') io.write_block(block) ##### VALIDATION OF FILE CONTENT ###### with neo.NixIO(nix_filename, mode='ro') as io: blocks = io.read_all_blocks() assert len(blocks) == 1 block_new = blocks[0] for seg_old, seg_new in zip(block.segments, block_new.segments): for anasig_old, anasig_new in zip(seg_old.analogsignals, seg_new.analogsignals): # ignoring differences in the file_origin attribute anasig_old.file_origin = anasig_new.file_origin assert_same_sub_schema(anasig_old, anasig_new) assert_same_annotations(anasig_old, anasig_new) assert_same_array_annotations(anasig_old, anasig_new) print(f'AnalogSignals are equivalent.') for st_old, st_new in zip(seg_old.spiketrains, seg_new.spiketrains): # ignoring differences in the file_origin attribute st_old.file_origin = st_new.file_origin assert_same_sub_schema(st_old, st_new) assert_same_annotations(st_old, st_new) assert_same_array_annotations(st_old, st_new) print(f'Spiketrains are equivalent.') for ev_old, ev_new in zip(seg_old.events, seg_new.events): # ignoring differences in the file_origin attribute ev_old.file_origin = ev_new.file_origin # ignore list-array type changes if 'color_codes' in ev_old.annotations: ev_old.annotations['color_codes'] = list(ev_old.annotations['color_codes']) assert_same_sub_schema(ev_old, ev_new) assert_same_annotations(ev_old, ev_new) assert_same_array_annotations(ev_old, ev_new) print(f'Events are equivalent.') for ep_old, ep_new in zip(seg_old.epochs, seg_new.epochs): # ignoring differences in the file_origin attribute ep_old.file_origin = ep_new.file_origin assert_same_sub_schema(ep_old, ep_new) assert_same_annotations(ep_old, ep_new) assert_same_array_annotations(ep_old, ep_new) print(f'Epochs are equivalent.') ##### SAVE SMALL NIX FILE ################### # remove raw signal del block.segments[0].analogsignals[2] del block.groups[2] nix_filename = nix_session_path + '_no_raw.nix' if os.path.exists(nix_filename): print('Nix file already exists and will not be overwritten.') else: with neo.NixIO(nix_filename) as io: print(f'Saving nix file at {nix_filename}') io.write_block(block)