convert_to_nix.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234
  1. """
  2. This script loads a complete session in the blackrock format and converts it to
  3. a single nix file.
  4. For sessions without online recorded LFP (i.e., 1000Hz neural
  5. signal in the ns2 files), this LFP is generated offline by filtering and
  6. downsampling.
  7. Two versions of the nix files are saved: one containing the complete data, and
  8. a smaller one containing everything except the AnalogSignal object with the
  9. raw data records.
  10. """
  11. import os
  12. import numpy as np
  13. import quantities as pq
  14. import neo
  15. from neo.test.tools import (assert_same_sub_schema,
  16. assert_same_annotations,
  17. assert_same_array_annotations)
  18. from elephant.signal_processing import butter
  19. from reachgraspio import reachgraspio
  20. ##### INITIAL SETUP ############
  21. # Choose which session you want to convert into a nix file
  22. session = "i140703-001"
  23. session = "l101210-001"
  24. # Input data. i.e., original Blackrock files and odML
  25. dataset_dir = '../datasets_blackrock'
  26. session_path = f'{dataset_dir}/{session}'
  27. odml_dir = os.path.join('..', 'datasets_blackrock')
  28. # Output for the nix files
  29. nix_dataset_dir = '../datasets_nix'
  30. nix_session_path = f'{nix_dataset_dir}/{session}'
  31. ##### LOAD BLACKROCK FILES ############
  32. session = reachgraspio.ReachGraspIO(
  33. filename=session_path,
  34. odml_directory=odml_dir,
  35. verbose=False)
  36. block = session.read_block(lazy=False, load_waveforms=True)
  37. ##### CREATE LFP IF MISSING ############
  38. # Here, we construct one offline filtered LFP from each ns5 (monkey L) or ns6
  39. # (monkey N) raw recording trace. For monkey N, this filtered LFP can be
  40. # compared to the LFPs in the ns2 file (note that monkey L contains only
  41. # behavioral signals in the ns2 file). Also, we assign telling names to each
  42. # Neo AnalogSignal, which is used for plotting later on in this script.
  43. # this factor was heuristically determined as an approximate shift introduced
  44. # by the online filtering. Here, the offline filtering does not introduce a
  45. # noticeable shift, so we set it to zero.
  46. time_shift_factor = 0*pq.ms
  47. # Identify neuronal signals and provide labels for plotting
  48. filtered_anasig = None
  49. raw_anasig = None
  50. for anasig in block.segments[0].analogsignals:
  51. # skip non-neuronal signals
  52. if not anasig.annotations['neural_signal']:
  53. continue
  54. # Identify nsx source of signals in this AnalogSignal object
  55. if 'nsx' in anasig.annotations:
  56. nsx = anasig.annotations['nsx']
  57. else:
  58. nsx = np.unique(anasig.array_annotations['nsx'])
  59. assert len(nsx) == 1, 'Different nsx sources in AnalogSignal'
  60. nsx = nsx[0]
  61. if nsx == 2:
  62. # AnalogSignal is LFP from ns2
  63. filtered_anasig = anasig
  64. elif nsx in [5, 6]:
  65. # AnalogSignal is raw signal from ns5 or ns6
  66. raw_anasig = anasig
  67. # Does the file contain a filtered signal, i.e., an LFP?
  68. if filtered_anasig is None:
  69. print("Filtering raw time series to obtain LFP")
  70. # Filtering must be done channel by channel for memory reasons (requires approx. 32 GB RAM)
  71. for f in range(raw_anasig.shape[1]):
  72. filtered_signal = butter(
  73. raw_anasig[:, f],
  74. highpass_freq=None,
  75. lowpass_freq=250 * pq.Hz,
  76. filter_function='sosfiltfilt',
  77. order=4)
  78. # Downsampling 30-fold here to get to 1000Hz from 30kHz
  79. # Note: For filters that may introduce a time shift, here would be the
  80. # place to correct for this shift using:
  81. # [...] .time_shift(time_shift_factor)
  82. downsampled_signal=filtered_signal.downsample(30)
  83. # First run? Create a new Analogsignal
  84. if f == 0:
  85. offline_filtered_anasig = neo.AnalogSignal(
  86. np.zeros((downsampled_signal.shape[0], raw_anasig.shape[1]), dtype=np.float32) *\
  87. downsampled_signal.units,
  88. t_start=downsampled_signal.t_start,
  89. sampling_rate=downsampled_signal.sampling_rate)
  90. # add missing annotations (decision not to put nsx2, since this
  91. # signal is not in the original files
  92. offline_filtered_anasig.annotate(
  93. neural_signal=True,
  94. filter_shift_correction=time_shift_factor,
  95. nsx=-1,
  96. stream_id='',
  97. )
  98. # all array annotations of the raw signal also apply to the filtered
  99. # signal
  100. offline_filtered_anasig.array_annotate(
  101. **raw_anasig.array_annotations)
  102. offline_filtered_anasig[:, f] = downsampled_signal
  103. if 'nsx' in anasig.annotations:
  104. nsx = anasig.annotations['nsx']
  105. else:
  106. nsx = anasig.array_annotations["nsx"][0]
  107. offline_filtered_anasig.name = f"NeuralTimeSeriesDownsampled"
  108. offline_filtered_anasig.description = "Downsampled continuous neuronal " \
  109. "recordings, where the downsampling was " \
  110. "performed off-line during post-processing"
  111. offline_filtered_group = neo.Group(
  112. name="offline",
  113. stream_id=''
  114. )
  115. offline_filtered_group.analogsignals.append(offline_filtered_anasig)
  116. # Attach all offline filtered LFPs to the segment of data and the groups
  117. block.segments[0].analogsignals.insert(0, offline_filtered_anasig)
  118. block.groups.insert(0, offline_filtered_group)
  119. ##### MISC FIXES ###################
  120. # For lilou, behavior channels were not set with nice names in the setup.
  121. # Here, we hard code these handwritten annotations to make both recordings
  122. # more similar
  123. block.segments[0].analogsignals[1].array_annotate(
  124. channel_names=['GFpr1', 'GFside1', 'GFpr2', 'GFside2', 'LF', 'Displ'])
  125. # Unify group names of ns2 for Nikos session
  126. if session == "i140703-001":
  127. block.groups[0].name = 'nsx2'
  128. block.groups[1].name = 'nsx2'
  129. ##### SAVE NIX FILE ###################
  130. nix_filename = nix_session_path + '.nix'
  131. if os.path.exists(nix_filename):
  132. print('Nix file already exists and will not be overwritten.')
  133. else:
  134. with neo.NixIO(nix_filename) as io:
  135. print(f'Saving nix file at {nix_filename}')
  136. io.write_block(block)
  137. ##### VALIDATION OF FILE CONTENT ######
  138. with neo.NixIO(nix_filename, mode='ro') as io:
  139. blocks = io.read_all_blocks()
  140. assert len(blocks) == 1
  141. block_new = blocks[0]
  142. for seg_old, seg_new in zip(block.segments, block_new.segments):
  143. for anasig_old, anasig_new in zip(seg_old.analogsignals, seg_new.analogsignals):
  144. # ignoring differences in the file_origin attribute
  145. anasig_old.file_origin = anasig_new.file_origin
  146. assert_same_sub_schema(anasig_old, anasig_new)
  147. assert_same_annotations(anasig_old, anasig_new)
  148. assert_same_array_annotations(anasig_old, anasig_new)
  149. print(f'AnalogSignals are equivalent.')
  150. for st_old, st_new in zip(seg_old.spiketrains, seg_new.spiketrains):
  151. # ignoring differences in the file_origin attribute
  152. st_old.file_origin = st_new.file_origin
  153. assert_same_sub_schema(st_old, st_new)
  154. assert_same_annotations(st_old, st_new)
  155. assert_same_array_annotations(st_old, st_new)
  156. print(f'Spiketrains are equivalent.')
  157. for ev_old, ev_new in zip(seg_old.events, seg_new.events):
  158. # ignoring differences in the file_origin attribute
  159. ev_old.file_origin = ev_new.file_origin
  160. # ignore list-array type changes
  161. if 'color_codes' in ev_old.annotations:
  162. ev_old.annotations['color_codes'] = list(ev_old.annotations['color_codes'])
  163. assert_same_sub_schema(ev_old, ev_new)
  164. assert_same_annotations(ev_old, ev_new)
  165. assert_same_array_annotations(ev_old, ev_new)
  166. print(f'Events are equivalent.')
  167. for ep_old, ep_new in zip(seg_old.epochs, seg_new.epochs):
  168. # ignoring differences in the file_origin attribute
  169. ep_old.file_origin = ep_new.file_origin
  170. assert_same_sub_schema(ep_old, ep_new)
  171. assert_same_annotations(ep_old, ep_new)
  172. assert_same_array_annotations(ep_old, ep_new)
  173. print(f'Epochs are equivalent.')
  174. ##### SAVE SMALL NIX FILE ###################
  175. # remove raw signal
  176. del block.segments[0].analogsignals[2]
  177. del block.groups[2]
  178. nix_filename = nix_session_path + '_no_raw.nix'
  179. if os.path.exists(nix_filename):
  180. print('Nix file already exists and will not be overwritten.')
  181. else:
  182. with neo.NixIO(nix_filename) as io:
  183. print(f'Saving nix file at {nix_filename}')
  184. io.write_block(block)