convert_to_nix.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. """
  2. This script loads a complete session in the blackrock format and converts it to a single nix file
  3. """
  4. import os
  5. import copy
  6. import numpy as np
  7. import quantities as pq
  8. import neo
  9. from neo.test.tools import (assert_same_sub_schema,
  10. assert_same_annotations,
  11. assert_same_array_annotations)
  12. from elephant.signal_processing import butter
  13. from reachgraspio import reachgraspio
  14. # Choose which session you want to convert into a nix file
  15. #session = "i140703-001"
  16. session = "l101210-001"
  17. # Input data. i.e., original Blackrock files and odML
  18. dataset_dir = '../datasets_blackrock'
  19. session_path = f'{dataset_dir}/{session}'
  20. odml_dir = os.path.join('..', 'datasets_blackrock')
  21. # Output for the nix files
  22. nix_dataset_dir = '../datasets_nix'
  23. nix_session_path = f'{nix_dataset_dir}/{session}'
  24. ##### LOAD BLACKROCK FILES ############
  25. session = reachgraspio.ReachGraspIO(
  26. filename=session_path,
  27. odml_directory=odml_dir,
  28. verbose=False)
  29. block = session.read_block(lazy=False, load_waveforms=True)
  30. # =============================================================================
  31. # Create offline filtered LFP
  32. #
  33. # Here, we construct one offline filtered LFP from each ns5 (monkey L) or ns6
  34. # (monkey N) raw recording trace. For monkey N, this filtered LFP can be
  35. # compared to the LFPs in the ns2 file (note that monkey L contains only
  36. # behavioral signals in the ns2 file). Also, we assign telling names to each
  37. # Neo AnalogSignal, which is used for plotting later on in this script.
  38. # =============================================================================
  39. # this factor was heuristically determined as an approximate shift introduced
  40. # by the online filtering. Here, the offline filtering does not introduce a
  41. # noticable shift, so we set it to zero.
  42. time_shift_factor = 0*pq.ms
  43. # identify neuronal signals and provide labels for plotting
  44. filtered_anasig = None
  45. raw_anasig = None
  46. for anasig in block.segments[0].analogsignals:
  47. # skip non-neuronal signals
  48. if not anasig.annotations['neural_signal']:
  49. continue
  50. # identify nsx source of signals in this AnalogSignal object
  51. if 'nsx' in anasig.annotations:
  52. nsx = anasig.annotations['nsx']
  53. else:
  54. nsx = np.unique(anasig.array_annotations['nsx'])
  55. assert len(nsx) == 1, 'Different nsx sources in AnalogSignal'
  56. nsx = nsx[0]
  57. if nsx == 2:
  58. # AnalogSignal is LFP from ns2
  59. filtered_anasig = anasig
  60. elif nsx in [5, 6]:
  61. # AnalogSignal is raw signal from ns5 or ns6
  62. raw_anasig = anasig
  63. if filtered_anasig is None:
  64. print("Filtering raw time series to obtain LFP")
  65. for f in range(raw_anasig.shape[1]):
  66. # filtering must be done channel by channel for memory reasons (requires approx. 32 GB RAM)
  67. print(f"Processing channel {f}")
  68. filtered_signal = butter(
  69. raw_anasig[:, f],
  70. highpass_freq=None,
  71. lowpass_freq=250 * pq.Hz,
  72. filter_function='sosfiltfilt',
  73. order=4)
  74. # For other filters that may introduce a time shift, here would be the
  75. # place to correct for this shift using:
  76. # ... .time_shift(time_shift_factor)
  77. # Downsampling 30-fold here to get to 1000Hz from 30kHz
  78. downsampled_signal=filtered_signal.downsample(30)
  79. # first run? Create a new Analogsignal
  80. if f == 0:
  81. offline_filtered_anasig = neo.AnalogSignal(
  82. np.zeros((downsampled_signal.shape[0], raw_anasig.shape[1]), dtype=np.float32) *\
  83. downsampled_signal.units,
  84. t_start=downsampled_signal.t_start,
  85. sampling_rate=downsampled_signal.sampling_rate)
  86. # add missing annotations (decision not to put nsx2, since this
  87. # signal is not in the original files
  88. offline_filtered_anasig.annotate(
  89. neural_signal=True,
  90. filter_shift_correction=time_shift_factor,
  91. nsx=-1,
  92. stream_id='',
  93. )
  94. # all array annotations of the raw signal also apply to the filtered
  95. # signal
  96. # offline_filtered_anasig.array_annotations = copy.copy(
  97. # raw_anasig.array_annotations)
  98. offline_filtered_anasig.array_annotate(
  99. **raw_anasig.array_annotations)
  100. offline_filtered_anasig[:, f] = downsampled_signal
  101. if 'nsx' in anasig.annotations:
  102. nsx = anasig.annotations['nsx']
  103. else:
  104. nsx = anasig.array_annotations["nsx"][0]
  105. offline_filtered_anasig.name = f"NeuralTimeSeriesDownsampled"
  106. offline_filtered_anasig.description = "Downsampled continuous neuronal recordings, where the downsampling was " \
  107. "performed off-line during post-processing"
  108. # Attach all offline filtered LFPs to the segment of data
  109. block.segments[0].analogsignals.insert(0, offline_filtered_anasig)
  110. ##### MISC FIXES ###################
  111. # for lilou, behavior channels were not set with nice names in the setup
  112. # Here, we hard code these handwritten annotations to make both recordings
  113. # more similar
  114. block.segments[0].analogsignals[1].array_annotate(
  115. channel_names=['GFpr1', 'GFside1', 'GFpr2', 'GFside2', 'LF', 'Displ'])
  116. ##### SAVE NIX FILE ###################
  117. nix_filename = nix_session_path + '.nix'
  118. if os.path.exists(nix_filename):
  119. print('Nix file already exists and will not be overwritten.')
  120. else:
  121. with neo.NixIO(nix_filename) as io:
  122. print(f'Saving nix file at {nix_filename}')
  123. io.write_block(block)
  124. ##### VALIDATION OF FILE CONTENT ######
  125. with neo.NixIO(nix_filename, mode='ro') as io:
  126. blocks = io.read_all_blocks()
  127. assert len(blocks) == 1
  128. block_new = blocks[0]
  129. for seg_old, seg_new in zip(block.segments, block_new.segments):
  130. for anasig_old, anasig_new in zip(seg_old.analogsignals, seg_new.analogsignals):
  131. # ignoring differences in the file_origin attribute
  132. anasig_old.file_origin = anasig_new.file_origin
  133. assert_same_sub_schema(anasig_old, anasig_new)
  134. assert_same_annotations(anasig_old, anasig_new)
  135. assert_same_array_annotations(anasig_old, anasig_new)
  136. print(f'AnalogSignals are equivalent.')
  137. for st_old, st_new in zip(seg_old.spiketrains, seg_new.spiketrains):
  138. # ignoring differences in the file_origin attribute
  139. st_old.file_origin = st_new.file_origin
  140. assert_same_sub_schema(st_old, st_new)
  141. assert_same_annotations(st_old, st_new)
  142. assert_same_array_annotations(st_old, st_new)
  143. print(f'Spiketrains are equivalent.')
  144. for ev_old, ev_new in zip(seg_old.events, seg_new.events):
  145. # ignoring differences in the file_origin attribute
  146. ev_old.file_origin = ev_new.file_origin
  147. # ignore list-array type changes
  148. if 'color_codes' in ev_old.annotations:
  149. ev_old.annotations['color_codes'] = list(ev_old.annotations['color_codes'])
  150. assert_same_sub_schema(ev_old, ev_new)
  151. assert_same_annotations(ev_old, ev_new)
  152. assert_same_array_annotations(ev_old, ev_new)
  153. print(f'Events are equivalent.')
  154. for ep_old, ep_new in zip(seg_old.epochs, seg_new.epochs):
  155. # ignoring differences in the file_origin attribute
  156. ep_old.file_origin = ep_new.file_origin
  157. assert_same_sub_schema(ep_old, ep_new)
  158. assert_same_annotations(ep_old, ep_new)
  159. assert_same_array_annotations(ep_old, ep_new)
  160. print(f'Epochs are equivalent.')