2
0

neo.py 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. import numpy as np
  2. import os
  3. import json
  4. import mne
  5. import glob
  6. import pyedflib
  7. from .utils import upsample_events
  8. from settings.config import settings
  9. FINGERMODEL_IDS = settings.FINGERMODEL_IDS
  10. FINGERMODEL_IDS_INVERSE = settings.FINGERMODEL_IDS_INVERSE
  11. CONFIG_INFO = settings.CONFIG_INFO
  12. def raw_loader(data_root, session_paths:dict,
  13. do_rereference=True,
  14. upsampled_epoch_length=1.,
  15. ori_epoch_length=5):
  16. """
  17. Params:
  18. data_root:
  19. session_paths: dict of lists
  20. do_rereference (bool): do common average rereference or not
  21. upsampled_epoch_length (None or float): None: do not do upsampling
  22. ori_epoch_length (int or 'varied'): original epoch length in second
  23. """
  24. raws_loaded = load_sessions(data_root, session_paths, do_rereference)
  25. # process event
  26. raws = []
  27. event_id = {}
  28. for (finger_model, raw) in raws_loaded:
  29. fs = raw.info['sfreq']
  30. {d: int(d) for d in np.unique(raw.annotations.description)}
  31. events, _ = mne.events_from_annotations(raw, event_id={d: int(d) for d in np.unique(raw.annotations.description)})
  32. event_id = event_id | {FINGERMODEL_IDS_INVERSE[int(d)]: int(d) for d in np.unique(raw.annotations.description)}
  33. if isinstance(ori_epoch_length, int) or isinstance(ori_epoch_length, float):
  34. trial_duration = ori_epoch_length
  35. elif ori_epoch_length == 'varied':
  36. trial_duration = None
  37. elif isinstance(ori_epoch_length, dict):
  38. trial_duration = ori_epoch_length
  39. else:
  40. raise ValueError(f'Unsupported epoch_length {ori_epoch_length}')
  41. events = reconstruct_events(events, fs,
  42. trial_duration=trial_duration)
  43. if upsampled_epoch_length is not None:
  44. events = upsample_events(events, int(fs * upsampled_epoch_length))
  45. event_desc = {e: FINGERMODEL_IDS_INVERSE[e] for e in np.unique(events[:, 2])}
  46. annotations = mne.annotations_from_events(events, fs, event_desc)
  47. raw.set_annotations(annotations)
  48. raws.append(raw)
  49. raws = mne.concatenate_raws(raws)
  50. raws.load_data()
  51. return raws, event_id
  52. def preprocessing(raw, do_rereference=True):
  53. raw.load_data()
  54. if do_rereference:
  55. # common average
  56. raw.set_eeg_reference('average')
  57. # high pass
  58. raw = raw.filter(1, None)
  59. # filter 50Hz
  60. raw = raw.notch_filter([50, 100, 150], trans_bandwidth=3, verbose=False)
  61. return raw
  62. def reconstruct_events(events, fs, trial_duration=5):
  63. """重构出事件序列中的单独运动事件
  64. Args:
  65. events (np.ndarray):
  66. fs (float):
  67. trial_duration (float or None or dict): None means variable epoch length, dict means there are different trial durations for different trials
  68. """
  69. # Trial duration are fixed to be ? seconds.
  70. # extract trials
  71. trials_ind_deduplicated = np.flatnonzero(np.diff(events[:, 2], prepend=0) != 0)
  72. events_new = events[trials_ind_deduplicated]
  73. if trial_duration is None:
  74. events_new[:-1, 1] = np.diff(events_new[:, 0])
  75. events_new[-1, 1] = events[-1, 0] - events_new[-1, 0]
  76. elif isinstance(trial_duration, dict):
  77. for e in trial_duration.keys():
  78. events_new[events_new[:, 2] == e, 1] = int(trial_duration[e] * fs)
  79. else:
  80. events_new[:, 1] = int(trial_duration * fs)
  81. return events_new
  82. def load_sessions(data_root, session_names: dict, do_rereference=True):
  83. # return raws for different finger models on an interleaved manner
  84. raw_cnt = sum(len(session_names[k]) for k in session_names)
  85. raws = []
  86. i = 0
  87. while i < raw_cnt:
  88. for finger_model in session_names.keys():
  89. try:
  90. s = session_names[finger_model].pop(0)
  91. i += 1
  92. except IndexError:
  93. continue
  94. if glob.glob(os.path.join(data_root, s, 'evt.bdf')):
  95. # neo format
  96. raw = load_neuracle(os.path.join(data_root, s))
  97. else:
  98. # kraken format
  99. data_file = glob.glob(os.path.join(data_root, s, '*.bdf'))[0]
  100. raw = mne.io.read_raw_bdf(data_file)
  101. # preprocess raw
  102. raw = preprocessing(raw, do_rereference)
  103. # append list
  104. raws.append((finger_model, raw))
  105. return raws
  106. def load_neuracle(data_dir, data_type='ecog'):
  107. """
  108. neuracle file loader
  109. :param
  110. data_dir: root data dir for the experiment
  111. sfreq:
  112. data_type:
  113. :return:
  114. raw: mne.io.RawArray
  115. """
  116. f = {
  117. 'data': os.path.join(data_dir, 'data.bdf'),
  118. 'evt': os.path.join(data_dir, 'evt.bdf'),
  119. 'info': os.path.join(data_dir, 'recordInformation.json')
  120. }
  121. # read json
  122. with open(f['info'], 'r') as json_file:
  123. record_info = json.load(json_file)
  124. start_time_point = record_info['DataFileInformations'][0]['BeginTimeStamp']
  125. sfreq = record_info['SampleRate']
  126. # read data
  127. f_data = pyedflib.EdfReader(f['data'])
  128. ch_names = f_data.getSignalLabels()
  129. data = np.array([f_data.readSignal(i) for i in range(f_data.signals_in_file)]) * 1e-6 # to Volt
  130. info = mne.create_info(ch_names, sfreq, [data_type] * len(ch_names))
  131. raw = mne.io.RawArray(data, info)
  132. # read event
  133. try:
  134. f_evt = pyedflib.EdfReader(f['evt'])
  135. onset, duration, content = f_evt.readAnnotations()
  136. onset = np.array(onset) - start_time_point * 1e-3 # correct by start time point
  137. onset = (onset * sfreq).astype(np.int64)
  138. try:
  139. content = content.astype(np.int64) # use original event code
  140. except ValueError:
  141. event_mapping = {c: i for i, c in enumerate(np.unique(content))}
  142. content = [event_mapping[i] for i in content]
  143. duration = (np.array(duration) * sfreq).astype(np.int64)
  144. events = np.stack((onset, duration, content), axis=1)
  145. annotations = mne.annotations_from_events(events, sfreq)
  146. raw.set_annotations(annotations)
  147. except OSError:
  148. pass
  149. return raw