Coverage for ibllib/io/raw_data_loaders.py: 88%
450 statements
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
1"""
2Raw Data Loader functions for PyBpod rig.
4Module contains one loader function per raw datafile.
5"""
6import re
7import json
8import logging
9import wave
10from collections import OrderedDict
11from datetime import datetime
12from pathlib import Path, PureWindowsPath
13from typing import Union
15from dateutil import parser as dateparser
16from packaging import version
17import numpy as np
18import pandas as pd
20from iblutil.io import jsonable
21from ibllib.io.video import assert_valid_label
22from ibllib.time import uncycle_pgts, convert_pgts, date2isostr
24_logger = logging.getLogger(__name__)
27def trial_times_to_times(raw_trial):
28 """
29 Parse and convert all trial timestamps to "absolute" time.
30 Float64 seconds from session start.
32 0---BpodStart---TrialStart0---------TrialEnd0-----TrialStart1---TrialEnd1...0---ts0---ts1---
33 tsN...absTS = tsN + TrialStartN - BpodStart
35 Bpod timestamps are in microseconds (µs)
36 PyBpod timestamps are is seconds (s)
38 :param raw_trial: raw trial data
39 :type raw_trial: dict
40 :return: trial data with modified timestamps
41 :rtype: dict
42 """
43 ts_bs = raw_trial['behavior_data']['Bpod start timestamp'] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
44 ts_ts = raw_trial['behavior_data']['Trial start timestamp'] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
45 # ts_te = raw_trial['behavior_data']['Trial end timestamp']
47 def convert(ts): 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
48 return ts + ts_ts - ts_bs 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
50 converted_events = {} 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
51 for k, v in raw_trial['behavior_data']['Events timestamps'].items(): 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
52 converted_events.update({k: [convert(i) for i in v]}) 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
53 raw_trial['behavior_data']['Events timestamps'] = converted_events 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
55 converted_states = {} 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
56 for k, v in raw_trial['behavior_data']['States timestamps'].items(): 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
57 converted_states.update({k: [[convert(i) for i in x] for x in v]}) 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
58 raw_trial['behavior_data']['States timestamps'] = converted_states 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
60 shift = raw_trial['behavior_data']['Bpod start timestamp'] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
61 raw_trial['behavior_data']['Bpod start timestamp'] -= shift 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
62 raw_trial['behavior_data']['Trial start timestamp'] -= shift 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
63 raw_trial['behavior_data']['Trial end timestamp'] -= shift 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
64 assert raw_trial['behavior_data']['Bpod start timestamp'] == 0 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
65 return raw_trial 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
68def load_bpod(session_path, task_collection='raw_behavior_data'):
69 """
70 Load both settings and data from bpod (.json and .jsonable)
72 :param session_path: Absolute path of session folder
73 :param task_collection: Collection within sesison path with behavior data
74 :return: dict settings and list of dicts data
75 """
76 return load_settings(session_path, task_collection), load_data(session_path, task_collection) 1a567cYX
79def load_data(session_path: Union[str, Path], task_collection='raw_behavior_data', time='absolute'):
80 """
81 Load PyBpod data files (.jsonable).
83 Bpod timestamps are in microseconds (µs)
84 PyBpod timestamps are is seconds (s)
86 :param session_path: Absolute path of session folder
87 :type session_path: str, Path
88 :return: A list of len ntrials each trial being a dictionary
89 :rtype: list of dicts
90 """
91 if session_path is None: 1aGHIJKyLMNOPQFRSTUez567AlDpghcEvnuqtVrwisjkfYBXbmod
92 _logger.warning('No data loaded: session_path is None')
93 return
94 path = Path(session_path).joinpath(task_collection) 1aGHIJKyLMNOPQFRSTUez567AlDpghcEvnuqtVrwisjkfYBXbmod
95 path = next(path.glob('_iblrig_taskData.raw*.jsonable'), None) 1aGHIJKyLMNOPQFRSTUez567AlDpghcEvnuqtVrwisjkfYBXbmod
96 if not path: 1aGHIJKyLMNOPQFRSTUez567AlDpghcEvnuqtVrwisjkfYBXbmod
97 _logger.warning('No data loaded: could not find raw data file') 1567
98 return None 1567
99 data = jsonable.read(path) 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
100 if time == 'absolute': 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
101 data = [trial_times_to_times(t) for t in data] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
102 return data 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfYBXbmod
105def load_camera_frameData(session_path, camera: str = 'left', raw: bool = False) -> pd.DataFrame:
106 """Loads binary frame data from Bonsai camera recording workflow.
108 Args:
109 session_path (StrPath): Path to session folder
110 camera (str, optional): Load FramsData for specific camera. Defaults to 'left'.
111 raw (bool, optional): Whether to return raw or parsed data. Defaults to False.
113 Returns:
114 parsed: (raw=False, Default)
115 pandas.DataFrame: 4 int64 columns: {
116 Timestamp, # float64 (seconds from session start)
117 embeddedTimeStamp, # float64 (seconds from session start)
118 embeddedFrameCounter, # int64 (Frame number from session start)
119 embeddedGPIOPinState # object (State of each of the 4 GPIO pins as a
120 # list of numpy boolean arrays
121 # e.g. np.array([True, False, False, False])
122 }
123 raw:
124 pandas.DataFrame: 4 int64 columns: {
125 Timestamp, # UTC ticks from BehaviorPC
126 # (100's of ns since midnight 1/1/0001)
127 embeddedTimeStamp, # Camera timestamp (Needs unclycling and conversion)
128 embeddedFrameCounter, # Frame counter (int)
129 embeddedGPIOPinState # GPIO pin state integer representation of 4 pins
130 }
131 """
132 camera = assert_valid_label(camera) 1:1t
133 fpath = Path(session_path).joinpath("raw_video_data") 1:1t
134 fpath = next(fpath.glob(f"_iblrig_{camera}Camera.frameData*.bin"), None) 1:1t
135 assert fpath, f"{fpath}\nFile not Found: Could not find bin file for cam <{camera}>" 1:1t
136 rdata = np.fromfile(fpath, dtype=np.float64) 1:1t
137 assert rdata.size % 4 == 0, "Dimension mismatch: bin file length is not mod 4" 1:1t
138 rows = int(rdata.size / 4) 1:1t
139 data = np.reshape(rdata.astype(np.int64), (rows, 4)) 1:1t
140 df_dict = dict.fromkeys( 1:1t
141 ["Timestamp", "embeddedTimeStamp", "embeddedFrameCounter", "embeddedGPIOPinState"]
142 )
143 df = pd.DataFrame(data, columns=df_dict.keys()) 1:1t
144 if raw: 1:1t
145 return df 1:
147 df_dict["Timestamp"] = (data[:, 0] - data[0, 0]) / 10_000_000 # in seconds from first frame 1:1t
148 camerats = uncycle_pgts(convert_pgts(data[:, 1])) 1:1t
149 df_dict["embeddedTimeStamp"] = camerats - camerats[0] # in seconds from first frame 1:1t
150 df_dict["embeddedFrameCounter"] = data[:, 2] - data[0, 2] # from start 1:1t
151 gpio = (np.right_shift(np.tile(data[:, 3], (4, 1)).T, np.arange(31, 27, -1)) & 0x1) == 1 1:1t
152 df_dict["embeddedGPIOPinState"] = [np.array(x) for x in gpio.tolist()] 1:1t
154 parsed_df = pd.DataFrame.from_dict(df_dict) 1:1t
155 return parsed_df 1:1t
158def load_camera_ssv_times(session_path, camera: str):
159 """
160 Load the bonsai frame and camera timestamps from Camera.timestamps.ssv
162 NB: For some sessions the frame times are in the first column, in others the order is reversed.
163 NB: If using the new bin file the bonsai_times is a float in seconds since first frame
164 :param session_path: Absolute path of session folder
165 :param camera: Name of the camera to load, e.g. 'left'
166 :return: array of datetimes, array of frame times in seconds
167 """
168 camera = assert_valid_label(camera) 1=;cZnuq?0trw
169 video_path = Path(session_path).joinpath('raw_video_data') 1=;cZnuq?0trw
170 if next(video_path.glob(f'_iblrig_{camera}Camera.frameData*.bin'), None): 1=;cZnuq?0trw
171 df = load_camera_frameData(session_path, camera=camera) 1t
172 return df['Timestamp'].values, df['embeddedTimeStamp'].values 1t
174 file = next(video_path.glob(f'_iblrig_{camera.lower()}Camera.timestamps*.ssv'), None) 1=;cZnuq?0trw
175 if not file: 1=;cZnuq?0trw
176 file = str(video_path.joinpath(f'_iblrig_{camera.lower()}Camera.timestamps.ssv')) 1;
177 raise FileNotFoundError(file + ' not found') 1;
178 # NB: Numpy has deprecated support for non-naive timestamps.
179 # Converting them is extremely slow: 6000 timestamps takes 0.8615s vs 0.0352s.
180 # from datetime import timezone
181 # c = {0: lambda x: datetime.fromisoformat(x).astimezone(timezone.utc).replace(tzinfo=None)}
183 # Determine the order of the columns by reading one line and testing whether the first value
184 # is an integer or not.
185 with open(file, 'r') as f: 1=;cZnuq?0trw
186 line = f.readline() 1=;cZnuq?0trw
187 type_map = OrderedDict(bonsai='<M8[ns]', camera='<u4') 1=;cZnuq?0trw
188 try: 1=;cZnuq?0trw
189 int(line.split(' ')[1]) 1=;cZnuq?0trw
190 except ValueError: 1;cnqrw
191 type_map.move_to_end('bonsai') 1;cnqrw
192 ssv_params = dict(names=type_map.keys(), dtype=','.join(type_map.values()), delimiter=' ') 1=;cZnuq?0trw
193 ssv_times = np.genfromtxt(file, **ssv_params) # np.loadtxt is slower for some reason 1=;cZnuq?0trw
194 bonsai_times = ssv_times['bonsai'] 1=;cZnuq?0trw
195 camera_times = uncycle_pgts(convert_pgts(ssv_times['camera'])) 1=;cZnuq?0trw
196 return bonsai_times, camera_times 1=;cZnuq?0trw
199def load_embedded_frame_data(session_path, label: str, raw=False):
200 """
201 Load the embedded frame count and GPIO for a given session. If the file doesn't exist,
202 or is empty, None values are returned.
204 :param session_path: Absolute path of session folder
205 :param label: The specific video to load, one of ('left', 'right', 'body')
206 :param raw: If True the raw data are returned without preprocessing, otherwise frame count is
207 returned starting from 0 and the GPIO is returned as a dict of indices
208 :return: The frame count, GPIO
209 """
210 count = load_camera_frame_count(session_path, label, raw=raw) 19cv/Znuq0trw
211 gpio = load_camera_gpio(session_path, label, as_dicts=not raw) 19cv/Znuq0trw
212 return count, gpio 19cv/Znuq0trw
215def load_camera_frame_count(session_path, label: str, raw=True):
216 """
217 Load the embedded frame count for a given session. If the file doesn't exist, or is empty,
218 a None value is returned.
219 :param session_path: Absolute path of session folder
220 :param label: The specific video to load, one of ('left', 'right', 'body')
221 :param raw: If True the raw data are returned without preprocessing, otherwise frame count is
222 returned starting from 0
223 :return: The frame count
224 """
225 if session_path is None: 1@9cv/Znuq0trw
226 return 1@
228 label = assert_valid_label(label) 1@9cv/Znuq0trw
229 video_path = Path(session_path).joinpath('raw_video_data') 1@9cv/Znuq0trw
230 if next(video_path.glob(f'_iblrig_{label}Camera.frameData*.bin'), None): 1@9cv/Znuq0trw
231 df = load_camera_frameData(session_path, camera=label) 1t
232 return df['embeddedFrameCounter'].values 1t
234 # Load frame count
235 glob = video_path.glob(f'_iblrig_{label}Camera.frame_counter*.bin') 1@9cv/Znuq0trw
236 count_file = next(glob, None) 1@9cv/Znuq0trw
237 count = np.fromfile(count_file, dtype=np.float64).astype(int) if count_file else [] 1@9cv/Znuq0trw
238 if len(count) == 0: 1@9cv/Znuq0trw
239 return 1@cv/n
240 if not raw: 1@9Zuq0trw
241 count -= count[0] # start from zero 1@9Zq0trw
242 return count 1@9Zuq0trw
245def load_camera_gpio(session_path, label: str, as_dicts=False):
246 """
247 Load the GPIO for a given session. If the file doesn't exist, or is empty, a None value is
248 returned.
250 The raw binary file contains uint32 values (saved as doubles) where the first 4 bits
251 represent the state of each of the 4 GPIO pins. The array is expanded to an n x 4 array by
252 shifting each bit to the end and checking whether it is 0 (low state) or 1 (high state).
254 :param session_path: Absolute path of session folder
255 :param label: The specific video to load, one of ('left', 'right', 'body')
256 :param as_dicts: If False the raw data are returned boolean array with shape (n_frames, n_pins)
257 otherwise GPIO is returned as a list of dictionaries with keys ('indices', 'polarities').
258 :return: An nx4 boolean array where columns represent state of GPIO pins 1-4.
259 If as_dicts is True, a list of dicts is returned with keys ('indices', 'polarities'),
260 or None if the dictionary is empty.
261 """
262 if session_path is None: 119cv/Znuq0trw
263 return 11
264 raw_path = Path(session_path).joinpath('raw_video_data') 119cv/Znuq0trw
265 label = assert_valid_label(label) 119cv/Znuq0trw
267 # Load pin state
268 if next(raw_path.glob(f'_iblrig_{label}Camera.frameData*.bin'), False): 119cv/Znuq0trw
269 df = load_camera_frameData(session_path, camera=label, raw=False) 11t
270 gpio = np.array([x for x in df['embeddedGPIOPinState'].values]) 11t
271 if len(gpio) == 0: 11t
272 return [None] * 4 if as_dicts else None
273 else:
274 GPIO_file = next(raw_path.glob(f'_iblrig_{label}Camera.GPIO*.bin'), None) 119cv/Znuq0trw
275 # This deals with missing and empty files the same
276 gpio = np.fromfile(GPIO_file, dtype=np.float64).astype(np.uint32) if GPIO_file else [] 119cv/Znuq0trw
277 # Check values make sense (4 pins = 16 possible values)
278 if not np.isin(gpio, np.left_shift(np.arange(2 ** 4, dtype=np.uint32), 32 - 4)).all(): 119cv/Znuq0trw
279 _logger.warning('Unexpected GPIO values; decoding may fail') 11
280 if len(gpio) == 0: 119cv/Znuq0trw
281 return [None] * 4 if as_dicts else None 11cv/n
282 # 4 pins represented as uint32
283 # For each pin, shift its bit to the end and check the bit is set
284 gpio = (np.right_shift(np.tile(gpio, (4, 1)).T, np.arange(31, 27, -1)) & 0x1) == 1 119Zuq0trw
286 if as_dicts: 119Zuq0trw
287 if not gpio.any(): 119Zq0trw
288 _logger.error('No GPIO changes') 11
289 return [None] * 4 11
290 # Find state changes for each pin and construct a dict of indices and polarities for each
291 edges = np.vstack((gpio[0, :], np.diff(gpio.astype(int), axis=0))) 119Zq0trw
292 # gpio = [(ind := np.where(edges[:, i])[0], edges[ind, i]) for i in range(4)]
293 # gpio = [dict(zip(('indices', 'polarities'), x)) for x in gpio_] # py3.8
294 gpio = [{'indices': np.where(edges[:, i])[0], 119Zq0trw
295 'polarities': edges[edges[:, i] != 0, i]}
296 for i in range(4)]
297 # Replace empty dicts with None
298 gpio = [None if x['indices'].size == 0 else x for x in gpio] 119Zq0trw
300 return gpio 119Zuq0trw
303def _read_settings_json_compatibility_enforced(settings):
304 """
305 Patch iblrig settings for compatibility across rig versions.
307 Parameters
308 ----------
309 settings : pathlib.Path, dict
310 Either a _iblrig_taskSettings.raw.json file path or the loaded settings.
312 Returns
313 -------
314 dict
315 The task settings patched for compatibility.
316 """
317 if isinstance(settings, dict): 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
318 md = settings.copy()
319 else:
320 with open(settings) as js: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
321 md = json.load(js) 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
322 if 'IS_MOCK' not in md: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
323 md['IS_MOCK'] = False 1aGHIJKyLMNOPQFRSTUWxez!%'#8567AlDghEvnqVrijkfCYBX(bmod
324 # Many v < 8 sessions had both version and version tag keys. v > 8 have a version tag.
325 # Some sessions have neither key. From v8 onwards we will use IBLRIG_VERSION to test rig
326 # version, however some places may still use the version tag.
327 if 'IBLRIG_VERSION_TAG' not in md.keys(): 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
328 md['IBLRIG_VERSION_TAG'] = md.get('IBLRIG_VERSION', '') 1#567Ef
329 if 'IBLRIG_VERSION' not in md.keys(): 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
330 md['IBLRIG_VERSION'] = md['IBLRIG_VERSION_TAG'] 1aGHIJKyLMNOPQFRSTUWxez!%.'8AlD)pghEvnuqVr*+,isjkCYBX(bmod
331 elif all([md['IBLRIG_VERSION'], md['IBLRIG_VERSION_TAG']]): 1#567cfC
332 # This may not be an issue; not sure what the intended difference between these keys was
333 assert md['IBLRIG_VERSION'] == md['IBLRIG_VERSION_TAG'], 'version and version tag mismatch' 1#567cfC
334 # Test version can be parsed. If not, log an error and set the version to nothing
335 try: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
336 version.parse(md['IBLRIG_VERSION'] or '0') 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
337 except version.InvalidVersion as ex:
338 _logger.error('%s in iblrig settings, this may affect extraction', ex)
339 # try a more relaxed version parse
340 laxed_parse = re.search(r'^\d+\.\d+\.\d+', md['IBLRIG_VERSION'])
341 # Set the tag as the invalid version
342 md['IBLRIG_VERSION_TAG'] = md['IBLRIG_VERSION']
343 # overwrite version with either successfully parsed one or an empty string
344 md['IBLRIG_VERSION'] = laxed_parse.group() if laxed_parse else ''
345 if 'device_sound' not in md: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
346 # sound device must be defined in version 8 and later # FIXME this assertion will cause tests to break
347 assert version.parse(md['IBLRIG_VERSION'] or '0') < version.parse('8.0.0') 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghvnuqVr*+,isjkCYBX(bmod
348 # in v7 we must infer the device from the sampling frequency if SD is None
349 if 'sounddevice' in md.get('SD', ''): 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghvnuqVr*+,isjkCYBX(bmod
350 device = 'xonar' 1alDghvqVrCYXbd
351 else:
352 freq_map = {192000: 'xonar', 96000: 'harp', 44100: 'sysdefault'} 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567A)pnu*+,isjkBX(bmod
353 device = freq_map.get(md.get('SOUND_SAMPLE_FREQ'), 'unknown') 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567A)pnu*+,isjkBX(bmod
354 md['device_sound'] = {'OUTPUT': device} 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghvnuqVr*+,isjkCYBX(bmod
355 # 2018-12-05 Version 3.2.3 fixes (permanent fixes in IBL_RIG from 3.2.4 on)
356 if md['IBLRIG_VERSION'] == '': 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
357 pass 1F.Eid
358 elif version.parse(md['IBLRIG_VERSION']) >= version.parse('8.0.0'): 1aGHIJKyLMNOPQFRSTUWxez!%'#8567AlD)pghcvnuqVr*+,isjkfCYBX(bmod
359 md['SESSION_NUMBER'] = str(md['SESSION_NUMBER']).zfill(3) 1f
360 md['PYBPOD_BOARD'] = md['RIG_NAME'] 1f
361 md['PYBPOD_CREATOR'] = (md['ALYX_USER'], '') 1f
362 md['SESSION_DATE'] = md['SESSION_START_TIME'][:10] 1f
363 md['SESSION_DATETIME'] = md['SESSION_START_TIME'] 1f
364 elif version.parse(md['IBLRIG_VERSION']) <= version.parse('3.2.3'): 1aGHIJKyLMNOPQFRSTUWxez!%'#8567AlD)pghcvnuqVr*+,isjkCYBX(bmod
365 if 'LAST_TRIAL_DATA' in md.keys(): 1b
366 md.pop('LAST_TRIAL_DATA') 1b
367 if 'weighings' in md['PYBPOD_SUBJECT_EXTRA'].keys(): 1b
368 md['PYBPOD_SUBJECT_EXTRA'].pop('weighings') 1b
369 if 'water_administration' in md['PYBPOD_SUBJECT_EXTRA'].keys(): 1b
370 md['PYBPOD_SUBJECT_EXTRA'].pop('water_administration') 1b
371 if 'IBLRIG_COMMIT_HASH' not in md.keys(): 1b
372 md['IBLRIG_COMMIT_HASH'] = 'f9d8905647dbafe1f9bdf78f73b286197ae2647b' 1b
373 # parse the date format to Django supported ISO
374 dt = dateparser.parse(md['SESSION_DATETIME']) 1b
375 md['SESSION_DATETIME'] = date2isostr(dt) 1b
376 # add the weight key if it doesn't already exist
377 if 'SUBJECT_WEIGHT' not in md: 1b
378 md['SUBJECT_WEIGHT'] = None 1b
379 return md 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
382def load_settings(session_path: Union[str, Path], task_collection='raw_behavior_data'):
383 """
384 Load PyBpod Settings files (.json).
386 [description]
388 :param session_path: Absolute path of session folder
389 :type session_path: str, Path
390 :return: Settings dictionary
391 :rtype: dict
392 """
393 if session_path is None: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
394 _logger.warning("No data loaded: session_path is None") 18
395 return 18
396 path = Path(session_path).joinpath(task_collection) 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
397 path = next(path.glob("_iblrig_taskSettings.raw*.json"), None) 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
398 if not path: 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
399 _logger.warning("No data loaded: could not find raw settings file") 1!85C
400 return None 1!85C
401 settings = _read_settings_json_compatibility_enforced(path) 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
402 return settings 1aGHIJKyLMNOPQFRSTUWxez!%.'#8567AlD)pghcEvnuqVr*+,isjkfCYBX(bmod
405def load_stim_position_screen(session_path, task_collection='raw_behavior_data'):
406 path = Path(session_path).joinpath(task_collection)
407 path = next(path.glob("_iblrig_stimPositionScreen.raw*.csv"), None)
409 data = pd.read_csv(path, sep=',', header=None, on_bad_lines='skip')
410 data.columns = ['contrast', 'position', 'bns_ts']
411 data['bns_ts'] = pd.to_datetime(data['bns_ts'])
412 return data
415def load_encoder_events(session_path, task_collection='raw_behavior_data', settings=False):
416 """
417 Load Rotary Encoder (RE) events raw data file.
419 Assumes that a folder called "raw_behavior_data" exists in folder.
421 Events number correspond to following bpod states:
422 1: correct / hide_stim
423 2: stim_on
424 3: closed_loop
425 4: freeze_error / freeze_correct
427 >>> data.columns
428 >>> ['re_ts', # Rotary Encoder Timestamp (ms) 'numpy.int64'
429 'sm_ev', # State Machine Event 'numpy.int64'
430 'bns_ts'] # Bonsai Timestamp (int) 'pandas.Timestamp'
431 # pd.to_datetime(data.bns_ts) to work in datetimes
433 :param session_path: [description]
434 :type session_path: [type]
435 :return: dataframe w/ 3 cols and (ntrials * 3) lines
436 :rtype: Pandas.DataFrame
437 """
438 if session_path is None: 1aWelpghcisjkfbmod
439 return
440 path = Path(session_path).joinpath(task_collection) 1aWelpghcisjkfbmod
441 path = next(path.glob("_iblrig_encoderEvents.raw*.ssv"), None) 1aWelpghcisjkfbmod
442 if not settings: 1aWelpghcisjkfbmod
443 settings = load_settings(session_path, task_collection=task_collection) 1aWelpghcisjkfbmod
444 if settings is None or not settings.get('IBLRIG_VERSION'): 1aWelpghcisjkfbmod
445 settings = {'IBLRIG_VERSION': '100.0.0'} 1d
446 # auto-detect old files when version is not labeled
447 with open(path) as fid: 1d
448 line = fid.readline() 1d
449 if line.startswith('Event') and 'StateMachine' in line: 1d
450 settings = {'IBLRIG_VERSION': '0.0.0'} 1d
451 if not path: 1aWelpghcisjkfbmod
452 return None
453 if version.parse(settings['IBLRIG_VERSION']) >= version.parse('5.0.0'): 1aWelpghcisjkfbmod
454 return _load_encoder_events_file_ge5(path) 1Welpghisjkfbmod
455 else:
456 return _load_encoder_events_file_lt5(path) 1aWecbd
459def _load_encoder_ssv_file(file_path, **kwargs):
460 file_path = Path(file_path) 1a4$Wxe32lpghcisjkfbmod
461 if file_path.stat().st_size == 0: 1a4$Wxe32lpghcisjkfbmod
462 _logger.error(f"{file_path.name} is an empty file. ")
463 raise ValueError(f"{file_path.name} is an empty file. ABORT EXTRACTION. ")
464 return pd.read_csv(file_path, sep=' ', header=None, on_bad_lines='skip', **kwargs) 1a4$Wxe32lpghcisjkfbmod
467def _load_encoder_positions_file_lt5(file_path):
468 """
469 File loader without the session overhead
470 :param file_path:
471 :return: dataframe of encoder events
472 """
473 data = _load_encoder_ssv_file(file_path, 1a4$xe2cbd
474 names=['_', 're_ts', 're_pos', 'bns_ts', '__'],
475 usecols=['re_ts', 're_pos', 'bns_ts'])
476 return _groom_wheel_data_lt5(data, label='_iblrig_encoderPositions.raw.ssv', path=file_path) 1a4$xe2cbd
479def _load_encoder_positions_file_ge5(file_path):
480 """
481 File loader without the session overhead
482 :param file_path:
483 :return: dataframe of encoder events
484 """
485 data = _load_encoder_ssv_file(file_path, 14xe2lpghisjkfbmod
486 names=['re_ts', 're_pos', '_'],
487 usecols=['re_ts', 're_pos'])
488 return _groom_wheel_data_ge5(data, label='_iblrig_encoderPositions.raw.ssv', path=file_path) 14xe2lpghisjkfbmod
491def _load_encoder_events_file_lt5(file_path):
492 """
493 File loader without the session overhead
494 :param file_path:
495 :return: dataframe of encoder events
496 """
497 data = _load_encoder_ssv_file(file_path, 1aWe3cbd
498 names=['_', 're_ts', '__', 'sm_ev', 'bns_ts', '___'],
499 usecols=['re_ts', 'sm_ev', 'bns_ts'])
500 return _groom_wheel_data_lt5(data, label='_iblrig_encoderEvents.raw.ssv', path=file_path) 1aWe3cbd
503def _load_encoder_events_file_ge5(file_path):
504 """
505 File loader without the session overhead
506 :param file_path:
507 :return: dataframe of encoder events
508 """
509 data = _load_encoder_ssv_file(file_path, 1We3lpghisjkfbmod
510 names=['re_ts', 'sm_ev', '_'],
511 usecols=['re_ts', 'sm_ev'])
512 return _groom_wheel_data_ge5(data, label='_iblrig_encoderEvents.raw.ssv', path=file_path) 1We3lpghisjkfbmod
515def load_encoder_positions(session_path, task_collection='raw_behavior_data', settings=False):
516 """
517 Load Rotary Encoder (RE) positions from raw data file within a session path.
519 Assumes that a folder called "raw_behavior_data" exists in folder.
520 Positions are RE ticks [-512, 512] == [-180º, 180º]
521 0 == trial stim init position
522 Positive nums are rightwards movements (mouse) or RE CW (mouse)
524 Variable line number, depends on movements.
526 Raw datafile Columns:
527 Position, RE timestamp, RE Position, Bonsai Timestamp
529 Position is always equal to 'Position' so this column was dropped.
531 >>> data.columns
532 >>> ['re_ts', # Rotary Encoder Timestamp (ms) 'numpy.int64'
533 're_pos', # Rotary Encoder position (ticks) 'numpy.int64'
534 'bns_ts'] # Bonsai Timestamp 'pandas.Timestamp'
535 # pd.to_datetime(data.bns_ts) to work in datetimes
537 :param session_path: Absolute path of session folder
538 :type session_path: str
539 :return: dataframe w/ 3 cols and N positions
540 :rtype: Pandas.DataFrame
541 """
542 if session_path is None: 1axelpghcnisjkfbmod
543 return
544 path = Path(session_path).joinpath(task_collection) 1axelpghcnisjkfbmod
545 path = next(path.glob("_iblrig_encoderPositions.raw*.ssv"), None) 1axelpghcnisjkfbmod
546 if not settings: 1axelpghcnisjkfbmod
547 settings = load_settings(session_path, task_collection=task_collection) 1axelpghcnisjkfbmod
548 if settings is None or not settings.get('IBLRIG_VERSION'): 1axelpghcnisjkfbmod
549 settings = {'IBLRIG_VERSION': '100.0.0'} 1d
550 # auto-detect old files when version is not labeled
551 with open(path) as fid: 1d
552 line = fid.readline() 1d
553 if line.startswith('Position'): 1d
554 settings = {'IBLRIG_VERSION': '0.0.0'} 1d
555 if not path: 1axelpghcnisjkfbmod
556 _logger.warning("No data loaded: could not find raw encoderPositions file") 1n
557 return None 1n
558 if version.parse(settings['IBLRIG_VERSION']) >= version.parse('5.0.0'): 1axelpghcisjkfbmod
559 return _load_encoder_positions_file_ge5(path) 1xelpghisjkfbmod
560 else:
561 return _load_encoder_positions_file_lt5(path) 1axecbd
564def load_encoder_trial_info(session_path, task_collection='raw_behavior_data'):
565 """
566 Load Rotary Encoder trial info from raw data file.
568 Assumes that a folder calles "raw_behavior_data" exists in folder.
570 NOTE: Last trial probably inexistent data (Trial info is sent on trial start
571 and data is only saved on trial exit...) max(trialnum) should be N+1 if N
572 is the amount of trial data saved.
574 Raw datafile Columns:
576 >>> data.columns
577 >>> ['trial_num', # Trial Number 'numpy.int64'
578 'stim_pos_init', # Initial position of visual stim 'numpy.int64'
579 'stim_contrast', # Contrast of visual stimulus 'numpy.float64'
580 'stim_freq', # Frequency of gabor patch 'numpy.float64'
581 'stim_angle', # Angle of Gabor 0 = Vertical 'numpy.float64'
582 'stim_gain', # Wheel gain (mm/º of stim) 'numpy.float64'
583 'stim_sigma', # Size of patch 'numpy.float64'
584 'stim_phase', # Phase of gabor 'numpy.float64'
585 'bns_ts' ] # Bonsai Timestamp 'pandas.Timestamp'
586 # pd.to_datetime(data.bns_ts) to work in datetimes
588 :param session_path: Absoulte path of session folder
589 :type session_path: str
590 :return: dataframe w/ 9 cols and ntrials lines
591 :rtype: Pandas.DataFrame
592 """
593 if session_path is None: 1[
594 return 1[
595 path = Path(session_path).joinpath(task_collection) 1[
596 path = next(path.glob("_iblrig_encoderTrialInfo.raw*.ssv"), None) 1[
597 if not path: 1[
598 return None 1[
599 data = pd.read_csv(path, sep=' ', header=None) 1[
600 data = data.drop([9], axis=1) 1[
601 data.columns = ['trial_num', 'stim_pos_init', 'stim_contrast', 'stim_freq', 1[
602 'stim_angle', 'stim_gain', 'stim_sigma', 'stim_phase', 'bns_ts']
603 # return _groom_wheel_data_lt5(data, label='_iblrig_encoderEvents.raw.ssv', path=path)
604 return data 1[
607def load_ambient_sensor(session_path, task_collection='raw_behavior_data'):
608 """
609 Load Ambient Sensor data from session.
611 Probably could be extracted to DatasetTypes:
612 _ibl_trials.temperature_C, _ibl_trials.airPressure_mb,
613 _ibl_trials.relativeHumidity
614 Returns a list of dicts one dict per trial.
615 dict keys are:
616 dict_keys(['Temperature_C', 'AirPressure_mb', 'RelativeHumidity'])
618 :param session_path: Absoulte path of session folder
619 :type session_path: str
620 :return: list of dicts
621 :rtype: list
622 """
623 if session_path is None:
624 return
625 path = Path(session_path).joinpath(task_collection)
626 path = next(path.glob("_iblrig_ambientSensorData.raw*.jsonable"), None)
627 if not path:
628 return None
629 data = []
630 with open(path, 'r') as f:
631 for line in f:
632 data.append(json.loads(line))
633 return data
636def load_mic(session_path, task_collection='raw_behavior_data'):
637 """
638 Load Microphone wav file to np.array of len nSamples
640 :param session_path: Absolute path of session folder
641 :type session_path: str
642 :return: An array of values of the sound waveform
643 :rtype: numpy.array
644 """
645 if session_path is None:
646 return
647 path = Path(session_path).joinpath(task_collection)
648 path = next(path.glob("_iblrig_micData.raw*.wav"), None)
649 if not path:
650 return None
651 fp = wave.open(path)
652 nchan = fp.getnchannels()
653 N = fp.getnframes()
654 dstr = fp.readframes(N * nchan)
655 data = np.frombuffer(dstr, np.int16)
656 data = np.reshape(data, (-1, nchan))
657 return data
660def _clean_wheel_dataframe(data, label, path):
661 if np.any(data.isna()): 1a4$Wxe32lpghcisjkfbmod
662 _logger.warning(label + ' has missing/incomplete records \n %s', path) 1a$xe32bd
663 # first step is to re-interpret as numeric objects if not already done
664 for col in data.columns: 1a4$Wxe32lpghcisjkfbmod
665 if data[col].dtype == object and col not in ['bns_ts']: 1a4$Wxe32lpghcisjkfbmod
666 data[col] = pd.to_numeric(data[col], errors='coerce') 13d
667 # then drop Nans and duplicates
668 data.dropna(inplace=True) 1a4$Wxe32lpghcisjkfbmod
669 data.drop_duplicates(keep='first', inplace=True) 1a4$Wxe32lpghcisjkfbmod
670 data.reset_index(inplace=True) 1a4$Wxe32lpghcisjkfbmod
671 # handle the clock resets when microseconds exceed uint32 max value
672 drop_first = False 1a4$Wxe32lpghcisjkfbmod
673 data['re_ts'] = data['re_ts'].astype(np.double, copy=False) 1a4$Wxe32lpghcisjkfbmod
674 if any(np.diff(data['re_ts']) < 0): 1a4$Wxe32lpghcisjkfbmod
675 ind = np.where(np.diff(data['re_ts']) < 0)[0] 14$xe32lghcijkfbd
676 for i in ind: 14$xe32lghcijkfbd
677 # the first sample may be corrupt, in this case throw away
678 if i <= 1: 14$xe32lghcijkfbd
679 drop_first = i 14xe2cb
680 _logger.warning(label + ' rotary encoder positions timestamps' 14xe2cb
681 ' first sample corrupt ' + str(path))
682 # if it's an uint32 wraparound, the diff should be close to 2 ** 32
683 elif 32 - np.log2(data['re_ts'][i] - data['re_ts'][i + 1]) < 0.2: 14$xe32lghijkfd
684 data.loc[i + 1:, 're_ts'] = data.loc[i + 1:, 're_ts'] + 2 ** 32 1$xe2lghijkd
685 # there is also the case where 2 positions are swapped and need to be swapped back
687 elif data['re_ts'][i] > data['re_ts'][i + 1] > data['re_ts'][i - 1]: 14xe32ghfd
688 _logger.warning(label + ' rotary encoder timestamps swapped at index: ' + 14xe2ghfd
689 str(i) + ' ' + str(path))
690 a, b = data.iloc[i].copy(), data.iloc[i + 1].copy() 14xe2ghfd
691 data.iloc[i], data.iloc[i + 1] = b, a 14xe2ghfd
692 # if none of those 3 cases apply, raise an error
693 else:
694 _logger.error(label + ' Rotary encoder timestamps are not sorted.' + str(path)) 13
695 data.sort_values('re_ts', inplace=True) 13
696 data.reset_index(inplace=True) 13
697 if drop_first is not False: 1a4$Wxe32lpghcisjkfbmod
698 data.drop(data.loc[:drop_first].index, inplace=True) 14xe2cb
699 data = data.reindex() 14xe2cb
700 return data 1a4$Wxe32lpghcisjkfbmod
703def _groom_wheel_data_lt5(data, label='file ', path=''):
704 """
705 The whole purpose of this function is to account for variability and corruption in
706 the wheel position files. There are many possible errors described below, but
707 nothing excludes getting new ones.
708 """
709 data = _clean_wheel_dataframe(data, label, path) 1a4$Wxe32cbd
710 data.drop(data.loc[data.bns_ts.apply(len) != 33].index, inplace=True) 1a4$Wxe32cbd
711 # check if the time scale is in ms
712 sess_len_sec = (datetime.strptime(data['bns_ts'].iloc[-1][:25], '%Y-%m-%dT%H:%M:%S.%f') - 1a4$Wxe32cbd
713 datetime.strptime(data['bns_ts'].iloc[0][:25], '%Y-%m-%dT%H:%M:%S.%f')).seconds
714 if data['re_ts'].iloc[-1] / (sess_len_sec + 1e-6) < 1e5: # should be 1e6 normally 1a4$Wxe32cbd
715 _logger.warning('Rotary encoder reset logs events in ms instead of us: ' + 1We3cb
716 'RE firmware needs upgrading and wheel velocity is potentially inaccurate')
717 data['re_ts'] = data['re_ts'] * 1000 1We3cb
718 return data 1a4$Wxe32cbd
721def _groom_wheel_data_ge5(data, label='file ', path=''):
722 """
723 The whole purpose of this function is to account for variability and corruption in
724 the wheel position files. There are many possible errors described below, but
725 nothing excludes getting new ones.
726 """
727 data = _clean_wheel_dataframe(data, label, path) 14Wxe32lpghisjkfbmod
728 # check if the time scale is in ms
729 if (data['re_ts'].iloc[-1] - data['re_ts'].iloc[0]) / 1e6 < 20: 14Wxe32lpghisjkfbmod
730 _logger.warning('Rotary encoder reset logs events in ms instead of us: ' + 132bmd
731 'RE firmware needs upgrading and wheel velocity is potentially inaccurate')
732 data['re_ts'] = data['re_ts'] * 1000 132bmd
733 return data 14Wxe32lpghisjkfbmod
736def sync_trials_robust(t0, t1, diff_threshold=0.001, drift_threshold_ppm=200, max_shift=5,
737 return_index=False):
738 """
739 Attempts to find matching timestamps in 2 time-series that have an offset, are drifting,
740 and are most likely incomplete: sizes don't have to match, some pulses may be missing
741 in any series.
742 Only works with irregular time series as it relies on the derivative to match sync.
743 :param t0:
744 :param t1:
745 :param diff_threshold:
746 :param drift_threshold_ppm: (150)
747 :param max_shift: (200)
748 :param return_index (False)
749 :return:
750 """
751 nsync = min(t0.size, t1.size) 1-bd
752 dt0 = np.diff(t0) 1-bd
753 dt1 = np.diff(t1) 1-bd
754 ind = np.zeros_like(dt0) * np.nan 1-bd
755 i0 = 0 1-bd
756 i1 = 0 1-bd
757 cdt = np.nan # the current time difference between the two series to compute drift 1-bd
758 while i0 < (nsync - 1): 1-bd
759 # look in the next max_shift events the ones whose derivative match
760 isearch = np.arange(i1, min(max_shift + i1, dt1.size)) 1-bd
761 dec = np.abs(dt0[i0] - dt1[isearch]) < diff_threshold 1-bd
762 # another constraint is to check the dt for the maximum drift
763 if ~np.isnan(cdt): 1-bd
764 drift_ppm = np.abs((cdt - (t0[i0] - t1[isearch])) / dt1[isearch]) * 1e6 1-bd
765 dec = np.logical_and(dec, drift_ppm <= drift_threshold_ppm) 1-bd
766 # if one is found
767 if np.any(dec): 1-bd
768 ii1 = np.where(dec)[0][0] 1-bd
769 ind[i0] = i1 + ii1 1-bd
770 i1 += ii1 + 1 1-bd
771 cdt = t0[i0 + 1] - t1[i1 + ii1] 1-bd
772 i0 += 1 1-bd
773 it0 = np.where(~np.isnan(ind))[0] 1-bd
774 it1 = ind[it0].astype(int) 1-bd
775 ind0 = np.unique(np.r_[it0, it0 + 1]) 1-bd
776 ind1 = np.unique(np.r_[it1, it1 + 1]) 1-bd
777 if return_index: 1-bd
778 return t0[ind0], t1[ind1], ind0, ind1
779 else:
780 return t0[ind0], t1[ind1] 1-bd
783def load_bpod_fronts(session_path: str, data: list = False, task_collection: str = 'raw_behavior_data') -> list:
784 """load_bpod_fronts
785 Loads BNC1 and BNC2 bpod channels times and polarities from session_path
787 :param session_path: a valid session_path
788 :type session_path: str
789 :param data: pre-loaded raw data dict, defaults to False
790 :type data: list, optional
791 :return: List of dicts BNC1 and BNC2 {"times": np.array, "polarities":np.array}
792 :rtype: list
793 """
794 if not data: 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
795 data = load_data(session_path, task_collection) 1ac
797 BNC1_fronts = np.array([[np.nan, np.nan]]) 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
798 BNC2_fronts = np.array([[np.nan, np.nan]]) 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
799 for tr in data: 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
800 BNC1_fronts = np.append( 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
801 BNC1_fronts,
802 np.array(
803 [
804 [x, 1]
805 for x in tr["behavior_data"]["Events timestamps"].get("BNC1High", [np.nan])
806 ]
807 ),
808 axis=0,
809 )
810 BNC1_fronts = np.append( 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
811 BNC1_fronts,
812 np.array(
813 [
814 [x, -1]
815 for x in tr["behavior_data"]["Events timestamps"].get("BNC1Low", [np.nan])
816 ]
817 ),
818 axis=0,
819 )
820 BNC2_fronts = np.append( 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
821 BNC2_fronts,
822 np.array(
823 [
824 [x, 1]
825 for x in tr["behavior_data"]["Events timestamps"].get("BNC2High", [np.nan])
826 ]
827 ),
828 axis=0,
829 )
830 BNC2_fronts = np.append( 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
831 BNC2_fronts,
832 np.array(
833 [
834 [x, -1]
835 for x in tr["behavior_data"]["Events timestamps"].get("BNC2Low", [np.nan])
836 ]
837 ),
838 axis=0,
839 )
841 BNC1_fronts = BNC1_fronts[1:, :] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
842 BNC1_fronts = BNC1_fronts[BNC1_fronts[:, 0].argsort()] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
843 BNC2_fronts = BNC2_fronts[1:, :] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
844 BNC2_fronts = BNC2_fronts[BNC2_fronts[:, 0].argsort()] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
846 BNC1 = {"times": BNC1_fronts[:, 0], "polarities": BNC1_fronts[:, 1]} 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
847 BNC2 = {"times": BNC2_fronts[:, 0], "polarities": BNC2_fronts[:, 1]} 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
849 return [BNC1, BNC2] 1aGHIJKyLMNOPQFRSTUezAlDpghcEvnuqtVrwisjkfBbmo
852def get_port_events(trial: dict, name: str = '') -> list:
853 """get_port_events
854 Return all event timestamps from bpod raw data trial that match 'name'
855 --> looks in trial['behavior_data']['Events timestamps']
857 :param trial: raw trial dict
858 :type trial: dict
859 :param name: name of event, defaults to ''
860 :type name: str, optional
861 :return: Sorted list of event timestamps
862 :rtype: list
863 TODO: add polarities?
864 """
865 out: list = [] 1ayezAlDpghcEisjkfBbmo
866 events = trial['behavior_data']['Events timestamps'] 1ayezAlDpghcEisjkfBbmo
867 for k in events: 1ayezAlDpghcEisjkfBbmo
868 if name in k: 1ayezAlDpghcEisjkfBbmo
869 out.extend(events[k]) 1yezAlDpghcEisjkfBbmo
870 out = sorted(out) 1ayezAlDpghcEisjkfBbmo
872 return out 1ayezAlDpghcEisjkfBbmo
875def load_widefield_mmap(session_path, dtype=np.uint16, shape=(540, 640), n_frames=None, mode='r'):
876 """
877 TODO Document this function
879 Parameters
880 ----------
881 session_path
883 Returns
884 -------
886 """
887 filepath = Path(session_path).joinpath('raw_widefield_data').glob('widefield.raw.*.dat')
888 filepath = next(filepath, None)
889 if not filepath:
890 _logger.warning("No data loaded: could not find raw data file")
891 return None
893 if type(dtype) is str:
894 dtype = np.dtype(dtype)
896 if n_frames is None:
897 # Get the number of samples from the file size
898 n_frames = int(filepath.stat().st_size / (np.prod(shape) * dtype.itemsize))
900 return np.memmap(str(filepath), mode=mode, dtype=dtype, shape=(int(n_frames), *shape))
903def patch_settings(session_path, collection='raw_behavior_data',
904 new_collection=None, subject=None, number=None, date=None):
905 """Modify various details in a settings file.
907 This function makes it easier to change things like subject name in a settings as it will
908 modify the subject name in the myriad paths. NB: This saves the settings into the same location
909 it was loaded from.
911 Parameters
912 ----------
913 session_path : str, pathlib.Path
914 The session path containing the settings file.
915 collection : str
916 The subfolder containing the settings file.
917 new_collection : str
918 An optional new subfolder to change in the settings paths.
919 subject : str
920 An optional new subject name to change in the settings.
921 number : str, int
922 An optional new number to change in the settings.
923 date : str, datetime.date
924 An optional date to change in the settings.
926 Returns
927 -------
928 dict
929 The modified settings.
931 Examples
932 --------
933 File is in /data/subject/2020-01-01/002/raw_behavior_data. Patch the file then move to new location.
934 >>> patch_settings('/data/subject/2020-01-01/002', number='001')
935 >>> shutil.move('/data/subject/2020-01-01/002/raw_behavior_data/', '/data/subject/2020-01-01/001/raw_behavior_data/')
937 File is moved into new collection within the same session, then patched.
938 >>> shutil.move('./subject/2020-01-01/002/raw_task_data_00/', './subject/2020-01-01/002/raw_task_data_01/')
939 >>> patch_settings('/data/subject/2020-01-01/002', collection='raw_task_data_01', new_collection='raw_task_data_01')
941 Update subject, date and number.
942 >>> new_session_path = Path('/data/foobar/2024-02-24/002')
943 >>> old_session_path = Path('/data/baz/2024-02-23/001')
944 >>> patch_settings(old_session_path, collection='raw_task_data_00',
945 ... subject=new_session_path.parts[-3], date=new_session_path.parts[-2], number=new_session_path.parts[-1])
946 >>> shutil.move(old_session_path, new_session_path)
947 """
948 settings = load_settings(session_path, collection) 1aC
949 if not settings: 1aC
950 raise IOError('Settings file not found') 1C
952 filename = PureWindowsPath(settings.get('SETTINGS_FILE_PATH', '_iblrig_taskSettings.raw.json')).name 1aC
953 file_path = Path(session_path).joinpath(collection, filename) 1aC
955 if subject: 1aC
956 # Patch subject name
957 old_subject = settings['SUBJECT_NAME'] 1aC
958 settings['SUBJECT_NAME'] = subject 1aC
959 for k in settings.keys(): 1aC
960 if isinstance(settings[k], str): 1aC
961 settings[k] = settings[k].replace(f'\\Subjects\\{old_subject}', f'\\Subjects\\{subject}') 1aC
962 if 'SESSION_NAME' in settings: 1aC
963 settings['SESSION_NAME'] = '\\'.join([subject, *settings['SESSION_NAME'].split('\\')[1:]]) 1aC
964 settings.pop('PYBPOD_SUBJECT_EXTRA', None) # Get rid of Alyx subject info 1aC
966 if date: 1aC
967 # Patch session datetime
968 date = str(date) 1aC
969 old_date = settings['SESSION_DATE'] 1aC
970 settings['SESSION_DATE'] = date 1aC
971 for k in settings.keys(): 1aC
972 if isinstance(settings[k], str): 1aC
973 settings[k] = settings[k].replace( 1aC
974 f'\\{settings["SUBJECT_NAME"]}\\{old_date}',
975 f'\\{settings["SUBJECT_NAME"]}\\{date}'
976 )
977 settings['SESSION_DATETIME'] = date + settings['SESSION_DATETIME'][10:] 1aC
978 if 'SESSION_END_TIME' in settings: 1aC
979 settings['SESSION_END_TIME'] = date + settings['SESSION_END_TIME'][10:] 1C
980 if 'SESSION_START_TIME' in settings: 1aC
981 settings['SESSION_START_TIME'] = date + settings['SESSION_START_TIME'][10:] 1C
983 if number: 1aC
984 # Patch session number
985 old_number = settings['SESSION_NUMBER'] 1aC
986 if isinstance(number, int): 1aC
987 number = f'{number:03}' 1C
988 settings['SESSION_NUMBER'] = number 1aC
989 for k in settings.keys(): 1aC
990 if isinstance(settings[k], str): 1aC
991 settings[k] = settings[k].replace( 1aC
992 f'\\{settings["SESSION_DATE"]}\\{old_number}',
993 f'\\{settings["SESSION_DATE"]}\\{number}'
994 )
996 if new_collection: 1aC
997 if 'SESSION_RAW_DATA_FOLDER' not in settings: 1C
998 _logger.warning('SESSION_RAW_DATA_FOLDER key not in settings; collection not updated') 1C
999 else:
1000 old_path = settings['SESSION_RAW_DATA_FOLDER'] 1C
1001 new_path = PureWindowsPath(settings['SESSION_RAW_DATA_FOLDER']).with_name(new_collection) 1C
1002 for k in settings.keys(): 1C
1003 if isinstance(settings[k], str): 1C
1004 settings[k] = settings[k].replace(old_path, str(new_path)) 1C
1005 with open(file_path, 'w') as fp: 1aC
1006 json.dump(settings, fp, indent=' ') 1aC
1007 return settings 1aC