Coverage for ibllib/pipes/misc.py: 59%
668 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-11 11:13 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-11 11:13 +0100
1import ctypes
2import hashlib
3import json
4import os
5import re
6import shutil
7import subprocess
8import sys
9import time
10import logging
11from pathlib import Path
12from typing import Union, List
13from inspect import signature
14import uuid
15import socket
16import traceback
18import spikeglx
19from iblutil.io import hashfile, params
20from iblutil.util import range_str
21from one.alf.files import get_session_path
22from one.alf.spec import is_uuid_string, is_session_path, describe
23from one.api import ONE
25import ibllib.io.flags as flags
26import ibllib.io.raw_data_loaders as raw
27from ibllib.io.misc import delete_empty_folders
28import ibllib.io.session_params as sess_params
30log = logging.getLogger(__name__)
32DEVICE_FLAG_MAP = {'neuropixel': 'ephys',
33 'cameras': 'video',
34 'widefield': 'widefield',
35 'sync': 'sync'}
38def subjects_data_folder(folder: Path, rglob: bool = False) -> Path:
39 """Given a root_data_folder will try to find a 'Subjects' data folder.
40 If Subjects folder is passed will return it directly."""
41 if not isinstance(folder, Path): 1infdage
42 folder = Path(folder) 1fdge
43 if rglob: 1infdage
44 func = folder.rglob 1infdage
45 else:
46 func = folder.glob
48 # Try to find Subjects folder one level
49 if folder.name.lower() != 'subjects': 1infdage
50 # Try to find Subjects folder if folder.glob
51 spath = [x for x in func('*') if x.name.lower() == 'subjects'] 1infdage
52 if not spath: 1infdage
53 raise ValueError('No "Subjects" folder in children folders')
54 elif len(spath) > 1: 1infdage
55 raise ValueError(f'Multiple "Subjects" folder in children folders: {spath}')
56 else:
57 folder = folder / spath[0] 1infdage
59 return folder 1infdage
62def cli_ask_default(prompt: str, default: str):
63 """
64 Prompt the user for input, display the default option and return user input or default
65 :param prompt: String to display to user
66 :param default: The default value to return if user doesn't enter anything
67 :return: User input or default
68 """
69 return input(f'{prompt} [default: {default}]: ') or default 1j
72def cli_ask_options(prompt: str, options: list, default_idx: int = 0) -> str:
73 parsed_options = [str(x) for x in options]
74 if default_idx is not None:
75 parsed_options[default_idx] = f"[{parsed_options[default_idx]}]"
76 options_str = " (" + " | ".join(parsed_options) + ")> "
77 ans = input(prompt + options_str) or str(options[default_idx])
78 if ans not in [str(x) for x in options]:
79 return cli_ask_options(prompt, options, default_idx=default_idx)
80 return ans
83def behavior_exists(session_path: str, include_devices=False) -> bool:
84 """
85 Returns True if the session has a task behaviour folder
86 :param session_path:
87 :return:
88 """
89 session_path = Path(session_path) 1zbfdage
90 if include_devices and session_path.joinpath("_devices").exists(): 1zbfdage
91 return True
92 if session_path.joinpath("raw_behavior_data").exists(): 1zbfdage
93 return True 1zbfdage
94 return any(session_path.glob('raw_task_data_*')) 1zba
97def check_transfer(src_session_path, dst_session_path):
98 """
99 Check all the files in the source directory match those in the destination directory. Function
100 will throw assertion errors/exceptions if number of files do not match, file names do not
101 match, or if file sizes do not match.
103 :param src_session_path: The source directory that was copied
104 :param dst_session_path: The copy target directory
105 """
106 src_files = sorted([x for x in Path(src_session_path).rglob('*') if x.is_file()]) 1y
107 dst_files = sorted([x for x in Path(dst_session_path).rglob('*') if x.is_file()]) 1y
108 assert len(src_files) == len(dst_files), 'Not all files transferred' 1y
109 for s, d in zip(src_files, dst_files): 1y
110 assert s.name == d.name, 'file name mismatch' 1y
111 assert s.stat().st_size == d.stat().st_size, 'file size mismatch' 1y
114def rename_session(session_path: str, new_subject=None, new_date=None, new_number=None,
115 ask: bool = False) -> Path:
116 """Rename a session. Prompts the user for the new subject name, data and number and then moves
117 session path to new session path.
119 :param session_path: A session path to rename
120 :type session_path: str
121 :param new_subject: A new subject name, if none provided, the user is prompted for one
122 :param new_date: A new session date, if none provided, the user is prompted for one
123 :param new_number: A new session number, if none provided, the user is prompted for one
124 :param ask: used to ensure prompt input from user, defaults to False
125 :type ask: bool
126 :return: The renamed session path
127 :rtype: Path
128 """
129 session_path = get_session_path(session_path) 1q
130 if session_path is None: 1q
131 raise ValueError('Session path not valid ALF session folder') 1q
132 mouse = session_path.parts[-3] 1q
133 date = session_path.parts[-2] 1q
134 sess = session_path.parts[-1] 1q
135 new_mouse = new_subject or mouse 1q
136 new_date = new_date or date 1q
137 new_sess = new_number or sess 1q
138 if ask: 1q
139 new_mouse = input(f"Please insert subject NAME [current value: {mouse}]> ") 1q
140 new_date = input(f"Please insert new session DATE [current value: {date}]> ") 1q
141 new_sess = input(f"Please insert new session NUMBER [current value: {sess}]> ") 1q
143 new_session_path = Path(*session_path.parts[:-3]).joinpath(new_mouse, new_date, 1q
144 new_sess.zfill(3))
145 assert is_session_path(new_session_path), 'invalid subject, date or number' 1q
147 if new_session_path.exists(): 1q
148 ans = input(f'Warning: session path {new_session_path} already exists.\nWould you like to '
149 f'move {new_session_path} to a backup directory? [y/N] ')
150 if (ans or 'n').lower() in ['n', 'no']:
151 print(f'Manual intervention required, data exists in the following directory: '
152 f'{session_path}')
153 return
154 if backup_session(new_session_path):
155 print(f'Backup was successful, removing directory {new_session_path}...')
156 shutil.rmtree(str(new_session_path), ignore_errors=True)
157 shutil.move(str(session_path), str(new_session_path)) 1q
158 print(session_path, "--> renamed to:") 1q
159 print(new_session_path) 1q
161 return new_session_path 1q
164def backup_session(session_path):
165 """Used to move the contents of a session to a backup folder, likely before the folder is
166 removed.
168 :param session_path: A session path to be backed up
169 :return: True if directory was backed up or exits if something went wrong
170 :rtype: Bool
171 """
172 bk_session_path = Path() 1v
173 if Path(session_path).exists(): 1v
174 try: 1v
175 bk_session_path = Path(*session_path.parts[:-4]).joinpath( 1v
176 "Subjects_backup_renamed_sessions", Path(*session_path.parts[-3:]))
177 Path(bk_session_path.parent).mkdir(parents=True) 1v
178 print(f"Created path: {bk_session_path.parent}") 1v
179 # shutil.copytree(session_path, bk_session_path, dirs_exist_ok=True)
180 shutil.copytree(session_path, bk_session_path) # python 3.7 compatibility 1v
181 print(f"Copied contents from {session_path} to {bk_session_path}") 1v
182 return True 1v
183 except FileExistsError: 1v
184 log.error(f"A backup session for the given path already exists: {bk_session_path}, " 1v
185 f"manual intervention is necessary.")
186 raise 1v
187 except shutil.Error:
188 log.error(f'Some kind of copy error occurred when moving files from {session_path} to '
189 f'{bk_session_path}')
190 log.error(shutil.Error)
191 else:
192 log.error(f"The given session path does not exist: {session_path}") 1v
193 return False 1v
196def copy_with_check(src, dst, **kwargs):
197 dst = Path(dst)
198 if dst.exists() and Path(src).stat().st_size == dst.stat().st_size:
199 return dst
200 elif dst.exists():
201 dst.unlink()
202 return shutil.copy2(src, dst, **kwargs)
205def transfer_session_folders(local_sessions: list, remote_subject_folder, subfolder_to_transfer):
206 """
207 Used to determine which local session folders should be transferred to which remote session folders, will prompt the user
208 when necessary.
210 Parameters
211 ----------
212 local_sessions : list
213 Required list of local session folder paths to sync to local server.
214 remote_subject_folder : str, pathlib.Path
215 The remote location of the subject folder (typically pulled from the params).
216 subfolder_to_transfer : str
217 Which subfolder to sync
219 Returns
220 -------
221 list of tuples
222 For each session, a tuple of (source, destination) of attempted file transfers.
223 list of bool
224 A boolean True/False for success/failure of the transfer.
225 """
226 transfer_list = [] # list of sessions to transfer 1bfdage
227 skip_list = "" # "list" of sessions to skip and the reason for the skip 1bfdage
228 # Iterate through all local sessions in the given list
229 for local_session in local_sessions: 1bfdage
230 # Set expected remote_session location and perform simple error state checks
231 remote_session = remote_subject_folder.joinpath(*local_session.parts[-3:]) 1bfdage
232 # Skip session if ...
233 if subfolder_to_transfer: 1bfdage
234 if not local_session.joinpath(subfolder_to_transfer).exists(): 1bfdage
235 msg = f"{local_session} - skipping session, no '{subfolder_to_transfer}' folder found locally"
236 log.warning(msg)
237 skip_list += msg + "\n"
238 continue
239 if not remote_session.parent.exists(): 1bfdage
240 msg = f"{local_session} - no matching remote session date folder found for the given local session" 1ba
241 log.info(msg) 1ba
242 skip_list += msg + "\n" 1ba
243 continue 1ba
244 if not behavior_exists(remote_session): 1bfdage
245 msg = f"{local_session} - skipping session, no behavior data found in remote folder {remote_session}" 1ba
246 log.warning(msg) 1ba
247 skip_list += msg + "\n" 1ba
248 continue 1ba
250 # Determine if there are multiple session numbers from the date path
251 local_sessions_for_date = get_session_numbers_from_date_path(local_session.parent) 1bfdage
252 remote_sessions_for_date = get_session_numbers_from_date_path(remote_session.parent) 1bfdage
253 remote_session_pick = None 1bfdage
254 if len(local_sessions_for_date) > 1 or len(remote_sessions_for_date) > 1: 1bfdage
255 # Format folder size output for end user to review
256 local_session_numbers_with_size = remote_session_numbers_with_size = "" 1ba
257 for lsfd in local_sessions_for_date: 1ba
258 size_in_gb = round(get_directory_size(local_session.parent / lsfd, in_gb=True), 2) 1ba
259 local_session_numbers_with_size += lsfd + " (" + str(size_in_gb) + " GB)\n" 1ba
260 for rsfd in remote_sessions_for_date: 1ba
261 size_in_gb = round(get_directory_size(remote_session.parent / rsfd, in_gb=True), 2) 1ba
262 remote_session_numbers_with_size += rsfd + " (" + str(size_in_gb) + " GB)\n" 1ba
263 log.info(f"\n\nThe following local session folder(s) were found on this acquisition PC:\n\n" 1ba
264 f"{''.join(local_session_numbers_with_size)}\nThe following remote session folder(s) were found on the "
265 f"server:\n\n{''.join(remote_session_numbers_with_size)}\n")
267 def _remote_session_picker(sessions_for_date): 1ba
268 resp = "s" 1ba
269 resp_invalid = True 1ba
270 while resp_invalid: # loop until valid user input 1ba
271 resp = input(f"\n\n--- USER INPUT NEEDED ---\nWhich REMOTE session number would you like to transfer your " 1ba
272 f"local session to? Options {range_str(map(int, sessions_for_date))} or "
273 f"[s]kip/[h]elp/[e]xit> ").strip().lower()
274 if resp == "h": 1ba
275 print("An example session filepath:\n")
276 describe("number") # Explain what a session number is
277 input("Press enter to continue")
278 elif resp == "s" or resp == "e": # exit loop 1ba
279 resp_invalid = False
280 elif len(resp) <= 3: 1ba
281 resp_invalid = False if [i for i in sessions_for_date if int(resp) == int(i)] else None 1ba
282 else:
283 print("Invalid response. Please try again.")
284 return resp 1ba
286 log.info(f"Evaluation for local session " 1ba
287 f"{local_session.parts[-3]}/{local_session.parts[-2]}/{local_session.parts[-1]}...")
288 user_response = _remote_session_picker(remote_sessions_for_date) 1ba
289 if user_response == "s": 1ba
290 msg = f"{local_session} - Local session skipped due to user input"
291 log.info(msg)
292 skip_list += msg + "\n"
293 continue
294 elif user_response == "e": 1ba
295 log.info("Exiting, no files transferred.")
296 return
297 else:
298 remote_session_pick = remote_session.parent / user_response.zfill(3) 1ba
300 # Append to the transfer_list
301 transfer_tuple = (local_session, remote_session_pick) if remote_session_pick else (local_session, remote_session) 1bfdage
302 transfer_list.append(transfer_tuple) 1bfdage
303 log.info(f"{transfer_tuple[0]}, {transfer_tuple[1]} - Added to the transfer list") 1bfdage
305 # Verify that the number of local transfer_list entries match the number of remote transfer_list entries
306 if len(transfer_list) != len(set(dst for _, dst in transfer_list)): 1bfdage
307 raise RuntimeError(
308 "An invalid combination of sessions were picked; the most likely cause of this error is multiple local "
309 "sessions being selected for a single remote session. Please rerun the script."
310 )
312 # Call rsync/rdiff function for every entry in the transfer list
313 success = [] 1bfdage
314 for src, dst in transfer_list: 1bfdage
315 if subfolder_to_transfer: 1bfdage
316 success.append(rsync_paths(src / subfolder_to_transfer, dst / subfolder_to_transfer)) 1bfdage
317 else:
318 success.append(rsync_paths(src, dst))
319 if not success[-1]: 1bfdage
320 log.error("File transfer failed, check log for reason.")
322 # Notification to user for any transfers were skipped
323 log.warning(f"Video transfers that were not completed:\n\n{skip_list}") if skip_list else log.info("No transfers skipped.") 1bfdage
324 return transfer_list, success 1bfdage
327def transfer_folder(src: Path, dst: Path, force: bool = False) -> None:
328 """functionality has been replaced by transfer_session_folders function"""
329 print(f"Attempting to copy:\n{src}\n--> {dst}")
330 if force:
331 print(f"Removing {dst}")
332 shutil.rmtree(dst, ignore_errors=True)
333 else:
334 try:
335 check_transfer(src, dst)
336 print("All files already copied, use force=True to re-copy")
337 return
338 except AssertionError:
339 pass
340 print(f"Copying all files:\n{src}\n--> {dst}")
341 # rsync_folder(src, dst, '**transfer_me.flag')
342 if sys.version_info.minor < 8:
343 # dirs_exist_ok kwarg not supported in < 3.8
344 shutil.rmtree(dst, ignore_errors=True)
345 shutil.copytree(src, dst, copy_function=copy_with_check)
346 else:
347 shutil.copytree(src, dst, dirs_exist_ok=True, copy_function=copy_with_check)
348 # If folder was created delete the src_flag_file
349 if check_transfer(src, dst) is None:
350 print("All files copied")
351 # rdiff-backup --compare /tmp/tmpw9o1zgn0 /tmp/tmp82gg36rm
352 # No changes found. Directory matches archive data.
355def load_params_dict(params_fname: str) -> dict:
356 params_fpath = Path(params.getfile(params_fname))
357 if not params_fpath.exists():
358 return None
359 with open(params_fpath, "r") as f:
360 out = json.load(f)
361 return out
364def load_videopc_params():
365 if not load_params_dict("videopc_params"):
366 create_videopc_params()
367 return load_params_dict("videopc_params")
370def load_ephyspc_params():
371 if not load_params_dict("ephyspc_params"):
372 create_ephyspc_params()
373 return load_params_dict("ephyspc_params")
376def create_basic_transfer_params(param_str='transfer_params', local_data_path=None,
377 remote_data_path=None, clobber=False, **kwargs):
378 """Create some basic parameters common to all acquisition rigs.
380 Namely prompt user for the local root data path and the remote (lab server) data path.
381 NB: All params stored in uppercase by convention.
383 Parameters
384 ----------
385 param_str : str
386 The name of the parameters to load/save.
387 local_data_path : str, pathlib.Path
388 The local root data path, stored with the DATA_FOLDER_PATH key. If None, user is prompted.
389 remote_data_path : str, pathlib.Path, bool
390 The local root data path, stored with the REMOTE_DATA_FOLDER_PATH key. If None, user is prompted.
391 If False, the REMOTE_DATA_PATH key is not updated or is set to False if clobber = True.
392 clobber : bool
393 If True, any parameters in existing parameter file not found as keyword args will be removed,
394 otherwise the user is prompted for these also.
395 **kwargs
396 Extra parameters to set. If value is None, the user is prompted.
398 Returns
399 -------
400 dict
401 The parameters written to disc.
403 Examples
404 --------
405 Set up basic transfer parameters for modality acquisition PC
407 >>> par = create_basic_transfer_params()
409 Set up basic transfer paramers without prompting the user
411 >>> par = create_basic_transfer_params(
412 ... local_data_path='/iblrig_data/Subjects',
413 ... remote_data_path='/mnt/iblserver.champalimaud.pt/ibldata/Subjects')
415 Prompt user for extra parameter using custom prompt (will call function with current default)
417 >>> from functools import partial
418 >>> par = create_basic_transfer_params(
419 ... custom_arg=partial(cli_ask_default, 'Please enter custom arg value'))
421 Set up with no remote path (NB: if not the first time, use clobber=True to save param key)
423 >>> par = create_basic_transfer_params(remote_data_path=False)
425 """
426 parameters = params.as_dict(params.read(param_str, {})) or {} 1jrsfdge
427 if local_data_path is None: 1jrsfdge
428 local_data_path = parameters.get('DATA_FOLDER_PATH') 1jrs
429 if not local_data_path or clobber: 1jrs
430 local_data_path = cli_ask_default("Where's your LOCAL 'Subjects' data folder?", local_data_path) 1j
431 parameters['DATA_FOLDER_PATH'] = local_data_path 1jrsfdge
433 if remote_data_path is None: 1jrsfdge
434 remote_data_path = parameters.get('REMOTE_DATA_FOLDER_PATH') 1jrs
435 if remote_data_path in (None, '') or clobber: 1jrs
436 remote_data_path = cli_ask_default("Where's your REMOTE 'Subjects' data folder?", remote_data_path) 1j
437 if remote_data_path is not False: 1jrsfdge
438 parameters['REMOTE_DATA_FOLDER_PATH'] = remote_data_path 1jrsfdge
439 elif 'REMOTE_DATA_FOLDER_PATH' not in parameters or clobber: 1j
440 parameters['REMOTE_DATA_FOLDER_PATH'] = False # Always assume no remote path 1j
442 # Deal with extraneous parameters
443 for k, v in kwargs.items(): 1jrsfdge
444 if callable(v): # expect function handle with default value as input 1j
445 n_pars = len(signature(v).parameters) 1j
446 parameters[k.upper()] = v(parameters.get(k.upper())) if n_pars > 0 else v() 1j
447 elif v is None: # generic prompt for key 1j
448 parameters[k.upper()] = cli_ask_default( 1j
449 f'Enter a value for parameter {k.upper()}', parameters.get(k.upper())
450 )
451 else: # assign value to parameter
452 parameters[k.upper()] = str(v) 1j
454 defined = list(map(str.upper, ('DATA_FOLDER_PATH', 'REMOTE_DATA_FOLDER_PATH', 'TRANSFER_LABEL', *kwargs.keys()))) 1jrsfdge
455 if clobber: 1jrsfdge
456 # Delete any parameters in parameter dict that were not passed as keyword args into function
457 parameters = {k: v for k, v in parameters.items() if k in defined} 1j
458 else:
459 # Prompt for any other parameters that weren't passed into function
460 for k in filter(lambda x: x not in defined, map(str.upper, parameters.keys())): 1jrsfdge
461 parameters[k] = cli_ask_default(f'Enter a value for parameter {k}', parameters.get(k)) 1j
463 if 'TRANSFER_LABEL' not in parameters: 1jrsfdge
464 parameters['TRANSFER_LABEL'] = f'{socket.gethostname()}_{uuid.getnode()}' 1j
466 # Write parameters
467 params.write(param_str, parameters) 1jrsfdge
468 return parameters 1jrsfdge
471def create_videopc_params(force=False, silent=False):
472 if Path(params.getfile("videopc_params")).exists() and not force:
473 print(f"{params.getfile('videopc_params')} exists already, exiting...")
474 print(Path(params.getfile("videopc_params")).exists())
475 return
476 if silent:
477 data_folder_path = r"D:\iblrig_data\Subjects"
478 remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
479 body_cam_idx = 0
480 left_cam_idx = 1
481 right_cam_idx = 2
482 else:
483 data_folder_path = cli_ask_default(
484 r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
485 )
486 remote_data_folder_path = cli_ask_default(
487 r"Where's your REMOTE 'Subjects' data folder?",
488 r"\\iblserver.champalimaud.pt\ibldata\Subjects",
489 )
490 body_cam_idx = cli_ask_default("Please select the index of the BODY camera", "0")
491 left_cam_idx = cli_ask_default("Please select the index of the LEFT camera", "1")
492 right_cam_idx = cli_ask_default("Please select the index of the RIGHT camera", "2")
494 param_dict = {
495 "DATA_FOLDER_PATH": data_folder_path,
496 "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
497 "BODY_CAM_IDX": body_cam_idx,
498 "LEFT_CAM_IDX": left_cam_idx,
499 "RIGHT_CAM_IDX": right_cam_idx,
500 }
501 params.write("videopc_params", param_dict)
502 print(f"Created {params.getfile('videopc_params')}")
503 print(param_dict)
504 return param_dict
507def create_ephyspc_params(force=False, silent=False):
508 if Path(params.getfile("ephyspc_params")).exists() and not force:
509 print(f"{params.getfile('ephyspc_params')} exists already, exiting...")
510 print(Path(params.getfile("ephyspc_params")).exists())
511 return
512 if silent:
513 data_folder_path = r"D:\iblrig_data\Subjects"
514 remote_data_folder_path = r"\\iblserver.champalimaud.pt\ibldata\Subjects"
515 probe_types = {"PROBE_TYPE_00": "3A", "PROBE_TYPE_01": "3B"}
516 else:
517 data_folder_path = cli_ask_default(
518 r"Where's your LOCAL 'Subjects' data folder?", r"D:\iblrig_data\Subjects"
519 )
520 remote_data_folder_path = cli_ask_default(
521 r"Where's your REMOTE 'Subjects' data folder?",
522 r"\\iblserver.champalimaud.pt\ibldata\Subjects",
523 )
524 n_probes = int(cli_ask_default("How many probes are you using?", '2'))
525 assert 100 > n_probes > 0, 'Please enter number between 1, 99 inclusive'
526 probe_types = {}
527 for i in range(n_probes):
528 probe_types[f'PROBE_TYPE_{i:02}'] = cli_ask_options(
529 f"What's the type of PROBE {i:02}?", ["3A", "3B"])
530 param_dict = {
531 "DATA_FOLDER_PATH": data_folder_path,
532 "REMOTE_DATA_FOLDER_PATH": remote_data_folder_path,
533 **probe_types
534 }
535 params.write("ephyspc_params", param_dict)
536 print(f"Created {params.getfile('ephyspc_params')}")
537 print(param_dict)
538 return param_dict
541def rdiff_install() -> bool:
542 """
543 For windows:
544 * if the rdiff-backup executable does not already exist on the system
545 * downloads rdiff-backup zip file
546 * copies the executable to the C:\tools folder
548 For linux/mac:
549 * runs a pip install rdiff-backup
551 Returns:
552 True when install is successful, False when an error is encountered
553 """
554 if os.name == "nt": 1Bi
555 # ensure tools folder exists
556 tools_folder = "C:\\tools\\"
557 os.mkdir(tools_folder) if not Path(tools_folder).exists() else None
559 rdiff_cmd_loc = tools_folder + "rdiff-backup.exe"
560 if not Path(rdiff_cmd_loc).exists():
561 import requests
562 import zipfile
563 from io import BytesIO
565 url = "https://github.com/rdiff-backup/rdiff-backup/releases/download/v2.0.5/rdiff-backup-2.0.5.win32exe.zip"
566 log.info("Downloading zip file for rdiff-backup.")
567 # Download the file by sending the request to the URL, ensure success by status code
568 if requests.get(url).status_code == 200:
569 log.info("Download complete for rdiff-backup zip file.")
570 # extracting the zip file contents
571 zipfile = zipfile.ZipFile(BytesIO(requests.get(url).content))
572 zipfile.extractall("C:\\Temp")
573 rdiff_folder_name = zipfile.namelist()[0] # attempting a bit of future-proofing
574 # move the executable to the C:\tools folder
575 shutil.copy("C:\\Temp\\" + rdiff_folder_name + "rdiff-backup.exe", rdiff_cmd_loc)
576 shutil.rmtree("C:\\Temp\\" + rdiff_folder_name) # cleanup temp folder
577 try: # attempt to call the rdiff command
578 subprocess.run([rdiff_cmd_loc, "--version"], check=True)
579 except (FileNotFoundError, subprocess.CalledProcessError) as e:
580 log.error("rdiff-backup installation did not complete.\n", e)
581 return False
582 return True
583 else:
584 log.error("Download request status code not 200, something did not go as expected.")
585 return False
586 else: # anything not Windows
587 try: # package should not be installed via the requirements.txt to accommodate windows 1Bi
588 subprocess.run(["pip", "install", "rdiff-backup"], check=True) 1Bi
589 except subprocess.CalledProcessError as e:
590 log.error("rdiff-backup pip install did not complete.\n", e)
591 return False
592 return True 1Bi
595def get_directory_size(dir_path: Path, in_gb=False) -> float:
596 """
597 Used to determine total size of all files in a given session_path, including all child directories
599 Args:
600 dir_path (Path): path we want to get the total size of
601 in_gb (bool): set to True for returned value to be in gigabytes
603 Returns:
604 float: sum of all files in the given directory path (in bytes by default, in GB if specified)
605 """
606 total = 0 1ba
607 with iter(os.scandir(dir_path)) as it: 1ba
608 for entry in it: 1ba
609 if entry.is_file(): 1ba
610 total += entry.stat().st_size 1ba
611 elif entry.is_dir(): 1ba
612 total += get_directory_size(entry.path) 1ba
613 if in_gb: 1ba
614 return total / 1024 / 1024 / 1024 # in GB 1ba
615 return total # in bytes 1ba
618def get_session_numbers_from_date_path(date_path: Path) -> list:
619 """
620 Retrieves session numbers when given a date path
622 Args:
623 date_path (Path): path to date, i.e. \\\\server\\some_lab\\Subjects\\Date"
625 Returns:
626 (list): Found sessions as a sorted list
627 """
628 contents = Path(date_path).glob('*') 1bfdage
629 folders = filter(lambda x: x.is_dir() and re.match(r'^\d{3}$', x.name), contents) 1bfdage
630 sessions_as_set = set(map(lambda x: x.name, folders)) 1bfdage
631 sessions_as_sorted_list = sorted(sessions_as_set) 1bfdage
632 return sessions_as_sorted_list 1bfdage
635def rsync_paths(src: Path, dst: Path) -> bool:
636 """
637 Used to run the rsync algorithm via a rdiff-backup command on the paths contained on the provided source and destination.
638 This function relies on the rdiff-backup package and is run from the command line, i.e. subprocess.run(). Full documentation
639 can be found here - https://rdiff-backup.net/docs/rdiff-backup.1.html
641 Parameters
642 ----------
643 src : Path
644 source path that contains data to be transferred
645 dst : Path
646 destination path that will receive the transferred data
648 Returns
649 -------
650 bool
651 True for success, False for failure
653 Raises
654 ------
655 FileNotFoundError, subprocess.CalledProcessError
656 """
657 # Set rdiff_cmd_loc based on OS type (assuming C:\tools is not in Windows PATH environ)
658 rdiff_cmd_loc = "C:\\tools\\rdiff-backup.exe" if os.name == "nt" else "rdiff-backup" 1tbinfdage
659 try: # Check if rdiff-backup command is available 1tbinfdage
660 subprocess.run([rdiff_cmd_loc, "--version"], check=True) 1tbinfdage
661 except (FileNotFoundError, subprocess.CalledProcessError) as e: 1i
662 if not rdiff_install(): # Attempt to install rdiff 1i
663 log.error("rdiff-backup command is unavailable, transfers can not continue.\n", e)
664 raise
666 log.info("Attempting to transfer data: " + str(src) + " -> " + str(dst)) 1tbinfdage
667 WindowsInhibitor().inhibit() if os.name == "nt" else None # prevent Windows from going to sleep 1tbinfdage
668 try: 1tbinfdage
669 rsync_command = [rdiff_cmd_loc, "--verbosity", str(0), 1tbinfdage
670 "--create-full-path", "--backup-mode", "--no-acls", "--no-eas",
671 "--no-file-statistics", "--exclude", "**transfer_me.flag",
672 str(src), str(dst)]
673 subprocess.run(rsync_command, check=True) 1tbinfdage
674 time.sleep(1) # give rdiff-backup a second to complete all logging operations 1tbinfdage
675 except (FileNotFoundError, subprocess.CalledProcessError) as e:
676 log.error("Transfer failed with code %i.\n", e.returncode)
677 if e.stderr:
678 log.error(e.stderr)
679 return False
680 log.info("Validating transfer completed...") 1tbinfdage
681 try: # Validate the transfers succeeded 1tbinfdage
682 rsync_validate = [rdiff_cmd_loc, "--verify", str(dst)] 1tbinfdage
683 subprocess.run(rsync_validate, check=True) 1tbinfdage
684 except (FileNotFoundError, subprocess.CalledProcessError) as e:
685 log.error(f"Validation for destination {dst} failed.\n", e)
686 return False
687 log.info("Cleaning up rdiff files...") 1tbinfdage
688 shutil.rmtree(dst / "rdiff-backup-data") 1tbinfdage
689 WindowsInhibitor().uninhibit() if os.name == 'nt' else None # allow Windows to go to sleep 1tbinfdage
690 return True 1tbinfdage
693def confirm_ephys_remote_folder(local_folder=False, remote_folder=False, force=False, iblscripts_folder=False,
694 session_path=None):
695 """
696 :param local_folder: The full path to the local Subjects folder
697 :param remote_folder: the full path to the remote Subjects folder
698 :param force:
699 :param iblscripts_folder:
700 :return:
701 """
702 # FIXME: session_path can be relative
703 pars = load_ephyspc_params()
704 if not iblscripts_folder:
705 import deploy
706 iblscripts_folder = Path(deploy.__file__).parent.parent
707 if not local_folder:
708 local_folder = pars["DATA_FOLDER_PATH"]
709 if not remote_folder:
710 remote_folder = pars["REMOTE_DATA_FOLDER_PATH"]
711 local_folder = Path(local_folder)
712 remote_folder = Path(remote_folder)
713 # Check for Subjects folder
714 local_folder = subjects_data_folder(local_folder, rglob=True)
715 remote_folder = subjects_data_folder(remote_folder, rglob=True)
717 log.info(f"local folder: {local_folder}")
718 log.info(f"remote folder: {remote_folder}")
719 if session_path is None:
720 src_session_paths = [x.parent for x in local_folder.rglob("transfer_me.flag")]
721 else:
722 src_session_paths = session_path if isinstance(session_path, list) else [session_path]
724 if not src_session_paths:
725 log.info("Nothing to transfer, exiting...")
726 return
727 for session_path in src_session_paths:
728 log.info(f"Found : {session_path}")
729 log.info(f"Found: {len(src_session_paths)} sessions to transfer, starting transferring now")
731 for session_path in src_session_paths:
732 log.info(f"Transferring session: {session_path}")
733 # Rename ephys files
734 # FIXME: if transfer has failed and wiring file is there renaming will fail!
735 rename_ephys_files(str(session_path))
736 # Move ephys files
737 move_ephys_files(str(session_path))
738 # Copy wiring files
739 copy_wiring_files(str(session_path), iblscripts_folder)
740 try:
741 create_alyx_probe_insertions(str(session_path))
742 except BaseException:
743 log.error(traceback.print_exc())
744 log.info("Probe creation failed, please create the probe insertions manually. Continuing transfer...")
745 msg = f"Transfer {session_path }to {remote_folder} with the same name?"
746 resp = input(msg + "\n[y]es/[r]ename/[s]kip/[e]xit\n ^\n> ") or "y"
747 resp = resp.lower()
748 log.info(resp)
749 if resp not in ["y", "r", "s", "e", "yes", "rename", "skip", "exit"]:
750 return confirm_ephys_remote_folder(
751 local_folder=local_folder,
752 remote_folder=remote_folder,
753 force=force,
754 iblscripts_folder=iblscripts_folder,
755 )
756 elif resp == "y" or resp == "yes":
757 pass
758 elif resp == "r" or resp == "rename":
759 session_path = rename_session(session_path)
760 if not session_path:
761 continue
762 elif resp == "s" or resp == "skip":
763 continue
764 elif resp == "e" or resp == "exit":
765 return
767 remote_session_path = remote_folder / Path(*session_path.parts[-3:])
768 if not behavior_exists(remote_session_path, include_devices=True):
769 log.error(f"No behavior folder found in {remote_session_path}: skipping session...")
770 return
771 # TODO: Check flagfiles on src.and dst + alf dir in session folder then remove
772 # Try catch? wher catch condition is force transfer maybe
773 transfer_folder(session_path / "raw_ephys_data", remote_session_path / "raw_ephys_data", force=force)
774 # if behavior extract_me.flag exists remove it, because of ephys flag
775 flag_file = session_path / "transfer_me.flag"
776 if flag_file.exists(): # this file only exists for the iblrig v7 and lower
777 flag_file.unlink()
778 if (remote_session_path / "extract_me.flag").exists():
779 (remote_session_path / "extract_me.flag").unlink()
780 # Create remote flags
781 create_ephys_transfer_done_flag(remote_session_path)
782 check_create_raw_session_flag(remote_session_path)
785def probe_labels_from_session_path(session_path: Union[str, Path]) -> List[str]:
786 """
787 Finds ephys probes according to the metadata spikeglx files. Only returns first subfolder
788 name under raw_ephys_data folder, ie. raw_ephys_data/probe00/copy_of_probe00 won't be returned
789 If there is a NP2.4 probe with several shanks, create several probes
790 :param session_path:
791 :return: list of strings
792 """
793 plabels = [] 1wk
794 raw_ephys_folder = Path(session_path).joinpath('raw_ephys_data') 1wk
795 for meta_file in raw_ephys_folder.rglob('*.ap.meta'): 1wk
796 if meta_file.parents[1] != raw_ephys_folder: 1wk
797 continue 1w
798 meta = spikeglx.read_meta_data(meta_file) 1wk
799 nshanks = spikeglx._get_nshanks_from_meta(meta) 1wk
800 if nshanks > 1: 1wk
801 for i in range(nshanks): 1w
802 plabels.append(meta_file.parts[-2] + 'abcdefghij'[i]) 1w
803 else:
804 plabels.append(meta_file.parts[-2]) 1wk
805 plabels.sort() 1wk
806 return plabels 1wk
809def create_alyx_probe_insertions(
810 session_path: str,
811 force: bool = False,
812 one: object = None,
813 model: str = None,
814 labels: list = None,
815):
816 if one is None: 1cuk
817 one = ONE(cache_rest=None, mode='local')
818 eid = session_path if is_uuid_string(session_path) else one.path2eid(session_path) 1cuk
819 if eid is None: 1cuk
820 log.warning("Session not found on Alyx: please create session before creating insertions")
821 if model is None: 1cuk
822 probe_model = spikeglx.get_neuropixel_version_from_folder(session_path) 1k
823 pmodel = "3B2" if probe_model == "3B" else probe_model 1k
824 else:
825 pmodel = model 1cu
826 labels = labels or probe_labels_from_session_path(session_path) 1cuk
827 # create the qc fields in the json field
828 qc_dict = {} 1cuk
829 qc_dict.update({"qc": "NOT_SET"}) 1cuk
830 qc_dict.update({"extended_qc": {}}) 1cuk
832 # create the dictionary
833 insertions = [] 1cuk
834 for plabel in labels: 1cuk
835 insdict = {"session": eid, "name": plabel, "model": pmodel, "json": qc_dict} 1cuk
836 # search for the corresponding insertion in Alyx
837 alyx_insertion = one.alyx.get(f'/insertions?&session={eid}&name={plabel}', clobber=True) 1cuk
838 # if it doesn't exist, create it
839 if len(alyx_insertion) == 0: 1cuk
840 alyx_insertion = one.alyx.rest("insertions", "create", data=insdict) 1cuk
841 else:
842 iid = alyx_insertion[0]["id"]
843 if force:
844 alyx_insertion = one.alyx.rest("insertions", "update", id=iid, data=insdict)
845 else:
846 alyx_insertion = alyx_insertion[0]
847 insertions.append(alyx_insertion) 1cuk
848 return insertions 1cuk
851def create_ephys_flags(session_folder: str):
852 """
853 Create flags for processing an ephys session. Should be called after move_ephys_files
854 :param session_folder: A path to an ephys session
855 :return:
856 """
857 session_path = Path(session_folder) 1A
858 flags.write_flag_file(session_path.joinpath("extract_ephys.flag")) 1A
859 flags.write_flag_file(session_path.joinpath("raw_ephys_qc.flag")) 1A
860 for probe_path in session_path.joinpath('raw_ephys_data').glob('probe*'): 1A
861 flags.write_flag_file(probe_path.joinpath("spike_sorting.flag")) 1A
864def create_ephys_transfer_done_flag(session_folder: str) -> None:
865 session_path = Path(session_folder) 1pC
866 flags.write_flag_file(session_path.joinpath("ephys_data_transferred.flag")) 1pC
869def create_video_transfer_done_flag(session_folder: str) -> None:
870 session_path = Path(session_folder) 1pDa
871 flags.write_flag_file(session_path.joinpath("video_data_transferred.flag")) 1pDa
874def create_transfer_done_flag(session_folder: str, flag_name: str) -> None:
875 session_path = Path(session_folder) 1de
876 flags.write_flag_file(session_path.joinpath(f"{flag_name}_data_transferred.flag")) 1de
879def check_create_raw_session_flag(session_folder: str) -> None:
880 session_path = Path(session_folder) 1pml
882 # if we have an experiment description file read in whether we expect video, ephys widefield etc, don't do it just based
883 # on the task protocol
884 experiment_description = sess_params.read_params(session_path) 1pml
886 def check_status(expected, flag): 1pml
887 if expected is not False and flag.exists(): 1ml
888 return True 1ml
889 if expected is False and not flag.exists(): 1ml
890 return True
891 else:
892 return False 1ml
894 if experiment_description is not None: 1pml
896 if any(session_path.joinpath('_devices').glob('*')): 1ml
897 return
899 # Find the devices in the experiment description file
900 devices = list() 1ml
901 for key in DEVICE_FLAG_MAP.keys(): 1ml
902 if experiment_description.get('devices', {}).get(key, None) is not None: 1ml
903 devices.append(key) 1ml
904 # In case of widefield the sync also needs to be in it's own folder
905 if 'widefield' in devices: 1ml
906 devices.append('sync') 1l
908 expected_flags = [session_path.joinpath(f'{DEVICE_FLAG_MAP[dev]}_data_transferred.flag') for dev in devices] 1ml
910 expected = [] 1ml
911 flag_files = [] 1ml
912 for dev, fl in zip(devices, expected_flags): 1ml
913 status = check_status(dev, fl) 1ml
914 if status: 1ml
915 flag_files.append(fl) 1ml
916 expected.append(status) 1ml
918 # In this case all the copying has completed
919 if all(expected): 1ml
920 # make raw session flag
921 flags.write_flag_file(session_path.joinpath("raw_session.flag")) 1ml
922 # and unlink individual copy flags
923 for fl in flag_files: 1ml
924 fl.unlink() 1ml
926 return 1ml
928 ephys = session_path.joinpath("ephys_data_transferred.flag") 1p
929 video = session_path.joinpath("video_data_transferred.flag") 1p
931 sett = raw.load_settings(session_path) 1p
932 if sett is None: 1p
933 log.error(f"No flag created for {session_path}")
934 return
936 is_biased = True if "biased" in sett["PYBPOD_PROTOCOL"] else False 1p
937 is_training = True if "training" in sett["PYBPOD_PROTOCOL"] else False 1p
938 is_habituation = True if "habituation" in sett["PYBPOD_PROTOCOL"] else False 1p
939 if video.exists() and (is_biased or is_training or is_habituation): 1p
940 flags.write_flag_file(session_path.joinpath("raw_session.flag")) 1p
941 video.unlink() 1p
942 if video.exists() and ephys.exists(): 1p
943 flags.write_flag_file(session_path.joinpath("raw_session.flag")) 1p
944 ephys.unlink() 1p
945 video.unlink() 1p
948def rename_ephys_files(session_folder: str) -> None:
949 """rename_ephys_files is system agnostic (3A, 3B1, 3B2).
950 Renames all ephys files to Alyx compatible filenames. Uses get_new_filename.
952 :param session_folder: Session folder path
953 :type session_folder: str
954 :return: None - Changes names of files on filesystem
955 :rtype: None
956 """
957 session_path = Path(session_folder) 1h
958 ap_files = session_path.rglob("*.ap.*") 1h
959 lf_files = session_path.rglob("*.lf.*") 1h
960 nidq_files = session_path.rglob("*.nidq.*") 1h
962 for apf in ap_files: 1h
963 new_filename = get_new_filename(apf.name) 1h
964 shutil.move(str(apf), str(apf.parent / new_filename)) 1h
966 for lff in lf_files: 1h
967 new_filename = get_new_filename(lff.name) 1h
968 shutil.move(str(lff), str(lff.parent / new_filename)) 1h
970 for nidqf in nidq_files: 1h
971 # Ignore wiring files: these are usually created after the file renaming however this
972 # function may be called a second time upon failed transfer.
973 if 'wiring' in nidqf.name: 1h
974 continue
975 new_filename = get_new_filename(nidqf.name) 1h
976 shutil.move(str(nidqf), str(nidqf.parent / new_filename)) 1h
979def get_new_filename(filename: str) -> str:
980 """get_new_filename is system agnostic (3A, 3B1, 3B2).
981 Gets an alyx compatible filename from any spikeglx ephys file.
983 :param filename: Name of an ephys file
984 :return: New name for ephys file
985 """
986 root = "_spikeglx_ephysData" 1xh
987 parts = filename.split('.') 1xh
988 if len(parts) < 3: 1xh
989 raise ValueError(fr'unrecognized filename "{filename}"') 1x
990 pattern = r'.*(?P<gt>_g\d+_t\d+)' 1xh
991 if not (match := re.match(pattern, parts[0])): 1xh
992 raise ValueError(fr'unrecognized filename "{filename}"') 1x
993 return '.'.join([root + match.group(1), *parts[1:]]) 1xh
996def move_ephys_files(session_folder: str) -> None:
997 """move_ephys_files is system agnostic (3A, 3B1, 3B2).
998 Moves all properly named ephys files to appropriate locations for transfer.
999 Use rename_ephys_files function before this one.
1001 :param session_folder: Session folder path
1002 :type session_folder: str
1003 :return: None - Moves files on filesystem
1004 :rtype: None
1005 """
1006 session_path = Path(session_folder) 1h
1007 raw_ephys_data_path = session_path / "raw_ephys_data" 1h
1009 imec_files = session_path.rglob("*.imec*") 1h
1010 for imf in imec_files: 1h
1011 # For 3B system probe0x == imecx
1012 probe_number = re.match(r'_spikeglx_ephysData_g\d_t\d.imec(\d+).*', imf.name) 1h
1013 if not probe_number: 1h
1014 # For 3A system imec files must be in a 'probexx' folder
1015 probe_label = re.search(r'probe\d+', str(imf)) 1h
1016 assert probe_label, f'Cannot assign probe number to file {imf}' 1h
1017 probe_label = probe_label.group() 1h
1018 else:
1019 probe_number, = probe_number.groups() 1h
1020 probe_label = f'probe{probe_number.zfill(2)}' 1h
1021 raw_ephys_data_path.joinpath(probe_label).mkdir(exist_ok=True) 1h
1022 shutil.move(imf, raw_ephys_data_path.joinpath(probe_label, imf.name)) 1h
1024 # NIDAq files (3B system only)
1025 nidq_files = session_path.rglob("*.nidq.*") 1h
1026 for nidqf in nidq_files: 1h
1027 shutil.move(str(nidqf), str(raw_ephys_data_path / nidqf.name)) 1h
1028 # Delete all empty folders recursively
1029 delete_empty_folders(raw_ephys_data_path, dry=False, recursive=True) 1h
1032def create_custom_ephys_wirings(iblscripts_folder: str):
1033 iblscripts_path = Path(iblscripts_folder)
1034 PARAMS = load_ephyspc_params()
1035 probe_set = set(v for k, v in PARAMS.items() if k.startswith('PROBE_TYPE'))
1037 params_path = iblscripts_path.parent / "iblscripts_params"
1038 params_path.mkdir(parents=True, exist_ok=True)
1039 wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
1040 for k, v in PARAMS.items():
1041 if not k.startswith('PROBE_TYPE_'):
1042 continue
1043 probe_label = f'probe{k[-2:]}'
1044 if v not in ('3A', '3B'):
1045 raise ValueError(f'Unsupported probe type "{v}"')
1046 shutil.copy(
1047 wirings_path / f"{v}.wiring.json", params_path / f"{v}_{probe_label}.wiring.json"
1048 )
1049 print(f"Created {v}.wiring.json in {params_path} for {probe_label}")
1050 if "3B" in probe_set:
1051 shutil.copy(wirings_path / "nidq.wiring.json", params_path / "nidq.wiring.json")
1052 print(f"Created nidq.wiring.json in {params_path}")
1053 print(f"\nYou can now modify your wiring files from folder {params_path}")
1056def get_iblscripts_folder():
1057 return str(Path().cwd().parent.parent)
1060def copy_wiring_files(session_folder, iblscripts_folder):
1061 """Run after moving files to probe folders"""
1062 PARAMS = load_ephyspc_params()
1063 if PARAMS["PROBE_TYPE_00"] != PARAMS["PROBE_TYPE_01"]:
1064 print("Having different probe types is not supported")
1065 raise NotImplementedError()
1066 session_path = Path(session_folder)
1067 iblscripts_path = Path(iblscripts_folder)
1068 iblscripts_params_path = iblscripts_path.parent / "iblscripts_params"
1069 wirings_path = iblscripts_path / "deploy" / "ephyspc" / "wirings"
1070 termination = '.wiring.json'
1071 # Determine system
1072 ephys_system = PARAMS["PROBE_TYPE_00"]
1073 # Define where to get the files from (determine if custom wiring applies)
1074 src_wiring_path = iblscripts_params_path if iblscripts_params_path.exists() else wirings_path
1075 probe_wiring_file_path = src_wiring_path / f"{ephys_system}{termination}"
1077 if ephys_system == "3B":
1078 # Copy nidq file
1079 nidq_files = session_path.rglob("*.nidq.bin")
1080 for nidqf in nidq_files:
1081 nidq_wiring_name = ".".join(str(nidqf.name).split(".")[:-1]) + termination
1082 shutil.copy(
1083 str(src_wiring_path / f"nidq{termination}"),
1084 str(session_path / "raw_ephys_data" / nidq_wiring_name),
1085 )
1086 # If system is either (3A OR 3B) copy a wiring file for each ap.bin file
1087 for binf in session_path.rglob("*.ap.bin"):
1088 probe_label = re.search(r'probe\d+', str(binf))
1089 if probe_label:
1090 wiring_name = ".".join(str(binf.name).split(".")[:-2]) + termination
1091 dst_path = session_path / "raw_ephys_data" / probe_label.group() / wiring_name
1092 shutil.copy(probe_wiring_file_path, dst_path)
1095def multi_parts_flags_creation(root_paths: Union[list, str, Path]) -> List[Path]:
1096 """
1097 Creates the sequence files to run spike sorting in batches
1098 A sequence file is a json file with the following fields:
1099 sha1: a unique hash of the metafiles involved
1100 probe: a string with the probe name
1101 index: the index within the sequence
1102 nrecs: the length of the sequence
1103 files: a list of files
1104 :param root_paths:
1105 :return:
1106 """
1107 from one.alf import io as alfio 1o
1108 # "001/raw_ephys_data/probe00/_spikeglx_ephysData_g0_t0.imec0.ap.meta",
1109 if isinstance(root_paths, str) or isinstance(root_paths, Path): 1o
1110 root_paths = [root_paths] 1o
1111 recordings = {} 1o
1112 for root_path in root_paths: 1o
1113 for meta_file in root_path.rglob("*.ap.meta"): 1o
1114 # we want to make sure that the file is just under session_path/raw_ephys_data/{probe_label}
1115 session_path = alfio.files.get_session_path(meta_file) 1o
1116 raw_ephys_path = session_path.joinpath('raw_ephys_data') 1o
1117 if meta_file.parents[1] != raw_ephys_path: 1o
1118 log.warning(f"{meta_file} is not in a probe directory and will be skipped")
1119 continue
1120 # stack the meta-file in the probe label key of the recordings dictionary
1121 plabel = meta_file.parts[-2] 1o
1122 recordings[plabel] = recordings.get(plabel, []) + [meta_file] 1o
1123 # once we have all of the files
1124 for k in recordings: 1o
1125 nrecs = len(recordings[k]) 1o
1126 recordings[k].sort() 1o
1127 # the identifier of the overarching recording sequence is the hash of hashes of the files
1128 m = hashlib.sha1() 1o
1129 for i, meta_file in enumerate(recordings[k]): 1o
1130 hash = hashfile.sha1(meta_file) 1o
1131 m.update(hash.encode()) 1o
1132 # writes the sequence files
1133 for i, meta_file in enumerate(recordings[k]): 1o
1134 sequence_file = meta_file.parent.joinpath(meta_file.name.replace('ap.meta', 'sequence.json')) 1o
1135 with open(sequence_file, 'w+') as fid: 1o
1136 json.dump(dict(sha1=m.hexdigest(), probe=k, index=i, nrecs=len(recordings[k]), 1o
1137 files=list(map(str, recordings[k]))), fid)
1138 log.info(f"{k}: {i}/{nrecs} written sequence file {recordings}") 1o
1139 return recordings 1o
1142class WindowsInhibitor:
1143 """Prevent OS sleep/hibernate in windows; code from:
1144 https://github.com/h3llrais3r/Deluge-PreventSuspendPlus/blob/master/preventsuspendplus/core.py
1145 API documentation:
1146 https://msdn.microsoft.com/en-us/library/windows/desktop/aa373208(v=vs.85).aspx"""
1147 ES_CONTINUOUS = 0x80000000
1148 ES_SYSTEM_REQUIRED = 0x00000001
1150 def __init__(self):
1151 pass
1153 def inhibit(self):
1154 print("Preventing Windows from going to sleep")
1155 ctypes.windll.kernel32.SetThreadExecutionState(WindowsInhibitor.ES_CONTINUOUS | WindowsInhibitor.ES_SYSTEM_REQUIRED)
1157 def uninhibit(self):
1158 print("Allowing Windows to go to sleep")
1159 ctypes.windll.kernel32.SetThreadExecutionState(WindowsInhibitor.ES_CONTINUOUS)