Coverage for ibllib/pipes/base_tasks.py: 95%
239 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-07-02 18:55 +0100
« prev ^ index » next coverage.py v7.9.1, created at 2025-07-02 18:55 +0100
1"""Abstract base classes for dynamic pipeline tasks."""
2import logging
3from pathlib import Path
5from packaging import version
6from one.webclient import no_cache
7from iblutil.util import flatten, ensure_list
8import matplotlib.image
9from skimage.io import ImageCollection, imread
11from ibllib.pipes.tasks import Task
12import ibllib.io.session_params as sess_params
13from ibllib.qc.base import sign_off_dict, SIGN_OFF_CATEGORIES
14from ibllib.io.raw_daq_loaders import load_timeline_sync_and_chmap
15from ibllib.oneibl.data_handlers import update_collections
17_logger = logging.getLogger(__name__)
20class DynamicTask(Task):
22 def __init__(self, session_path, **kwargs):
23 super().__init__(session_path, **kwargs) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
24 self.session_params = self.read_params_file() 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
26 # TODO Which should be default?
27 # Sync collection
28 self.sync_collection = self.get_sync_collection(kwargs.get('sync_collection', None)) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
29 # Sync type
30 self.sync = self.get_sync(kwargs.get('sync', None)) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
31 # Sync extension
32 self.sync_ext = self.get_sync_extension(kwargs.get('sync_ext', None)) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
33 # Sync namespace
34 self.sync_namespace = self.get_sync_namespace(kwargs.get('sync_namespace', None)) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
36 def get_sync_collection(self, sync_collection=None):
37 return sync_collection if sync_collection else sess_params.get_sync_collection(self.session_params) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
39 def get_sync(self, sync=None):
40 return sync if sync else sess_params.get_sync_label(self.session_params) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
42 def get_sync_extension(self, sync_ext=None):
43 return sync_ext if sync_ext else sess_params.get_sync_extension(self.session_params) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
45 def get_sync_namespace(self, sync_namespace=None):
46 return sync_namespace if sync_namespace else sess_params.get_sync_namespace(self.session_params) 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
48 def get_protocol(self, protocol=None, task_collection=None):
49 return protocol if protocol else sess_params.get_task_protocol(self.session_params, task_collection) 1bckjl,
51 def get_task_collection(self, collection=None):
52 if not collection: 1u-.bckjl,
53 collection = sess_params.get_task_collection(self.session_params) 1bckjl
54 # If inferring the collection from the experiment description, assert only one returned
55 assert collection is None or isinstance(collection, str) or len(collection) == 1 1u-.bckjl,
56 return collection 1u-.bckjl,
58 def get_device_collection(self, device, device_collection=None):
59 if device_collection: 2a x ] ^ ubu _ ` { - . b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % ( ) * | } + 1 3 4 ~ fb: ; = ? , @ [ abbbv cbdbeb
60 return device_collection 2a x ] ^ ubu _ ` { - . b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % ( ) * | } + 1 3 4 ~ fb: ; = ? , @ [ abbbv cbdbeb
61 collection_map = sess_params.get_collections(self.session_params['devices']) 2ub
62 return collection_map.get(device) 2ub
64 def read_params_file(self):
65 """Read the session parameters file.
67 Returns
68 -------
69 dict
70 The session parameters dictionary, or an empty dictionary if the file does not exist.
71 """
72 if not self.session_path: 2a 0 X E t w y D d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
73 return {} 1twyD
74 return sess_params.read_params(self.session_path) or {} 2a 0 X E d Y x ] ^ F G ' g H u _ ` { - . I o p J q K L M r N O s b h i e f c k n j l m 5 6 7 8 9 ! # 2 $ % hbibjbkblb( ) * | } + 1 3 4 ~ z C fbmbnbobpbqbrb/ : ; = ? , @ [ abbbv cbdbebZ A B P Q R S T U V W
77class BehaviourTask(DynamicTask):
79 extractor = None
80 """ibllib.io.extractors.base.BaseBpodExtractor: A trials extractor object."""
82 def __init__(self, session_path, **kwargs):
83 super().__init__(session_path, **kwargs) 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
85 self.collection = self.get_task_collection(kwargs.get('collection', None)) 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
86 # Task type (protocol)
87 self.protocol = self.get_protocol(kwargs.get('protocol', None), task_collection=self.collection) 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
89 self.protocol_number = self.get_protocol_number(kwargs.get('protocol_number'), task_protocol=self.protocol) 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
91 self.output_collection = 'alf' 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
92 # Do not use kwargs.get('number', None) -- this will return None if number is 0
93 if self.protocol_number is not None: 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
94 self.output_collection += f'/task_{self.protocol_number:02}' 1aXExuecnjCZ
96 def get_protocol(self, protocol=None, task_collection=None):
97 """
98 Return the task protocol name.
100 This returns the task protocol based on the task collection. If `protocol` is not None, this
101 acts as an identity function. If both `task_collection` and `protocol` are None, returns
102 the protocol defined in the experiment description file only if a single protocol was run.
103 If the `task_collection` is not None, the associated protocol name is returned.
106 Parameters
107 ----------
108 protocol : str
109 A task protocol name. If not None, the same value is returned.
110 task_collection : str
111 The task collection whose protocol name to return. May be None if only one protocol run.
113 Returns
114 -------
115 str, None
116 The task protocol name, or None, if no protocol found.
118 Raises
119 ------
120 ValueError
121 For session with multiple task protocols, a task collection must be passed.
122 """
123 if protocol: 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
124 return protocol 1a0XEwxubhiefcknjlmCZ
125 protocol = sess_params.get_task_protocol(self.session_params, task_collection) or None 1atwyDFGIopJqKLMrNOszABPQRSTUVW
126 if isinstance(protocol, set): 1atwyDFGIopJqKLMrNOszABPQRSTUVW
127 if len(protocol) == 1: 1w
128 protocol = next(iter(protocol)) 1w
129 else:
130 raise ValueError('Multiple task protocols for session. Task collection must be explicitly defined.') 1w
131 return protocol 1atwyDFGIopJqKLMrNOszABPQRSTUVW
133 def get_task_collection(self, collection=None):
134 """
135 Return the task collection.
137 If `collection` is not None, this acts as an identity function. Otherwise loads it from
138 the experiment description if only one protocol was run.
140 Parameters
141 ----------
142 collection : str
143 A task collection. If not None, the same value is returned.
145 Returns
146 -------
147 str, None
148 The task collection, or None if no task protocols were run.
150 Raises
151 ------
152 AssertionError
153 Raised if multiple protocols were run and collection is None, or if experiment
154 description file is improperly formatted.
156 """
157 if not collection: 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
158 collection = sess_params.get_task_collection(self.session_params) 1EtwyD
159 # If inferring the collection from the experiment description, assert only one returned
160 assert collection is None or isinstance(collection, str) or len(collection) == 1 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
161 return collection 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
163 def get_protocol_number(self, number=None, task_protocol=None):
164 """
165 Return the task protocol number.
167 Numbering starts from 0. If the 'protocol_number' field is missing from the experiment
168 description, None is returned. If `task_protocol` is None, the first protocol number if n
169 protocols == 1, otherwise raises an AssertionError.
171 NB: :func:`ibllib.pipes.dynamic_pipeline.make_pipeline` will determine the protocol number
172 from the order of the tasks in the experiment description if the task collection follows
173 the pattern 'raw_task_data_XX'. If the task protocol does not follow this pattern, the
174 experiment description file should explicitly define the number with the 'protocol_number'
175 field.
177 Parameters
178 ----------
179 number : int
180 The protocol number. If not None, the same value is returned.
181 task_protocol : str
182 The task protocol name.
184 Returns
185 -------
186 int, None
187 The task protocol number, if defined.
188 """
189 if number is None: # Do not use "if not number" as that will return True if number is 0 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
190 number = sess_params.get_task_protocol_number(self.session_params, task_protocol) 1a0twyDxFGIopJqKLMrNOsbhiefklmzABPQRSTUVW
191 elif not isinstance(number, int): 1XEyxucnjCZ
192 number = int(number) 1y
193 # If inferring the number from the experiment description, assert only one returned (or something went wrong)
194 assert number is None or isinstance(number, int), 'ambiguous protocol number; no task protocol defined' 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
195 return number 1a0XEtwyDxFGuIopJqKLMrNOsbhiefcknjlmzCZABPQRSTUVW
197 @staticmethod
198 def _spacer_support(settings):
199 """
200 Spacer support was introduced in v7.1 for iblrig v7 and v8.0.1 in v8.
202 Parameters
203 ----------
204 settings : dict
205 The task settings dict.
207 Returns
208 -------
209 bool
210 True if task spacers are to be expected.
211 """
212 v = version.parse 2vbz C
213 ver = v(settings.get('IBLRIG_VERSION') or '100.0.0') 2vbz C
214 return ver not in (v('100.0.0'), v('8.0.0')) and ver >= v('7.1.0') 2vbz C
216 def extract_behaviour(self, save=True):
217 """Extract trials data.
219 This is an abstract method called by `_run` and `run_qc` methods. Subclasses should return
220 the extracted trials data and a list of output files. This method should also save the
221 trials extractor object to the :prop:`extractor` property for use by `run_qc`.
223 Parameters
224 ----------
225 save : bool
226 Whether to save the extracted data as ALF datasets.
228 Returns
229 -------
230 dict
231 A dictionary of trials data.
232 list of pathlib.Path
233 A list of output file paths if save == true.
234 """
235 return None, None
237 def run_qc(self, trials_data=None, update=True):
238 """Run task QC.
240 Subclass method should return the QC object. This just validates the trials_data is not
241 None.
243 Parameters
244 ----------
245 trials_data : dict
246 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
247 update : bool
248 If true, update Alyx with the QC outcome.
250 Returns
251 -------
252 ibllib.qc.task_metrics.TaskQC
253 A TaskQC object replete with task data and computed metrics.
254 """
255 self._assert_trials_data(trials_data)
256 return None
258 def _assert_trials_data(self, trials_data=None):
259 """Check trials data available.
261 Called by :meth:`run_qc`, this extracts the trial data if `trials_data` is None, and raises
262 if :meth:`extract_behaviour` returns None.
264 Parameters
265 ----------
266 trials_data : dict, None
267 A dictionary of extracted trials data or None.
269 Returns
270 -------
271 trials_data : dict
272 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
273 """
274 if not self.extractor or trials_data is None: 1atbAB
275 trials_data, _ = self.extract_behaviour(save=False) 1at
276 if not (trials_data and self.extractor): 1atbAB
277 raise ValueError('No trials data and/or extractor found') 1t
278 return trials_data 1atbAB
281class VideoTask(DynamicTask):
283 def __init__(self, session_path, cameras, **kwargs):
284 super().__init__(session_path, cameras=cameras, **kwargs) 1abhiefcknjlm:;=?,@[
285 self.cameras = cameras 1abhiefcknjlm:;=?,@[
286 self.device_collection = self.get_device_collection('cameras', kwargs.get('device_collection', 'raw_video_data')) 1abhiefcknjlm:;=?,@[
287 # self.collection = self.get_task_collection(kwargs.get('collection', None))
289 def extract_camera(self, save=True):
290 """Extract trials data.
292 This is an abstract method called by `_run` and `run_qc` methods. Subclasses should return
293 the extracted trials data and a list of output files. This method should also save the
294 trials extractor object to the :prop:`extractor` property for use by `run_qc`.
296 Parameters
297 ----------
298 save : bool
299 Whether to save the extracted data as ALF datasets.
301 Returns
302 -------
303 dict
304 A dictionary of trials data.
305 list of pathlib.Path
306 A list of output file paths if save == true.
307 """
308 return None, None
310 def run_qc(self, camera_data=None, update=True):
311 """Run camera QC.
313 Subclass method should return the QC object. This just validates the trials_data is not
314 None.
316 Parameters
317 ----------
318 camera_data : dict
319 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
320 update : bool
321 If true, update Alyx with the QC outcome.
323 Returns
324 -------
325 ibllib.qc.task_metrics.TaskQC
326 A TaskQC object replete with task data and computed metrics.
327 """
328 self._assert_trials_data(camera_data)
329 return None
332class AudioTask(DynamicTask):
334 def __init__(self, session_path, **kwargs):
335 super().__init__(session_path, **kwargs) 1au{-.bhiefckjlm
336 self.device_collection = self.get_device_collection('microphone', kwargs.get('device_collection', 'raw_behavior_data')) 1au{-.bhiefckjlm
339class EphysTask(DynamicTask):
341 def __init__(self, session_path, **kwargs):
342 super().__init__(session_path, **kwargs) 1ahiefc56789!#2$%
344 self.pname = self.get_pname(kwargs.get('pname', None)) 1ahiefc56789!#2$%
345 self.nshanks, self.pextra = self.get_nshanks(kwargs.get('nshanks', None)) 1ahiefc56789!#2$%
346 self.device_collection = self.get_device_collection('neuropixel', kwargs.get('device_collection', 'raw_ephys_data')) 1ahiefc56789!#2$%
348 def get_pname(self, pname):
349 # pname can be a list or a string
350 pname = self.kwargs.get('pname', pname) 1ahiefc56789!#2$%
352 return pname 1ahiefc56789!#2$%
354 def get_nshanks(self, nshanks=None):
355 nshanks = self.kwargs.get('nshanks', nshanks) 1ahiefc56789!#2$%
356 if nshanks is not None: 1ahiefc56789!#2$%
357 pextra = [chr(97 + int(shank)) for shank in range(nshanks)] 1fc#2
358 else:
359 pextra = [] 1ahiefc56789!2$%
361 return nshanks, pextra 1ahiefc56789!#2$%
364class WidefieldTask(DynamicTask):
365 def __init__(self, session_path, **kwargs):
366 super().__init__(session_path, **kwargs) 2a _ ` m abbbv cbdbeb
368 self.device_collection = self.get_device_collection('widefield', kwargs.get('device_collection', 'raw_widefield_data')) 2a _ ` m abbbv cbdbeb
371class MesoscopeTask(DynamicTask):
372 def __init__(self, session_path, **kwargs):
373 super().__init__(session_path, **kwargs) 1ax]^n()*|}+134~
375 self.device_collection = self.get_device_collection( 1ax]^n()*|}+134~
376 'mesoscope', kwargs.get('device_collection', 'raw_imaging_data_[0-9]*'))
378 def get_signatures(self, **kwargs):
379 """
380 From the template signature of the task, create the exact list of inputs and outputs to expect based on the
381 available device collection folders
383 Necessary because we don't know in advance how many device collection folders ("imaging bouts") to expect
384 """
385 self.session_path = Path(self.session_path) 2sbtb( ) * + 1 3 4
386 # Glob for all device collection (raw imaging data) folders
387 raw_imaging_folders = [p.name for p in self.session_path.glob(self.device_collection)] 2sbtb( ) * + 1 3 4
388 super().get_signatures(**kwargs) # Set inputs and outputs 2sbtb( ) * + 1 3 4
389 # For all inputs and outputs that are part of the device collection, expand to one file per folder
390 # All others keep unchanged
391 self.input_files = [ 2sbtb( ) * + 1 3 4
392 update_collections(x, raw_imaging_folders, self.device_collection, exact_match=True) for x in self.input_files]
393 self.output_files = [ 2sbtb( ) * + 1 3 4
394 update_collections(x, raw_imaging_folders, self.device_collection, exact_match=True) for x in self.output_files]
396 def load_sync(self):
397 """
398 Load the sync and channel map.
400 This method may be expanded to support other raw DAQ data formats.
402 Returns
403 -------
404 one.alf.io.AlfBunch
405 A dictionary with keys ('times', 'polarities', 'channels'), containing the sync pulses
406 and the corresponding channel numbers.
407 dict
408 A map of channel names and their corresponding indices.
409 """
410 alf_path = self.session_path / self.sync_collection 134
411 if self.get_sync_namespace() == 'timeline': 134
412 # Load the sync and channel map from the raw DAQ data
413 sync, chmap = load_timeline_sync_and_chmap(alf_path) 134
414 else:
415 raise NotImplementedError
416 return sync, chmap 134
419class RegisterRawDataTask(DynamicTask):
420 """
421 Base register raw task.
422 To rename files
423 1. input and output must have the same length
424 2. output files must have full filename
425 """
427 priority = 100
428 job_size = 'small'
430 def rename_files(self, symlink_old=False):
432 # If either no inputs or no outputs are given, we don't do any renaming
433 if not all(map(len, (self.input_files, self.output_files))): 2Y H gbo p q r s b 1 / v
434 return 1YHopqrsb1/
436 # Otherwise we need to make sure there is one to one correspondence for renaming files
437 assert len(self.input_files) == len(self.output_files) 2Y gbv
439 for before, after in zip(self.input_files, self.output_files): 2Y gbv
440 ok, old_paths, missing = before.find_files(self.session_path) 2Y gbv
441 if not old_paths: 2Y gbv
442 if ok: # if the file doesn't exist and it is not required we are okay to continue 1Y
443 continue
444 else:
445 raise FileNotFoundError(f'file(s) {", ".join(missing)} not found') 1Y
446 new_paths = list(map(self.session_path.joinpath, ensure_list(after.glob_pattern))) 2Y gbv
447 assert len(old_paths) == len(new_paths) 2Y gbv
448 for old_path, new_path in zip(old_paths, new_paths): 2Y gbv
449 if old_path == new_path: 2Y gbv
450 continue
451 new_path.parent.mkdir(parents=True, exist_ok=True) 2Y gbv
452 _logger.debug('%s -> %s', old_path.relative_to(self.session_path), new_path.relative_to(self.session_path)) 2Y gbv
453 old_path.replace(new_path) 2Y gbv
454 if symlink_old: 2Y gbv
455 old_path.symlink_to(new_path) 1v
457 @staticmethod
458 def _is_animated_gif(snapshot: Path) -> bool:
459 """
460 Test if image is an animated GIF file.
462 Parameters
463 ----------
464 snapshot : pathlib.Path
465 An image filepath to test.
467 Returns
468 -------
469 bool
470 True if image is an animated GIF.
472 Notes
473 -----
474 This could be achieved more succinctly with `from PIL import Image; Image.open(snapshot).is_animated`,
475 however despite being an indirect dependency, the Pillow library is not in the requirements,
476 whereas skimage is.
477 """
478 return snapshot.suffix == '.gif' and len(ImageCollection(str(snapshot))) > 1 1d
480 @staticmethod
481 def _save_as_png(snapshot: Path) -> Path:
482 """
483 Save an image to PNG format.
485 Parameters
486 ----------
487 snapshot : pathlib.Path
488 An image filepath to convert.
490 Returns
491 -------
492 pathlib.Path
493 The new PNG image filepath.
494 """
495 img = imread(snapshot, as_gray=True) 1d
496 matplotlib.image.imsave(snapshot.with_suffix('.png'), img, cmap='gray') 1d
497 return snapshot.with_suffix('.png') 1d
499 def register_snapshots(self, unlink=False, collection=None):
500 """
501 Register any photos in the snapshots folder to the session. Typically imaging users will
502 take numerous photos for reference. Supported extensions: .jpg, .jpeg, .png, .tif, .tiff
504 If a .txt file with the same name exists in the same location, the contents will be added
505 to the note text.
507 Parameters
508 ----------
509 unlink : bool
510 If true, files are deleted after upload.
511 collection : str, list, optional
512 Location of 'snapshots' folder relative to the session path. If None, uses
513 'device_collection' attribute (if exists) or root session path.
515 Returns
516 -------
517 list of dict
518 The newly registered Alyx notes.
520 Notes
521 -----
522 - Animated GIF files are not resized and therefore may take up significant space on the database.
523 - TIFF files are converted to PNG format before upload. The original file is not replaced.
524 - JPEG and PNG files are resized by Alyx.
525 """
526 assert self.one and not self.one.offline, f'{self.__class__.__name__} requires an online ONE instance' 1d'g
527 if not self.one.alyx.is_logged_in: 1d'g
528 # Register snapshot requires the user field to be set, which may happen before a REST
529 # query is made. To avoid the user field being None, we authenticate here. If the
530 # token is cached this will simply set the user and token properties.
531 self.one.alyx.authenticate() 1'
533 collection = getattr(self, 'device_collection', None) if collection is None else collection 1d'g
534 collection = collection or '' # If not defined, use no collection 1d'g
535 if collection and '*' in collection: 1d'g
536 collection = [p.name for p in self.session_path.glob(collection)] 1g
537 # Check whether folders on disk contain '*'; this is to stop an infinite recursion
538 assert not any('*' in c for c in collection), 'folders containing asterisks not supported' 1g
539 # If more than one collection exists, register snapshots in each collection
540 if collection and not isinstance(collection, str): 1d'g
541 return flatten(filter(None, [self.register_snapshots(unlink, c) for c in collection])) 1g
542 snapshots_path = self.session_path.joinpath(*filter(None, (collection, 'snapshots'))) 1d'g
543 if not snapshots_path.exists(): 1d'g
544 return 1'
546 eid = self.one.path2eid(self.session_path, query_type='remote') 1dg
547 if not eid: 1dg
548 _logger.warning('Failed to upload snapshots: session not found on Alyx')
549 return
550 note = dict(user=self.one.alyx.user, content_type='session', object_id=eid, text='') 1dg
552 notes = [] 1dg
553 exts = ('.jpg', '.jpeg', '.png', '.tif', '.tiff', '.gif') 1dg
554 for snapshot in filter(lambda x: x.suffix.lower() in exts, snapshots_path.glob('*.*')): 1dg
555 if snapshot.suffix in ('.tif', '.tiff') and not snapshot.with_suffix('.png').exists(): 1dg
556 _logger.debug('converting "%s" to png...', snapshot.relative_to(self.session_path)) 1dg
557 snapshot = self._save_as_png(snapshot_tif := snapshot) 1dg
558 if unlink: 1dg
559 snapshot_tif.unlink() 1d
560 _logger.info('Uploading "%s"...', snapshot.relative_to(self.session_path)) 1dg
561 if snapshot.with_suffix('.txt').exists(): 1dg
562 with open(snapshot.with_suffix('.txt'), 'r') as txt_file: 1g
563 note['text'] = txt_file.read().strip() 1g
564 else:
565 note['text'] = '' 1dg
566 note['width'] = 'orig' if self._is_animated_gif(snapshot) else None 1dg
567 with open(snapshot, 'rb') as img_file: 1dg
568 files = {'image': img_file} 1dg
569 notes.append(self.one.alyx.rest('notes', 'create', data=note, files=files)) 1dg
570 if unlink: 1dg
571 snapshot.unlink() 1d
572 # If nothing else in the snapshots folder, delete the folder
573 if unlink and next(snapshots_path.rglob('*'), None) is None: 1dg
574 snapshots_path.rmdir() 1d
575 _logger.info('%i snapshots uploaded to Alyx', len(notes)) 1dg
576 return notes 1dg
578 def _run(self, **kwargs):
579 self.rename_files(**kwargs) 1Hopqrsb1/v
580 if not self.output_files: 1Hopqrsb1/v
581 return [] 11
583 # FIXME Can be done with Task.assert_expected_outputs
584 ok, out_files, missing = map(flatten, zip(*map(lambda x: x.find_files(self.session_path), self.output_files))) 1Hopqrsb/v
585 if not ok: 1Hopqrsb/v
586 _logger.error('The following expected are missing: %s', ', '.join(missing))
587 self.status = -1
589 return out_files 1Hopqrsb/v
592class ExperimentDescriptionRegisterRaw(RegisterRawDataTask):
593 """dict of list: custom sign off keys corresponding to specific devices"""
594 sign_off_categories = SIGN_OFF_CATEGORIES
596 @property
597 def signature(self):
598 signature = { 1aHbhiefcnm
599 'input_files': [],
600 'output_files': [('*experiment.description.yaml', '', True)]
601 }
602 return signature 1aHbhiefcnm
604 def _run(self, **kwargs):
605 # Register experiment description file
606 out_files = super(ExperimentDescriptionRegisterRaw, self)._run(**kwargs) 1Hb
607 if not self.one.offline and self.status == 0: 1Hb
608 with no_cache(self.one.alyx): # Ensure we don't load the cached JSON response 1Hb
609 eid = self.one.path2eid(self.session_path, query_type='remote') 1Hb
610 exp_dec = sess_params.read_params(out_files[0]) 1Hb
611 data = sign_off_dict(exp_dec, sign_off_categories=self.sign_off_categories) 1Hb
612 self.one.alyx.json_field_update('sessions', eid, data=data) 1Hb
613 return out_files 1Hb