Coverage for ibllib/pipes/base_tasks.py: 95%
237 statements
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
1"""Abstract base classes for dynamic pipeline tasks."""
2import logging
3from pathlib import Path
5from packaging import version
6from one.webclient import no_cache
7from iblutil.util import flatten, ensure_list
8import matplotlib.image
9from skimage.io import ImageCollection, imread
11from ibllib.pipes.tasks import Task
12import ibllib.io.session_params as sess_params
13from ibllib.qc.base import sign_off_dict, SIGN_OFF_CATEGORIES
14from ibllib.io.raw_daq_loaders import load_timeline_sync_and_chmap
15from ibllib.oneibl.data_handlers import update_collections
17_logger = logging.getLogger(__name__)
20class DynamicTask(Task):
22 def __init__(self, session_path, **kwargs):
23 super().__init__(session_path, **kwargs) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
24 self.session_params = self.read_params_file() 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
26 # TODO Which should be default?
27 # Sync collection
28 self.sync_collection = self.get_sync_collection(kwargs.get('sync_collection', None)) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
29 # Sync type
30 self.sync = self.get_sync(kwargs.get('sync', None)) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
31 # Sync extension
32 self.sync_ext = self.get_sync_extension(kwargs.get('sync_ext', None)) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
33 # Sync namespace
34 self.sync_namespace = self.get_sync_namespace(kwargs.get('sync_namespace', None)) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
36 def get_sync_collection(self, sync_collection=None):
37 return sync_collection if sync_collection else sess_params.get_sync_collection(self.session_params) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
39 def get_sync(self, sync=None):
40 return sync if sync else sess_params.get_sync_label(self.session_params) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
42 def get_sync_extension(self, sync_ext=None):
43 return sync_ext if sync_ext else sess_params.get_sync_extension(self.session_params) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
45 def get_sync_namespace(self, sync_namespace=None):
46 return sync_namespace if sync_namespace else sess_params.get_sync_namespace(self.session_params) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
48 def get_protocol(self, protocol=None, task_collection=None):
49 return protocol if protocol else sess_params.get_task_protocol(self.session_params, task_collection) 1bclkm)
51 def get_task_collection(self, collection=None):
52 if not collection: 1v*+bclkm)
53 collection = sess_params.get_task_collection(self.session_params) 1bclkm
54 # If inferring the collection from the experiment description, assert only one returned
55 assert collection is None or isinstance(collection, str) or len(collection) == 1 1v*+bclkm)
56 return collection 1v*+bclkm)
58 def get_device_collection(self, device, device_collection=None):
59 if device_collection: 2a x ? @ rbv [ ] ^ * + b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # $ % ' _ ` ( Z 1 2 { cb- . / : ) ; = | } j ~ abbb
60 return device_collection 2a x ? @ rbv [ ] ^ * + b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # $ % ' _ ` ( Z 1 2 { cb- . / : ) ; = | } j ~ abbb
61 collection_map = sess_params.get_collections(self.session_params['devices']) 2rb
62 return collection_map.get(device) 2rb
64 def read_params_file(self):
65 params = sess_params.read_params(self.session_path) 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
67 if params is None: 2a Y V D u w y C f W x ? @ E F i G v [ ] ^ * + H p q I r J K L s M N t b g h d e c l o k m n 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` ( Z 1 2 { z cbibjbkblbmbnb, - . / : ) ; = | } j ~ abbbX A B O P Q R S T U
68 return {} 2a Y V D u w y C f W x ? @ E F i [ ] ^ * + H p q I r J K L s M N t 3 4 5 6 7 8 9 0 ! # dbebfbgbhb$ % ' _ ` { ibjbkblbmbnb, - . / : ) ; = | } j ~ abbbA B O P Q R S T U
70 # TODO figure out the best way
71 # if params is None and self.one:
72 # # Try to read params from alyx or try to download params file
73 # params = self.one.load_dataset(self.one.path2eid(self.session_path), 'params.yml')
74 # params = self.one.alyx.rest()
76 return params 2a x G v b g h d e c l o k m n ( Z 1 2 z cbX
79class BehaviourTask(DynamicTask):
81 extractor = None
82 """ibllib.io.extractors.base.BaseBpodExtractor: A trials extractor object."""
84 def __init__(self, session_path, **kwargs):
85 super().__init__(session_path, **kwargs) 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
87 self.collection = self.get_task_collection(kwargs.get('collection', None)) 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
88 # Task type (protocol)
89 self.protocol = self.get_protocol(kwargs.get('protocol', None), task_collection=self.collection) 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
91 self.protocol_number = self.get_protocol_number(kwargs.get('protocol_number'), task_protocol=self.protocol) 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
93 self.output_collection = 'alf' 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
94 # Do not use kwargs.get('number', None) -- this will return None if number is 0
95 if self.protocol_number is not None: 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
96 self.output_collection += f'/task_{self.protocol_number:02}' 1aVDxvdcokX
98 def get_protocol(self, protocol=None, task_collection=None):
99 """
100 Return the task protocol name.
102 This returns the task protocol based on the task collection. If `protocol` is not None, this
103 acts as an identity function. If both `task_collection` and `protocol` are None, returns
104 the protocol defined in the experiment description file only if a single protocol was run.
105 If the `task_collection` is not None, the associated protocol name is returned.
108 Parameters
109 ----------
110 protocol : str
111 A task protocol name. If not None, the same value is returned.
112 task_collection : str
113 The task collection whose protocol name to return. May be None if only one protocol run.
115 Returns
116 -------
117 str, None
118 The task protocol name, or None, if no protocol found.
120 Raises
121 ------
122 ValueError
123 For session with multiple task protocols, a task collection must be passed.
124 """
125 if protocol: 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
126 return protocol 1aYVDwxvbghdeclokmnX
127 protocol = sess_params.get_task_protocol(self.session_params, task_collection) or None 1auwyCEFHpqIrJKLsMNtzABOPQRSTU
128 if isinstance(protocol, set): 1auwyCEFHpqIrJKLsMNtzABOPQRSTU
129 if len(protocol) == 1: 1w
130 protocol = next(iter(protocol)) 1w
131 else:
132 raise ValueError('Multiple task protocols for session. Task collection must be explicitly defined.') 1w
133 return protocol 1auwyCEFHpqIrJKLsMNtzABOPQRSTU
135 def get_task_collection(self, collection=None):
136 """
137 Return the task collection.
139 If `collection` is not None, this acts as an identity function. Otherwise loads it from
140 the experiment description if only one protocol was run.
142 Parameters
143 ----------
144 collection : str
145 A task collection. If not None, the same value is returned.
147 Returns
148 -------
149 str, None
150 The task collection, or None if no task protocols were run.
152 Raises
153 ------
154 AssertionError
155 Raised if multiple protocols were run and collection is None, or if experiment
156 description file is improperly formatted.
158 """
159 if not collection: 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
160 collection = sess_params.get_task_collection(self.session_params) 1DuwyC
161 # If inferring the collection from the experiment description, assert only one returned
162 assert collection is None or isinstance(collection, str) or len(collection) == 1 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
163 return collection 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
165 def get_protocol_number(self, number=None, task_protocol=None):
166 """
167 Return the task protocol number.
169 Numbering starts from 0. If the 'protocol_number' field is missing from the experiment
170 description, None is returned. If `task_protocol` is None, the first protocol number if n
171 protocols == 1, otherwise raises an AssertionError.
173 NB: :func:`ibllib.pipes.dynamic_pipeline.make_pipeline` will determine the protocol number
174 from the order of the tasks in the experiment description if the task collection follows
175 the pattern 'raw_task_data_XX'. If the task protocol does not follow this pattern, the
176 experiment description file should explicitly define the number with the 'protocol_number'
177 field.
179 Parameters
180 ----------
181 number : int
182 The protocol number. If not None, the same value is returned.
183 task_protocol : str
184 The task protocol name.
186 Returns
187 -------
188 int, None
189 The task protocol number, if defined.
190 """
191 if number is None: # Do not use "if not number" as that will return True if number is 0 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
192 number = sess_params.get_task_protocol_number(self.session_params, task_protocol) 1aYuwyCxEFHpqIrJKLsMNtbghdelmnzABOPQRSTU
193 elif not isinstance(number, int): 1VDyxvcokX
194 number = int(number) 1y
195 # If inferring the number from the experiment description, assert only one returned (or something went wrong)
196 assert number is None or isinstance(number, int), 'ambiguous protocol number; no task protocol defined' 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
197 return number 1aYVDuwyCxEFvHpqIrJKLsMNtbghdeclokmnzXABOPQRSTU
199 @staticmethod
200 def _spacer_support(settings):
201 """
202 Spacer support was introduced in v7.1 for iblrig v7 and v8.0.1 in v8.
204 Parameters
205 ----------
206 settings : dict
207 The task settings dict.
209 Returns
210 -------
211 bool
212 True if task spacers are to be expected.
213 """
214 v = version.parse 2sbz
215 ver = v(settings.get('IBLRIG_VERSION') or '100.0.0') 2sbz
216 return ver not in (v('100.0.0'), v('8.0.0')) and ver >= v('7.1.0') 2sbz
218 def extract_behaviour(self, save=True):
219 """Extract trials data.
221 This is an abstract method called by `_run` and `run_qc` methods. Subclasses should return
222 the extracted trials data and a list of output files. This method should also save the
223 trials extractor object to the :prop:`extractor` property for use by `run_qc`.
225 Parameters
226 ----------
227 save : bool
228 Whether to save the extracted data as ALF datasets.
230 Returns
231 -------
232 dict
233 A dictionary of trials data.
234 list of pathlib.Path
235 A list of output file paths if save == true.
236 """
237 return None, None
239 def run_qc(self, trials_data=None, update=True):
240 """Run task QC.
242 Subclass method should return the QC object. This just validates the trials_data is not
243 None.
245 Parameters
246 ----------
247 trials_data : dict
248 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
249 update : bool
250 If true, update Alyx with the QC outcome.
252 Returns
253 -------
254 ibllib.qc.task_metrics.TaskQC
255 A TaskQC object replete with task data and computed metrics.
256 """
257 self._assert_trials_data(trials_data)
258 return None
260 def _assert_trials_data(self, trials_data=None):
261 """Check trials data available.
263 Called by :meth:`run_qc`, this extracts the trial data if `trials_data` is None, and raises
264 if :meth:`extract_behaviour` returns None.
266 Parameters
267 ----------
268 trials_data : dict, None
269 A dictionary of extracted trials data or None.
271 Returns
272 -------
273 trials_data : dict
274 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
275 """
276 if not self.extractor or trials_data is None: 1aubAB
277 trials_data, _ = self.extract_behaviour(save=False) 1au
278 if not (trials_data and self.extractor): 1aubAB
279 raise ValueError('No trials data and/or extractor found') 1u
280 return trials_data 1aubAB
283class VideoTask(DynamicTask):
285 def __init__(self, session_path, cameras, **kwargs):
286 super().__init__(session_path, cameras=cameras, **kwargs) 1abghdeclokmn-./:);=
287 self.cameras = cameras 1abghdeclokmn-./:);=
288 self.device_collection = self.get_device_collection('cameras', kwargs.get('device_collection', 'raw_video_data')) 1abghdeclokmn-./:);=
289 # self.collection = self.get_task_collection(kwargs.get('collection', None))
291 def extract_camera(self, save=True):
292 """Extract trials data.
294 This is an abstract method called by `_run` and `run_qc` methods. Subclasses should return
295 the extracted trials data and a list of output files. This method should also save the
296 trials extractor object to the :prop:`extractor` property for use by `run_qc`.
298 Parameters
299 ----------
300 save : bool
301 Whether to save the extracted data as ALF datasets.
303 Returns
304 -------
305 dict
306 A dictionary of trials data.
307 list of pathlib.Path
308 A list of output file paths if save == true.
309 """
310 return None, None
312 def run_qc(self, camera_data=None, update=True):
313 """Run camera QC.
315 Subclass method should return the QC object. This just validates the trials_data is not
316 None.
318 Parameters
319 ----------
320 camera_data : dict
321 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
322 update : bool
323 If true, update Alyx with the QC outcome.
325 Returns
326 -------
327 ibllib.qc.task_metrics.TaskQC
328 A TaskQC object replete with task data and computed metrics.
329 """
330 self._assert_trials_data(camera_data)
331 return None
334class AudioTask(DynamicTask):
336 def __init__(self, session_path, **kwargs):
337 super().__init__(session_path, **kwargs) 1av^*+bghdeclkmn
338 self.device_collection = self.get_device_collection('microphone', kwargs.get('device_collection', 'raw_behavior_data')) 1av^*+bghdeclkmn
341class EphysTask(DynamicTask):
343 def __init__(self, session_path, **kwargs):
344 super().__init__(session_path, **kwargs) 1aghdec34567890!#
346 self.pname = self.get_pname(kwargs.get('pname', None)) 1aghdec34567890!#
347 self.nshanks, self.pextra = self.get_nshanks(kwargs.get('nshanks', None)) 1aghdec34567890!#
348 self.device_collection = self.get_device_collection('neuropixel', kwargs.get('device_collection', 'raw_ephys_data')) 1aghdec34567890!#
350 def get_pname(self, pname):
351 # pname can be a list or a string
352 pname = self.kwargs.get('pname', pname) 1aghdec34567890!#
354 return pname 1aghdec34567890!#
356 def get_nshanks(self, nshanks=None):
357 nshanks = self.kwargs.get('nshanks', nshanks) 1aghdec34567890!#
358 if nshanks is not None: 1aghdec34567890!#
359 pextra = [chr(97 + int(shank)) for shank in range(nshanks)] 1ec90
360 else:
361 pextra = [] 1aghdec3456780!#
363 return nshanks, pextra 1aghdec34567890!#
366class WidefieldTask(DynamicTask):
367 def __init__(self, session_path, **kwargs):
368 super().__init__(session_path, **kwargs) 2a [ ] n | } j ~ abbb
370 self.device_collection = self.get_device_collection('widefield', kwargs.get('device_collection', 'raw_widefield_data')) 2a [ ] n | } j ~ abbb
373class MesoscopeTask(DynamicTask):
374 def __init__(self, session_path, **kwargs):
375 super().__init__(session_path, **kwargs) 1ax?@o$%'_`(Z12{
377 self.device_collection = self.get_device_collection( 1ax?@o$%'_`(Z12{
378 'mesoscope', kwargs.get('device_collection', 'raw_imaging_data_[0-9]*'))
380 def get_signatures(self, **kwargs):
381 """
382 From the template signature of the task, create the exact list of inputs and outputs to expect based on the
383 available device collection folders
385 Necessary because we don't know in advance how many device collection folders ("imaging bouts") to expect
386 """
387 self.session_path = Path(self.session_path) 2pbqb$ % ' ( Z 1 2
388 # Glob for all device collection (raw imaging data) folders
389 raw_imaging_folders = [p.name for p in self.session_path.glob(self.device_collection)] 2pbqb$ % ' ( Z 1 2
390 super().get_signatures(**kwargs) # Set inputs and outputs 2pbqb$ % ' ( Z 1 2
391 # For all inputs and outputs that are part of the device collection, expand to one file per folder
392 # All others keep unchanged
393 self.input_files = [update_collections(x, raw_imaging_folders, self.device_collection) for x in self.input_files] 2pbqb$ % ' ( Z 1 2
394 self.output_files = [update_collections(x, raw_imaging_folders, self.device_collection) for x in self.output_files] 2pbqb$ % ' ( Z 1 2
396 def load_sync(self):
397 """
398 Load the sync and channel map.
400 This method may be expanded to support other raw DAQ data formats.
402 Returns
403 -------
404 one.alf.io.AlfBunch
405 A dictionary with keys ('times', 'polarities', 'channels'), containing the sync pulses
406 and the corresponding channel numbers.
407 dict
408 A map of channel names and their corresponding indices.
409 """
410 alf_path = self.session_path / self.sync_collection 112
411 if self.get_sync_namespace() == 'timeline': 112
412 # Load the sync and channel map from the raw DAQ data
413 sync, chmap = load_timeline_sync_and_chmap(alf_path) 112
414 else:
415 raise NotImplementedError
416 return sync, chmap 112
419class RegisterRawDataTask(DynamicTask):
420 """
421 Base register raw task.
422 To rename files
423 1. input and output must have the same length
424 2. output files must have full filename
425 """
427 priority = 100
428 job_size = 'small'
430 def rename_files(self, symlink_old=False):
432 # If either no inputs or no outputs are given, we don't do any renaming
433 if not all(map(len, (self.input_files, self.output_files))): 2W G obp q r s t b Z , j
434 return 1WGpqrstbZ,
436 # Otherwise we need to make sure there is one to one correspondence for renaming files
437 assert len(self.input_files) == len(self.output_files) 2W obj
439 for before, after in zip(self.input_files, self.output_files): 2W obj
440 ok, old_paths, missing = before.find_files(self.session_path) 2W obj
441 if not old_paths: 2W obj
442 if ok: # if the file doesn't exist and it is not required we are okay to continue 1W
443 continue
444 else:
445 raise FileNotFoundError(f'file(s) {", ".join(missing)} not found') 1W
446 new_paths = list(map(self.session_path.joinpath, ensure_list(after.glob_pattern))) 2W obj
447 assert len(old_paths) == len(new_paths) 2W obj
448 for old_path, new_path in zip(old_paths, new_paths): 2W obj
449 if old_path == new_path: 2W obj
450 continue
451 new_path.parent.mkdir(parents=True, exist_ok=True) 2W obj
452 _logger.debug('%s -> %s', old_path.relative_to(self.session_path), new_path.relative_to(self.session_path)) 2W obj
453 old_path.replace(new_path) 2W obj
454 if symlink_old: 2W obj
455 old_path.symlink_to(new_path) 1j
457 @staticmethod
458 def _is_animated_gif(snapshot: Path) -> bool:
459 """
460 Test if image is an animated GIF file.
462 Parameters
463 ----------
464 snapshot : pathlib.Path
465 An image filepath to test.
467 Returns
468 -------
469 bool
470 True if image is an animated GIF.
472 Notes
473 -----
474 This could be achieved more succinctly with `from PIL import Image; Image.open(snapshot).is_animated`,
475 however despite being an indirect dependency, the Pillow library is not in the requirements,
476 whereas skimage is.
477 """
478 return snapshot.suffix == '.gif' and len(ImageCollection(str(snapshot))) > 1 1f
480 @staticmethod
481 def _save_as_png(snapshot: Path) -> Path:
482 """
483 Save an image to PNG format.
485 Parameters
486 ----------
487 snapshot : pathlib.Path
488 An image filepath to convert.
490 Returns
491 -------
492 pathlib.Path
493 The new PNG image filepath.
494 """
495 img = imread(snapshot, as_gray=True) 1f
496 matplotlib.image.imsave(snapshot.with_suffix('.png'), img, cmap='gray') 1f
497 return snapshot.with_suffix('.png') 1f
499 def register_snapshots(self, unlink=False, collection=None):
500 """
501 Register any photos in the snapshots folder to the session. Typically imaging users will
502 take numerous photos for reference. Supported extensions: .jpg, .jpeg, .png, .tif, .tiff
504 If a .txt file with the same name exists in the same location, the contents will be added
505 to the note text.
507 Parameters
508 ----------
509 unlink : bool
510 If true, files are deleted after upload.
511 collection : str, list, optional
512 Location of 'snapshots' folder relative to the session path. If None, uses
513 'device_collection' attribute (if exists) or root session path.
515 Returns
516 -------
517 list of dict
518 The newly registered Alyx notes.
520 Notes
521 -----
522 - Animated GIF files are not resized and therefore may take up significant space on the database.
523 - TIFF files are converted to PNG format before upload. The original file is not replaced.
524 - JPEG and PNG files are resized by Alyx.
525 """
526 collection = getattr(self, 'device_collection', None) if collection is None else collection 1fij
527 collection = collection or '' # If not defined, use no collection 1fij
528 if collection and '*' in collection: 1fij
529 collection = [p.name for p in self.session_path.glob(collection)] 1i
530 # Check whether folders on disk contain '*'; this is to stop an infinite recursion
531 assert not any('*' in c for c in collection), 'folders containing asterisks not supported' 1i
532 # If more than one collection exists, register snapshots in each collection
533 if collection and not isinstance(collection, str): 1fij
534 return flatten(filter(None, [self.register_snapshots(unlink, c) for c in collection])) 1i
535 snapshots_path = self.session_path.joinpath(*filter(None, (collection, 'snapshots'))) 1fij
536 if not snapshots_path.exists(): 1fij
537 return 1j
539 eid = self.one.path2eid(self.session_path, query_type='remote') 1fi
540 if not eid: 1fi
541 _logger.warning('Failed to upload snapshots: session not found on Alyx')
542 return
543 note = dict(user=self.one.alyx.user, content_type='session', object_id=eid, text='') 1fi
545 notes = [] 1fi
546 exts = ('.jpg', '.jpeg', '.png', '.tif', '.tiff', '.gif') 1fi
547 for snapshot in filter(lambda x: x.suffix.lower() in exts, snapshots_path.glob('*.*')): 1fi
548 if snapshot.suffix in ('.tif', '.tiff') and not snapshot.with_suffix('.png').exists(): 1fi
549 _logger.debug('converting "%s" to png...', snapshot.relative_to(self.session_path)) 1fi
550 snapshot = self._save_as_png(snapshot_tif := snapshot) 1fi
551 if unlink: 1fi
552 snapshot_tif.unlink() 1f
553 _logger.info('Uploading "%s"...', snapshot.relative_to(self.session_path)) 1fi
554 if snapshot.with_suffix('.txt').exists(): 1fi
555 with open(snapshot.with_suffix('.txt'), 'r') as txt_file: 1i
556 note['text'] = txt_file.read().strip() 1i
557 else:
558 note['text'] = '' 1fi
559 note['width'] = 'orig' if self._is_animated_gif(snapshot) else None 1fi
560 with open(snapshot, 'rb') as img_file: 1fi
561 files = {'image': img_file} 1fi
562 notes.append(self.one.alyx.rest('notes', 'create', data=note, files=files)) 1fi
563 if unlink: 1fi
564 snapshot.unlink() 1f
565 # If nothing else in the snapshots folder, delete the folder
566 if unlink and next(snapshots_path.rglob('*'), None) is None: 1fi
567 snapshots_path.rmdir() 1f
568 _logger.info('%i snapshots uploaded to Alyx', len(notes)) 1fi
569 return notes 1fi
571 def _run(self, **kwargs):
572 self.rename_files(**kwargs) 1GpqrstbZ,j
573 if not self.output_files: 1GpqrstbZ,j
574 return [] 1Z
576 # FIXME Can be done with Task.assert_expected_outputs
577 ok, out_files, missing = map(flatten, zip(*map(lambda x: x.find_files(self.session_path), self.output_files))) 1Gpqrstb,j
578 if not ok: 1Gpqrstb,j
579 _logger.error('The following expected are missing: %s', ', '.join(missing))
580 self.status = -1
582 return out_files 1Gpqrstb,j
585class ExperimentDescriptionRegisterRaw(RegisterRawDataTask):
586 """dict of list: custom sign off keys corresponding to specific devices"""
587 sign_off_categories = SIGN_OFF_CATEGORIES
589 @property
590 def signature(self):
591 signature = { 1aGbghdecon
592 'input_files': [],
593 'output_files': [('*experiment.description.yaml', '', True)]
594 }
595 return signature 1aGbghdecon
597 def _run(self, **kwargs):
598 # Register experiment description file
599 out_files = super(ExperimentDescriptionRegisterRaw, self)._run(**kwargs) 1Gb
600 if not self.one.offline and self.status == 0: 1Gb
601 with no_cache(self.one.alyx): # Ensure we don't load the cached JSON response 1Gb
602 eid = self.one.path2eid(self.session_path, query_type='remote') 1Gb
603 exp_dec = sess_params.read_params(out_files[0]) 1Gb
604 data = sign_off_dict(exp_dec, sign_off_categories=self.sign_off_categories) 1Gb
605 self.one.alyx.json_field_update('sessions', eid, data=data) 1Gb
606 return out_files 1Gb