Coverage for ibllib/pipes/base_tasks.py: 96%
237 statements
« prev ^ index » next coverage.py v7.5.4, created at 2024-07-08 17:16 +0100
« prev ^ index » next coverage.py v7.5.4, created at 2024-07-08 17:16 +0100
1"""Abstract base classes for dynamic pipeline tasks."""
2import logging
3from pathlib import Path
5from packaging import version
6from one.webclient import no_cache
7from iblutil.util import flatten
8import matplotlib.image
9from skimage.io import ImageCollection, imread
11from ibllib.pipes.tasks import Task
12import ibllib.io.session_params as sess_params
13from ibllib.qc.base import sign_off_dict, SIGN_OFF_CATEGORIES
14from ibllib.io.raw_daq_loaders import load_timeline_sync_and_chmap
16_logger = logging.getLogger(__name__)
19class DynamicTask(Task):
21 def __init__(self, session_path, **kwargs):
22 super().__init__(session_path, **kwargs) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
23 self.session_params = self.read_params_file() 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
25 # TODO Which should be default?
26 # Sync collection
27 self.sync_collection = self.get_sync_collection(kwargs.get('sync_collection', None)) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
28 # Sync type
29 self.sync = self.get_sync(kwargs.get('sync', None)) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
30 # Sync extension
31 self.sync_ext = self.get_sync_extension(kwargs.get('sync_ext', None)) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
32 # Sync namespace
33 self.sync_namespace = self.get_sync_namespace(kwargs.get('sync_namespace', None)) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
35 def get_sync_collection(self, sync_collection=None):
36 return sync_collection if sync_collection else sess_params.get_sync_collection(self.session_params) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
38 def get_sync(self, sync=None):
39 return sync if sync else sess_params.get_sync_label(self.session_params) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
41 def get_sync_extension(self, sync_ext=None):
42 return sync_ext if sync_ext else sess_params.get_sync_extension(self.session_params) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
44 def get_sync_namespace(self, sync_namespace=None):
45 return sync_namespace if sync_namespace else sess_params.get_sync_namespace(self.session_params) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
47 def get_protocol(self, protocol=None, task_collection=None):
48 return protocol if protocol else sess_params.get_task_protocol(self.session_params, task_collection) 1bdrs8c
50 def get_task_collection(self, collection=None):
51 if not collection: 1'$%bdrvs(A8c
52 collection = sess_params.get_task_collection(self.session_params) 1'bdrsc
53 # If inferring the collection from the experiment description, assert only one returned
54 assert collection is None or isinstance(collection, str) or len(collection) == 1 1'$%bdrvs(A8c
55 return collection 1'$%bdrvs(A8c
57 def get_device_collection(self, device, device_collection=None):
58 if device_collection: 2a D / : kb' ; = ? $ % b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 9 ! @ [ # q 6 7 ] ( A ) * + , 8 - . ^ _ e ` { | c
59 return device_collection 2a D / : kb' ; = ? $ % b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 9 ! @ [ # q 6 7 ] ( A ) * + , 8 - . ^ _ e ` { | c
60 collection_map = sess_params.get_collections(self.session_params['devices']) 2kb
61 return collection_map.get(device) 2kb
63 def read_params_file(self):
64 params = sess_params.read_params(self.session_path) 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
66 if params is None: 2a U S x y E F h G D / : l u ' ; = ? $ % H m k I n J K L o M N p b i j f g d r w v s t X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ # q 6 7 ] z ( A ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | V B C O P c Q R
67 return {} 2a U S x y E F h G / : l ' ; = ? $ % H m k I n J K L o M N p X Y Z 0 1 2 3 W 4 5 ~ abbbcbdb9 ! @ [ ] ebfbgbhbibjbT ) * + , 8 - . ^ _ e ` { | B C O P Q R
69 # TODO figure out the best way
70 # if params is None and self.one:
71 # # Try to read params from alyx or try to download params file
72 # params = self.one.load_dataset(self.one.path2eid(self.session_path), 'params.yml')
73 # params = self.one.alyx.rest()
75 return params 1aDubijfgdrwvst#q67z(AVc
78class BehaviourTask(DynamicTask):
80 extractor = None
81 """ibllib.io.extractors.base.BaseBpodExtractor: A trials extractor object."""
83 def __init__(self, session_path, **kwargs):
84 super().__init__(session_path, **kwargs) 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
86 self.collection = self.get_task_collection(kwargs.get('collection', None)) 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
87 # Task type (protocol)
88 self.protocol = self.get_protocol(kwargs.get('protocol', None), task_collection=self.collection) 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
90 self.protocol_number = self.get_protocol_number(kwargs.get('protocol_number'), task_protocol=self.protocol) 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
92 self.output_collection = 'alf' 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
93 # Do not use kwargs.get('number', None) -- this will return None if number is 0
94 if self.protocol_number is not None: 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
95 self.output_collection += f'/task_{self.protocol_number:02}' 1aUSDfdwV
97 def get_protocol(self, protocol=None, task_collection=None):
98 """
99 Return the task protocol name.
101 This returns the task protocol based on the task collection. If `protocol` is not None, this
102 acts as an identity function. If both `task_collection` and `protocol` are None, returns
103 the protocol defined in the experiment description file only if a single protocol was run.
104 If the `task_collection` is not None, the associated protocol name is returned.
107 Parameters
108 ----------
109 protocol : str
110 A task protocol name. If not None, the same value is returned.
111 task_collection : str
112 The task collection whose protocol name to return. May be None if only one protocol run.
114 Returns
115 -------
116 str, None
117 The task protocol name, or None, if no protocol found.
119 Raises
120 ------
121 ValueError
122 For session with multiple task protocols, a task collection must be passed.
123 """
124 if protocol: 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
125 return protocol 1aUSyDbijfgdrwvstVc
126 protocol = sess_params.get_task_protocol(self.session_params, task_collection) or None 1xyEFHmkInJKLoMNpzBCOPQR
127 if isinstance(protocol, set): 1xyEFHmkInJKLoMNpzBCOPQR
128 if len(protocol) == 1: 1y
129 protocol = next(iter(protocol)) 1y
130 else:
131 raise ValueError('Multiple task protocols for session. Task collection must be explicitly defined.') 1y
132 return protocol 1xyEFHmkInJKLoMNpzBCOPQR
134 def get_task_collection(self, collection=None):
135 """
136 Return the task collection.
138 If `collection` is not None, this acts as an identity function. Otherwise loads it from
139 the experiment description if only one protocol was run.
141 Parameters
142 ----------
143 collection : str
144 A task collection. If not None, the same value is returned.
146 Returns
147 -------
148 str, None
149 The task collection, or None if no task protocols were run.
151 Raises
152 ------
153 AssertionError
154 Raised if multiple protocols were run and collection is None, or if experiment
155 description file is improperly formatted.
157 """
158 if not collection: 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
159 collection = sess_params.get_task_collection(self.session_params) 1SxyEF
160 # If inferring the collection from the experiment description, assert only one returned
161 assert collection is None or isinstance(collection, str) or len(collection) == 1 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
162 return collection 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
164 def get_protocol_number(self, number=None, task_protocol=None):
165 """
166 Return the task protocol number.
168 Numbering starts from 0. If the 'protocol_number' field is missing from the experiment
169 description, None is returned. If `task_protocol` is None, the first protocol number if n
170 protocols == 1, otherwise returns None.
172 NB: :func:`ibllib.pipes.dynamic_pipeline.make_pipeline` will determine the protocol number
173 from the order of the tasks in the experiment description if the task collection follows
174 the pattern 'raw_task_data_XX'. If the task protocol does not follow this pattern, the
175 experiment description file should explicitly define the number with the 'protocol_number'
176 field.
178 Parameters
179 ----------
180 number : int
181 The protocol number. If not None, the same value is returned.
182 task_protocol : str
183 The task protocol name.
185 Returns
186 -------
187 int, None
188 The task protocol number, if defined.
189 """
190 if number is None: # Do not use "if not number" as that will return True if number is 0 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
191 number = sess_params.get_task_protocol_number(self.session_params, task_protocol) 1axyEFHmkInJKLoMNpbijfgrvstzBCOPcQR
192 # If inferring the number from the experiment description, assert only one returned (or something went wrong)
193 assert number is None or isinstance(number, int) 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
194 return number 1aUSxyEFDHmkInJKLoMNpbijfgdrwvstzVBCOPcQR
196 @staticmethod
197 def _spacer_support(settings):
198 """
199 Spacer support was introduced in v7.1 for iblrig v7 and v8.0.1 in v8.
201 Parameters
202 ----------
203 settings : dict
204 The task settings dict.
206 Returns
207 -------
208 bool
209 True if task spacers are to be expected.
210 """
211 v = version.parse 2mbz
212 ver = v(settings.get('IBLRIG_VERSION') or '100.0.0') 2mbz
213 return ver not in (v('100.0.0'), v('8.0.0')) and ver >= v('7.1.0') 2mbz
215 def extract_behaviour(self, save=True):
216 """Extract trials data.
218 This is an abstract method called by `_run` and `run_qc` methods. Subclasses should return
219 the extracted trials data and a list of output files. This method should also save the
220 trials extractor object to the :prop:`extractor` property for use by `run_qc`.
222 Parameters
223 ----------
224 save : bool
225 Whether to save the extracted data as ALF datasets.
227 Returns
228 -------
229 dict
230 A dictionary of trials data.
231 list of pathlib.Path
232 A list of output file paths if save == true.
233 """
234 return None, None
236 def run_qc(self, trials_data=None, update=True):
237 """Run task QC.
239 Subclass method should return the QC object. This just validates the trials_data is not
240 None.
242 Parameters
243 ----------
244 trials_data : dict
245 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
246 update : bool
247 If true, update Alyx with the QC outcome.
249 Returns
250 -------
251 ibllib.qc.task_metrics.TaskQC
252 A TaskQC object replete with task data and computed metrics.
253 """
254 self._assert_trials_data(trials_data)
255 return None
257 def _assert_trials_data(self, trials_data=None):
258 """Check trials data available.
260 Called by :meth:`run_qc`, this extracts the trial data if `trials_data` is None, and raises
261 if :meth:`extract_behaviour` returns None.
263 Parameters
264 ----------
265 trials_data : dict, None
266 A dictionary of extracted trials data or None.
268 Returns
269 -------
270 trials_data : dict
271 A dictionary of extracted trials data. The output of :meth:`extract_behaviour`.
272 """
273 if not self.extractor or trials_data is None: 1xbBCc
274 trials_data, _ = self.extract_behaviour(save=False) 1x
275 if not (trials_data and self.extractor): 1xbBCc
276 raise ValueError('No trials data and/or extractor found') 1x
277 return trials_data 1xbBCc
280class VideoTask(DynamicTask):
282 def __init__(self, session_path, cameras, **kwargs):
283 super().__init__(session_path, cameras=cameras, **kwargs) 1abijfgdrwvst)*+,8-.c
284 self.cameras = cameras 1abijfgdrwvst)*+,8-.c
285 self.device_collection = self.get_device_collection('cameras', kwargs.get('device_collection', 'raw_video_data')) 1abijfgdrwvst)*+,8-.c
286 # self.collection = self.get_task_collection(kwargs.get('collection', None))
289class AudioTask(DynamicTask):
291 def __init__(self, session_path, **kwargs):
292 super().__init__(session_path, **kwargs) 1a?$%bijfgdrvstc
293 self.device_collection = self.get_device_collection('microphone', kwargs.get('device_collection', 'raw_behavior_data')) 1a?$%bijfgdrvstc
296class EphysTask(DynamicTask):
298 def __init__(self, session_path, **kwargs):
299 super().__init__(session_path, **kwargs) 1aijfgdXYZ0123W45
301 self.pname = self.get_pname(kwargs.get('pname', None)) 1aijfgdXYZ0123W45
302 self.nshanks, self.pextra = self.get_nshanks(kwargs.get('nshanks', None)) 1aijfgdXYZ0123W45
303 self.device_collection = self.get_device_collection('neuropixel', kwargs.get('device_collection', 'raw_ephys_data')) 1aijfgdXYZ0123W45
305 def get_pname(self, pname):
306 # pname can be a list or a string
307 pname = self.kwargs.get('pname', pname) 1aijfgdXYZ0123W45
309 return pname 1aijfgdXYZ0123W45
311 def get_nshanks(self, nshanks=None):
312 nshanks = self.kwargs.get('nshanks', nshanks) 1aijfgdXYZ0123W45
313 if nshanks is not None: 1aijfgdXYZ0123W45
314 pextra = [chr(97 + int(shank)) for shank in range(nshanks)] 1gd3W
315 else:
316 pextra = [] 1aijfgdXYZ012W45
318 return nshanks, pextra 1aijfgdXYZ0123W45
321class WidefieldTask(DynamicTask):
322 def __init__(self, session_path, **kwargs):
323 super().__init__(session_path, **kwargs) 1a;=t^_e`{|
325 self.device_collection = self.get_device_collection('widefield', kwargs.get('device_collection', 'raw_widefield_data')) 1a;=t^_e`{|
328class MesoscopeTask(DynamicTask):
329 def __init__(self, session_path, **kwargs):
330 super().__init__(session_path, **kwargs) 1aD/:w9!@[#q67]
332 self.device_collection = self.get_device_collection( 1aD/:w9!@[#q67]
333 'mesoscope', kwargs.get('device_collection', 'raw_imaging_data_[0-9]*'))
335 def get_signatures(self, **kwargs):
336 """
337 From the template signature of the task, create the exact list of inputs and outputs to expect based on the
338 available device collection folders
340 Necessary because we don't know in advance how many device collection folders ("imaging bouts") to expect
341 """
342 self.session_path = Path(self.session_path) 2lb9 ! # q 6 7
343 # Glob for all device collection (raw imaging data) folders
344 raw_imaging_folders = [p.name for p in self.session_path.glob(self.device_collection)] 2lb9 ! # q 6 7
345 # For all inputs and outputs that are part of the device collection, expand to one file per folder
346 # All others keep unchanged
347 self.input_files = [(sig[0], sig[1].replace(self.device_collection, folder), sig[2]) 2lb9 ! # q 6 7
348 for folder in raw_imaging_folders for sig in self.signature['input_files']]
349 self.output_files = [(sig[0], sig[1].replace(self.device_collection, folder), sig[2]) 2lb9 ! # q 6 7
350 for folder in raw_imaging_folders for sig in self.signature['output_files']]
352 def load_sync(self):
353 """
354 Load the sync and channel map.
356 This method may be expanded to support other raw DAQ data formats.
358 Returns
359 -------
360 one.alf.io.AlfBunch
361 A dictionary with keys ('times', 'polarities', 'channels'), containing the sync pulses
362 and the corresponding channel numbers.
363 dict
364 A map of channel names and their corresponding indices.
365 """
366 alf_path = self.session_path / self.sync_collection 167
367 if self.get_sync_namespace() == 'timeline': 167
368 # Load the sync and channel map from the raw DAQ data
369 sync, chmap = load_timeline_sync_and_chmap(alf_path) 167
370 else:
371 raise NotImplementedError
372 return sync, chmap 167
375class RegisterRawDataTask(DynamicTask):
376 """
377 Base register raw task.
378 To rename files
379 1. input and output must have the same length
380 2. output files must have full filename
381 """
383 priority = 100
384 job_size = 'small'
386 def rename_files(self, symlink_old=False):
388 # If either no inputs or no outputs are given, we don't do any renaming
389 if not all(map(len, (self.input_files, self.output_files))): 1Gu}mknopbqATec
390 return 1GumknopbATc
392 # Otherwise we need to make sure there is one to one correspondence for renaming files
393 assert len(self.input_files) == len(self.output_files) 1G}qe
395 for before, after in zip(self.input_files, self.output_files): 1G}qe
396 old_file, old_collection, required = before 1G}qe
397 old_path = self.session_path.joinpath(old_collection).glob(old_file) 1G}qe
398 old_path = next(old_path, None) 1G}qe
399 # if the file doesn't exist and it is not required we are okay to continue
400 if not old_path: 1G}qe
401 if required: 1G
402 raise FileNotFoundError(str(old_file)) 1G
403 else:
404 continue
406 new_file, new_collection, _ = after 1G}qe
407 new_path = self.session_path.joinpath(new_collection, new_file) 1G}qe
408 if old_path == new_path: 1G}qe
409 continue 1q
410 new_path.parent.mkdir(parents=True, exist_ok=True) 1G}e
411 _logger.debug('%s -> %s', old_path.relative_to(self.session_path), new_path.relative_to(self.session_path)) 1G}e
412 old_path.replace(new_path) 1G}e
413 if symlink_old: 1G}e
414 old_path.symlink_to(new_path) 1e
416 @staticmethod
417 def _is_animated_gif(snapshot: Path) -> bool:
418 """
419 Test if image is an animated GIF file.
421 Parameters
422 ----------
423 snapshot : pathlib.Path
424 An image filepath to test.
426 Returns
427 -------
428 bool
429 True if image is an animated GIF.
431 Notes
432 -----
433 This could be achieved more succinctly with `from PIL import Image; Image.open(snapshot).is_animated`,
434 however despite being an indirect dependency, the Pillow library is not in the requirements,
435 whereas skimage is.
436 """
437 return snapshot.suffix == '.gif' and len(ImageCollection(str(snapshot))) > 1 1h
439 @staticmethod
440 def _save_as_png(snapshot: Path) -> Path:
441 """
442 Save an image to PNG format.
444 Parameters
445 ----------
446 snapshot : pathlib.Path
447 An image filepath to convert.
449 Returns
450 -------
451 pathlib.Path
452 The new PNG image filepath.
453 """
454 img = imread(snapshot, as_gray=True) 1h
455 matplotlib.image.imsave(snapshot.with_suffix('.png'), img, cmap='gray') 1h
456 return snapshot.with_suffix('.png') 1h
458 def register_snapshots(self, unlink=False, collection=None):
459 """
460 Register any photos in the snapshots folder to the session. Typically imaging users will
461 take numerous photos for reference. Supported extensions: .jpg, .jpeg, .png, .tif, .tiff
463 If a .txt file with the same name exists in the same location, the contents will be added
464 to the note text.
466 Parameters
467 ----------
468 unlink : bool
469 If true, files are deleted after upload.
470 collection : str, list, optional
471 Location of 'snapshots' folder relative to the session path. If None, uses
472 'device_collection' attribute (if exists) or root session path.
474 Returns
475 -------
476 list of dict
477 The newly registered Alyx notes.
479 Notes
480 -----
481 - Animated GIF files are not resized and therefore may take up significant space on the database.
482 - TIFF files are converted to PNG format before upload. The original file is not replaced.
483 - JPEG and PNG files are resized by Alyx.
484 """
485 collection = getattr(self, 'device_collection', None) if collection is None else collection 1hle
486 collection = collection or '' # If not defined, use no collection 1hle
487 if collection and '*' in collection: 1hle
488 collection = [p.name for p in self.session_path.glob(collection)] 1l
489 # Check whether folders on disk contain '*'; this is to stop an infinite recursion
490 assert not any('*' in c for c in collection), 'folders containing asterisks not supported' 1l
491 # If more than one collection exists, register snapshots in each collection
492 if collection and not isinstance(collection, str): 1hle
493 return flatten(filter(None, [self.register_snapshots(unlink, c) for c in collection])) 1l
494 snapshots_path = self.session_path.joinpath(*filter(None, (collection, 'snapshots'))) 1hle
495 if not snapshots_path.exists(): 1hle
496 return 1e
498 eid = self.one.path2eid(self.session_path, query_type='remote') 1hl
499 if not eid: 1hl
500 _logger.warning('Failed to upload snapshots: session not found on Alyx')
501 return
502 note = dict(user=self.one.alyx.user, content_type='session', object_id=eid, text='') 1hl
504 notes = [] 1hl
505 exts = ('.jpg', '.jpeg', '.png', '.tif', '.tiff', '.gif') 1hl
506 for snapshot in filter(lambda x: x.suffix.lower() in exts, snapshots_path.glob('*.*')): 1hl
507 if snapshot.suffix in ('.tif', '.tiff') and not snapshot.with_suffix('.png').exists(): 1hl
508 _logger.debug('converting "%s" to png...', snapshot.relative_to(self.session_path)) 1hl
509 snapshot = self._save_as_png(snapshot_tif := snapshot) 1hl
510 if unlink: 1hl
511 snapshot_tif.unlink() 1h
512 _logger.debug('Uploading "%s"...', snapshot.relative_to(self.session_path)) 1hl
513 if snapshot.with_suffix('.txt').exists(): 1hl
514 with open(snapshot.with_suffix('.txt'), 'r') as txt_file: 1l
515 note['text'] = txt_file.read().strip() 1l
516 else:
517 note['text'] = '' 1hl
518 note['width'] = 'orig' if self._is_animated_gif(snapshot) else None 1hl
519 with open(snapshot, 'rb') as img_file: 1hl
520 files = {'image': img_file} 1hl
521 notes.append(self.one.alyx.rest('notes', 'create', data=note, files=files)) 1hl
522 if unlink: 1hl
523 snapshot.unlink() 1h
524 # If nothing else in the snapshots folder, delete the folder
525 if unlink and next(snapshots_path.rglob('*'), None) is None: 1hl
526 snapshots_path.rmdir() 1h
527 _logger.info('%i snapshots uploaded to Alyx', len(notes)) 1hl
528 return notes 1hl
530 def _run(self, **kwargs):
531 self.rename_files(**kwargs) 1umknopbqATec
532 out_files = [] 1umknopbqATec
533 n_required = 0 1umknopbqATec
534 for file_sig in self.output_files: 1umknopbqATec
535 file_name, collection, required = file_sig 1umknopbqATec
536 n_required += required 1umknopbqATec
537 file_path = self.session_path.joinpath(collection).glob(file_name) 1umknopbqATec
538 file_path = next(file_path, None) 1umknopbqATec
539 if not file_path and not required: 1umknopbqATec
540 continue 1kbTc
541 elif not file_path and required: 1umknopbqATec
542 _logger.error(f'expected {file_sig} missing')
543 else:
544 out_files.append(file_path) 1umknopbqATec
546 if len(out_files) < n_required: 1umknopbqATec
547 self.status = -1
549 return out_files 1umknopbqATec
552class ExperimentDescriptionRegisterRaw(RegisterRawDataTask):
553 """dict of list: custom sign off keys corresponding to specific devices"""
554 sign_off_categories = SIGN_OFF_CATEGORIES
556 @property
557 def signature(self):
558 signature = { 1aubijfgdwtc
559 'input_files': [],
560 'output_files': [('*experiment.description.yaml', '', True)]
561 }
562 return signature 1aubijfgdwtc
564 def _run(self, **kwargs):
565 # Register experiment description file
566 out_files = super(ExperimentDescriptionRegisterRaw, self)._run(**kwargs) 1ubc
567 if not self.one.offline and self.status == 0: 1ubc
568 with no_cache(self.one.alyx): # Ensure we don't load the cached JSON response 1ubc
569 eid = self.one.path2eid(self.session_path, query_type='remote') 1ubc
570 exp_dec = sess_params.read_params(out_files[0]) 1ubc
571 data = sign_off_dict(exp_dec, sign_off_categories=self.sign_off_categories) 1ubc
572 self.one.alyx.json_field_update('sessions', eid, data=data) 1ubc
573 return out_files 1ubc