Coverage for ibllib/pipes/histology.py: 34%
373 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-11 11:13 +0100
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-11 11:13 +0100
1from pathlib import Path
2import logging
4import matplotlib.pyplot as plt
5import numpy as np
6from one.api import ONE
7import one.alf.io as alfio
9from neuropixel import TIP_SIZE_UM, trace_header
10import iblatlas.atlas as atlas
11from ibllib.ephys.spikes import probes_description as extract_probes
12from ibllib.qc import base
14from neurodsp.utils import fcn_cosine
17_logger = logging.getLogger(__name__)
20def load_track_csv(file_track, brain_atlas=None):
21 """
22 Loads a lasagna track and convert to IBL-ALlen coordinate framework
23 :param file_track:
24 :return: xyz
25 """
27 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1ci
28 # apmldv in the histology file is flipped along y direction
29 file_track = Path(file_track) 1ci
30 if file_track.stat().st_size == 0: 1ci
31 return np.array([])
32 ixiyiz = np.loadtxt(file_track, delimiter=',')[:, [1, 0, 2]] 1ci
33 ixiyiz[:, 1] = 527 - ixiyiz[:, 1] 1ci
34 ixiyiz = ixiyiz[np.argsort(ixiyiz[:, 2]), :] 1ci
35 xyz = brain_atlas.bc.i2xyz(ixiyiz) 1ci
36 # xyz[:, 0] = - xyz[:, 0]
37 return xyz 1ci
40def get_picked_tracks(histology_path, glob_pattern="*_pts_transformed.csv", brain_atlas=None):
41 """
42 This outputs reads in the Lasagna output and converts the picked tracks in the IBL coordinates
43 :param histology_path: Path object: folder path containing tracks
44 :return: xyz coordinates in
45 """
46 brain_atlas = brain_atlas or atlas.AllenAtlas()
47 xyzs = []
48 histology_path = Path(histology_path)
49 if histology_path.is_file():
50 files_track = [histology_path]
51 else:
52 files_track = list(histology_path.rglob(glob_pattern))
53 for file_track in files_track:
54 xyzs.append(load_track_csv(file_track, brain_atlas=brain_atlas))
55 return {'files': files_track, 'xyz': xyzs}
58def get_micro_manipulator_data(subject, one=None, force_extract=False):
59 """
60 Looks for all ephys sessions for a given subject and get the probe micro-manipulator
61 trajectories.
62 If probes ALF object not on flat-iron, attempts to perform the extraction from meta-data
63 and task settings file.
64 """
65 if not one:
66 one = ONE(cache_rest=None)
68 eids, sessions = one.search(subject=subject, task_protocol='ephys', details=True)
69 probes = alfio.AlfBunch({})
70 for ses in sessions:
71 sess_path = Path(ses['local_path'])
72 probe = None
73 if not force_extract:
74 probe = one.load_object(ses['url'], 'probes')
75 if not probe:
76 _logger.warning(f"Re-extraction probe info for {sess_path}")
77 dtypes = ['_iblrig_taskSettings.raw', 'ephysData.raw.meta']
78 raw_files = one.load(ses['url'], dataset_types=dtypes, download_only=True)
79 if all([rf is None for rf in raw_files]):
80 _logger.warning(f"no raw settings files nor ephys data found for"
81 f" {ses['local_path']}. Skip this session.")
82 continue
83 extract_probes(sess_path, bin_exists=False)
84 probe = alfio.load_object(sess_path.joinpath('alf'), 'probes')
85 one.load(ses['url'], dataset_types='channels.localCoordinates', download_only=True)
86 # get for each insertion the sites local mapping: if not found assumes checkerboard pattern
87 probe['sites_coordinates'] = []
88 for prb in probe.description:
89 chfile = Path(ses['local_path']).joinpath('alf', prb['label'],
90 'channels.localCoordinates.npy')
91 if chfile.exists():
92 probe['sites_coordinates'].append(np.load(chfile))
93 else:
94 _logger.warning(f"no channel.localCoordinates found for {ses['local_path']}."
95 f"Assumes checkerboard pattern")
96 th = trace_header(version=1)
97 probe['sites_coordinates'].append(np.c_[th['x'], th['y']])
98 # put the session information in there
99 probe['session'] = [ses] * len(probe.description)
100 probes = probes.append(probe)
101 return probes
104def plot2d_all(trajectories, tracks, brain_atlas=None):
105 """
106 Plot all tracks on a single 2d slice
107 :param trajectories: dictionary output of the Alyx REST query on trajectories
108 :param tracks:
109 :return:
110 """
111 brain_atlas = brain_atlas or atlas.AllenAtlas(25)
112 plt.figure()
113 axs = brain_atlas.plot_sslice(brain_atlas.bc.i2x(190), cmap=plt.get_cmap('bone'))
114 plt.figure()
115 axc = brain_atlas.plot_cslice(brain_atlas.bc.i2y(350))
116 for xyz in tracks['xyz']:
117 axc.plot(xyz[:, 0] * 1e3, xyz[:, 2] * 1e6, 'b')
118 axs.plot(xyz[:, 1] * 1e3, xyz[:, 2] * 1e6, 'b')
119 for trj in trajectories:
120 ins = atlas.Insertion.from_dict(trj, brain_atlas=brain_atlas)
121 xyz = ins.xyz
122 axc.plot(xyz[:, 0] * 1e3, xyz[:, 2] * 1e6, 'r')
123 axs.plot(xyz[:, 1] * 1e3, xyz[:, 2] * 1e6, 'r')
126def plot3d_all(trajectories, tracks, brain_atlas=None):
127 """
128 Plot all tracks on a single 2d slice
129 :param trajectories: dictionary output of the Alyx REST query on trajectories
130 :param tracks:
131 :return:
132 """
133 from mayavi import mlab
134 brain_atlas = brain_atlas or atlas.AllenAtlas()
135 src = mlab.pipeline.scalar_field(brain_atlas.label)
136 mlab.pipeline.iso_surface(src, contours=[0.5, ], opacity=0.3)
138 pts = []
139 for xyz in tracks['xyz']:
140 mlapdv = brain_atlas.bc.xyz2i(xyz)
141 pts.append(mlab.plot3d(mlapdv[:, 1], mlapdv[:, 0], mlapdv[:, 2], line_width=3))
143 plt_trj = []
144 for trj in trajectories:
145 ins = atlas.Insertion.from_dict(trj, brain_atlas=brain_atlas)
146 mlapdv = brain_atlas.bc.xyz2i(ins.xyz)
147 plt = mlab.plot3d(mlapdv[:, 1], mlapdv[:, 0], mlapdv[:, 2],
148 line_width=3, color=(1., 0., 1.))
149 plt_trj.append(plt)
152def interpolate_along_track(xyz_track, depths):
153 """
154 Get the coordinates of points along a track according to their distances from the first
155 point.
156 :param xyz_track: np.array [npoints, 3]. Usually the first point is the deepest
157 :param depths: distance from the first point of the track, usually the convention is the
158 deepest point is 0 and going up
159 :return: xyz_channels
160 """
161 # from scipy.interpolate import interp1d
162 # this is the cumulative distance from the lowest picked point (first)
163 distance = np.cumsum(np.r_[0, np.sqrt(np.sum(np.diff(xyz_track, axis=0) ** 2, axis=1))]) 1dfgabcjkl
164 xyz_channels = np.zeros((depths.shape[0], 3)) 1dfgabcjkl
165 for m in np.arange(3): 1dfgabcjkl
166 xyz_channels[:, m] = np.interp(depths, distance, xyz_track[:, m]) 1dfgabcjkl
167 # xyz_channels[:, m] = interp1d(distance, xyz[:, m], kind='cubic')(chdepths / 1e6)
168 # plt.figure()
169 # plt.plot(xyz_track[:, 0] * 1e6, xyz_track[:, 2] * 1e6, 'k*'), plt.axis('equal')
170 # plt.plot(xyz_channels[:, 0] * 1e6, xyz_channels[:, 2] * 1e6, '.'), plt.axis('equal')
171 return xyz_channels 1dfgabcjkl
174def get_brain_regions(xyz, channels_positions=None, brain_atlas=None):
175 """
176 :param xyz: numpy array of 3D coordinates corresponding to a picked track or a trajectory
177 the deepest point is assumed to be the tip.
178 :param channels_positions:
179 :param brain_atlas:
180 :return: brain_regions (associated to each channel),
181 insertion (object atlas.Insertion, defining 2 points of entries
182 (tip and end of probe))
183 """
184 """ 1abc
185 this is the depth along the probe (from the first point which is the deepest labeled point)
186 Due to the blockiness, depths may not be unique along the track so it has to be prepared
187 """
189 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1abc
190 if channels_positions is None: 1abc
191 geometry = trace_header(version=1) 1abc
192 channels_positions = np.c_[geometry['x'], geometry['y']] 1abc
194 xyz = xyz[np.argsort(xyz[:, 2]), :] 1abc
195 d = atlas.cart2sph(xyz[:, 0] - xyz[0, 0], xyz[:, 1] - xyz[0, 1], xyz[:, 2] - xyz[0, 2])[0] 1abc
196 indsort = np.argsort(d) 1abc
197 xyz = xyz[indsort, :] 1abc
198 d = d[indsort] 1abc
199 iduplicates = np.where(np.diff(d) == 0)[0] 1abc
200 xyz = np.delete(xyz, iduplicates, axis=0) 1abc
201 d = np.delete(d, iduplicates, axis=0) 1abc
203 assert np.all(np.diff(d) > 0), "Depths should be strictly increasing" 1abc
205 # Get the probe insertion from the coordinates
206 insertion = atlas.Insertion.from_track(xyz, brain_atlas) 1abc
208 # Interpolate channel positions along the probe depth and get brain locations
209 TIP_SIZE_UM = 200 1abc
210 xyz_channels = interpolate_along_track(xyz, (channels_positions[:, 1] + TIP_SIZE_UM) / 1e6) 1abc
212 # get the brain regions
213 brain_regions = brain_atlas.regions.get(brain_atlas.get_labels(xyz_channels)) 1abc
214 brain_regions['xyz'] = xyz_channels 1abc
215 brain_regions['lateral'] = channels_positions[:, 0] 1abc
216 brain_regions['axial'] = channels_positions[:, 1] 1abc
217 assert np.unique([len(brain_regions[k]) for k in brain_regions]).size == 1 1abc
219 return brain_regions, insertion 1abc
222def register_chronic_track(chronic_id, picks=None, one=None, overwrite=False, channels=True, brain_atlas=None):
223 """
224 Register the user picks to a chronic insertion in Alyx.
225 Here we update the database in 4 steps
226 1) The user picks converted to IBL coordinates will be stored in the json field of the
227 corresponding chronic insertion models
228 2) All associated probe insertions are identified and the user picks stored in the json field too
229 2) The trajectory associated to the chronic insertion computed from the histology track is created or patched
230 3) Channel locations are set in the table
231 :param chronic_id:
232 :param picks:
233 :param one:
234 :param overwrite:
235 :param channels:
236 :param brain_atlas:
237 :return:
238 """
239 assert one 1ae
240 brain_locations, insertion_histology = register_track(chronic_id, picks=picks, one=one, overwrite=overwrite, 1ae
241 channels=channels, brain_atlas=brain_atlas,
242 endpoint='chronic-insertions')
244 # Update all the associated probe insertions with the relevant QC and xyz_picks
245 chronic = one.alyx.rest('chronic-insertions', 'list', id=chronic_id)[0] 1ae
246 for probe_id in chronic['probe_insertion']: 1ae
247 pid = probe_id['id'] 1ae
248 if picks is None or picks.size == 0: 1ae
249 hist_qc = base.QC(pid, one=one, endpoint='insertions') 1e
250 hist_qc.update_extended_qc({'tracing_exists': False}) 1e
251 hist_qc.update('CRITICAL', namespace='tracing') 1e
252 else:
253 one.alyx.json_field_update(endpoint='insertions', uuid=pid, field_name='json', 1a
254 data={'xyz_picks': np.int32(picks * 1e6).tolist()})
255 # Update the insertion qc to register tracing exits
256 hist_qc = base.QC(pid, one=one, endpoint='insertions') 1a
257 hist_qc.update_extended_qc({'tracing_exists': True}) 1a
259 return brain_locations, insertion_histology 1ae
262def register_track(probe_id, picks=None, one=None, overwrite=False, channels=True, brain_atlas=None,
263 endpoint='insertions'):
264 """
265 Register the user picks to a probe in Alyx
266 Here we update Alyx models on the database in 3 steps
267 1) The user picks converted to IBL coordinates will be stored in the json field of the
268 corresponding probe insertion models
269 2) The trajectory computed from the histology track is created or patched
270 3) Channel locations are set in the table
271 """
272 assert one 1aebh
273 brain_atlas = brain_atlas or atlas.AllenAtlas() 1aebh
274 # 0) if it's an empty track, create a null trajectory and exit
275 if picks is None or picks.size == 0: 1aebh
276 tdict = {'x': None, 'y': None, 'z': None, 1eh
277 'phi': None, 'theta': None, 'depth': None, 'roll': None,
278 'provenance': 'Histology track',
279 'coordinate_system': 'IBL-Allen',
280 }
281 if endpoint == 'chronic-insertions': 1eh
282 tdict['chronic_insertion'] = probe_id 1e
283 else:
284 tdict['probe_insertion'] = probe_id 1h
286 brain_locations = None 1eh
287 # Update the insertion qc to CRITICAL
288 hist_qc = base.QC(probe_id, one=one, endpoint=endpoint) 1eh
289 hist_qc.update_extended_qc({'tracing_exists': False}) 1eh
290 hist_qc.update('CRITICAL', namespace='tracing') 1eh
291 insertion_histology = None 1eh
292 # Here need to change the track qc to critical and also extended qc to zero
293 else:
294 brain_locations, insertion_histology = get_brain_regions(picks, brain_atlas=brain_atlas) 1ab
295 # 1) update the alyx models, first put the picked points in the insertion json
296 one.alyx.json_field_update(endpoint=endpoint, uuid=probe_id, field_name='json', 1ab
297 data={'xyz_picks': np.int32(picks * 1e6).tolist()})
299 # Update the insertion qc to register tracing exits
300 hist_qc = base.QC(probe_id, one=one, endpoint=endpoint) 1ab
301 hist_qc.update_extended_qc({'tracing_exists': True}) 1ab
302 # 2) patch or create the trajectory coming from histology track
303 tdict = create_trajectory_dict(probe_id, insertion_histology, provenance='Histology track', endpoint=endpoint) 1ab
305 alyx_end = 'chronic_insertion' if endpoint == 'chronic-insertions' else 'probe_insertion' 1aebh
306 hist_traj = one.alyx.get('/trajectories?' 1aebh
307 f'&{alyx_end}={probe_id}'
308 '&provenance=Histology track', clobber=True)
309 # if the trajectory exists, remove it, this will cascade delete existing channel locations
310 if len(hist_traj): 1aebh
311 if overwrite: 1e
312 one.alyx.rest('trajectories', 'delete', id=hist_traj[0]['id']) 1e
313 else:
314 raise FileExistsError('The session already exists, however overwrite is set to False.'
315 'If you want to overwrite, set overwrite=True.')
316 hist_traj = one.alyx.rest('trajectories', 'create', data=tdict) 1aebh
318 if brain_locations is None: 1aebh
319 return brain_locations, None 1eh
320 # 3) create channel locations
321 if channels: 1ab
322 channel_dict = create_channel_dict(hist_traj, brain_locations)
323 one.alyx.rest('channels', 'create', data=channel_dict)
325 return brain_locations, insertion_histology 1ab
328def register_aligned_track(probe_id, xyz_channels, chn_coords=None, one=None, overwrite=False,
329 channels=True, brain_atlas=None):
330 """
331 Register ephys aligned trajectory and channel locations to Alyx
332 Here we update Alyx models on the database in 2 steps
333 1) The trajectory computed from the final electrode channel locations
334 2) Channel locations are set to the trajectory
335 """
336 assert one 1fg
337 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1fg
338 if chn_coords is None: 1fg
339 geometry = trace_header(version=1)
340 chn_coords = np.c_[geometry['x'], geometry['y']]
342 insertion = atlas.Insertion.from_track(xyz_channels, brain_atlas) 1fg
343 tdict = create_trajectory_dict(probe_id, insertion, provenance='Ephys aligned histology track') 1fg
345 hist_traj = one.alyx.rest('trajectories', 'list', 1fg
346 probe_insertion=probe_id,
347 provenance='Ephys aligned histology track',
348 no_cache=True)
349 # if the trajectory exists, remove it, this will cascade delete existing channel locations
350 if len(hist_traj): 1fg
351 if overwrite: 1fg
352 one.alyx.rest('trajectories', 'delete', id=hist_traj[0]['id']) 1fg
353 else:
354 raise FileExistsError('The session already exists, however overwrite is set to False.'
355 'If you want to overwrite, set overwrite=True.')
356 hist_traj = one.alyx.rest('trajectories', 'create', data=tdict) 1fg
358 if channels: 1fg
359 brain_regions = brain_atlas.regions.get(brain_atlas.get_labels(xyz_channels))
360 brain_regions['xyz'] = xyz_channels
361 brain_regions['lateral'] = chn_coords[:, 0]
362 brain_regions['axial'] = chn_coords[:, 1]
363 assert np.unique([len(brain_regions[k]) for k in brain_regions]).size == 1
364 channel_dict = create_channel_dict(hist_traj, brain_regions)
365 one.alyx.rest('channels', 'create', data=channel_dict)
368def create_trajectory_dict(probe_id, insertion, provenance, endpoint='insertions'):
369 """
370 Create trajectory dictionary in form to upload to alyx
371 :param probe id: unique id of probe insertion
372 :type probe_id: string (hexadecimal UUID)
373 :param insertion: Insertion object describing entry and tip of trajectory
374 :type insertion: object atlas.Insertion
375 :param provenance: 'Histology track' or 'Ephys aligned histology track'
376 :type provenance: string
377 :param endpoint: Alyx endpoint, either 'insertions', or 'chronic-insertions'
378 :type endpoint: string
379 :return tdict:
380 :type tdict: dict
381 """
382 tdict = {'x': insertion.x * 1e6, 1fgab
383 'y': insertion.y * 1e6,
384 'z': insertion.z * 1e6,
385 'phi': insertion.phi,
386 'theta': insertion.theta,
387 'depth': insertion.depth * 1e6,
388 'roll': insertion.beta,
389 'provenance': provenance,
390 'coordinate_system': 'IBL-Allen',
391 }
392 if endpoint == 'chronic-insertions': 1fgab
393 tdict['chronic_insertion'] = probe_id 1a
394 else:
395 tdict['probe_insertion'] = probe_id 1fgb
397 return tdict 1fgab
400def create_channel_dict(traj, brain_locations):
401 """
402 Create channel dictionary in form to upload to alyx
403 :param traj: alyx trajectory object to attach channel information to
404 :type traj: dict
405 :param brain_locations: information about location of electrode channels in brain atlas
406 :type insertion: Bunch
407 :return tdict:
408 :type tdict: list of dict
409 """
410 channel_dict = []
411 for i in np.arange(brain_locations.id.size):
412 channel_dict.append({
413 'x': np.float64(brain_locations.xyz[i, 0] * 1e6),
414 'y': np.float64(brain_locations.xyz[i, 1] * 1e6),
415 'z': np.float64(brain_locations.xyz[i, 2] * 1e6),
416 'axial': np.float64(brain_locations.axial[i]),
417 'lateral': np.float64(brain_locations.lateral[i]),
418 'brain_region': int(brain_locations.id[i]),
419 'trajectory_estimate': traj['id']
420 })
422 return channel_dict
425def _parse_filename(track_file):
426 tmp = track_file.name.split('_') 1m
427 inumber = [i for i, s in enumerate(tmp) if s.isdigit and len(s) == 3][-1] 1m
428 search_filter = {'date': tmp[0], 'experiment_number': int(tmp[inumber]), 1m
429 'name': '_'.join(tmp[inumber + 1:- 1]),
430 'subject': '_'.join(tmp[1:inumber])}
431 return search_filter 1m
434def register_chronic_track_files(path_tracks, one=None, overwrite=False, brain_atlas=None):
435 """
436 Registers track files for chronic insertions
437 :param path_tracks:
438 :param one:
439 :param overwrite:
440 :param brain_atlas:
441 :return:
442 """
444 brain_atlas = brain_atlas or atlas.AllenAtlas()
445 glob_pattern = "*_probe*_pts*.csv"
446 path_tracks = Path(path_tracks)
448 if not path_tracks.is_dir():
449 track_files = [path_tracks]
450 else:
451 track_files = list(path_tracks.rglob(glob_pattern))
452 track_files.sort()
454 assert path_tracks.exists()
455 assert one
457 ntracks = len(track_files)
458 for ind, track_file in enumerate(track_files):
459 # Nomenclature expected:
460 # '{yyyy-mm-dd}}_{nickname}_{session_number}_{probe_label}_pts.csv'
461 # beware: there may be underscores in the subject nickname
463 search_filter = _parse_filename(track_file)
464 probe = one.alyx.rest('chronic-insertions', 'list', no_cache=True, **search_filter)
465 if len(probe) == 0:
466 raise ValueError(f"Could not find associated chronic insertion for {search_filter['subject']},"
467 f"{search_filter['name']}")
468 elif len(probe) == 1:
469 probe = probe[0]
470 else:
471 raise ValueError("Multiple chronic insertions found.")
472 chronic_id = probe['id']
473 try:
474 xyz_picks = load_track_csv(track_file, brain_atlas=brain_atlas)
475 register_chronic_track(chronic_id, xyz_picks, one=one, overwrite=overwrite, brain_atlas=brain_atlas)
476 except Exception as e:
477 _logger.error(str(track_file))
478 raise e
479 _logger.info(f"{ind + 1}/{ntracks}, {str(track_file)}")
482def register_track_files(path_tracks, one=None, overwrite=False, brain_atlas=None):
483 """
484 :param path_tracks: path to directory containing tracks; also works with a single file name
485 :param one:
486 :return:
487 """
488 brain_atlas = brain_atlas or atlas.AllenAtlas()
489 glob_pattern = "*_probe*_pts*.csv"
490 path_tracks = Path(path_tracks)
492 if not path_tracks.is_dir():
493 track_files = [path_tracks]
494 else:
495 track_files = list(path_tracks.rglob(glob_pattern))
496 track_files.sort()
498 assert path_tracks.exists()
499 assert one
501 ntracks = len(track_files)
502 for ind, track_file in enumerate(track_files):
503 # Nomenclature expected:
504 # '{yyyy-mm-dd}}_{nickname}_{session_number}_{probe_label}_pts.csv'
505 # beware: there may be underscores in the subject nickname
507 search_filter = _parse_filename(track_file)
508 probe = one.alyx.rest('insertions', 'list', no_cache=True, **search_filter)
509 if len(probe) == 0:
510 eid = one.search(subject=search_filter['subject'], date_range=search_filter['date'],
511 number=search_filter['experiment_number'])
512 if len(eid) == 0:
513 raise Exception(f"No session found {track_file.name}")
514 insertion = {'session': eid[0],
515 'name': search_filter['name']}
516 probe = one.alyx.rest('insertions', 'create', data=insertion)
517 elif len(probe) == 1:
518 probe = probe[0]
519 else:
520 raise ValueError("Multiple probes found.")
521 probe_id = probe['id']
522 try:
523 xyz_picks = load_track_csv(track_file, brain_atlas=brain_atlas)
524 register_track(probe_id, xyz_picks, one=one, overwrite=overwrite, brain_atlas=brain_atlas)
525 except Exception as e:
526 _logger.error(str(track_file))
527 raise e
528 _logger.info(f"{ind + 1}/{ntracks}, {str(track_file)}")
531def detect_missing_histology_tracks(path_tracks=None, one=None, subject=None, brain_atlas=None):
532 """
533 Compares the number of probe insertions to the number of registered histology tracks to see if
534 there is a discrepancy so that missing tracks can be properly logged in the database
535 :param path_tracks: path to track files to be registered
536 :param subject: subject nickname for which to detect missing tracks
537 """
539 brain_atlas = brain_atlas or atlas.AllenAtlas()
540 if path_tracks:
541 glob_pattern = "*_probe*_pts*.csv"
543 path_tracks = Path(path_tracks)
545 if not path_tracks.is_dir():
546 track_files = [path_tracks]
547 else:
548 track_files = list(path_tracks.rglob(glob_pattern))
549 track_files.sort()
551 subjects = []
552 for track_file in track_files:
553 search_filter = _parse_filename(track_file)
554 subjects.append(search_filter['subject'])
556 unique_subjects = np.unique(subjects)
557 elif not path_tracks and subject:
558 unique_subjects = [subject]
559 else:
560 _logger.warning('Must specifiy either path_tracks or subject argument')
561 return
563 for subj in unique_subjects:
564 insertions = one.alyx.rest('insertions', 'list', subject=subj, no_cache=True)
565 trajectories = one.alyx.rest('trajectories', 'list', subject=subj,
566 provenance='Histology track', no_cache=True)
567 if len(insertions) != len(trajectories):
568 ins_sess = np.array([ins['session'] + ins['name'] for ins in insertions])
569 traj_sess = np.array([traj['session']['id'] + traj['probe_name']
570 for traj in trajectories])
571 miss_idx = np.where(np.isin(ins_sess, traj_sess, invert=True))[0]
573 for idx in miss_idx:
575 info = one.eid2path(ins_sess[idx][:36], query_type='remote').parts
576 print(ins_sess[idx][:36])
577 msg = f"Histology tracing missing for {info[-3]}, {info[-2]}, {info[-1]}," \
578 f" {ins_sess[idx][36:]}.\nEnter [y]es to register an empty track for " \
579 f"this insertion \nEnter [n]o, if tracing for this probe insertion will be "\
580 f"conducted at a later date \n>"
581 resp = input(msg)
582 resp = resp.lower()
583 if resp == 'y' or resp == 'yes':
584 _logger.info('Histology track for this probe insertion registered as empty')
585 probe_id = insertions[idx]['id']
586 print(insertions[idx]['session'])
587 print(probe_id)
588 register_track(probe_id, one=one, brain_atlas=brain_atlas)
589 else:
590 _logger.info('Histology track for this probe insertion will not be registered')
591 continue
594def coverage(trajs, ba=None, dist_fcn=[100, 150]):
595 """
596 Computes a coverage volume from
597 :param trajs: dictionary of trajectories from Alyx rest endpoint (one.alyx.rest...)
598 :param ba: iblatlas.atlas.BrainAtlas instance
599 :return: 3D np.array the same size as the volume provided in the brain atlas
600 """
601 # in um. Coverage = 1 below the first value, 0 after the second, cosine taper in between
602 ACTIVE_LENGTH_UM = 3.5 * 1e3
603 MAX_DIST_UM = dist_fcn[1] # max distance around the probe to be searched for
604 if ba is None:
605 ba = atlas.AllenAtlas()
607 def crawl_up_from_tip(ins, d):
608 return (ins.entry - ins.tip) * (d[:, np.newaxis] /
609 np.linalg.norm(ins.entry - ins.tip)) + ins.tip
611 full_coverage = np.zeros(ba.image.shape, dtype=np.float32).flatten()
613 for p in np.arange(len(trajs)):
614 if len(trajs) > 20:
615 if p % 20 == 0:
616 print(p / len(trajs))
617 traj = trajs[p]
619 ins = atlas.Insertion.from_dict(traj)
620 # those are the top and bottom coordinates of the active part of the shank extended
621 # to maxdist
622 d = (np.array([ACTIVE_LENGTH_UM + MAX_DIST_UM * np.sqrt(2),
623 -MAX_DIST_UM * np.sqrt(2)]) + TIP_SIZE_UM)
624 top_bottom = crawl_up_from_tip(ins, d / 1e6)
625 # this is the axis that has the biggest deviation. Almost always z
626 axis = np.argmax(np.abs(np.diff(top_bottom, axis=0)))
627 if axis != 2:
628 _logger.warning(f"This works only for 45 degree or vertical tracks so far, skipping"
629 f" {ins}")
630 continue
631 # sample the active track path along this axis
632 tbi = ba.bc.xyz2i(top_bottom)
633 nz = tbi[1, axis] - tbi[0, axis] + 1
634 ishank = np.round(np.array(
635 [np.linspace(tbi[0, i], tbi[1, i], nz) for i in np.arange(3)]).T).astype(np.int32)
637 # creates a flattened "column" of candidate volume indices around the track
638 # around each sample get an horizontal square slice of nx *2 +1 and ny *2 +1 samples
639 # flatten the corresponding xyz indices and compute the min distance to the track only
640 # for those
641 nx = int(np.floor(MAX_DIST_UM / 1e6 / np.abs(ba.bc.dxyz[0]) * np.sqrt(2) / 2)) * 2 + 1
642 ny = int(np.floor(MAX_DIST_UM / 1e6 / np.abs(ba.bc.dxyz[1]) * np.sqrt(2) / 2)) * 2 + 1
643 ixyz = np.stack([v.flatten() for v in np.meshgrid(
644 np.arange(-nx, nx + 1), np.arange(-ny, ny + 1), np.arange(nz))]).T
645 ixyz[:, 0] = ishank[ixyz[:, 2], 0] + ixyz[:, 0]
646 ixyz[:, 1] = ishank[ixyz[:, 2], 1] + ixyz[:, 1]
647 ixyz[:, 2] = ishank[ixyz[:, 2], 2]
648 # if any, remove indices that lie outside of the volume bounds
649 iok = np.logical_and(0 <= ixyz[:, 0], ixyz[:, 0] < ba.bc.nx)
650 iok &= np.logical_and(0 <= ixyz[:, 1], ixyz[:, 1] < ba.bc.ny)
651 iok &= np.logical_and(0 <= ixyz[:, 2], ixyz[:, 2] < ba.bc.nz)
652 ixyz = ixyz[iok, :]
653 # get the minimum distance to the trajectory, to which is applied the cosine taper
654 xyz = np.c_[ba.bc.xscale[ixyz[:, 0]], ba.bc.yscale[ixyz[:, 1]], ba.bc.zscale[ixyz[:, 2]]]
655 sites_bounds = crawl_up_from_tip(
656 ins, (np.array([ACTIVE_LENGTH_UM, 0]) + TIP_SIZE_UM) / 1e6)
657 mdist = ins.trajectory.mindist(xyz, bounds=sites_bounds)
658 coverage = 1 - fcn_cosine(np.array(dist_fcn) / 1e6)(mdist)
659 # remap to the coverage volume
660 flat_ind = ba._lookup_inds(ixyz)
661 full_coverage[flat_ind] += coverage
663 full_coverage = full_coverage.reshape(ba.image.shape)
664 full_coverage[ba.label == 0] = np.nan
665 return full_coverage, np.mean(xyz, 0), flat_ind
668def coverage_grid(xyz_channels, spacing=500, ba=None):
670 if ba is None:
671 ba = atlas.AllenAtlas()
673 def _get_scale_and_indices(v, bin, lim):
674 _lim = [np.min(lim), np.max(lim)]
675 # if bin is a nonzero scalar, this is a bin size: create scale and indices
676 if np.isscalar(bin) and bin != 0:
677 scale = np.arange(_lim[0], _lim[1] + bin / 2, bin)
678 ind = (np.floor((v - _lim[0]) / bin)).astype(np.int64)
679 if lim[0] > lim[1]:
680 scale = scale[::-1]
681 ind = (scale.size - 1) - ind
682 # if bin == 0, aggregate over unique values
683 else:
684 scale, ind = np.unique(v, return_inverse=True)
685 return scale, ind
687 xlim = ba.bc.xlim
688 ylim = ba.bc.ylim
689 zlim = ba.bc.zlim
691 xscale, xind = _get_scale_and_indices(xyz_channels[:, 0], spacing / 1e6, xlim)
692 yscale, yind = _get_scale_and_indices(xyz_channels[:, 1], spacing / 1e6, ylim)
693 zscale, zind = _get_scale_and_indices(xyz_channels[:, 2], spacing / 1e6, zlim)
695 # aggregate by using bincount on absolute indices for a 2d array
696 nx, ny, nz = [xscale.size, yscale.size, zscale.size]
697 ind2d = np.ravel_multi_index(np.c_[yind, xind, zind].transpose(), dims=(ny, nx, nz))
698 r = np.bincount(ind2d, minlength=nx * ny * nz).reshape(ny, nx, nz)
700 # Make things analagous to AllenAtlas
701 dxyz = spacing / 1e6 * np.array([1, -1, -1])
702 dims2xyz = np.array([1, 0, 2])
703 nxyz = np.array(r.shape)[dims2xyz]
704 iorigin = (atlas.ALLEN_CCF_LANDMARKS_MLAPDV_UM['bregma'] / spacing)
706 bc = atlas.BrainCoordinates(nxyz=nxyz, xyz0=(0, 0, 0), dxyz=dxyz)
707 bc = atlas.BrainCoordinates(nxyz=nxyz, xyz0=- bc.i2xyz(iorigin), dxyz=dxyz)
709 return r, bc