Coverage for ibllib/pipes/histology.py: 34%
382 statements
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-17 15:25 +0000
1from pathlib import Path
2import logging
4import matplotlib.pyplot as plt
5import numpy as np
6from one.api import ONE
7import one.alf.io as alfio
9from neuropixel import TIP_SIZE_UM, trace_header
10import iblatlas.atlas as atlas
11from ibllib.ephys.spikes import probes_description as extract_probes
12from ibllib.qc import base
14from ibldsp.utils import fcn_cosine
17_logger = logging.getLogger(__name__)
20def load_track_csv(file_track, brain_atlas=None):
21 """
22 Loads a lasagna track and convert to IBL-ALlen coordinate framework
23 :param file_track:
24 :return: xyz
25 """
27 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1ci
28 # apmldv in the histology file is flipped along y direction
29 file_track = Path(file_track) 1ci
30 if file_track.stat().st_size == 0: 1ci
31 return np.array([])
32 ixiyiz = np.loadtxt(file_track, delimiter=',')[:, [1, 0, 2]] 1ci
33 ixiyiz[:, 1] = 527 - ixiyiz[:, 1] 1ci
34 ixiyiz = ixiyiz[np.argsort(ixiyiz[:, 2]), :] 1ci
35 xyz = brain_atlas.bc.i2xyz(ixiyiz) 1ci
36 # xyz[:, 0] = - xyz[:, 0]
37 return xyz 1ci
40def get_picked_tracks(histology_path, glob_pattern="*_pts_transformed.csv", brain_atlas=None):
41 """
42 This outputs reads in the Lasagna output and converts the picked tracks in the IBL coordinates
43 :param histology_path: Path object: folder path containing tracks
44 :return: xyz coordinates in
45 """
46 brain_atlas = brain_atlas or atlas.AllenAtlas()
47 xyzs = []
48 histology_path = Path(histology_path)
49 if histology_path.is_file():
50 files_track = [histology_path]
51 else:
52 files_track = list(histology_path.rglob(glob_pattern))
53 for file_track in files_track:
54 xyzs.append(load_track_csv(file_track, brain_atlas=brain_atlas))
55 return {'files': files_track, 'xyz': xyzs}
58def get_micro_manipulator_data(subject, one=None, force_extract=False):
59 """
60 Looks for all ephys sessions for a given subject and get the probe micro-manipulator
61 trajectories.
62 If probes ALF object not on flat-iron, attempts to perform the extraction from meta-data
63 and task settings file.
64 """
65 if not one:
66 one = ONE(cache_rest=None)
68 eids, sessions = one.search(subject=subject, task_protocol='ephys', details=True)
69 probes = alfio.AlfBunch({})
70 for ses in sessions:
71 sess_path = Path(ses['local_path'])
72 probe = None
73 if not force_extract:
74 probe = one.load_object(ses['url'], 'probes')
75 if not probe:
76 _logger.warning(f"Re-extraction probe info for {sess_path}")
77 dtypes = ['_iblrig_taskSettings.raw', 'ephysData.raw.meta']
78 raw_files = one.load(ses['url'], dataset_types=dtypes, download_only=True)
79 if all([rf is None for rf in raw_files]):
80 _logger.warning(f"no raw settings files nor ephys data found for"
81 f" {ses['local_path']}. Skip this session.")
82 continue
83 extract_probes(sess_path, bin_exists=False)
84 probe = alfio.load_object(sess_path.joinpath('alf'), 'probes')
85 one.load(ses['url'], dataset_types='channels.localCoordinates', download_only=True)
86 # get for each insertion the sites local mapping: if not found assumes checkerboard pattern
87 probe['sites_coordinates'] = []
88 for prb in probe.description:
89 chfile = Path(ses['local_path']).joinpath('alf', prb['label'],
90 'channels.localCoordinates.npy')
91 if chfile.exists():
92 probe['sites_coordinates'].append(np.load(chfile))
93 else:
94 _logger.warning(f"no channel.localCoordinates found for {ses['local_path']}."
95 f"Assumes checkerboard pattern")
96 th = trace_header(version=1)
97 probe['sites_coordinates'].append(np.c_[th['x'], th['y']])
98 # put the session information in there
99 probe['session'] = [ses] * len(probe.description)
100 probes = probes.append(probe)
101 return probes
104def plot2d_all(trajectories, tracks, brain_atlas=None):
105 """
106 Plot all tracks on a single 2d slice
107 :param trajectories: dictionary output of the Alyx REST query on trajectories
108 :param tracks:
109 :return:
110 """
111 brain_atlas = brain_atlas or atlas.AllenAtlas(25)
112 plt.figure()
113 axs = brain_atlas.plot_sslice(brain_atlas.bc.i2x(190), cmap=plt.get_cmap('bone'))
114 plt.figure()
115 axc = brain_atlas.plot_cslice(brain_atlas.bc.i2y(350))
116 for xyz in tracks['xyz']:
117 axc.plot(xyz[:, 0] * 1e3, xyz[:, 2] * 1e6, 'b')
118 axs.plot(xyz[:, 1] * 1e3, xyz[:, 2] * 1e6, 'b')
119 for trj in trajectories:
120 ins = atlas.Insertion.from_dict(trj, brain_atlas=brain_atlas)
121 xyz = ins.xyz
122 axc.plot(xyz[:, 0] * 1e3, xyz[:, 2] * 1e6, 'r')
123 axs.plot(xyz[:, 1] * 1e3, xyz[:, 2] * 1e6, 'r')
126def plot3d_all(trajectories, tracks, brain_atlas=None):
127 """
128 Plot all tracks on a single 2d slice
129 :param trajectories: dictionary output of the Alyx REST query on trajectories
130 :param tracks:
131 :return:
132 """
133 from mayavi import mlab
134 brain_atlas = brain_atlas or atlas.AllenAtlas()
135 src = mlab.pipeline.scalar_field(brain_atlas.label)
136 mlab.pipeline.iso_surface(src, contours=[0.5, ], opacity=0.3)
138 pts = []
139 for xyz in tracks['xyz']:
140 mlapdv = brain_atlas.bc.xyz2i(xyz)
141 pts.append(mlab.plot3d(mlapdv[:, 1], mlapdv[:, 0], mlapdv[:, 2], line_width=3))
143 plt_trj = []
144 for trj in trajectories:
145 ins = atlas.Insertion.from_dict(trj, brain_atlas=brain_atlas)
146 mlapdv = brain_atlas.bc.xyz2i(ins.xyz)
147 plt = mlab.plot3d(mlapdv[:, 1], mlapdv[:, 0], mlapdv[:, 2],
148 line_width=3, color=(1., 0., 1.))
149 plt_trj.append(plt)
152def interpolate_along_track(xyz_track, depths):
153 """
154 Get the coordinates of points along a track according to their distances from the first
155 point.
156 :param xyz_track: np.array [npoints, 3]. Usually the first point is the deepest
157 :param depths: distance from the first point of the track, usually the convention is the
158 deepest point is 0 and going up
159 :return: xyz_channels
160 """
161 # from scipy.interpolate import interp1d
162 # this is the cumulative distance from the lowest picked point (first)
163 distance = np.cumsum(np.r_[0, np.sqrt(np.sum(np.diff(xyz_track, axis=0) ** 2, axis=1))]) 1dfgabcjkl
164 xyz_channels = np.zeros((depths.shape[0], 3)) 1dfgabcjkl
165 for m in np.arange(3): 1dfgabcjkl
166 xyz_channels[:, m] = np.interp(depths, distance, xyz_track[:, m]) 1dfgabcjkl
167 # xyz_channels[:, m] = interp1d(distance, xyz[:, m], kind='cubic')(chdepths / 1e6)
168 # plt.figure()
169 # plt.plot(xyz_track[:, 0] * 1e6, xyz_track[:, 2] * 1e6, 'k*'), plt.axis('equal')
170 # plt.plot(xyz_channels[:, 0] * 1e6, xyz_channels[:, 2] * 1e6, '.'), plt.axis('equal')
171 return xyz_channels 1dfgabcjkl
174def get_brain_regions(xyz, channels_positions=None, brain_atlas=None):
175 """
176 :param xyz: numpy array of 3D coordinates corresponding to a picked track or a trajectory
177 the deepest point is assumed to be the tip.
178 :param channels_positions:
179 :param brain_atlas:
180 :return: brain_regions (associated to each channel),
181 insertion (object atlas.Insertion, defining 2 points of entries
182 (tip and end of probe))
183 """
184 """ 1abc
185 this is the depth along the probe (from the first point which is the deepest labeled point)
186 Due to the blockiness, depths may not be unique along the track so it has to be prepared
187 """
189 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1abc
190 if channels_positions is None: 1abc
191 geometry = trace_header(version=1) 1abc
192 channels_positions = np.c_[geometry['x'], geometry['y']] 1abc
194 xyz = xyz[np.argsort(xyz[:, 2]), :] 1abc
195 d = atlas.cart2sph(xyz[:, 0] - xyz[0, 0], xyz[:, 1] - xyz[0, 1], xyz[:, 2] - xyz[0, 2])[0] 1abc
196 indsort = np.argsort(d) 1abc
197 xyz = xyz[indsort, :] 1abc
198 d = d[indsort] 1abc
199 iduplicates = np.where(np.diff(d) == 0)[0] 1abc
200 xyz = np.delete(xyz, iduplicates, axis=0) 1abc
201 d = np.delete(d, iduplicates, axis=0) 1abc
203 assert np.all(np.diff(d) > 0), "Depths should be strictly increasing" 1abc
205 # Get the probe insertion from the coordinates
206 insertion = atlas.Insertion.from_track(xyz, brain_atlas) 1abc
208 # Interpolate channel positions along the probe depth and get brain locations
209 TIP_SIZE_UM = 200 1abc
210 xyz_channels = interpolate_along_track(xyz, (channels_positions[:, 1] + TIP_SIZE_UM) / 1e6) 1abc
212 # get the brain regions
213 brain_regions = brain_atlas.regions.get(brain_atlas.get_labels(xyz_channels)) 1abc
214 brain_regions['xyz'] = xyz_channels 1abc
215 brain_regions['lateral'] = channels_positions[:, 0] 1abc
216 brain_regions['axial'] = channels_positions[:, 1] 1abc
217 assert np.unique([len(brain_regions[k]) for k in brain_regions]).size == 1 1abc
219 return brain_regions, insertion 1abc
222def register_chronic_track(chronic_id, picks=None, one=None, overwrite=False, channels=True, brain_atlas=None):
223 """
224 Register the user picks to a chronic insertion in Alyx.
225 Here we update the database in 4 steps
226 1) The user picks converted to IBL coordinates will be stored in the json field of the
227 corresponding chronic insertion models
228 2) All associated probe insertions are identified and the user picks stored in the json field too
229 2) The trajectory associated to the chronic insertion computed from the histology track is created or patched
230 3) Channel locations are set in the table
231 :param chronic_id:
232 :param picks:
233 :param one:
234 :param overwrite:
235 :param channels:
236 :param brain_atlas:
237 :return:
238 """
239 assert one 1ae
240 chronic = one.alyx.rest('chronic-insertions', 'list', id=chronic_id)[0] 1ae
241 for probe_id in chronic['probe_insertion']: 1ae
242 pid = probe_id['id'] 1ae
243 brain_locations, insertion_histology = register_track(pid, picks=picks, one=one, overwrite=overwrite, 1ae
244 channels=channels, brain_atlas=brain_atlas)
246 if picks is None or picks.size == 0: 1ae
247 hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions') 1e
248 hist_qc.update_extended_qc({'tracing_exists': False}) 1e
249 hist_qc.update('CRITICAL', namespace='tracing') 1e
250 else:
251 one.alyx.json_field_update(endpoint='chronic-insertions', uuid=chronic_id, field_name='json', 1a
252 data={'xyz_picks': np.int32(picks * 1e6).tolist()})
253 # Update the insertion qc to register tracing exits
254 hist_qc = base.QC(chronic_id, one=one, endpoint='chronic-insertions') 1a
255 hist_qc.update_extended_qc({'tracing_exists': True}) 1a
257 return brain_locations, insertion_histology 1ae
260def register_track(probe_id, picks=None, one=None, overwrite=False, channels=True, brain_atlas=None,
261 endpoint='insertions'):
262 """
263 Register the user picks to a probe in Alyx
264 Here we update Alyx models on the database in 3 steps
265 1) The user picks converted to IBL coordinates will be stored in the json field of the
266 corresponding probe insertion models
267 2) The trajectory computed from the histology track is created or patched
268 3) Channel locations are set in the table
269 """
270 assert one 1aebh
271 brain_atlas = brain_atlas or atlas.AllenAtlas() 1aebh
272 # 0) if it's an empty track, create a null trajectory and exit
273 if picks is None or picks.size == 0: 1aebh
274 tdict = {'x': None, 'y': None, 'z': None, 1eh
275 'phi': None, 'theta': None, 'depth': None, 'roll': None,
276 'provenance': 'Histology track',
277 'coordinate_system': 'IBL-Allen',
278 }
279 if endpoint == 'chronic-insertions': 1eh
280 tdict['chronic_insertion'] = probe_id
281 tdict['probe_insertion'] = None
282 else:
283 tdict['probe_insertion'] = probe_id 1eh
284 tdict['chronic_insertion'] = None 1eh
286 brain_locations = None 1eh
287 # Update the insertion qc to CRITICAL
288 hist_qc = base.QC(probe_id, one=one, endpoint=endpoint) 1eh
289 hist_qc.update_extended_qc({'tracing_exists': False}) 1eh
290 hist_qc.update('CRITICAL', namespace='tracing') 1eh
291 insertion_histology = None 1eh
292 # Here need to change the track qc to critical and also extended qc to zero
293 else:
294 try: 1ab
295 eid, pname = one.pid2eid(probe_id) 1ab
296 chan_pos = one.load_dataset(eid, 'channels.localCoordinates.npy', collection=f'alf/{pname}/pykilosort') 1ab
297 except Exception: 1ab
298 chan_pos = None 1ab
300 brain_locations, insertion_histology = get_brain_regions(picks, channels_positions=chan_pos, 1ab
301 brain_atlas=brain_atlas)
302 # 1) update the alyx models, first put the picked points in the insertion json
303 one.alyx.json_field_update(endpoint=endpoint, uuid=probe_id, field_name='json', 1ab
304 data={'xyz_picks': np.int32(picks * 1e6).tolist()})
306 # Update the insertion qc to register tracing exits
307 hist_qc = base.QC(probe_id, one=one, endpoint=endpoint) 1ab
308 hist_qc.update_extended_qc({'tracing_exists': True}) 1ab
309 # 2) patch or create the trajectory coming from histology track
310 tdict = create_trajectory_dict(probe_id, insertion_histology, provenance='Histology track', endpoint=endpoint) 1ab
312 alyx_end = 'chronic_insertion' if endpoint == 'chronic-insertions' else 'probe_insertion' 1aebh
313 hist_traj = one.alyx.get('/trajectories?' 1aebh
314 f'&{alyx_end}={probe_id}'
315 '&provenance=Histology track', clobber=True)
316 # if the trajectory exists, remove it, this will cascade delete existing channel locations
317 if len(hist_traj): 1aebh
318 if overwrite: 1e
319 one.alyx.rest('trajectories', 'delete', id=hist_traj[0]['id']) 1e
320 else:
321 raise FileExistsError('The session already exists, however overwrite is set to False.'
322 'If you want to overwrite, set overwrite=True.')
323 hist_traj = one.alyx.rest('trajectories', 'create', data=tdict) 1aebh
325 if brain_locations is None: 1aebh
326 return brain_locations, None 1eh
327 # 3) create channel locations
328 if channels: 1ab
329 channel_dict = create_channel_dict(hist_traj, brain_locations)
330 one.alyx.rest('channels', 'create', data=channel_dict)
332 return brain_locations, insertion_histology 1ab
335def register_aligned_track(probe_id, xyz_channels, chn_coords=None, one=None, overwrite=False,
336 channels=True, brain_atlas=None):
337 """
338 Register ephys aligned trajectory and channel locations to Alyx
339 Here we update Alyx models on the database in 2 steps
340 1) The trajectory computed from the final electrode channel locations
341 2) Channel locations are set to the trajectory
342 """
343 assert one 1fg
344 brain_atlas = brain_atlas or atlas.AllenAtlas(25) 1fg
345 if chn_coords is None: 1fg
346 geometry = trace_header(version=1)
347 chn_coords = np.c_[geometry['x'], geometry['y']]
349 insertion = atlas.Insertion.from_track(xyz_channels, brain_atlas) 1fg
350 tdict = create_trajectory_dict(probe_id, insertion, provenance='Ephys aligned histology track') 1fg
352 hist_traj = one.alyx.rest('trajectories', 'list', 1fg
353 probe_insertion=probe_id,
354 provenance='Ephys aligned histology track',
355 no_cache=True)
356 # if the trajectory exists, remove it, this will cascade delete existing channel locations
357 if len(hist_traj): 1fg
358 if overwrite: 1fg
359 one.alyx.rest('trajectories', 'delete', id=hist_traj[0]['id']) 1fg
360 else:
361 raise FileExistsError('The session already exists, however overwrite is set to False.'
362 'If you want to overwrite, set overwrite=True.')
363 hist_traj = one.alyx.rest('trajectories', 'create', data=tdict) 1fg
365 if channels: 1fg
366 brain_regions = brain_atlas.regions.get(brain_atlas.get_labels(xyz_channels))
367 brain_regions['xyz'] = xyz_channels
368 brain_regions['lateral'] = chn_coords[:, 0]
369 brain_regions['axial'] = chn_coords[:, 1]
370 assert np.unique([len(brain_regions[k]) for k in brain_regions]).size == 1
371 channel_dict = create_channel_dict(hist_traj, brain_regions)
372 one.alyx.rest('channels', 'create', data=channel_dict)
375def create_trajectory_dict(probe_id, insertion, provenance, endpoint='insertions'):
376 """
377 Create trajectory dictionary in form to upload to alyx
378 :param probe id: unique id of probe insertion
379 :type probe_id: string (hexadecimal UUID)
380 :param insertion: Insertion object describing entry and tip of trajectory
381 :type insertion: object atlas.Insertion
382 :param provenance: 'Histology track' or 'Ephys aligned histology track'
383 :type provenance: string
384 :param endpoint: Alyx endpoint, either 'insertions', or 'chronic-insertions'
385 :type endpoint: string
386 :return tdict:
387 :type tdict: dict
388 """
389 tdict = {'x': insertion.x * 1e6, 1fgab
390 'y': insertion.y * 1e6,
391 'z': insertion.z * 1e6,
392 'phi': insertion.phi,
393 'theta': insertion.theta,
394 'depth': insertion.depth * 1e6,
395 'roll': insertion.beta,
396 'provenance': provenance,
397 'coordinate_system': 'IBL-Allen',
398 }
399 if endpoint == 'chronic-insertions': 1fgab
400 tdict['chronic_insertion'] = probe_id
401 tdict['probe_insertion'] = None
402 else:
403 tdict['probe_insertion'] = probe_id 1fgab
404 tdict['chronic_insertion'] = None 1fgab
406 return tdict 1fgab
409def create_channel_dict(traj, brain_locations):
410 """
411 Create channel dictionary in form to upload to alyx
412 :param traj: alyx trajectory object to attach channel information to
413 :type traj: dict
414 :param brain_locations: information about location of electrode channels in brain atlas
415 :type insertion: Bunch
416 :return tdict:
417 :type tdict: list of dict
418 """
419 channel_dict = []
420 for i in np.arange(brain_locations.id.size):
421 channel_dict.append({
422 'x': np.float64(brain_locations.xyz[i, 0] * 1e6),
423 'y': np.float64(brain_locations.xyz[i, 1] * 1e6),
424 'z': np.float64(brain_locations.xyz[i, 2] * 1e6),
425 'axial': np.float64(brain_locations.axial[i]),
426 'lateral': np.float64(brain_locations.lateral[i]),
427 'brain_region': int(brain_locations.id[i]),
428 'trajectory_estimate': traj['id']
429 })
431 return channel_dict
434def _parse_filename(track_file):
435 tmp = track_file.name.split('_') 1m
436 inumber = [i for i, s in enumerate(tmp) if s.isdigit and len(s) == 3][-1] 1m
437 search_filter = {'date': tmp[0], 'experiment_number': int(tmp[inumber]), 1m
438 'name': '_'.join(tmp[inumber + 1:- 1]),
439 'subject': '_'.join(tmp[1:inumber])}
440 return search_filter 1m
443def register_chronic_track_files(path_tracks, one=None, overwrite=False, brain_atlas=None):
444 """
445 Registers track files for chronic insertions
446 :param path_tracks:
447 :param one:
448 :param overwrite:
449 :param brain_atlas:
450 :return:
451 """
453 brain_atlas = brain_atlas or atlas.AllenAtlas()
454 glob_pattern = "*_probe*_pts*.csv"
455 path_tracks = Path(path_tracks)
457 if not path_tracks.is_dir():
458 track_files = [path_tracks]
459 else:
460 track_files = list(path_tracks.rglob(glob_pattern))
461 track_files.sort()
463 assert path_tracks.exists()
464 assert one
466 ntracks = len(track_files)
467 for ind, track_file in enumerate(track_files):
468 # Nomenclature expected:
469 # '{yyyy-mm-dd}}_{nickname}_{session_number}_{probe_label}_pts.csv'
470 # beware: there may be underscores in the subject nickname
472 search_filter = _parse_filename(track_file)
473 probe = one.alyx.rest('chronic-insertions', 'list', no_cache=True, **search_filter)
474 if len(probe) == 0:
475 raise ValueError(f"Could not find associated chronic insertion for {search_filter['subject']},"
476 f"{search_filter['name']}")
477 elif len(probe) == 1:
478 probe = probe[0]
479 else:
480 raise ValueError("Multiple chronic insertions found.")
481 chronic_id = probe['id']
482 try:
483 xyz_picks = load_track_csv(track_file, brain_atlas=brain_atlas)
484 register_chronic_track(chronic_id, xyz_picks, one=one, overwrite=overwrite, brain_atlas=brain_atlas)
485 except Exception as e:
486 _logger.error(str(track_file))
487 raise e
488 _logger.info(f"{ind + 1}/{ntracks}, {str(track_file)}")
491def register_track_files(path_tracks, one=None, overwrite=False, brain_atlas=None):
492 """
493 :param path_tracks: path to directory containing tracks; also works with a single file name
494 :param one:
495 :return:
496 """
497 brain_atlas = brain_atlas or atlas.AllenAtlas()
498 glob_pattern = "*_probe*_pts*.csv"
499 path_tracks = Path(path_tracks)
501 if not path_tracks.is_dir():
502 track_files = [path_tracks]
503 else:
504 track_files = list(path_tracks.rglob(glob_pattern))
505 track_files.sort()
507 assert path_tracks.exists()
508 assert one
510 ntracks = len(track_files)
511 for ind, track_file in enumerate(track_files):
512 # Nomenclature expected:
513 # '{yyyy-mm-dd}}_{nickname}_{session_number}_{probe_label}_pts.csv'
514 # beware: there may be underscores in the subject nickname
516 search_filter = _parse_filename(track_file)
517 probe = one.alyx.rest('insertions', 'list', no_cache=True, **search_filter)
518 if len(probe) == 0:
519 eid = one.search(subject=search_filter['subject'], date_range=search_filter['date'],
520 number=search_filter['experiment_number'])
521 if len(eid) == 0:
522 raise Exception(f"No session found {track_file.name}")
523 insertion = {'session': eid[0],
524 'name': search_filter['name']}
525 probe = one.alyx.rest('insertions', 'create', data=insertion)
526 elif len(probe) == 1:
527 probe = probe[0]
528 else:
529 raise ValueError("Multiple probes found.")
530 probe_id = probe['id']
531 try:
532 xyz_picks = load_track_csv(track_file, brain_atlas=brain_atlas)
533 register_track(probe_id, xyz_picks, one=one, overwrite=overwrite, brain_atlas=brain_atlas)
534 except Exception as e:
535 _logger.error(str(track_file))
536 raise e
537 _logger.info(f"{ind + 1}/{ntracks}, {str(track_file)}")
540def detect_missing_histology_tracks(path_tracks=None, one=None, subject=None, brain_atlas=None):
541 """
542 Compares the number of probe insertions to the number of registered histology tracks to see if
543 there is a discrepancy so that missing tracks can be properly logged in the database
544 :param path_tracks: path to track files to be registered
545 :param subject: subject nickname for which to detect missing tracks
546 """
548 brain_atlas = brain_atlas or atlas.AllenAtlas()
549 if path_tracks:
550 glob_pattern = "*_probe*_pts*.csv"
552 path_tracks = Path(path_tracks)
554 if not path_tracks.is_dir():
555 track_files = [path_tracks]
556 else:
557 track_files = list(path_tracks.rglob(glob_pattern))
558 track_files.sort()
560 subjects = []
561 for track_file in track_files:
562 search_filter = _parse_filename(track_file)
563 subjects.append(search_filter['subject'])
565 unique_subjects = np.unique(subjects)
566 elif not path_tracks and subject:
567 unique_subjects = [subject]
568 else:
569 _logger.warning('Must specifiy either path_tracks or subject argument')
570 return
572 for subj in unique_subjects:
573 insertions = one.alyx.rest('insertions', 'list', subject=subj, no_cache=True)
574 trajectories = one.alyx.rest('trajectories', 'list', subject=subj,
575 provenance='Histology track', no_cache=True)
576 if len(insertions) != len(trajectories):
577 ins_sess = np.array([ins['session'] + ins['name'] for ins in insertions])
578 traj_sess = np.array([traj['session']['id'] + traj['probe_name']
579 for traj in trajectories])
580 miss_idx = np.where(np.isin(ins_sess, traj_sess, invert=True))[0]
582 for idx in miss_idx:
584 info = one.eid2path(ins_sess[idx][:36], query_type='remote').parts
585 print(ins_sess[idx][:36])
586 msg = f"Histology tracing missing for {info[-3]}, {info[-2]}, {info[-1]}," \
587 f" {ins_sess[idx][36:]}.\nEnter [y]es to register an empty track for " \
588 f"this insertion \nEnter [n]o, if tracing for this probe insertion will be "\
589 f"conducted at a later date \n>"
590 resp = input(msg)
591 resp = resp.lower()
592 if resp == 'y' or resp == 'yes':
593 _logger.info('Histology track for this probe insertion registered as empty')
594 probe_id = insertions[idx]['id']
595 print(insertions[idx]['session'])
596 print(probe_id)
597 register_track(probe_id, one=one, brain_atlas=brain_atlas)
598 else:
599 _logger.info('Histology track for this probe insertion will not be registered')
600 continue
603def coverage(trajs, ba=None, dist_fcn=[100, 150]):
604 """
605 Computes a coverage volume from
606 :param trajs: dictionary of trajectories from Alyx rest endpoint (one.alyx.rest...)
607 :param ba: iblatlas.atlas.BrainAtlas instance
608 :return: 3D np.array the same size as the volume provided in the brain atlas
609 """
610 # in um. Coverage = 1 below the first value, 0 after the second, cosine taper in between
611 ACTIVE_LENGTH_UM = 3.5 * 1e3
612 MAX_DIST_UM = dist_fcn[1] # max distance around the probe to be searched for
613 if ba is None:
614 ba = atlas.AllenAtlas()
616 def crawl_up_from_tip(ins, d):
617 return (ins.entry - ins.tip) * (d[:, np.newaxis] /
618 np.linalg.norm(ins.entry - ins.tip)) + ins.tip
620 full_coverage = np.zeros(ba.image.shape, dtype=np.float32).flatten()
622 for p in np.arange(len(trajs)):
623 if len(trajs) > 20:
624 if p % 20 == 0:
625 print(p / len(trajs))
626 traj = trajs[p]
628 ins = atlas.Insertion.from_dict(traj)
629 # those are the top and bottom coordinates of the active part of the shank extended
630 # to maxdist
631 d = (np.array([ACTIVE_LENGTH_UM + MAX_DIST_UM * np.sqrt(2),
632 -MAX_DIST_UM * np.sqrt(2)]) + TIP_SIZE_UM)
633 top_bottom = crawl_up_from_tip(ins, d / 1e6)
634 # this is the axis that has the biggest deviation. Almost always z
635 axis = np.argmax(np.abs(np.diff(top_bottom, axis=0)))
636 if axis != 2:
637 _logger.warning(f"This works only for 45 degree or vertical tracks so far, skipping"
638 f" {ins}")
639 continue
640 # sample the active track path along this axis
641 tbi = ba.bc.xyz2i(top_bottom)
642 nz = tbi[1, axis] - tbi[0, axis] + 1
643 ishank = np.round(np.array(
644 [np.linspace(tbi[0, i], tbi[1, i], nz) for i in np.arange(3)]).T).astype(np.int32)
646 # creates a flattened "column" of candidate volume indices around the track
647 # around each sample get an horizontal square slice of nx *2 +1 and ny *2 +1 samples
648 # flatten the corresponding xyz indices and compute the min distance to the track only
649 # for those
650 nx = int(np.floor(MAX_DIST_UM / 1e6 / np.abs(ba.bc.dxyz[0]) * np.sqrt(2) / 2)) * 2 + 1
651 ny = int(np.floor(MAX_DIST_UM / 1e6 / np.abs(ba.bc.dxyz[1]) * np.sqrt(2) / 2)) * 2 + 1
652 ixyz = np.stack([v.flatten() for v in np.meshgrid(
653 np.arange(-nx, nx + 1), np.arange(-ny, ny + 1), np.arange(nz))]).T
654 ixyz[:, 0] = ishank[ixyz[:, 2], 0] + ixyz[:, 0]
655 ixyz[:, 1] = ishank[ixyz[:, 2], 1] + ixyz[:, 1]
656 ixyz[:, 2] = ishank[ixyz[:, 2], 2]
657 # if any, remove indices that lie outside of the volume bounds
658 iok = np.logical_and(0 <= ixyz[:, 0], ixyz[:, 0] < ba.bc.nx)
659 iok &= np.logical_and(0 <= ixyz[:, 1], ixyz[:, 1] < ba.bc.ny)
660 iok &= np.logical_and(0 <= ixyz[:, 2], ixyz[:, 2] < ba.bc.nz)
661 ixyz = ixyz[iok, :]
662 # get the minimum distance to the trajectory, to which is applied the cosine taper
663 xyz = np.c_[ba.bc.xscale[ixyz[:, 0]], ba.bc.yscale[ixyz[:, 1]], ba.bc.zscale[ixyz[:, 2]]]
664 sites_bounds = crawl_up_from_tip(
665 ins, (np.array([ACTIVE_LENGTH_UM, 0]) + TIP_SIZE_UM) / 1e6)
666 mdist = ins.trajectory.mindist(xyz, bounds=sites_bounds)
667 coverage = 1 - fcn_cosine(np.array(dist_fcn) / 1e6)(mdist)
668 # remap to the coverage volume
669 flat_ind = ba._lookup_inds(ixyz)
670 full_coverage[flat_ind] += coverage
672 full_coverage = full_coverage.reshape(ba.image.shape)
673 full_coverage[ba.label == 0] = np.nan
674 return full_coverage, np.mean(xyz, 0), flat_ind
677def coverage_grid(xyz_channels, spacing=500, ba=None):
679 if ba is None:
680 ba = atlas.AllenAtlas()
682 def _get_scale_and_indices(v, bin, lim):
683 _lim = [np.min(lim), np.max(lim)]
684 # if bin is a nonzero scalar, this is a bin size: create scale and indices
685 if np.isscalar(bin) and bin != 0:
686 scale = np.arange(_lim[0], _lim[1] + bin / 2, bin)
687 ind = (np.floor((v - _lim[0]) / bin)).astype(np.int64)
688 if lim[0] > lim[1]:
689 scale = scale[::-1]
690 ind = (scale.size - 1) - ind
691 # if bin == 0, aggregate over unique values
692 else:
693 scale, ind = np.unique(v, return_inverse=True)
694 return scale, ind
696 xlim = ba.bc.xlim
697 ylim = ba.bc.ylim
698 zlim = ba.bc.zlim
700 xscale, xind = _get_scale_and_indices(xyz_channels[:, 0], spacing / 1e6, xlim)
701 yscale, yind = _get_scale_and_indices(xyz_channels[:, 1], spacing / 1e6, ylim)
702 zscale, zind = _get_scale_and_indices(xyz_channels[:, 2], spacing / 1e6, zlim)
704 # aggregate by using bincount on absolute indices for a 2d array
705 nx, ny, nz = [xscale.size, yscale.size, zscale.size]
706 ind2d = np.ravel_multi_index(np.c_[yind, xind, zind].transpose(), dims=(ny, nx, nz))
707 r = np.bincount(ind2d, minlength=nx * ny * nz).reshape(ny, nx, nz)
709 # Make things analagous to AllenAtlas
710 dxyz = spacing / 1e6 * np.array([1, -1, -1])
711 dims2xyz = np.array([1, 0, 2])
712 nxyz = np.array(r.shape)[dims2xyz]
713 iorigin = (atlas.ALLEN_CCF_LANDMARKS_MLAPDV_UM['bregma'] / spacing)
715 bc = atlas.BrainCoordinates(nxyz=nxyz, xyz0=(0, 0, 0), dxyz=dxyz)
716 bc = atlas.BrainCoordinates(nxyz=nxyz, xyz0=- bc.i2xyz(iorigin), dxyz=dxyz)
718 return r, bc