Coverage for ibllib/qc/alignment_qc.py: 67%

230 statements  

« prev     ^ index     » next       coverage.py v7.5.4, created at 2024-07-08 17:16 +0100

1import logging 

2 

3import numpy as np 

4from pathlib import Path 

5from one.alf.spec import QC 

6from datetime import date 

7 

8from neuropixel import trace_header 

9import spikeglx 

10 

11from iblatlas.atlas import AllenAtlas 

12from ibllib.pipes import histology 

13from ibllib.pipes.ephys_alignment import EphysAlignment 

14from ibllib.qc import base 

15from ibllib.oneibl.patcher import FTPPatcher 

16 

17_log = logging.getLogger(__name__) 

18CRITERIA = {"PASS": 0.8} 

19 

20 

21class AlignmentQC(base.QC): 

22 """ 

23 Class that is used to update the extended_qc of the probe insertion fields with the results 

24 from the ephys alignment procedure 

25 """ 

26 def __init__(self, probe_id, one=None, brain_atlas=None, channels=True, collection=None): 

27 super().__init__(probe_id, one=one, log=_log, endpoint='insertions') 1ab

28 

29 # Data 

30 self.alignments = None 1ab

31 self.xyz_picks = None 1ab

32 self.depths = None 1ab

33 self.cluster_chns = None 1ab

34 self.chn_coords = None 1ab

35 self.align_keys_sorted = None 1ab

36 

37 # Metrics and passed trials 

38 self.sim_matrix = None 1ab

39 self.criteria = CRITERIA 1ab

40 

41 # Get the brain atlas 

42 self.brain_atlas = brain_atlas or AllenAtlas(25) 1ab

43 # Flag for uploading channels to alyx. For testing purposes 

44 self.channels_flag = channels 1ab

45 

46 self.insertion = self.one.alyx.get(f'/insertions/{self.eid}', clobber=True) 1ab

47 self.resolved = (self.insertion.get('json', {'temp': 0}).get('extended_qc'). 1ab

48 get('alignment_resolved', False)) 

49 

50 self.probe_collection = collection 1ab

51 

52 def load_data(self, prev_alignments=None, xyz_picks=None, depths=None, cluster_chns=None, 

53 chn_coords=None): 

54 """" 

55 Load data required to assess alignment qc and compute similarity matrix. If no arguments 

56 are given load_data will fetch all the relevant data required 

57 """ 

58 if not np.any(prev_alignments): 1ab

59 aligned_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}' 1a

60 '&provenance=Ephys aligned histology track', 

61 clobber=True) 

62 if len(aligned_traj) > 0: 1a

63 self.alignments = aligned_traj[0].get('json', {}) 1a

64 else: 

65 self.alignments = {} 1a

66 return 1a

67 else: 

68 self.alignments = prev_alignments 1ab

69 

70 align_keys = [*self.alignments.keys()] 1ab

71 self.align_keys_sorted = sorted(align_keys, reverse=True) 1ab

72 

73 if not np.any(xyz_picks): 1ab

74 self.xyz_picks = np.array(self.insertion['json']['xyz_picks']) / 1e6 1a

75 else: 

76 self.xyz_picks = xyz_picks 1ab

77 

78 if len(self.alignments) < 2: 1ab

79 return 1a

80 

81 if not self.probe_collection: 1ab

82 all_collections = self.one.list_collections(self.insertion['session']) 1ab

83 if f'alf/{self.insertion["name"]}/pykilosort' in all_collections: 1ab

84 self.probe_collection = f'alf/{self.insertion["name"]}/pykilosort' 

85 else: 

86 self.probe_collection = f'alf/{self.insertion["name"]}' 1ab

87 

88 if not np.any(chn_coords): 1ab

89 self.chn_coords = self.one.load_dataset(self.insertion['session'], 

90 'channels.localCoordinates.npy', 

91 collection=self.probe_collection) 

92 else: 

93 self.chn_coords = chn_coords 1ab

94 

95 if not np.any(depths): 1ab

96 self.depths = self.chn_coords[:, 1] 

97 else: 

98 self.depths = depths 1ab

99 

100 if not np.any(cluster_chns): 1ab

101 self.cluster_chns = self.one.load_dataset(self.insertion['session'], 

102 'clusters.channels.npy', 

103 collection=self.probe_collection) 

104 else: 

105 self.cluster_chns = cluster_chns 1ab

106 

107 def compute(self): 

108 """ 

109 Computes the similarity matrix if > 2 alignments. If no data loaded, wraps around load_data 

110 to get all relevant data needed 

111 """ 

112 

113 if self.alignments is None: 1ab

114 self.load_data() 1a

115 

116 if len(self.alignments) < 2: 1ab

117 self.log.info(f"Insertion {self.eid}: One or less alignment found...") 1a

118 self.sim_matrix = np.array([len(self.alignments)]) 1a

119 else: 

120 self.log.info(f"Insertion {self.eid}: Running QC on alignment data...") 1ab

121 self.sim_matrix = self.compute_similarity_matrix() 1ab

122 

123 return self.sim_matrix 1ab

124 

125 def run(self, update=True, upload_alyx=True, upload_flatiron=True): 

126 """ 

127 Compute alignment_qc for a specified probe insertion and updates extended qc field in alyx. 

128 If alignment is resolved and upload flags set to True channels from resolved 

129 alignment will be updated to alyx and datasets sent to ibl-ftp-patcher to be uploaded to 

130 flatiron 

131 """ 

132 if self.sim_matrix is None: 1ab

133 self.compute() 1ab

134 

135 # Case where the alignment has already been resolved 

136 if self.resolved: 1ab

137 self.log.info(f"Alignment for insertion {self.eid} already resolved, channels won't be" 

138 f" updated. To force update of channels use " 

139 f"resolve_manual method with force=True") 

140 results = {'alignment_count': len(self.alignments)} 

141 if update: 

142 self.update_extended_qc(results) 

143 results.update({'alignment_resolved': True}) 

144 

145 # Case where no alignments have been made 

146 elif np.all(self.sim_matrix == 0) and self.sim_matrix.shape[0] == 1: 1ab

147 # We don't update database 

148 results = {'alignment_resolved': False} 1a

149 

150 # Case where only one alignment 

151 elif np.all(self.sim_matrix == 1) and self.sim_matrix.shape[0] == 1: 1ab

152 results = {'alignment_count': len(self.alignments), 1a

153 'alignment_stored': self.align_keys_sorted[0], 

154 'alignment_resolved': False} 

155 if update: 1a

156 self.update_extended_qc(results) 1a

157 

158 # Case where 2 or more alignments and alignments haven't been resolved 

159 else: 

160 results = self.compute_alignment_status() 1ab

161 

162 if update: 1ab

163 self.update_extended_qc(results) 1ab

164 

165 if results['alignment_resolved'] and np.bitwise_or(upload_alyx, upload_flatiron): 1ab

166 self.upload_channels(results['alignment_stored'], upload_alyx, upload_flatiron) 1a

167 

168 return results 1ab

169 

170 def resolve_manual(self, align_key, update=True, upload_alyx=True, upload_flatiron=False, 

171 force=False): 

172 """ 

173 Method to manually resolve the alignment of a probe insertion with a given alignment 

174 regardless of the number of alignments or the alignment qc value. Channels from specified 

175 alignment will be uploaded to alyx and datasets sent to ibl-ftp-patcher to be uploaded to 

176 flatiron. If alignment already resolved will only upload if force flag set to True 

177 """ 

178 

179 if self.sim_matrix is None: 1b

180 self.compute() 1b

181 assert align_key in self.align_keys_sorted, 'align key not recognised' 1b

182 

183 if self.resolved == 1 and not force: 1b

184 self.log.info(f"Alignment for insertion {self.eid} already resolved, channels won't be" 

185 f"updated. To overwrite stored channels with alignment {align_key} " 

186 f"set 'force=True'") 

187 file_paths = [] 

188 else: 

189 if self.sim_matrix.shape[0] > 1: 1b

190 results = self.compute_alignment_status() 1b

191 else: 

192 results = {'alignment_count': self.sim_matrix.shape[0]} 

193 

194 results['alignment_resolved'] = True 1b

195 results['alignment_stored'] = align_key 1b

196 results['alignment_resolved_by'] = 'experimenter' 1b

197 results['alignment_resolved_date'] = date.today().isoformat() 1b

198 

199 if update: 1b

200 self.update_extended_qc(results) 1b

201 file_paths = [] 1b

202 

203 if upload_alyx or upload_flatiron: 1b

204 file_paths = self.upload_channels(align_key, upload_alyx, upload_flatiron) 1b

205 

206 return file_paths 1b

207 

208 def compute_similarity_matrix(self): 

209 """ 

210 Computes the similarity matrix between each alignment stored in the ephys aligned 

211 trajectory. Similarity matrix based on number of clusters that share brain region and 

212 parent brain region 

213 """ 

214 

215 r = self.brain_atlas.regions 1ab

216 

217 clusters = dict() 1ab

218 for iK, key in enumerate(self.align_keys_sorted): 1ab

219 # Location of reference lines used for alignment 

220 feature = np.array(self.alignments[key][0]) 1ab

221 track = np.array(self.alignments[key][1]) 1ab

222 

223 # Instantiate EphysAlignment object 

224 ephysalign = EphysAlignment(self.xyz_picks, self.depths, track_prev=track, 1ab

225 feature_prev=feature, 

226 brain_atlas=self.brain_atlas) 

227 

228 # Find xyz location of all channels 

229 xyz_channels = ephysalign.get_channel_locations(feature, track) 1ab

230 brain_regions = ephysalign.get_brain_locations(xyz_channels) 1ab

231 

232 # Find the location of clusters along the alignment 

233 cluster_info = dict() 1ab

234 cluster_info['brain_id'] = brain_regions['id'][self.cluster_chns] 1ab

235 cluster_info['parent_id'] = r.get(ids=cluster_info['brain_id']).parent.astype(int) 1ab

236 clusters.update({key: cluster_info}) 1ab

237 

238 sim_matrix = np.zeros((len(self.align_keys_sorted), len(self.align_keys_sorted))) 1ab

239 

240 for ik, key in enumerate(self.align_keys_sorted): 1ab

241 for ikk, key2 in enumerate(self.align_keys_sorted): 1ab

242 same_id = np.where(clusters[key]['brain_id'] == clusters[key2]['brain_id'])[0] 1ab

243 not_same_id = \ 1ab

244 np.where(clusters[key]['brain_id'] != clusters[key2]['brain_id'])[0] 

245 same_parent = np.where(clusters[key]['parent_id'][not_same_id] == 1ab

246 clusters[key2]['parent_id'][not_same_id])[0] 

247 sim_matrix[ik, ikk] = len(same_id) + (len(same_parent) * 0.5) 1ab

248 # Normalise 

249 sim_matrix_norm = sim_matrix / np.max(sim_matrix) 1ab

250 

251 return sim_matrix_norm 1ab

252 

253 def compute_alignment_status(self): 

254 """ 

255 Determine whether alignments agree based on value in similarity matrix. If any alignments 

256 have similarity of 0.8 set the alignment to be resolved 

257 """ 

258 # Set diagonals to zero so we don't use those to find max 

259 np.fill_diagonal(self.sim_matrix, 0) 1ab

260 

261 max_sim = np.max(self.sim_matrix) 1ab

262 

263 results = {'alignment_qc': max_sim, 1ab

264 'alignment_count': self.sim_matrix.shape[0]} 

265 

266 if max_sim > CRITERIA['PASS']: 1ab

267 location = np.where(self.sim_matrix == max_sim) 1a

268 results.update({'alignment_stored': self.align_keys_sorted[np.min(location)]}) 1a

269 results.update({'alignment_resolved': True}) 1a

270 results.update({'alignment_resolved_by': 'qc'}) 1a

271 

272 else: 

273 results.update({'alignment_stored': self.align_keys_sorted[0]}) 1ab

274 results.update({'alignment_resolved': False}) 1ab

275 

276 return results 1ab

277 

278 def upload_channels(self, alignment_key, upload_alyx, upload_flatiron): 

279 """ 

280 Upload channels to alyx and flatiron based on the alignment specified by the alignment key 

281 """ 

282 

283 feature = np.array(self.alignments[alignment_key][0]) 1ab

284 track = np.array(self.alignments[alignment_key][1]) 1ab

285 

286 try: 1ab

287 meta_dset = self.one.list_datasets(self.insertion['session'], '*ap.meta', 1ab

288 collection=f'raw_ephys_data/{self.insertion["name"]}') 

289 

290 meta_file = self.one.load_dataset(self.insertion['session'], meta_dset[0].split('/')[-1], 1ab

291 collection=f'raw_ephys_data/{self.insertion["name"]}', 

292 download_only=True) 

293 geometry = spikeglx.read_geometry(meta_file) 

294 chns = np.c_[geometry['x'], geometry['y']] 

295 except Exception as err: 1ab

296 self.log.warning(f"Could not compute channel locations from meta file, errored with message: {err}. " 1ab

297 f"Will use default Neuropixel 1 channels") 

298 geometry = trace_header(version=1) 1ab

299 chns = np.c_[geometry['x'], geometry['y']] 1ab

300 

301 ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1], 1ab

302 track_prev=track, 

303 feature_prev=feature, 

304 brain_atlas=self.brain_atlas) 

305 channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6) 1ab

306 channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] 1ab

307 

308 # Need to change channels stored on alyx as well as the stored key is not the same as the latest key 

309 if upload_alyx: 1ab

310 if alignment_key != self.align_keys_sorted[0]: 1ab

311 histology.register_aligned_track(self.eid, channels_mlapdv / 1e6, 1ab

312 chn_coords=chns, one=self.one, 

313 overwrite=True, channels=self.channels_flag, 

314 brain_atlas=self.brain_atlas) 

315 

316 ephys_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}' 1ab

317 '&provenance=Ephys aligned histology track', 

318 clobber=True) 

319 patch_dict = {'probe_insertion': self.eid, 'json': self.alignments} 1ab

320 self.one.alyx.rest('trajectories', 'partial_update', id=ephys_traj[0]['id'], 1ab

321 data=patch_dict) 

322 

323 files_to_register = [] 1ab

324 if upload_flatiron: 1ab

325 ftp_patcher = FTPPatcher(one=self.one) 

326 

327 alf_path = self.one.eid2path(self.insertion['session']).joinpath('alf', self.insertion["name"]) 

328 alf_path.mkdir(exist_ok=True, parents=True) 

329 

330 f_name = alf_path.joinpath('electrodeSites.mlapdv.npy') 

331 np.save(f_name, channels_mlapdv) 

332 files_to_register.append(f_name) 

333 

334 f_name = alf_path.joinpath('electrodeSites.brainLocationIds_ccf_2017.npy') 

335 np.save(f_name, channels_atlas_id) 

336 files_to_register.append(f_name) 

337 

338 f_name = alf_path.joinpath('electrodeSites.localCoordinates.npy') 

339 np.save(f_name, chns) 

340 files_to_register.append(f_name) 

341 

342 probe_collections = self.one.list_collections(self.insertion['session'], filename='channels*', 

343 collection=f'alf/{self.insertion["name"]}*') 

344 

345 for collection in probe_collections: 

346 chns = self.one.load_dataset(self.insertion['session'], 'channels.localCoordinates', collection=collection) 

347 ephysalign = EphysAlignment(self.xyz_picks, chns[:, 1], 

348 track_prev=track, 

349 feature_prev=feature, 

350 brain_atlas=self.brain_atlas) 

351 channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6) 

352 channels_atlas_id = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] 

353 

354 alf_path = self.one.eid2path(self.insertion['session']).joinpath(collection) 

355 alf_path.mkdir(exist_ok=True, parents=True) 

356 

357 f_name = alf_path.joinpath('channels.mlapdv.npy') 

358 np.save(f_name, channels_mlapdv) 

359 files_to_register.append(f_name) 

360 

361 f_name = alf_path.joinpath('channels.brainLocationIds_ccf_2017.npy') 

362 np.save(f_name, channels_atlas_id) 

363 files_to_register.append(f_name) 

364 

365 self.log.info("Writing datasets to FlatIron") 

366 ftp_patcher.create_dataset(path=files_to_register, 

367 created_by=self.one.alyx.user) 

368 

369 return files_to_register 1ab

370 

371 def update_experimenter_evaluation(self, prev_alignments=None, override=False): 

372 

373 if not np.any(prev_alignments) and not np.any(self.alignments): 

374 aligned_traj = self.one.alyx.get(f'/trajectories?&probe_insertion={self.eid}' 

375 '&provenance=Ephys aligned histology track', 

376 clobber=True) 

377 if len(aligned_traj) > 0: 

378 self.alignments = aligned_traj[0].get('json', {}) 

379 else: 

380 self.alignments = {} 

381 return 

382 else: 

383 self.alignments = prev_alignments 

384 

385 outcomes = [align[2].split(':')[0] for key, align in self.alignments.items() 

386 if len(align) == 3] 

387 if len(outcomes) > 0: 

388 vals = list(map(QC.validate, outcomes)) 

389 max_qc = np.argmax(vals) 

390 outcome = outcomes[max_qc] 

391 self.update(outcome, namespace='experimenter', override=override) 

392 else: 

393 self.log.warning(f'No experimenter qc found, qc field of probe insertion {self.eid} ' 

394 f'will not be updated') 

395 

396 

397def get_aligned_channels(ins, chn_coords, one, ba=None, save_dir=None): 

398 

399 ba = ba or AllenAtlas(25) 

400 depths = chn_coords[:, 1] 

401 xyz = np.array(ins['json']['xyz_picks']) / 1e6 

402 traj = one.alyx.rest('trajectories', 'list', probe_insertion=ins['id'], 

403 provenance='Ephys aligned histology track')[0] 

404 align_key = ins['json']['extended_qc']['alignment_stored'] 

405 feature = traj['json'][align_key][0] 

406 track = traj['json'][align_key][1] 

407 ephysalign = EphysAlignment(xyz, depths, track_prev=track, 

408 feature_prev=feature, 

409 brain_atlas=ba, speedy=True) 

410 channels_mlapdv = np.int32(ephysalign.get_channel_locations(feature, track) * 1e6) 

411 channels_atlas_ids = ephysalign.get_brain_locations(channels_mlapdv / 1e6)['id'] 

412 

413 out_files = [] 

414 if save_dir is not None: 

415 f_name = Path(save_dir).joinpath('channels.mlapdv.npy') 

416 np.save(f_name, channels_mlapdv) 

417 out_files.append(f_name) 

418 

419 f_name = Path(save_dir).joinpath('channels.brainLocationIds_ccf_2017.npy') 

420 np.save(f_name, channels_atlas_ids) 

421 out_files.append(f_name) 

422 

423 return out_files