Coverage for ibllib/oneibl/registration.py: 88%

253 statements  

« prev     ^ index     » next       coverage.py v7.5.4, created at 2024-07-08 17:16 +0100

1from pathlib import Path 

2import json 

3import datetime 

4import logging 

5import itertools 

6 

7from packaging import version 

8from requests import HTTPError 

9 

10from one.alf.files import get_session_path, folder_parts, get_alf_path 

11from one.registration import RegistrationClient, get_dataset_type 

12from one.remote.globus import get_local_endpoint_id, get_lab_from_endpoint_id 

13from one.webclient import AlyxClient, no_cache 

14from one.converters import ConversionMixin 

15import one.alf.exceptions as alferr 

16from one.util import datasets2records, ensure_list 

17from one.api import ONE 

18 

19import ibllib 

20import ibllib.io.extractors.base 

21from ibllib.time import isostr2date 

22import ibllib.io.raw_data_loaders as raw 

23from ibllib.io import session_params 

24 

25_logger = logging.getLogger(__name__) 

26EXCLUDED_EXTENSIONS = ['.flag', '.error', '.avi'] 

27REGISTRATION_GLOB_PATTERNS = ['alf/**/*.*', 

28 'raw_behavior_data/**/_iblrig_*.*', 

29 'raw_task_data_*/**/_iblrig_*.*', 

30 'raw_passive_data/**/_iblrig_*.*', 

31 'raw_behavior_data/**/_iblmic_*.*', 

32 'raw_video_data/**/_iblrig_*.*', 

33 'raw_video_data/**/_ibl_*.*', 

34 'raw_ephys_data/**/_iblrig_*.*', 

35 'raw_ephys_data/**/_spikeglx_*.*', 

36 'raw_ephys_data/**/_iblqc_*.*', 

37 'spikesorters/**/_kilosort_*.*' 

38 'spikesorters/**/_kilosort_*.*', 

39 'raw_widefield_data/**/_ibl_*.*', 

40 'raw_photometry_data/**/_neurophotometrics_*.*', 

41 ] 

42 

43 

44def register_dataset(file_list, one=None, exists=False, versions=None, **kwargs): 

45 """ 

46 Registers a set of files belonging to a session only on the server. 

47 

48 Parameters 

49 ---------- 

50 file_list : list, str, pathlib.Path 

51 A filepath (or list thereof) of ALF datasets to register to Alyx. 

52 one : one.api.OneAlyx 

53 An instance of ONE. 

54 exists : bool 

55 Whether files exist in the repository. May be set to False when registering files 

56 before copying to the repository. 

57 versions : str, list of str 

58 Optional version tags, defaults to the current ibllib version. 

59 kwargs 

60 Optional keyword arguments for one.registration.RegistrationClient.register_files. 

61 

62 Returns 

63 ------- 

64 list of dicts, dict 

65 A list of newly created Alyx dataset records or the registration data if dry. 

66 

67 Notes 

68 ----- 

69 - If a repository is passed, server_only will be set to True. 

70 

71 See Also 

72 -------- 

73 one.registration.RegistrationClient.register_files 

74 """ 

75 if not file_list: 1lkiba

76 return 1iba

77 elif isinstance(file_list, (str, Path)): 1lkiba

78 file_list = [file_list] 1k

79 

80 assert len(set(get_session_path(f) for f in file_list)) == 1 1lkiba

81 assert all(Path(f).exists() for f in file_list) 1lkiba

82 

83 client = IBLRegistrationClient(one) 1lkiba

84 

85 # Check for protected datasets 

86 def _get_protected(pr_status): 1lkiba

87 if isinstance(protected_status, list): 1lkiba

88 pr = any(d['status_code'] == 403 for d in pr_status) 1lkiba

89 else: 

90 pr = protected_status['status_code'] == 403 

91 

92 return pr 1lkiba

93 

94 # Account for cases where we are connected to cortex lab database 

95 if one.alyx.base_url == 'https://alyx.cortexlab.net': 1lkiba

96 try: 

97 protected_status = IBLRegistrationClient( 

98 ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list) 

99 protected = _get_protected(protected_status) 

100 except HTTPError as err: 

101 if "[Errno 500] /check-protected: 'A base session for" in str(err): 

102 # If we get an error due to the session not existing, we take this to mean no datasets are protected 

103 protected = False 

104 else: 

105 raise err 

106 else: 

107 protected_status = client.check_protected_files(file_list) 1lkiba

108 protected = _get_protected(protected_status) 1lkiba

109 

110 # If we find a protected dataset, and we don't have a force=True flag, raise an error 

111 if protected and not kwargs.pop('force', False): 1lkiba

112 raise FileExistsError('Protected datasets were found in the file list. To force the registration of datasets ' 1k

113 'add the force=True argument.') 

114 

115 # If the repository is specified then for the registration client we want server_only=True to 

116 # make sure we don't make any other repositories for the lab 

117 if kwargs.get('repository') and not kwargs.get('server_only', False): 1lkiba

118 kwargs['server_only'] = True 

119 

120 return client.register_files(file_list, versions=versions or ibllib.__version__, exists=exists, **kwargs) 1lkiba

121 

122 

123def register_session_raw_data(session_path, one=None, overwrite=False, **kwargs): 

124 """ 

125 Registers all files corresponding to raw data files to Alyx. It will select files that 

126 match Alyx registration patterns. 

127 

128 Parameters 

129 ---------- 

130 session_path : str, pathlib.Path 

131 The local session path. 

132 one : one.api.OneAlyx 

133 An instance of ONE. 

134 overwrite : bool 

135 If set to True, will patch the datasets. It will take very long. If set to False (default) 

136 will skip all already registered data. 

137 **kwargs 

138 Optional keyword arguments for one.registration.RegistrationClient.register_files. 

139 

140 Returns 

141 ------- 

142 list of pathlib.Path 

143 A list of raw dataset paths. 

144 list of dicts, dict 

145 A list of newly created Alyx dataset records or the registration data if dry. 

146 """ 

147 # Clear rest cache to make sure we have the latest entries 

148 one.alyx.clear_rest_cache() 1h

149 client = IBLRegistrationClient(one) 1h

150 session_path = Path(session_path) 1h

151 eid = one.path2eid(session_path, query_type='remote') # needs to make sure we're up to date 1h

152 if not eid: 1h

153 raise alferr.ALFError(f'Session does not exist on Alyx: {get_alf_path(session_path)}') 1h

154 # find all files that are in a raw data collection 

155 file_list = [f for f in client.find_files(session_path) 1h

156 if f.relative_to(session_path).as_posix().startswith('raw')] 

157 # unless overwrite is True, filter out the datasets that already exist 

158 if not overwrite: 1h

159 # query the database for existing datasets on the session and allowed dataset types 

160 dsets = datasets2records(one.alyx.rest('datasets', 'list', session=eid, no_cache=True)) 1h

161 already_registered = list(map(session_path.joinpath, dsets['rel_path'])) 1h

162 file_list = list(filter(lambda f: f not in already_registered, file_list)) 1h

163 

164 kwargs['repository'] = get_local_data_repository(one.alyx) 1h

165 kwargs['server_only'] = True 1h

166 

167 response = client.register_files(file_list, versions=ibllib.__version__, exists=False, **kwargs) 1h

168 return file_list, response 1h

169 

170 

171class IBLRegistrationClient(RegistrationClient): 

172 """ 

173 Object that keeps the ONE instance and provides method to create sessions and register data. 

174 """ 

175 

176 def register_session(self, ses_path, file_list=True, projects=None, procedures=None, register_reward=True): 

177 """ 

178 Register an IBL Bpod session in Alyx. 

179 

180 Parameters 

181 ---------- 

182 ses_path : str, pathlib.Path 

183 The local session path. 

184 file_list : bool, list 

185 An optional list of file paths to register. If True, all valid files within the 

186 session folder are registered. If False, no files are registered. 

187 projects: str, list 

188 The project(s) to which the experiment belongs (optional). 

189 procedures : str, list 

190 An optional list of procedures, e.g. 'Behavior training/tasks'. 

191 register_reward : bool 

192 If true, register all water administrations in the settings files, if no admins already 

193 present for this session. 

194 

195 Returns 

196 ------- 

197 dict 

198 An Alyx session record. 

199 list of dict, None 

200 Alyx file records (or None if file_list is False). 

201 

202 Notes 

203 ----- 

204 For a list of available projects: 

205 >>> sorted(proj['name'] for proj in one.alyx.rest('projects', 'list')) 

206 For a list of available procedures: 

207 >>> sorted(proc['name'] for proc in one.alyx.rest('procedures', 'list')) 

208 """ 

209 if isinstance(ses_path, str): 1fecbda

210 ses_path = Path(ses_path) 1c

211 

212 # Read in the experiment description file if it exists and get projects and procedures from here 

213 experiment_description_file = session_params.read_params(ses_path) 1fecbda

214 _, subject, date, number, *_ = folder_parts(ses_path) 1fecbda

215 if experiment_description_file is None: 1fecbda

216 collections = ['raw_behavior_data'] 1fca

217 else: 

218 # Combine input projects/procedures with those in experiment description 

219 projects = list({*experiment_description_file.get('projects', []), *(projects or [])}) 1ebd

220 procedures = list({*experiment_description_file.get('procedures', []), *(procedures or [])}) 1ebd

221 collections = session_params.get_task_collection(experiment_description_file) 1ebd

222 

223 # query Alyx endpoints for subject, error if not found 

224 subject = self.assert_exists(subject, 'subjects') 1fecbda

225 

226 # look for a session from the same subject, same number on the same day 

227 with no_cache(self.one.alyx): 1fecbda

228 session_id, session = self.one.search(subject=subject['nickname'], 1fecbda

229 date_range=date, 

230 number=number, 

231 details=True, query_type='remote') 

232 if collections is None: # No task data 1fecbda

233 assert len(session) != 0, 'no session on Alyx and no tasks in experiment description' 

234 # Fetch the full session JSON and assert that some basic information is present. 

235 # Basically refuse to extract the data if key information is missing 

236 session_details = self.one.alyx.rest('sessions', 'read', id=session_id[0], no_cache=True) 

237 required = ('location', 'start_time', 'lab', 'users') 

238 missing = [k for k in required if not session_details[k]] 

239 assert not any(missing), 'missing session information: ' + ', '.join(missing) 

240 task_protocols = task_data = settings = [] 

241 json_field = None 

242 users = session_details['users'] 

243 else: # Get session info from task data 

244 collections = ensure_list(collections) 1fecbda

245 # read meta data from the rig for the session from the task settings file 

246 task_data = (raw.load_bpod(ses_path, collection) for collection in sorted(collections)) 1fecbda

247 # Filter collections where settings file was not found 

248 if not (task_data := list(zip(*filter(lambda x: x[0] is not None, task_data)))): 1fecbda

249 raise ValueError(f'_iblrig_taskSettings.raw.json not found in {ses_path} Abort.') 1f

250 settings, task_data = task_data 1fecbda

251 if len(settings) != len(collections): 1fecbda

252 raise ValueError(f'_iblrig_taskSettings.raw.json not found in {ses_path} Abort.') 

253 

254 # Do some validation 

255 assert len({x['SUBJECT_NAME'] for x in settings}) == 1 and settings[0]['SUBJECT_NAME'] == subject['nickname'] 1fecbda

256 assert len({x['SESSION_DATE'] for x in settings}) == 1 and settings[0]['SESSION_DATE'] == date 1fecbda

257 assert len({x['SESSION_NUMBER'] for x in settings}) == 1 and settings[0]['SESSION_NUMBER'] == number 1fecbda

258 assert len({x['IS_MOCK'] for x in settings}) == 1 1fecbda

259 assert len({md['PYBPOD_BOARD'] for md in settings}) == 1 1fecbda

260 assert len({md.get('IBLRIG_VERSION') for md in settings}) == 1 1fecbda

261 # assert len({md['IBLRIG_VERSION_TAG'] for md in settings}) == 1 

262 

263 users = [] 1fecbda

264 for user in filter(lambda x: x and x[1], map(lambda x: x.get('PYBPOD_CREATOR'), settings)): 1fecbda

265 user = self.assert_exists(user[0], 'users') # user is list of [username, uuid] 1fecbda

266 users.append(user['username']) 1fecbda

267 

268 # extract information about session duration and performance 

269 start_time, end_time = _get_session_times(str(ses_path), settings, task_data) 1fecbda

270 n_trials, n_correct_trials = _get_session_performance(settings, task_data) 1fecbda

271 

272 # TODO Add task_protocols to Alyx sessions endpoint 

273 task_protocols = [md['PYBPOD_PROTOCOL'] + md['IBLRIG_VERSION'] for md in settings] 1fecbda

274 # unless specified label the session projects with subject projects 

275 projects = subject['projects'] if projects is None else projects 1fecbda

276 # makes sure projects is a list 

277 projects = [projects] if isinstance(projects, str) else projects 1fecbda

278 

279 # unless specified label the session procedures with task protocol lookup 

280 procedures = procedures or list(set(filter(None, map(self._alyx_procedure_from_task, task_protocols)))) 1fecbda

281 procedures = [procedures] if isinstance(procedures, str) else procedures 1fecbda

282 json_fields_names = ['IS_MOCK', 'IBLRIG_VERSION'] 1fecbda

283 json_field = {k: settings[0].get(k) for k in json_fields_names} 1fecbda

284 # The poo count field is only updated if the field is defined in at least one of the settings 

285 poo_counts = [md.get('POOP_COUNT') for md in settings if md.get('POOP_COUNT') is not None] 1fecbda

286 if poo_counts: 1fecbda

287 json_field['POOP_COUNT'] = int(sum(poo_counts)) 1ebda

288 

289 if not len(session): # Create session and weighings 1fecbda

290 ses_ = {'subject': subject['nickname'], 1fecda

291 'users': users or [subject['responsible_user']], 

292 'location': settings[0]['PYBPOD_BOARD'], 

293 'procedures': procedures, 

294 'lab': subject['lab'], 

295 'projects': projects, 

296 'type': 'Experiment', 

297 'task_protocol': '/'.join(task_protocols), 

298 'number': number, 

299 'start_time': self.ensure_ISO8601(start_time), 

300 'end_time': self.ensure_ISO8601(end_time) if end_time else None, 

301 'n_correct_trials': n_correct_trials, 

302 'n_trials': n_trials, 

303 'json': json_field 

304 } 

305 session = self.one.alyx.rest('sessions', 'create', data=ses_) 1fecda

306 # Submit weights 

307 for md in filter(lambda md: md.get('SUBJECT_WEIGHT') is not None, settings): 1fecda

308 user = md.get('PYBPOD_CREATOR') 1fecda

309 if isinstance(user, list): 1fecda

310 user = user[0] 1fecda

311 if user not in users: 1fecda

312 user = self.one.alyx.user 

313 self.register_weight(subject['nickname'], md['SUBJECT_WEIGHT'], 1fecda

314 date_time=md['SESSION_DATETIME'], user=user) 

315 else: # if session exists update a few key fields 

316 data = {'procedures': procedures, 'projects': projects, 1cb

317 'n_correct_trials': n_correct_trials, 'n_trials': n_trials} 

318 if task_protocols: 1cb

319 data['task_protocol'] = '/'.join(task_protocols) 1cb

320 if end_time: 1cb

321 data['end_time'] = self.ensure_ISO8601(end_time) 1cb

322 

323 session = self.one.alyx.rest('sessions', 'partial_update', id=session_id[0], data=data) 1cb

324 if json_field: 1cb

325 session['json'] = self.one.alyx.json_field_update('sessions', session['id'], data=json_field) 1cb

326 

327 _logger.info(session['url'] + ' ') 1fecbda

328 # create associated water administration if not found 

329 if register_reward and not session['wateradmin_session_related'] and any(task_data): 1fecbda

330 for md, d in filter(all, zip(settings, task_data)): 1bda

331 _, _end_time = _get_session_times(ses_path, md, d) 1bda

332 user = md.get('PYBPOD_CREATOR') 1bda

333 user = user[0] if user[0] in users else self.one.alyx.user 1bda

334 volume = d[-1].get('water_delivered', sum(x['reward_amount'] for x in d)) / 1000 1bda

335 if volume > 0: 1bda

336 self.register_water_administration( 1bda

337 subject['nickname'], volume, date_time=_end_time or end_time, user=user, 

338 session=session['id'], water_type=md.get('REWARD_TYPE') or 'Water') 

339 # at this point the session has been created. If create only, exit 

340 if not file_list: 1fecbda

341 return session, None 1fbda

342 

343 # register all files that match the Alyx patterns and file_list 

344 if any(settings): 1ec

345 rename_files_compatibility(ses_path, settings[0]['IBLRIG_VERSION']) 1ec

346 F = filter(lambda x: self._register_bool(x.name, file_list), self.find_files(ses_path)) 1ec

347 recs = self.register_files(F, created_by=users[0] if users else None, versions=ibllib.__version__) 1ec

348 return session, recs 1ec

349 

350 @staticmethod 

351 def _register_bool(fn, file_list): 

352 if isinstance(file_list, bool): 1ec

353 return file_list 1ec

354 if isinstance(file_list, str): 

355 file_list = [file_list] 

356 return any(str(fil) in fn for fil in file_list) 

357 

358 @staticmethod 

359 def _alyx_procedure_from_task(task_protocol): 

360 task_type = ibllib.io.extractors.base.get_task_extractor_type(task_protocol) 1fcoa

361 procedure = _alyx_procedure_from_task_type(task_type) 1fcoa

362 return procedure or [] 1fcoa

363 

364 def find_files(self, session_path): 

365 """Similar to base class method but further filters by name and extension. 

366 

367 In addition to finding files that match Excludes files 

368 whose extension is in EXCLUDED_EXTENSIONS, or that don't match the patterns in 

369 REGISTRATION_GLOB_PATTERNS. 

370 

371 Parameters 

372 ---------- 

373 session_path : str, pathlib.Path 

374 The session path to search. 

375 

376 Yields 

377 ------- 

378 pathlib.Path 

379 File paths that match the dataset type patterns in Alyx and registration glob patterns. 

380 """ 

381 files = itertools.chain.from_iterable(session_path.glob(x) for x in REGISTRATION_GLOB_PATTERNS) 1ech

382 for file in filter(lambda x: x.suffix not in EXCLUDED_EXTENSIONS, files): 1ech

383 try: 1ech

384 get_dataset_type(file, self.dtypes) 1ech

385 yield file 1ech

386 except ValueError as ex: 

387 _logger.error(ex) 

388 

389 

390def _alyx_procedure_from_task_type(task_type): 

391 lookup = {'biased': 'Behavior training/tasks', 1fcoa

392 'biased_opto': 'Behavior training/tasks', 

393 'habituation': 'Behavior training/tasks', 

394 'training': 'Behavior training/tasks', 

395 'ephys': 'Ephys recording with acute probe(s)', 

396 'ephys_biased_opto': 'Ephys recording with acute probe(s)', 

397 'ephys_passive_opto': 'Ephys recording with acute probe(s)', 

398 'ephys_replay': 'Ephys recording with acute probe(s)', 

399 'ephys_training': 'Ephys recording with acute probe(s)', 

400 'mock_ephys': 'Ephys recording with acute probe(s)', 

401 'sync_ephys': 'Ephys recording with acute probe(s)'} 

402 try: 1fcoa

403 # look if there are tasks in the personal projects repo with procedures 

404 import projects.base 1fcoa

405 custom_tasks = Path(projects.base.__file__).parent.joinpath('task_type_procedures.json') 1fcoa

406 with open(custom_tasks) as fp: 1fcoa

407 lookup.update(json.load(fp)) 1fcoa

408 except (ModuleNotFoundError, FileNotFoundError): 

409 pass 

410 if task_type in lookup: 1fcoa

411 return lookup[task_type] 1fcoa

412 

413 

414def rename_files_compatibility(ses_path, version_tag): 

415 if not version_tag: 1ec

416 return 

417 if version.parse(version_tag) <= version.parse('3.2.3'): 1ec

418 task_code = ses_path.glob('**/_ibl_trials.iti_duration.npy') 

419 for fn in task_code: 

420 fn.replace(fn.parent.joinpath('_ibl_trials.itiDuration.npy')) 

421 task_code = ses_path.glob('**/_iblrig_taskCodeFiles.raw.zip') 1ec

422 for fn in task_code: 1ec

423 fn.replace(fn.parent.joinpath('_iblrig_codeFiles.raw.zip')) 

424 

425 

426def _get_session_times(fn, md, ses_data): 

427 """ 

428 Get session start and end time from the Bpod data. 

429 

430 Parameters 

431 ---------- 

432 fn : str, pathlib.Path 

433 Session/task identifier. Only used in warning logs. 

434 md : dict, list of dict 

435 A session parameters dictionary or list thereof. 

436 ses_data : dict, list of dict 

437 A session data dictionary or list thereof. 

438 

439 Returns 

440 ------- 

441 datetime.datetime 

442 The datetime of the start of the session. 

443 datetime.datetime 

444 The datetime of the end of the session, or None is ses_data is None. 

445 """ 

446 if isinstance(md, dict): 1fecbdjan

447 start_time = _start_time = isostr2date(md['SESSION_DATETIME']) 1bdjan

448 end_time = isostr2date(md['SESSION_END_TIME']) if md.get('SESSION_END_TIME') else None 1bdjan

449 else: 

450 start_time = isostr2date(md[0]['SESSION_DATETIME']) 1fecbdja

451 _start_time = isostr2date(md[-1]['SESSION_DATETIME']) 1fecbdja

452 end_time = isostr2date(md[-1]['SESSION_END_TIME']) if md[-1].get('SESSION_END_TIME') else None 1fecbdja

453 assert isinstance(ses_data, (list, tuple)) and len(ses_data) == len(md) 1fecbdja

454 assert len(md) == 1 or start_time < _start_time 1fecbdja

455 ses_data = ses_data[-1] 1fecbdja

456 if not ses_data or end_time is not None: 1fecbdjan

457 return start_time, end_time 1fec

458 c = ses_duration_secs = 0 1bdjan

459 for sd in reversed(ses_data): 1bdjan

460 ses_duration_secs = (sd['behavior_data']['Trial end timestamp'] - 1bdjan

461 sd['behavior_data']['Bpod start timestamp']) 

462 if ses_duration_secs < (6 * 3600): 1bdjan

463 break 1bdjan

464 c += 1 1a

465 if c: 1bdjan

466 _logger.warning(('Trial end timestamps of last %i trials above 6 hours ' 1a

467 '(most likely corrupt): %s'), c, str(fn)) 

468 end_time = _start_time + datetime.timedelta(seconds=ses_duration_secs) 1bdjan

469 return start_time, end_time 1bdjan

470 

471 

472def _get_session_performance(md, ses_data): 

473 """ 

474 Get performance about the session from Bpod data. 

475 Note: This does not support custom protocols. 

476 

477 Parameters 

478 ---------- 

479 md : dict, list of dict 

480 A session parameters dictionary or list thereof. 

481 ses_data : dict, list of dict 

482 A session data dictionary or list thereof. 

483 

484 Returns 

485 ------- 

486 int 

487 The total number of trials across protocols. 

488 int 

489 The total number of correct trials across protocols. 

490 """ 

491 

492 if not any(filter(None, ses_data or None)): 1fecbdma

493 return None, None 1fec

494 

495 if isinstance(md, dict): 1bdma

496 ses_data = [ses_data] 1m

497 md = [md] 1m

498 else: 

499 assert isinstance(ses_data, (list, tuple)) and len(ses_data) == len(md) 1bdma

500 

501 n_trials = [] 1bdma

502 n_correct = [] 1bdma

503 for data, settings in filter(all, zip(ses_data, md)): 1bdma

504 # In some protocols trials start from 0, in others, from 1 

505 n = data[-1]['trial_num'] + int(data[0]['trial_num'] == 0) # +1 if starts from 0 1bdma

506 n_trials.append(n) 1bdma

507 # checks that the number of actual trials and labeled number of trials check out 

508 assert len(data) == n, f'{len(data)} trials in data, however last trial number was {n}' 1bdma

509 # task specific logic 

510 if 'habituationChoiceWorld' in settings.get('PYBPOD_PROTOCOL', ''): 1bdma

511 n_correct.append(0) 1m

512 else: 

513 n_correct.append(data[-1].get('ntrials_correct', sum(x['trial_correct'] for x in data))) 1bdma

514 

515 return sum(n_trials), sum(n_correct) 1bdma

516 

517 

518def get_local_data_repository(ac): 

519 """ 

520 Get local data repo name from Globus client. 

521 

522 Parameters 

523 ---------- 

524 ac : one.webclient.AlyxClient 

525 An AlyxClient instance for querying data repositories. 

526 

527 Returns 

528 ------- 

529 str 

530 The (first) data repository associated with the local Globus endpoint ID. 

531 """ 

532 try: 1gqribha

533 assert ac 1gqribha

534 globus_id = get_local_endpoint_id() 1gqibha

535 except AssertionError: 1r

536 return 1r

537 

538 data_repo = ac.rest('data-repository', 'list', globus_endpoint_id=globus_id) 1gqibha

539 return next((da['name'] for da in data_repo), None) 1gqibha

540 

541 

542def get_lab(session_path, alyx=None): 

543 """ 

544 Get lab from a session path using the subject name. 

545 

546 On local lab servers, the lab name is not in the ALF path and the globus endpoint ID may be 

547 associated with multiple labs, so lab name is fetched from the subjects endpoint. 

548 

549 Parameters 

550 ---------- 

551 session_path : str, pathlib.Path 

552 The session path from which to determine the lab name. 

553 alyx : one.webclient.AlyxClient 

554 An AlyxClient instance for querying data repositories. 

555 

556 Returns 

557 ------- 

558 str 

559 The lab name associated with the session path subject. 

560 

561 See Also 

562 -------- 

563 one.remote.globus.get_lab_from_endpoint_id 

564 """ 

565 alyx = alyx or AlyxClient() 1pba

566 if not (ref := ConversionMixin.path2ref(session_path)): 1pba

567 raise ValueError(f'Failed to parse session path: {session_path}') 1p

568 

569 labs = [x['lab'] for x in alyx.rest('subjects', 'list', nickname=ref['subject'])] 1pba

570 if len(labs) == 0: 1pba

571 raise alferr.AlyxSubjectNotFound(ref['subject']) 1p

572 elif len(labs) > 1: # More than one subject with this nickname 1pba

573 # use local endpoint ID to find the correct lab 

574 endpoint_labs = get_lab_from_endpoint_id(alyx=alyx) 1p

575 lab = next(x for x in labs if x in endpoint_labs) 1p

576 else: 

577 lab, = labs 1pba

578 

579 return lab 1pba