ONE-api 3.0b3__py3-none-any.whl → 3.0b5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/LICENSE +21 -21
- {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/METADATA +115 -115
- ONE_api-3.0b5.dist-info/RECORD +37 -0
- one/__init__.py +2 -2
- one/alf/__init__.py +1 -1
- one/alf/cache.py +640 -653
- one/alf/exceptions.py +105 -105
- one/alf/io.py +876 -876
- one/alf/path.py +1450 -1450
- one/alf/spec.py +519 -519
- one/api.py +2979 -2973
- one/converters.py +850 -850
- one/params.py +414 -414
- one/registration.py +845 -845
- one/remote/__init__.py +1 -1
- one/remote/aws.py +313 -313
- one/remote/base.py +142 -142
- one/remote/globus.py +1254 -1254
- one/tests/fixtures/params/.caches +6 -6
- one/tests/fixtures/params/.test.alyx.internationalbrainlab.org +8 -8
- one/tests/fixtures/rest_responses/1f187d80fd59677b395fcdb18e68e4401bfa1cc9 +1 -1
- one/tests/fixtures/rest_responses/47893cf67c985e6361cdee009334963f49fb0746 +1 -1
- one/tests/fixtures/rest_responses/535d0e9a1e2c1efbdeba0d673b131e00361a2edb +1 -1
- one/tests/fixtures/rest_responses/6dc96f7e9bcc6ac2e7581489b9580a6cd3f28293 +1 -1
- one/tests/fixtures/rest_responses/db1731fb8df0208944ae85f76718430813a8bf50 +1 -1
- one/tests/fixtures/rest_responses/dcce48259bb929661f60a02a48563f70aa6185b3 +1 -1
- one/tests/fixtures/rest_responses/f530d6022f61cdc9e38cc66beb3cb71f3003c9a1 +1 -1
- one/tests/fixtures/test_dbs.json +14 -14
- one/util.py +524 -524
- one/webclient.py +1368 -1354
- ONE_api-3.0b3.dist-info/RECORD +0 -37
- {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/WHEEL +0 -0
- {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/top_level.txt +0 -0
one/webclient.py
CHANGED
|
@@ -1,1354 +1,1368 @@
|
|
|
1
|
-
"""API for interacting with a remote Alyx instance through REST.
|
|
2
|
-
|
|
3
|
-
The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
|
|
4
|
-
files through Alyx.
|
|
5
|
-
|
|
6
|
-
Examples
|
|
7
|
-
--------
|
|
8
|
-
>>> alyx = AlyxClient(
|
|
9
|
-
... username='test_user', password='TapetesBloc18',
|
|
10
|
-
... base_url='https://test.alyx.internationalbrainlab.org')
|
|
11
|
-
|
|
12
|
-
List subjects
|
|
13
|
-
|
|
14
|
-
>>> subjects = alyx.rest('subjects', 'list')
|
|
15
|
-
|
|
16
|
-
Create a subject
|
|
17
|
-
|
|
18
|
-
>>> record = {
|
|
19
|
-
... 'nickname': nickname,
|
|
20
|
-
... 'responsible_user': 'olivier',
|
|
21
|
-
... 'birth_date': '2019-06-15',
|
|
22
|
-
... 'death_date': None,
|
|
23
|
-
... 'lab': 'cortexlab',
|
|
24
|
-
... }
|
|
25
|
-
>>> new_subj = alyx.rest('subjects', 'create', data=record)
|
|
26
|
-
|
|
27
|
-
Download a remote file, given a local path
|
|
28
|
-
|
|
29
|
-
>>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
|
|
30
|
-
>>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
|
|
31
|
-
|
|
32
|
-
"""
|
|
33
|
-
import
|
|
34
|
-
import
|
|
35
|
-
import
|
|
36
|
-
import
|
|
37
|
-
import
|
|
38
|
-
import
|
|
39
|
-
|
|
40
|
-
import urllib.
|
|
41
|
-
from
|
|
42
|
-
|
|
43
|
-
from
|
|
44
|
-
from
|
|
45
|
-
from
|
|
46
|
-
import
|
|
47
|
-
import
|
|
48
|
-
import
|
|
49
|
-
import
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
import
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
from
|
|
57
|
-
|
|
58
|
-
from
|
|
59
|
-
|
|
60
|
-
from iblutil.
|
|
61
|
-
import
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
expires
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
def
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
list of
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
#
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
if
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
#
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
#
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
urls
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
"""
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
self.
|
|
567
|
-
self.
|
|
568
|
-
self.
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
#
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
@
|
|
584
|
-
def
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
#
|
|
718
|
-
if password is None:
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
if
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
self.
|
|
796
|
-
if
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
def
|
|
800
|
-
"""
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
"""
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
url
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
"""
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
file
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
path
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
>>> client
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
#
|
|
1114
|
-
if
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
# make sure the
|
|
1119
|
-
if
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
'\n ' + '\n '.join(
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
#
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
return self.
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
) ->
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
)
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1
|
+
"""API for interacting with a remote Alyx instance through REST.
|
|
2
|
+
|
|
3
|
+
The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
|
|
4
|
+
files through Alyx.
|
|
5
|
+
|
|
6
|
+
Examples
|
|
7
|
+
--------
|
|
8
|
+
>>> alyx = AlyxClient(
|
|
9
|
+
... username='test_user', password='TapetesBloc18',
|
|
10
|
+
... base_url='https://test.alyx.internationalbrainlab.org')
|
|
11
|
+
|
|
12
|
+
List subjects
|
|
13
|
+
|
|
14
|
+
>>> subjects = alyx.rest('subjects', 'list')
|
|
15
|
+
|
|
16
|
+
Create a subject
|
|
17
|
+
|
|
18
|
+
>>> record = {
|
|
19
|
+
... 'nickname': nickname,
|
|
20
|
+
... 'responsible_user': 'olivier',
|
|
21
|
+
... 'birth_date': '2019-06-15',
|
|
22
|
+
... 'death_date': None,
|
|
23
|
+
... 'lab': 'cortexlab',
|
|
24
|
+
... }
|
|
25
|
+
>>> new_subj = alyx.rest('subjects', 'create', data=record)
|
|
26
|
+
|
|
27
|
+
Download a remote file, given a local path
|
|
28
|
+
|
|
29
|
+
>>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
|
|
30
|
+
>>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
from uuid import UUID
|
|
34
|
+
import json
|
|
35
|
+
import logging
|
|
36
|
+
import math
|
|
37
|
+
import os
|
|
38
|
+
import re
|
|
39
|
+
import functools
|
|
40
|
+
import urllib.request
|
|
41
|
+
from urllib.error import HTTPError
|
|
42
|
+
import urllib.parse
|
|
43
|
+
from collections.abc import Mapping
|
|
44
|
+
from typing import Optional
|
|
45
|
+
from datetime import datetime, timedelta
|
|
46
|
+
from pathlib import Path
|
|
47
|
+
from weakref import ReferenceType
|
|
48
|
+
import warnings
|
|
49
|
+
import hashlib
|
|
50
|
+
import zipfile
|
|
51
|
+
import tempfile
|
|
52
|
+
from getpass import getpass
|
|
53
|
+
from contextlib import contextmanager
|
|
54
|
+
|
|
55
|
+
import requests
|
|
56
|
+
from tqdm import tqdm
|
|
57
|
+
|
|
58
|
+
from pprint import pprint
|
|
59
|
+
import one.params
|
|
60
|
+
from iblutil.io import hashfile
|
|
61
|
+
from iblutil.io.params import set_hidden
|
|
62
|
+
from iblutil.util import ensure_list
|
|
63
|
+
import concurrent.futures
|
|
64
|
+
_logger = logging.getLogger(__name__)
|
|
65
|
+
N_THREADS = int(os.environ.get('ONE_HTTP_DL_THREADS', 4))
|
|
66
|
+
"""int: The number of download threads."""
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class _JSONEncoder(json.JSONEncoder):
|
|
70
|
+
"""A JSON encoder that handles UUID objects."""
|
|
71
|
+
|
|
72
|
+
def default(self, o):
|
|
73
|
+
"""Cast UUID objects to str before serializing."""
|
|
74
|
+
if isinstance(o, UUID):
|
|
75
|
+
return str(o)
|
|
76
|
+
return super().default(o)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _cache_response(method):
|
|
80
|
+
"""Decorator for the generic request method for caching REST reponses.
|
|
81
|
+
|
|
82
|
+
Caches the result of the query and on subsequent calls, returns cache instead of hitting the
|
|
83
|
+
database.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
method : function
|
|
88
|
+
Function to wrap (i.e. AlyxClient._generic_request).
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
function
|
|
93
|
+
Handle to wrapped method.
|
|
94
|
+
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
@functools.wraps(method)
|
|
98
|
+
def wrapper_decorator(alyx_client, *args, expires=None, clobber=False, **kwargs):
|
|
99
|
+
"""REST caching wrapper.
|
|
100
|
+
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
alyx_client : AlyxClient
|
|
104
|
+
An instance of the AlyxClient class.
|
|
105
|
+
args : any
|
|
106
|
+
Positional arguments for applying to wrapped function.
|
|
107
|
+
expires : bool
|
|
108
|
+
An optional timedelta for how long cached response is valid. If True, the cached
|
|
109
|
+
response will not be used on subsequent calls. If None, the default expiry is applied.
|
|
110
|
+
clobber : bool
|
|
111
|
+
If True any existing cached response is overwritten.
|
|
112
|
+
**kwargs
|
|
113
|
+
Keyword arguments for applying to wrapped function.
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
dict
|
|
118
|
+
The REST response JSON either from cached file or directly from remote.
|
|
119
|
+
|
|
120
|
+
"""
|
|
121
|
+
expires = expires or alyx_client.default_expiry
|
|
122
|
+
mode = (alyx_client.cache_mode or '').casefold()
|
|
123
|
+
if args[0].__name__ != mode and mode != '*':
|
|
124
|
+
return method(alyx_client, *args, **kwargs)
|
|
125
|
+
# Check cache
|
|
126
|
+
rest_cache = alyx_client.cache_dir.joinpath('.rest')
|
|
127
|
+
sha1 = hashlib.sha1()
|
|
128
|
+
sha1.update(bytes(args[1], 'utf-8'))
|
|
129
|
+
name = sha1.hexdigest()
|
|
130
|
+
# Reversible but length may exceed 255 chars
|
|
131
|
+
# name = base64.urlsafe_b64encode(args[2].encode('UTF-8')).decode('UTF-8')
|
|
132
|
+
files = list(rest_cache.glob(name))
|
|
133
|
+
cached = None
|
|
134
|
+
if len(files) == 1 and not clobber:
|
|
135
|
+
_logger.debug('loading REST response from cache')
|
|
136
|
+
with open(files[0], 'r') as f:
|
|
137
|
+
cached, when = json.load(f)
|
|
138
|
+
if datetime.fromisoformat(when) > datetime.now():
|
|
139
|
+
return cached
|
|
140
|
+
try:
|
|
141
|
+
response = method(alyx_client, *args, **kwargs)
|
|
142
|
+
except requests.exceptions.ConnectionError as ex:
|
|
143
|
+
if cached and not clobber:
|
|
144
|
+
warnings.warn('Failed to connect, returning cached response', RuntimeWarning)
|
|
145
|
+
return cached
|
|
146
|
+
raise ex # No cache and can't connect to database; re-raise
|
|
147
|
+
|
|
148
|
+
# Save response into cache
|
|
149
|
+
if not rest_cache.exists():
|
|
150
|
+
rest_cache.mkdir(parents=True)
|
|
151
|
+
rest_cache = set_hidden(rest_cache, True)
|
|
152
|
+
|
|
153
|
+
_logger.debug('caching REST response')
|
|
154
|
+
expiry_datetime = datetime.now() + (timedelta() if expires is True else expires)
|
|
155
|
+
with open(rest_cache / name, 'w') as f:
|
|
156
|
+
json.dump((response, expiry_datetime.isoformat()), f, cls=_JSONEncoder)
|
|
157
|
+
return response
|
|
158
|
+
|
|
159
|
+
return wrapper_decorator
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@contextmanager
|
|
163
|
+
def no_cache(ac=None):
|
|
164
|
+
"""Temporarily turn off the REST cache for a given Alyx instance.
|
|
165
|
+
|
|
166
|
+
This function is particularly useful when calling ONE methods in remote mode.
|
|
167
|
+
|
|
168
|
+
Parameters
|
|
169
|
+
----------
|
|
170
|
+
ac : AlyxClient
|
|
171
|
+
An instance of the AlyxClient to modify. If None, the a new object is instantiated
|
|
172
|
+
|
|
173
|
+
Returns
|
|
174
|
+
-------
|
|
175
|
+
AlyxClient
|
|
176
|
+
The instance of Alyx with cache disabled
|
|
177
|
+
|
|
178
|
+
Examples
|
|
179
|
+
--------
|
|
180
|
+
>>> from one.api import ONE
|
|
181
|
+
>>> with no_cache(ONE().alyx):
|
|
182
|
+
... eids = ONE().search(subject='foobar', query_type='remote')
|
|
183
|
+
|
|
184
|
+
"""
|
|
185
|
+
ac = ac or AlyxClient()
|
|
186
|
+
cache_mode = ac.cache_mode
|
|
187
|
+
ac.cache_mode = None
|
|
188
|
+
try:
|
|
189
|
+
yield ac
|
|
190
|
+
finally:
|
|
191
|
+
ac.cache_mode = cache_mode
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
class _PaginatedResponse(Mapping):
|
|
195
|
+
"""Emulate a list from a paginated response.
|
|
196
|
+
|
|
197
|
+
Provides cache functionality.
|
|
198
|
+
|
|
199
|
+
Examples
|
|
200
|
+
--------
|
|
201
|
+
>>> r = _PaginatedResponse(client, response)
|
|
202
|
+
|
|
203
|
+
"""
|
|
204
|
+
|
|
205
|
+
def __init__(self, alyx, rep, cache_args=None):
|
|
206
|
+
"""Emulate a list from a paginated response.
|
|
207
|
+
|
|
208
|
+
Parameters
|
|
209
|
+
----------
|
|
210
|
+
alyx : AlyxClient
|
|
211
|
+
An instance of an AlyxClient associated with the REST response
|
|
212
|
+
rep : dict
|
|
213
|
+
A paginated REST response JSON dictionary
|
|
214
|
+
cache_args : dict
|
|
215
|
+
A dict of kwargs to pass to _cache_response decorator upon subsequent requests
|
|
216
|
+
|
|
217
|
+
"""
|
|
218
|
+
self.alyx = alyx
|
|
219
|
+
self.count = rep['count']
|
|
220
|
+
self.limit = len(rep['results'])
|
|
221
|
+
self._cache_args = cache_args or {}
|
|
222
|
+
# store URL without pagination query params
|
|
223
|
+
self.query = rep['next']
|
|
224
|
+
# init the cache, list with None with count size
|
|
225
|
+
self._cache = [None] * self.count
|
|
226
|
+
# fill the cache with results of the query
|
|
227
|
+
for i in range(self.limit):
|
|
228
|
+
self._cache[i] = rep['results'][i]
|
|
229
|
+
self._callbacks = set()
|
|
230
|
+
|
|
231
|
+
def add_callback(self, cb):
|
|
232
|
+
"""Add a callback function to use each time a new page is fetched.
|
|
233
|
+
|
|
234
|
+
The callback function will be called with the page results each time :meth:`populate`
|
|
235
|
+
is called.
|
|
236
|
+
|
|
237
|
+
Parameters
|
|
238
|
+
----------
|
|
239
|
+
cb : callable
|
|
240
|
+
A callable that takes the results of each paginated resonse.
|
|
241
|
+
|
|
242
|
+
"""
|
|
243
|
+
if not callable(cb):
|
|
244
|
+
raise TypeError(f'Expected type "callable", got "{type(cb)}" instead')
|
|
245
|
+
else:
|
|
246
|
+
self._callbacks.add(cb)
|
|
247
|
+
|
|
248
|
+
def __len__(self):
|
|
249
|
+
return self.count
|
|
250
|
+
|
|
251
|
+
def __getitem__(self, item):
|
|
252
|
+
if isinstance(item, slice):
|
|
253
|
+
while None in self._cache[item]:
|
|
254
|
+
# If slice start index is -ve, convert to +ve index
|
|
255
|
+
i = self.count + item.start if item.start < 0 else item.start
|
|
256
|
+
self.populate(i + self._cache[item].index(None))
|
|
257
|
+
elif self._cache[item] is None:
|
|
258
|
+
# If index is -ve, convert to +ve
|
|
259
|
+
self.populate(self.count + item if item < 0 else item)
|
|
260
|
+
return self._cache[item]
|
|
261
|
+
|
|
262
|
+
def populate(self, idx):
|
|
263
|
+
"""Populate response cache with new page of results.
|
|
264
|
+
|
|
265
|
+
Fetches the specific page of results containing the index passed and populates
|
|
266
|
+
stores the results in the :prop:`_cache` property.
|
|
267
|
+
|
|
268
|
+
Parameters
|
|
269
|
+
----------
|
|
270
|
+
idx : int
|
|
271
|
+
The index of a given record to fetch.
|
|
272
|
+
|
|
273
|
+
"""
|
|
274
|
+
offset = self.limit * math.floor(idx / self.limit)
|
|
275
|
+
query = update_url_params(self.query, {'limit': self.limit, 'offset': offset})
|
|
276
|
+
res = self.alyx._generic_request(requests.get, query, **self._cache_args)
|
|
277
|
+
if self.count != res['count']:
|
|
278
|
+
warnings.warn(
|
|
279
|
+
f'remote results for {urllib.parse.urlsplit(query).path} endpoint changed; '
|
|
280
|
+
f'results may be inconsistent', RuntimeWarning)
|
|
281
|
+
for i, r in enumerate(res['results'][:self.count - offset]):
|
|
282
|
+
self._cache[i + offset] = res['results'][i]
|
|
283
|
+
# Notify callbacks
|
|
284
|
+
pending_removal = []
|
|
285
|
+
for callback in self._callbacks:
|
|
286
|
+
# Handle weak reference callbacks first
|
|
287
|
+
if isinstance(callback, ReferenceType):
|
|
288
|
+
wf = callback
|
|
289
|
+
if (callback := wf()) is None:
|
|
290
|
+
pending_removal.append(wf)
|
|
291
|
+
continue
|
|
292
|
+
callback(res['results'])
|
|
293
|
+
for wf in pending_removal:
|
|
294
|
+
self._callbacks.discard(wf)
|
|
295
|
+
# When cache is complete, clear our callbacks
|
|
296
|
+
if all(reversed(self._cache)):
|
|
297
|
+
self._callbacks.clear()
|
|
298
|
+
|
|
299
|
+
def __iter__(self):
|
|
300
|
+
for i in range(self.count):
|
|
301
|
+
yield self.__getitem__(i)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def update_url_params(url: str, params: dict) -> str:
|
|
305
|
+
"""Add/update the query parameters of a URL and make url safe.
|
|
306
|
+
|
|
307
|
+
Parameters
|
|
308
|
+
----------
|
|
309
|
+
url : str
|
|
310
|
+
A URL string with which to update the query parameters
|
|
311
|
+
params : dict
|
|
312
|
+
A dict of new parameters. For multiple values for the same query, use a list (see example)
|
|
313
|
+
|
|
314
|
+
Returns
|
|
315
|
+
-------
|
|
316
|
+
str
|
|
317
|
+
A new URL with said parameters updated
|
|
318
|
+
|
|
319
|
+
Examples
|
|
320
|
+
--------
|
|
321
|
+
>>> update_url_params('website.com/?q=', {'pg': 5})
|
|
322
|
+
'website.com/?pg=5'
|
|
323
|
+
|
|
324
|
+
>>> update_url_params('website.com?q=xxx', {'pg': 5, 'foo': ['bar', 'baz']})
|
|
325
|
+
'website.com?q=xxx&pg=5&foo=bar&foo=baz'
|
|
326
|
+
|
|
327
|
+
"""
|
|
328
|
+
# Remove percent-encoding
|
|
329
|
+
url = urllib.parse.unquote(url)
|
|
330
|
+
parsed_url = urllib.parse.urlsplit(url)
|
|
331
|
+
# Extract URL query arguments and convert to dict
|
|
332
|
+
parsed_get_args = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=False)
|
|
333
|
+
# Merge URL arguments dict with new params
|
|
334
|
+
parsed_get_args.update(params)
|
|
335
|
+
# Convert back to query string
|
|
336
|
+
encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
|
|
337
|
+
# Update parser and convert to full URL str
|
|
338
|
+
return parsed_url._replace(query=encoded_get_args).geturl()
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def http_download_file_list(links_to_file_list, **kwargs):
|
|
342
|
+
"""Download a list of files from a remote HTTP server from a list of links.
|
|
343
|
+
|
|
344
|
+
Generates up to 4 separate threads to handle downloads.
|
|
345
|
+
Same options behaviour as http_download_file.
|
|
346
|
+
|
|
347
|
+
Parameters
|
|
348
|
+
----------
|
|
349
|
+
links_to_file_list : list
|
|
350
|
+
List of http links to files.
|
|
351
|
+
**kwargs
|
|
352
|
+
Optional arguments to pass to http_download_file.
|
|
353
|
+
|
|
354
|
+
Returns
|
|
355
|
+
-------
|
|
356
|
+
list of pathlib.Path
|
|
357
|
+
A list of the local full path of the downloaded files.
|
|
358
|
+
|
|
359
|
+
"""
|
|
360
|
+
links_to_file_list = list(links_to_file_list) # In case generator was passed
|
|
361
|
+
outputs = []
|
|
362
|
+
target_dir = kwargs.pop('target_dir', None)
|
|
363
|
+
# Ensure target dir the length of url list
|
|
364
|
+
if target_dir is None or isinstance(target_dir, (str, Path)):
|
|
365
|
+
target_dir = [target_dir] * len(links_to_file_list)
|
|
366
|
+
assert len(target_dir) == len(links_to_file_list)
|
|
367
|
+
# using with statement to ensure threads are cleaned up promptly
|
|
368
|
+
zipped = zip(links_to_file_list, target_dir)
|
|
369
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=N_THREADS) as executor:
|
|
370
|
+
# Multithreading load operations
|
|
371
|
+
futures = [executor.submit(
|
|
372
|
+
http_download_file, link, target_dir=target, **kwargs) for link, target in zipped]
|
|
373
|
+
zip(links_to_file_list, ensure_list(kwargs.pop('target_dir', None)))
|
|
374
|
+
# TODO Reintroduce variable timeout value based on file size and download speed of 5 Mb/s?
|
|
375
|
+
# timeout = reduce(lambda x, y: x + (y.get('file_size', 0) or 0), dsets, 0) / 625000 ?
|
|
376
|
+
concurrent.futures.wait(futures, timeout=None)
|
|
377
|
+
# build return list
|
|
378
|
+
for future in futures:
|
|
379
|
+
outputs.append(future.result())
|
|
380
|
+
# if returning md5, separate list of tuples into two lists: (files, md5)
|
|
381
|
+
return list(zip(*outputs)) if kwargs.get('return_md5', False) else outputs
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
def http_download_file(full_link_to_file, chunks=None, *, clobber=False, silent=False,
|
|
385
|
+
username='', password='', target_dir='', return_md5=False, headers=None):
|
|
386
|
+
"""Download a file from a remote HTTP server.
|
|
387
|
+
|
|
388
|
+
Parameters
|
|
389
|
+
----------
|
|
390
|
+
full_link_to_file : str
|
|
391
|
+
HTTP link to the file
|
|
392
|
+
chunks : tuple of ints
|
|
393
|
+
Chunks to download
|
|
394
|
+
clobber : bool
|
|
395
|
+
If True, force overwrite the existing file
|
|
396
|
+
silent : bool
|
|
397
|
+
If True, suppress download progress bar
|
|
398
|
+
username : str
|
|
399
|
+
User authentication for password protected file server
|
|
400
|
+
password : str
|
|
401
|
+
Password authentication for password protected file server
|
|
402
|
+
target_dir : str, pathlib.Path
|
|
403
|
+
Directory in which files are downloaded; defaults to user's Download directory
|
|
404
|
+
return_md5 : bool
|
|
405
|
+
If True an MD5 hash of the file is additionally returned
|
|
406
|
+
headers : list of dicts
|
|
407
|
+
Additional headers to add to the request (auth tokens etc.)
|
|
408
|
+
|
|
409
|
+
Returns
|
|
410
|
+
-------
|
|
411
|
+
pathlib.Path
|
|
412
|
+
The full file path of the downloaded file
|
|
413
|
+
|
|
414
|
+
"""
|
|
415
|
+
if not full_link_to_file:
|
|
416
|
+
return (None, None) if return_md5 else None
|
|
417
|
+
|
|
418
|
+
# makes sure special characters get encoded ('#' in file names for example)
|
|
419
|
+
surl = urllib.parse.urlsplit(full_link_to_file, allow_fragments=False)
|
|
420
|
+
full_link_to_file = surl._replace(path=urllib.parse.quote(surl.path)).geturl()
|
|
421
|
+
|
|
422
|
+
# default cache directory is the home dir
|
|
423
|
+
if not target_dir:
|
|
424
|
+
target_dir = Path.home().joinpath('Downloads')
|
|
425
|
+
|
|
426
|
+
# This should be the base url you wanted to access.
|
|
427
|
+
base_url, name = full_link_to_file.rsplit('/', 1)
|
|
428
|
+
file_name = Path(target_dir, name)
|
|
429
|
+
|
|
430
|
+
# do not overwrite an existing file unless specified
|
|
431
|
+
if not clobber and file_name.exists():
|
|
432
|
+
return (file_name, hashfile.md5(file_name)) if return_md5 else file_name
|
|
433
|
+
|
|
434
|
+
# Create a password manager
|
|
435
|
+
manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
|
436
|
+
if username and password:
|
|
437
|
+
manager.add_password(None, base_url, username, password)
|
|
438
|
+
|
|
439
|
+
# Create an authentication handler using the password manager
|
|
440
|
+
auth = urllib.request.HTTPBasicAuthHandler(manager)
|
|
441
|
+
|
|
442
|
+
# Create an opener that will replace the default urlopen method on further calls
|
|
443
|
+
opener = urllib.request.build_opener(auth)
|
|
444
|
+
urllib.request.install_opener(opener)
|
|
445
|
+
|
|
446
|
+
# Support for partial download.
|
|
447
|
+
req = urllib.request.Request(full_link_to_file)
|
|
448
|
+
if chunks is not None:
|
|
449
|
+
first_byte, n_bytes = chunks
|
|
450
|
+
req.add_header('Range', 'bytes=%d-%d' % (first_byte, first_byte + n_bytes - 1))
|
|
451
|
+
|
|
452
|
+
# add additional headers
|
|
453
|
+
if headers is not None:
|
|
454
|
+
for k in headers:
|
|
455
|
+
req.add_header(k, headers[k])
|
|
456
|
+
|
|
457
|
+
# Open the url and get the length
|
|
458
|
+
try:
|
|
459
|
+
u = urllib.request.urlopen(req)
|
|
460
|
+
except HTTPError as e:
|
|
461
|
+
_logger.error(f'{str(e)} {full_link_to_file}')
|
|
462
|
+
raise e
|
|
463
|
+
|
|
464
|
+
file_size = int(u.getheader('Content-length'))
|
|
465
|
+
if not silent:
|
|
466
|
+
print(f'Downloading: {file_name} Bytes: {file_size}')
|
|
467
|
+
block_sz = 8192 * 64 * 8
|
|
468
|
+
|
|
469
|
+
md5 = hashlib.md5()
|
|
470
|
+
f = open(file_name, 'wb')
|
|
471
|
+
with tqdm(total=file_size / 1024 / 1024, disable=silent) as pbar:
|
|
472
|
+
while True:
|
|
473
|
+
buffer = u.read(block_sz)
|
|
474
|
+
if not buffer:
|
|
475
|
+
break
|
|
476
|
+
f.write(buffer)
|
|
477
|
+
if return_md5:
|
|
478
|
+
md5.update(buffer)
|
|
479
|
+
pbar.update(len(buffer) / 1024 / 1024)
|
|
480
|
+
f.close()
|
|
481
|
+
|
|
482
|
+
return (file_name, md5.hexdigest()) if return_md5 else file_name
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def file_record_to_url(file_records) -> list:
|
|
486
|
+
"""Translate a Json dictionary to an usable http url for downloading files.
|
|
487
|
+
|
|
488
|
+
Parameters
|
|
489
|
+
----------
|
|
490
|
+
file_records : dict
|
|
491
|
+
JSON containing a 'data_url' field
|
|
492
|
+
|
|
493
|
+
Returns
|
|
494
|
+
-------
|
|
495
|
+
list of str
|
|
496
|
+
A list of full data urls
|
|
497
|
+
|
|
498
|
+
"""
|
|
499
|
+
urls = []
|
|
500
|
+
for fr in file_records:
|
|
501
|
+
if fr['data_url'] is not None:
|
|
502
|
+
urls.append(fr['data_url'])
|
|
503
|
+
return urls
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def dataset_record_to_url(dataset_record) -> list:
|
|
507
|
+
"""Extract a list of files urls from a list of dataset queries.
|
|
508
|
+
|
|
509
|
+
Parameters
|
|
510
|
+
----------
|
|
511
|
+
dataset_record : list, dict
|
|
512
|
+
Dataset JSON from a REST request
|
|
513
|
+
|
|
514
|
+
Returns
|
|
515
|
+
-------
|
|
516
|
+
list of str
|
|
517
|
+
A list of file urls corresponding to the datasets records
|
|
518
|
+
|
|
519
|
+
"""
|
|
520
|
+
urls = []
|
|
521
|
+
if isinstance(dataset_record, dict):
|
|
522
|
+
dataset_record = [dataset_record]
|
|
523
|
+
for ds in dataset_record:
|
|
524
|
+
urls += file_record_to_url(ds['file_records'])
|
|
525
|
+
return urls
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
class AlyxClient:
|
|
529
|
+
"""Class that implements simple GET/POST wrappers for the Alyx REST API.
|
|
530
|
+
|
|
531
|
+
See https://openalyx.internationalbrainlab.org/docs
|
|
532
|
+
"""
|
|
533
|
+
|
|
534
|
+
_token = None
|
|
535
|
+
_headers = {} # Headers for REST requests only
|
|
536
|
+
user = None
|
|
537
|
+
"""str: The Alyx username."""
|
|
538
|
+
base_url = None
|
|
539
|
+
"""str: The Alyx database URL."""
|
|
540
|
+
|
|
541
|
+
def __init__(self, base_url=None, username=None, password=None,
|
|
542
|
+
cache_dir=None, silent=False, cache_rest='GET'):
|
|
543
|
+
"""Create a client instance that allows to GET and POST to the Alyx server.
|
|
544
|
+
|
|
545
|
+
For One, constructor attempts to authenticate with credentials in params.py.
|
|
546
|
+
For standalone cases, AlyxClient(username='', password='', base_url='').
|
|
547
|
+
|
|
548
|
+
Parameters
|
|
549
|
+
----------
|
|
550
|
+
base_url : str
|
|
551
|
+
Alyx server address, including port and protocol.
|
|
552
|
+
username : str
|
|
553
|
+
Alyx database user.
|
|
554
|
+
password : str
|
|
555
|
+
Alyx database password.
|
|
556
|
+
cache_dir : str, pathlib.Path
|
|
557
|
+
The default root download location.
|
|
558
|
+
silent : bool
|
|
559
|
+
If true, user prompts and progress bars are suppressed.
|
|
560
|
+
cache_rest : str, None
|
|
561
|
+
Which type of http method to apply cache to; if '*', all requests are cached.
|
|
562
|
+
stay_logged_in : bool
|
|
563
|
+
If true, auth token is cached.
|
|
564
|
+
|
|
565
|
+
"""
|
|
566
|
+
self.silent = silent
|
|
567
|
+
self._par = one.params.get(client=base_url, silent=self.silent, username=username)
|
|
568
|
+
self.base_url = base_url or self._par.ALYX_URL
|
|
569
|
+
self._par = self._par.set('CACHE_DIR', cache_dir or self._par.CACHE_DIR)
|
|
570
|
+
if username or password:
|
|
571
|
+
self.authenticate(username, password)
|
|
572
|
+
self._rest_schemes = None
|
|
573
|
+
# the mixed accept application may cause errors sometimes, only necessary for the docs
|
|
574
|
+
self._headers = {**self._headers, 'Accept': 'application/json'}
|
|
575
|
+
# REST cache parameters
|
|
576
|
+
# The default length of time that cache file is valid for,
|
|
577
|
+
# The default expiry is overridden by the `expires` kwarg. If False, the caching is
|
|
578
|
+
# turned off.
|
|
579
|
+
self.default_expiry = timedelta(minutes=5)
|
|
580
|
+
self.cache_mode = cache_rest
|
|
581
|
+
self._obj_id = id(self)
|
|
582
|
+
|
|
583
|
+
@property
|
|
584
|
+
def rest_schemes(self):
|
|
585
|
+
"""dict: The REST endpoints and their parameters."""
|
|
586
|
+
# Delayed fetch of rest schemes speeds up instantiation
|
|
587
|
+
if not self._rest_schemes:
|
|
588
|
+
self._rest_schemes = self.get('/docs', expires=timedelta(weeks=1))
|
|
589
|
+
return self._rest_schemes
|
|
590
|
+
|
|
591
|
+
@property
|
|
592
|
+
def cache_dir(self):
|
|
593
|
+
"""pathlib.Path: The location of the downloaded file cache."""
|
|
594
|
+
return Path(self._par.CACHE_DIR)
|
|
595
|
+
|
|
596
|
+
@cache_dir.setter
|
|
597
|
+
def cache_dir(self, cache_dir):
|
|
598
|
+
cache_dir = Path(cache_dir)
|
|
599
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
600
|
+
self._par = self._par.set('CACHE_DIR', cache_dir)
|
|
601
|
+
|
|
602
|
+
@property
|
|
603
|
+
def is_logged_in(self):
|
|
604
|
+
"""bool: Check if user logged into Alyx database; True if user is authenticated."""
|
|
605
|
+
return bool(self.user and self._token and 'Authorization' in self._headers)
|
|
606
|
+
|
|
607
|
+
def list_endpoints(self):
|
|
608
|
+
"""Return a list of available REST endpoints.
|
|
609
|
+
|
|
610
|
+
Returns
|
|
611
|
+
-------
|
|
612
|
+
List of REST endpoint strings.
|
|
613
|
+
|
|
614
|
+
"""
|
|
615
|
+
EXCLUDE = ('_type', '_meta', '', 'auth-token')
|
|
616
|
+
return sorted(x for x in self.rest_schemes.keys() if x not in EXCLUDE)
|
|
617
|
+
|
|
618
|
+
def print_endpoint_info(self, endpoint, action=None):
|
|
619
|
+
"""Print the available actions and query parameters for a given REST endpoint.
|
|
620
|
+
|
|
621
|
+
Parameters
|
|
622
|
+
----------
|
|
623
|
+
endpoint : str
|
|
624
|
+
An Alyx REST endpoint to query.
|
|
625
|
+
action : str
|
|
626
|
+
An optional action (e.g. 'list') to print. If None, all actions are printed.
|
|
627
|
+
|
|
628
|
+
Returns
|
|
629
|
+
-------
|
|
630
|
+
dict, list
|
|
631
|
+
A dictionary of endpoint query parameter details or a list of parameter details if
|
|
632
|
+
action is not None.
|
|
633
|
+
|
|
634
|
+
"""
|
|
635
|
+
rs = self.rest_schemes
|
|
636
|
+
if endpoint not in rs:
|
|
637
|
+
return print(f'Endpoint "{endpoint}" does not exist')
|
|
638
|
+
|
|
639
|
+
for _action in (rs[endpoint] if action is None else [action]):
|
|
640
|
+
doc = []
|
|
641
|
+
pprint(_action)
|
|
642
|
+
for f in rs[endpoint][_action]['fields']:
|
|
643
|
+
required = ' (required): ' if f.get('required', False) else ': '
|
|
644
|
+
doc.append(f'\t"{f["name"]}"{required}{f["schema"]["_type"]}'
|
|
645
|
+
f', {f["schema"]["description"]}')
|
|
646
|
+
doc.sort()
|
|
647
|
+
[print(d) for d in doc if '(required)' in d]
|
|
648
|
+
[print(d) for d in doc if '(required)' not in d]
|
|
649
|
+
return (rs[endpoint] if action is None else rs[endpoint][action]).copy()
|
|
650
|
+
|
|
651
|
+
@_cache_response
|
|
652
|
+
def _generic_request(self, reqfunction, rest_query, data=None, files=None):
|
|
653
|
+
if not self.is_logged_in:
|
|
654
|
+
self.authenticate(username=self.user)
|
|
655
|
+
# makes sure the base url is the one from the instance
|
|
656
|
+
rest_query = rest_query.replace(self.base_url, '')
|
|
657
|
+
if not rest_query.startswith('/'):
|
|
658
|
+
rest_query = '/' + rest_query
|
|
659
|
+
_logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}')
|
|
660
|
+
headers = self._headers.copy()
|
|
661
|
+
if files is None:
|
|
662
|
+
to_json = functools.partial(json.dumps, cls=_JSONEncoder)
|
|
663
|
+
data = to_json(data) if isinstance(data, dict) or isinstance(data, list) else data
|
|
664
|
+
headers['Content-Type'] = 'application/json'
|
|
665
|
+
if rest_query.startswith('/docs'):
|
|
666
|
+
# the mixed accept application may cause errors sometimes, only necessary for the docs
|
|
667
|
+
headers['Accept'] = 'application/coreapi+json'
|
|
668
|
+
r = reqfunction(self.base_url + rest_query,
|
|
669
|
+
stream=True, headers=headers, data=data, files=files)
|
|
670
|
+
if r and r.status_code in (200, 201):
|
|
671
|
+
return json.loads(r.text)
|
|
672
|
+
elif r and r.status_code == 204:
|
|
673
|
+
return
|
|
674
|
+
if r.status_code == 403 and '"Invalid token."' in r.text:
|
|
675
|
+
_logger.debug('Token invalid; Attempting to re-authenticate...')
|
|
676
|
+
# Log out in order to flush stale token. At this point we no longer have the password
|
|
677
|
+
# but if the user re-instantiates with a password arg it will request a new token.
|
|
678
|
+
username = self.user
|
|
679
|
+
if self.silent: # no need to log out otherwise; user will be prompted for password
|
|
680
|
+
self.logout()
|
|
681
|
+
self.authenticate(username=username, force=True)
|
|
682
|
+
return self._generic_request(reqfunction, rest_query, data=data, files=files)
|
|
683
|
+
else:
|
|
684
|
+
_logger.debug('Response text raw: ' + r.text)
|
|
685
|
+
try:
|
|
686
|
+
message = json.loads(r.text)
|
|
687
|
+
message.pop('status_code', None) # Get status code from response object instead
|
|
688
|
+
message = message.get('detail') or message # Get details if available
|
|
689
|
+
_logger.debug(message)
|
|
690
|
+
except json.decoder.JSONDecodeError:
|
|
691
|
+
message = r.text
|
|
692
|
+
raise requests.HTTPError(r.status_code, rest_query, message, response=r)
|
|
693
|
+
|
|
694
|
+
def authenticate(self, username=None, password=None, cache_token=True, force=False):
|
|
695
|
+
"""Fetch token from the Alyx REST API for authenticating request headers.
|
|
696
|
+
|
|
697
|
+
Credentials are loaded via one.params.
|
|
698
|
+
|
|
699
|
+
Parameters
|
|
700
|
+
----------
|
|
701
|
+
username : str
|
|
702
|
+
Alyx username. If None, token not cached and not silent, user is prompted.
|
|
703
|
+
password : str
|
|
704
|
+
Alyx password. If None, token not cached and not silent, user is prompted.
|
|
705
|
+
cache_token : bool
|
|
706
|
+
If true, the token is cached for subsequent auto-logins.
|
|
707
|
+
force : bool
|
|
708
|
+
If true, any cached token is ignored.
|
|
709
|
+
|
|
710
|
+
"""
|
|
711
|
+
# Get username
|
|
712
|
+
if username is None:
|
|
713
|
+
username = getattr(self._par, 'ALYX_LOGIN', self.user)
|
|
714
|
+
if username is None and not self.silent:
|
|
715
|
+
username = input('Enter Alyx username:')
|
|
716
|
+
|
|
717
|
+
# If user passes in a password, force re-authentication even if token cached
|
|
718
|
+
if password is not None:
|
|
719
|
+
if not force:
|
|
720
|
+
_logger.debug('Forcing token request with provided password')
|
|
721
|
+
force = True
|
|
722
|
+
# Check if token cached
|
|
723
|
+
if not force and getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
|
|
724
|
+
self._token = self._par.TOKEN[username]
|
|
725
|
+
self._headers = {
|
|
726
|
+
'Authorization': f'Token {list(self._token.values())[0]}',
|
|
727
|
+
'Accept': 'application/json'}
|
|
728
|
+
self.user = username
|
|
729
|
+
return
|
|
730
|
+
|
|
731
|
+
# Get password
|
|
732
|
+
if password is None:
|
|
733
|
+
password = getattr(self._par, 'ALYX_PWD', None)
|
|
734
|
+
if password is None:
|
|
735
|
+
if self.silent:
|
|
736
|
+
warnings.warn(
|
|
737
|
+
'No password or cached token in silent mode. '
|
|
738
|
+
'Please run the following to re-authenticate:\n\t'
|
|
739
|
+
'AlyxClient(silent=False).authenticate'
|
|
740
|
+
'(username=<username>, force=True)', UserWarning)
|
|
741
|
+
else:
|
|
742
|
+
password = getpass(f'Enter Alyx password for "{username}":')
|
|
743
|
+
# Remove previous token
|
|
744
|
+
self._clear_token(username)
|
|
745
|
+
try:
|
|
746
|
+
credentials = {'username': username, 'password': password}
|
|
747
|
+
rep = requests.post(self.base_url + '/auth-token', data=credentials)
|
|
748
|
+
except requests.exceptions.ConnectionError:
|
|
749
|
+
raise ConnectionError(
|
|
750
|
+
f'Can\'t connect to {self.base_url}.\n' +
|
|
751
|
+
'Check your internet connections and Alyx database firewall'
|
|
752
|
+
)
|
|
753
|
+
# Assign token or raise exception on auth error
|
|
754
|
+
if rep.ok:
|
|
755
|
+
self._token = rep.json()
|
|
756
|
+
assert list(self._token.keys()) == ['token']
|
|
757
|
+
else:
|
|
758
|
+
if rep.status_code == 400: # Auth error; re-raise with details
|
|
759
|
+
redacted = '*' * len(credentials['password']) if credentials['password'] else None
|
|
760
|
+
message = ('Alyx authentication failed with credentials: '
|
|
761
|
+
f'user = {credentials["username"]}, password = {redacted}')
|
|
762
|
+
raise requests.HTTPError(rep.status_code, rep.url, message, response=rep)
|
|
763
|
+
else:
|
|
764
|
+
rep.raise_for_status()
|
|
765
|
+
|
|
766
|
+
self._headers = {
|
|
767
|
+
'Authorization': 'Token {}'.format(list(self._token.values())[0]),
|
|
768
|
+
'Accept': 'application/json'}
|
|
769
|
+
if cache_token:
|
|
770
|
+
# Update saved pars
|
|
771
|
+
par = one.params.get(client=self.base_url, silent=True)
|
|
772
|
+
tokens = getattr(par, 'TOKEN', {})
|
|
773
|
+
tokens[username] = self._token
|
|
774
|
+
one.params.save(par.set('TOKEN', tokens), self.base_url)
|
|
775
|
+
# Update current pars
|
|
776
|
+
self._par = self._par.set('TOKEN', tokens)
|
|
777
|
+
self.user = username
|
|
778
|
+
if not self.silent:
|
|
779
|
+
print(f'Connected to {self.base_url} as user "{self.user}"')
|
|
780
|
+
|
|
781
|
+
def _clear_token(self, username):
|
|
782
|
+
"""Remove auth token from client params.
|
|
783
|
+
|
|
784
|
+
Deletes the cached authentication token for a given user.
|
|
785
|
+
"""
|
|
786
|
+
par = one.params.get(client=self.base_url, silent=True)
|
|
787
|
+
# Remove token from cache
|
|
788
|
+
if getattr(par, 'TOKEN', False) and username in par.TOKEN:
|
|
789
|
+
del par.TOKEN[username]
|
|
790
|
+
one.params.save(par, self.base_url)
|
|
791
|
+
# Remove token from local pars
|
|
792
|
+
if getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
|
|
793
|
+
del self._par.TOKEN[username]
|
|
794
|
+
# Remove token from object
|
|
795
|
+
self._token = None
|
|
796
|
+
if self._headers and 'Authorization' in self._headers:
|
|
797
|
+
del self._headers['Authorization']
|
|
798
|
+
|
|
799
|
+
def logout(self):
|
|
800
|
+
"""Log out from Alyx.
|
|
801
|
+
|
|
802
|
+
Deletes the cached authentication token for the currently logged-in user
|
|
803
|
+
and clears the REST cache.
|
|
804
|
+
"""
|
|
805
|
+
if not self.is_logged_in:
|
|
806
|
+
return
|
|
807
|
+
self._clear_token(username := self.user)
|
|
808
|
+
self.user = None
|
|
809
|
+
self.clear_rest_cache()
|
|
810
|
+
if not self.silent:
|
|
811
|
+
print(f'{username} logged out from {self.base_url}')
|
|
812
|
+
|
|
813
|
+
def delete(self, rest_query):
|
|
814
|
+
"""Send a DELETE request to the Alyx server.
|
|
815
|
+
|
|
816
|
+
Will raise an exception on any HTTP status code other than 200, 201.
|
|
817
|
+
|
|
818
|
+
Parameters
|
|
819
|
+
----------
|
|
820
|
+
rest_query : str
|
|
821
|
+
A REST query string either as a relative URL path complete URL.
|
|
822
|
+
|
|
823
|
+
Returns
|
|
824
|
+
-------
|
|
825
|
+
JSON interpreted dictionary from response.
|
|
826
|
+
|
|
827
|
+
Examples
|
|
828
|
+
--------
|
|
829
|
+
>>> AlyxClient.delete('/weighings/c617562d-c107-432e-a8ee-682c17f9e698')
|
|
830
|
+
>>> AlyxClient.delete(
|
|
831
|
+
... 'https://alyx.example.com/endpoint/c617562d-c107-432e-a8ee-682c17f9e698')
|
|
832
|
+
|
|
833
|
+
"""
|
|
834
|
+
return self._generic_request(requests.delete, rest_query)
|
|
835
|
+
|
|
836
|
+
def download_file(self, url, **kwargs):
|
|
837
|
+
"""Download file(s) from data server from a REST file record URL.
|
|
838
|
+
|
|
839
|
+
Parameters
|
|
840
|
+
----------
|
|
841
|
+
url : str, list
|
|
842
|
+
Full url(s) of the file(s).
|
|
843
|
+
**kwargs
|
|
844
|
+
WebClient.http_download_file parameters.
|
|
845
|
+
|
|
846
|
+
Returns
|
|
847
|
+
-------
|
|
848
|
+
pathlib.Path, list of pathlib.Path
|
|
849
|
+
Local path(s) of downloaded file(s).
|
|
850
|
+
|
|
851
|
+
"""
|
|
852
|
+
if isinstance(url, str):
|
|
853
|
+
url = self._validate_file_url(url)
|
|
854
|
+
download_fcn = http_download_file
|
|
855
|
+
else:
|
|
856
|
+
url = (self._validate_file_url(x) for x in url)
|
|
857
|
+
download_fcn = http_download_file_list
|
|
858
|
+
pars = dict(
|
|
859
|
+
silent=kwargs.pop('silent', self.silent),
|
|
860
|
+
target_dir=kwargs.pop('target_dir', self._par.CACHE_DIR),
|
|
861
|
+
username=self._par.HTTP_DATA_SERVER_LOGIN,
|
|
862
|
+
password=self._par.HTTP_DATA_SERVER_PWD,
|
|
863
|
+
**kwargs
|
|
864
|
+
)
|
|
865
|
+
try:
|
|
866
|
+
files = download_fcn(url, **pars)
|
|
867
|
+
except HTTPError as ex:
|
|
868
|
+
if ex.code == 401:
|
|
869
|
+
ex.msg += (' - please check your HTTP_DATA_SERVER_LOGIN and '
|
|
870
|
+
'HTTP_DATA_SERVER_PWD ONE params, or username/password kwargs')
|
|
871
|
+
raise ex
|
|
872
|
+
return files
|
|
873
|
+
|
|
874
|
+
def download_cache_tables(self, source=None, destination=None):
|
|
875
|
+
"""Download Alyx cache tables to the local data cache directory.
|
|
876
|
+
|
|
877
|
+
Parameters
|
|
878
|
+
----------
|
|
879
|
+
source : str, pathlib.Path
|
|
880
|
+
The remote HTTP directory of the cache table (excluding the filename).
|
|
881
|
+
Default: AlyxClient.base_url.
|
|
882
|
+
destination : str, pathlib.Path
|
|
883
|
+
The target directory into to which the tables will be downloaded.
|
|
884
|
+
|
|
885
|
+
Returns
|
|
886
|
+
-------
|
|
887
|
+
List of parquet table file paths.
|
|
888
|
+
|
|
889
|
+
"""
|
|
890
|
+
source = str(source or f'{self.base_url}/cache.zip')
|
|
891
|
+
destination = destination or self.cache_dir
|
|
892
|
+
Path(destination).mkdir(exist_ok=True, parents=True)
|
|
893
|
+
|
|
894
|
+
headers = None
|
|
895
|
+
if source.startswith(self.base_url):
|
|
896
|
+
if not self.is_logged_in:
|
|
897
|
+
self.authenticate()
|
|
898
|
+
headers = self._headers
|
|
899
|
+
|
|
900
|
+
with tempfile.TemporaryDirectory(dir=destination) as tmp:
|
|
901
|
+
file = http_download_file(source,
|
|
902
|
+
headers=headers,
|
|
903
|
+
silent=self.silent,
|
|
904
|
+
target_dir=tmp,
|
|
905
|
+
clobber=True)
|
|
906
|
+
with zipfile.ZipFile(file, 'r') as zipped:
|
|
907
|
+
files = zipped.namelist()
|
|
908
|
+
zipped.extractall(destination)
|
|
909
|
+
return [Path(destination, table) for table in files]
|
|
910
|
+
|
|
911
|
+
def _validate_file_url(self, url):
|
|
912
|
+
"""Assert that URL matches HTTP_DATA_SERVER parameter.
|
|
913
|
+
|
|
914
|
+
Currently only one remote HTTP server is supported for a given AlyxClient instance. If
|
|
915
|
+
the URL contains only the relative path part, the full URL is returned.
|
|
916
|
+
|
|
917
|
+
Parameters
|
|
918
|
+
----------
|
|
919
|
+
url : str
|
|
920
|
+
The full or partial URL to validate.
|
|
921
|
+
|
|
922
|
+
Returns
|
|
923
|
+
-------
|
|
924
|
+
The complete URL.
|
|
925
|
+
|
|
926
|
+
Examples
|
|
927
|
+
--------
|
|
928
|
+
>>> url = self._validate_file_url('https://webserver.net/path/to/file')
|
|
929
|
+
'https://webserver.net/path/to/file'
|
|
930
|
+
>>> url = self._validate_file_url('path/to/file')
|
|
931
|
+
'https://webserver.net/path/to/file'
|
|
932
|
+
|
|
933
|
+
"""
|
|
934
|
+
if url.startswith('http'): # A full URL
|
|
935
|
+
assert url.startswith(self._par.HTTP_DATA_SERVER), \
|
|
936
|
+
('remote protocol and/or hostname does not match HTTP_DATA_SERVER parameter:\n' +
|
|
937
|
+
f'"{url[:40]}..." should start with "{self._par.HTTP_DATA_SERVER}"')
|
|
938
|
+
elif not url.startswith(self._par.HTTP_DATA_SERVER):
|
|
939
|
+
url = self.rel_path2url(url)
|
|
940
|
+
return url
|
|
941
|
+
|
|
942
|
+
def rel_path2url(self, path):
|
|
943
|
+
"""Given a relative file path, return the remote HTTP server URL.
|
|
944
|
+
|
|
945
|
+
It is expected that the remote HTTP server has the same file tree as the local system.
|
|
946
|
+
|
|
947
|
+
Parameters
|
|
948
|
+
----------
|
|
949
|
+
path : str, pathlib.Path
|
|
950
|
+
A relative ALF path (subject/date/number/etc.).
|
|
951
|
+
|
|
952
|
+
Returns
|
|
953
|
+
-------
|
|
954
|
+
A URL string.
|
|
955
|
+
|
|
956
|
+
"""
|
|
957
|
+
path = str(path).strip('/')
|
|
958
|
+
assert not path.startswith('http')
|
|
959
|
+
return f'{self._par.HTTP_DATA_SERVER}/{path}'
|
|
960
|
+
|
|
961
|
+
def get(self, rest_query, **kwargs):
|
|
962
|
+
"""Send a GET request to the Alyx server.
|
|
963
|
+
|
|
964
|
+
Will raise an exception on any HTTP status code other than 200, 201.
|
|
965
|
+
|
|
966
|
+
For the dictionary contents and list of endpoints, refer to:
|
|
967
|
+
https://openalyx.internationalbrainlab.org/docs
|
|
968
|
+
|
|
969
|
+
Parameters
|
|
970
|
+
----------
|
|
971
|
+
rest_query : str
|
|
972
|
+
A REST URL path, e.g. '/sessions?user=Hamish'.
|
|
973
|
+
**kwargs
|
|
974
|
+
Optional arguments to pass to _generic_request and _cache_response decorator.
|
|
975
|
+
|
|
976
|
+
Returns
|
|
977
|
+
-------
|
|
978
|
+
JSON interpreted dictionary from response.
|
|
979
|
+
|
|
980
|
+
"""
|
|
981
|
+
rep = self._generic_request(requests.get, rest_query, **kwargs)
|
|
982
|
+
if isinstance(rep, dict) and list(rep.keys()) == ['count', 'next', 'previous', 'results']:
|
|
983
|
+
if len(rep['results']) < rep['count']:
|
|
984
|
+
cache_args = {k: v for k, v in kwargs.items() if k in ('clobber', 'expires')}
|
|
985
|
+
rep = _PaginatedResponse(self, rep, cache_args)
|
|
986
|
+
else:
|
|
987
|
+
rep = rep['results']
|
|
988
|
+
return rep
|
|
989
|
+
|
|
990
|
+
def patch(self, rest_query, data=None, files=None):
|
|
991
|
+
"""Send a PATCH request to the Alyx server.
|
|
992
|
+
|
|
993
|
+
For the dictionary contents, refer to:
|
|
994
|
+
https://openalyx.internationalbrainlab.org/docs
|
|
995
|
+
|
|
996
|
+
Parameters
|
|
997
|
+
----------
|
|
998
|
+
rest_query : str
|
|
999
|
+
The endpoint as full or relative URL.
|
|
1000
|
+
data : dict, str
|
|
1001
|
+
JSON encoded string or dictionary (c.f. requests).
|
|
1002
|
+
files : dict, tuple
|
|
1003
|
+
Files to attach (c.f. requests).
|
|
1004
|
+
|
|
1005
|
+
Returns
|
|
1006
|
+
-------
|
|
1007
|
+
Response object.
|
|
1008
|
+
|
|
1009
|
+
"""
|
|
1010
|
+
return self._generic_request(requests.patch, rest_query, data=data, files=files)
|
|
1011
|
+
|
|
1012
|
+
def post(self, rest_query, data=None, files=None):
|
|
1013
|
+
"""Send a POST request to the Alyx server.
|
|
1014
|
+
|
|
1015
|
+
For the dictionary contents, refer to:
|
|
1016
|
+
https://openalyx.internationalbrainlab.org/docs
|
|
1017
|
+
|
|
1018
|
+
Parameters
|
|
1019
|
+
----------
|
|
1020
|
+
rest_query : str
|
|
1021
|
+
The endpoint as full or relative URL.
|
|
1022
|
+
data : dict, str
|
|
1023
|
+
JSON encoded string or dictionary (c.f. requests).
|
|
1024
|
+
files : dict, tuple
|
|
1025
|
+
Files to attach (c.f. requests).
|
|
1026
|
+
|
|
1027
|
+
Returns
|
|
1028
|
+
-------
|
|
1029
|
+
Response object.
|
|
1030
|
+
|
|
1031
|
+
"""
|
|
1032
|
+
return self._generic_request(requests.post, rest_query, data=data, files=files)
|
|
1033
|
+
|
|
1034
|
+
def put(self, rest_query, data=None, files=None):
|
|
1035
|
+
"""Send a PUT request to the Alyx server.
|
|
1036
|
+
|
|
1037
|
+
For the dictionary contents, refer to:
|
|
1038
|
+
https://openalyx.internationalbrainlab.org/docs
|
|
1039
|
+
|
|
1040
|
+
Parameters
|
|
1041
|
+
----------
|
|
1042
|
+
rest_query : str
|
|
1043
|
+
The endpoint as full or relative URL.
|
|
1044
|
+
data : dict, str
|
|
1045
|
+
JSON encoded string or dictionary (c.f. requests).
|
|
1046
|
+
files : dict, tuple
|
|
1047
|
+
Files to attach (c.f. requests).
|
|
1048
|
+
|
|
1049
|
+
Returns
|
|
1050
|
+
-------
|
|
1051
|
+
requests.Response
|
|
1052
|
+
Response object.
|
|
1053
|
+
|
|
1054
|
+
"""
|
|
1055
|
+
return self._generic_request(requests.put, rest_query, data=data, files=files)
|
|
1056
|
+
|
|
1057
|
+
def rest(self, url=None, action=None, id=None, data=None, files=None,
|
|
1058
|
+
no_cache=False, **kwargs):
|
|
1059
|
+
"""Alyx REST API wrapper.
|
|
1060
|
+
|
|
1061
|
+
If no arguments are passed, lists available endpoints.
|
|
1062
|
+
|
|
1063
|
+
Parameters
|
|
1064
|
+
----------
|
|
1065
|
+
url : str
|
|
1066
|
+
Endpoint name.
|
|
1067
|
+
action : str
|
|
1068
|
+
One of 'list', 'create', 'read', 'update', 'partial_update', 'delete'.
|
|
1069
|
+
id : str, uuid.UUID
|
|
1070
|
+
Lookup string for actions 'read', 'update', 'partial_update', and 'delete'.
|
|
1071
|
+
data : dict
|
|
1072
|
+
Data dictionary for actions 'update', 'partial_update' and 'create'.
|
|
1073
|
+
files : dict, tuple
|
|
1074
|
+
Option file(s) to upload.
|
|
1075
|
+
no_cache : bool
|
|
1076
|
+
If true the `list` and `read` actions are performed without returning the cache.
|
|
1077
|
+
kwargs
|
|
1078
|
+
Filters as per the Alyx REST documentation
|
|
1079
|
+
c.f. https://openalyx.internationalbrainlab.org/docs/
|
|
1080
|
+
|
|
1081
|
+
Returns
|
|
1082
|
+
-------
|
|
1083
|
+
list, dict
|
|
1084
|
+
List of queried dicts ('list') or dict (other actions).
|
|
1085
|
+
|
|
1086
|
+
Examples
|
|
1087
|
+
--------
|
|
1088
|
+
List available endpoint
|
|
1089
|
+
|
|
1090
|
+
>>> client = AlyxClient()
|
|
1091
|
+
... client.rest()
|
|
1092
|
+
|
|
1093
|
+
List available actions for the 'subjects' endpoint
|
|
1094
|
+
|
|
1095
|
+
>>> client.rest('subjects')
|
|
1096
|
+
|
|
1097
|
+
Example REST endpoint with all actions
|
|
1098
|
+
|
|
1099
|
+
>>> client.rest('subjects', 'list')
|
|
1100
|
+
>>> client.rest('subjects', 'list', field_filter1='filterval')
|
|
1101
|
+
>>> client.rest('subjects', 'create', data=sub_dict)
|
|
1102
|
+
>>> client.rest('subjects', 'read', id='nickname')
|
|
1103
|
+
>>> client.rest('subjects', 'update', id='nickname', data=sub_dict)
|
|
1104
|
+
>>> client.rest('subjects', 'partial_update', id='nickname', data=sub_dict)
|
|
1105
|
+
>>> client.rest('subjects', 'delete', id='nickname')
|
|
1106
|
+
>>> client.rest('notes', 'create', data=nd, files={'image': open(image_file, 'rb')})
|
|
1107
|
+
|
|
1108
|
+
"""
|
|
1109
|
+
# if endpoint is None, list available endpoints
|
|
1110
|
+
if not url:
|
|
1111
|
+
pprint(self.list_endpoints())
|
|
1112
|
+
return
|
|
1113
|
+
# remove beginning slash if any
|
|
1114
|
+
if url.startswith('/'):
|
|
1115
|
+
url = url[1:]
|
|
1116
|
+
# and split to the next slash or question mark
|
|
1117
|
+
endpoint = re.findall("^/*[^?/]*", url)[0].replace('/', '')
|
|
1118
|
+
# make sure the queried endpoint exists, if not throw an informative error
|
|
1119
|
+
if endpoint not in self.rest_schemes.keys():
|
|
1120
|
+
av = [k for k in self.rest_schemes.keys() if not k.startswith('_') and k]
|
|
1121
|
+
raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
|
|
1122
|
+
'endpoints are \n ' + '\n '.join(av))
|
|
1123
|
+
endpoint_scheme = self.rest_schemes[endpoint]
|
|
1124
|
+
# on a filter request, override the default action parameter
|
|
1125
|
+
if '?' in url:
|
|
1126
|
+
action = 'list'
|
|
1127
|
+
# if action is None, list available actions for the required endpoint
|
|
1128
|
+
if not action:
|
|
1129
|
+
pprint(list(endpoint_scheme.keys()))
|
|
1130
|
+
self.print_endpoint_info(endpoint)
|
|
1131
|
+
return
|
|
1132
|
+
# make sure the desired action exists, if not throw an informative error
|
|
1133
|
+
if action not in endpoint_scheme:
|
|
1134
|
+
raise ValueError('Action "' + action + '" for REST endpoint "' + endpoint + '" does ' +
|
|
1135
|
+
'not exist. Available actions are: ' +
|
|
1136
|
+
'\n ' + '\n '.join(endpoint_scheme.keys()))
|
|
1137
|
+
# the actions below require an id in the URL, warn and help the user
|
|
1138
|
+
if action in ['read', 'update', 'partial_update', 'delete'] and not id:
|
|
1139
|
+
_logger.warning('REST action "' + action + '" requires an ID in the URL: ' +
|
|
1140
|
+
endpoint_scheme[action]['url'])
|
|
1141
|
+
return
|
|
1142
|
+
# the actions below require a data dictionary, warn and help the user with fields list
|
|
1143
|
+
data_required = 'fields' in endpoint_scheme[action]
|
|
1144
|
+
if action in ['create', 'update', 'partial_update'] and data_required and not data:
|
|
1145
|
+
pprint(endpoint_scheme[action]['fields'])
|
|
1146
|
+
for act in endpoint_scheme[action]['fields']:
|
|
1147
|
+
print("'" + act['name'] + "': ...,")
|
|
1148
|
+
_logger.warning('REST action "' + action + '" requires a data dict with above keys')
|
|
1149
|
+
return
|
|
1150
|
+
|
|
1151
|
+
# clobber=True means remote request always made, expires=True means response is not cached
|
|
1152
|
+
cache_args = {'clobber': no_cache, 'expires': kwargs.pop('expires', False) or no_cache}
|
|
1153
|
+
if action == 'list':
|
|
1154
|
+
# list doesn't require id nor
|
|
1155
|
+
assert endpoint_scheme[action]['action'] == 'get'
|
|
1156
|
+
# add to url data if it is a string
|
|
1157
|
+
if id:
|
|
1158
|
+
# this is a special case of the list where we query a uuid. Usually read is better
|
|
1159
|
+
if 'django' in kwargs.keys():
|
|
1160
|
+
kwargs['django'] = kwargs['django'] + ','
|
|
1161
|
+
else:
|
|
1162
|
+
kwargs['django'] = ''
|
|
1163
|
+
kwargs['django'] = f"{kwargs['django']}pk,{id}"
|
|
1164
|
+
# otherwise, look for a dictionary of filter terms
|
|
1165
|
+
if kwargs:
|
|
1166
|
+
# Convert all lists in query params to comma separated list
|
|
1167
|
+
query_params = {k: ','.join(map(str, ensure_list(v))) for k, v in kwargs.items()}
|
|
1168
|
+
url = update_url_params(url, query_params)
|
|
1169
|
+
return self.get('/' + url, **cache_args)
|
|
1170
|
+
if not isinstance(id, str) and id is not None:
|
|
1171
|
+
id = str(id) # e.g. may be uuid.UUID
|
|
1172
|
+
if action == 'read':
|
|
1173
|
+
assert endpoint_scheme[action]['action'] == 'get'
|
|
1174
|
+
return self.get('/' + endpoint + '/' + id.split('/')[-1], **cache_args)
|
|
1175
|
+
elif action == 'create':
|
|
1176
|
+
assert endpoint_scheme[action]['action'] == 'post'
|
|
1177
|
+
return self.post('/' + endpoint, data=data, files=files)
|
|
1178
|
+
elif action == 'delete':
|
|
1179
|
+
assert endpoint_scheme[action]['action'] == 'delete'
|
|
1180
|
+
return self.delete('/' + endpoint + '/' + id.split('/')[-1])
|
|
1181
|
+
elif action == 'partial_update':
|
|
1182
|
+
assert endpoint_scheme[action]['action'] == 'patch'
|
|
1183
|
+
return self.patch('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
|
|
1184
|
+
elif action == 'update':
|
|
1185
|
+
assert endpoint_scheme[action]['action'] == 'put'
|
|
1186
|
+
return self.put('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
|
|
1187
|
+
|
|
1188
|
+
# JSON field interface convenience methods
|
|
1189
|
+
def _check_inputs(self, endpoint: str) -> None:
|
|
1190
|
+
# make sure the queried endpoint exists, if not throw an informative error
|
|
1191
|
+
if endpoint not in self.rest_schemes.keys():
|
|
1192
|
+
av = (k for k in self.rest_schemes.keys() if not k.startswith('_') and k)
|
|
1193
|
+
raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
|
|
1194
|
+
'endpoints are \n ' + '\n '.join(av))
|
|
1195
|
+
return
|
|
1196
|
+
|
|
1197
|
+
def json_field_write(
|
|
1198
|
+
self,
|
|
1199
|
+
endpoint: str = None,
|
|
1200
|
+
uuid: str = None,
|
|
1201
|
+
field_name: str = None,
|
|
1202
|
+
data: dict = None
|
|
1203
|
+
) -> dict:
|
|
1204
|
+
"""Write data to JSON field.
|
|
1205
|
+
|
|
1206
|
+
NOTE: Destructive write! WILL NOT CHECK IF DATA EXISTS
|
|
1207
|
+
|
|
1208
|
+
Parameters
|
|
1209
|
+
----------
|
|
1210
|
+
endpoint : str, None
|
|
1211
|
+
Valid alyx endpoint, defaults to None.
|
|
1212
|
+
uuid : str, uuid.UUID, None
|
|
1213
|
+
UUID or lookup name for endpoint.
|
|
1214
|
+
field_name : str, None
|
|
1215
|
+
Valid json field name, defaults to None.
|
|
1216
|
+
data : dict, None
|
|
1217
|
+
Data to write to json field, defaults to None.
|
|
1218
|
+
|
|
1219
|
+
Returns
|
|
1220
|
+
-------
|
|
1221
|
+
dict
|
|
1222
|
+
Written data dict.
|
|
1223
|
+
|
|
1224
|
+
"""
|
|
1225
|
+
self._check_inputs(endpoint)
|
|
1226
|
+
# Prepare data to patch
|
|
1227
|
+
patch_dict = {field_name: data}
|
|
1228
|
+
# Upload new extended_qc to session
|
|
1229
|
+
ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
|
|
1230
|
+
return ret[field_name]
|
|
1231
|
+
|
|
1232
|
+
def json_field_update(
|
|
1233
|
+
self,
|
|
1234
|
+
endpoint: str = None,
|
|
1235
|
+
uuid: str = None,
|
|
1236
|
+
field_name: str = 'json',
|
|
1237
|
+
data: dict = None
|
|
1238
|
+
) -> dict:
|
|
1239
|
+
"""Non-destructive update of JSON field of endpoint for object.
|
|
1240
|
+
|
|
1241
|
+
Will update the field_name of the object with pk = uuid of given endpoint
|
|
1242
|
+
If data has keys with the same name of existing keys it will squash the old
|
|
1243
|
+
values (uses the dict.update() method).
|
|
1244
|
+
|
|
1245
|
+
Parameters
|
|
1246
|
+
----------
|
|
1247
|
+
endpoint : str
|
|
1248
|
+
Alyx REST endpoint to hit.
|
|
1249
|
+
uuid : str, uuid.UUID
|
|
1250
|
+
UUID or lookup name of object.
|
|
1251
|
+
field_name : str
|
|
1252
|
+
Name of the json field.
|
|
1253
|
+
data : dict
|
|
1254
|
+
A dictionary with fields to be updated.
|
|
1255
|
+
|
|
1256
|
+
Returns
|
|
1257
|
+
-------
|
|
1258
|
+
dict
|
|
1259
|
+
New patched json field contents as dict.
|
|
1260
|
+
|
|
1261
|
+
Examples
|
|
1262
|
+
--------
|
|
1263
|
+
>>> client = AlyxClient()
|
|
1264
|
+
>>> client.json_field_update('sessions', 'eid_str', 'extended_qc', {'key': 'value'})
|
|
1265
|
+
|
|
1266
|
+
"""
|
|
1267
|
+
self._check_inputs(endpoint)
|
|
1268
|
+
# Load current json field contents
|
|
1269
|
+
current = self.rest(endpoint, 'read', id=uuid)[field_name]
|
|
1270
|
+
if current is None:
|
|
1271
|
+
current = {}
|
|
1272
|
+
|
|
1273
|
+
if not isinstance(current, dict):
|
|
1274
|
+
_logger.warning(
|
|
1275
|
+
f'Current json field "{field_name}" does not contains a dict, aborting update'
|
|
1276
|
+
)
|
|
1277
|
+
return current
|
|
1278
|
+
|
|
1279
|
+
# Patch current dict with new data
|
|
1280
|
+
current.update(data)
|
|
1281
|
+
# Prepare data to patch
|
|
1282
|
+
patch_dict = {field_name: current}
|
|
1283
|
+
# Upload new extended_qc to session
|
|
1284
|
+
ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
|
|
1285
|
+
return ret[field_name]
|
|
1286
|
+
|
|
1287
|
+
def json_field_remove_key(
|
|
1288
|
+
self,
|
|
1289
|
+
endpoint: str = None,
|
|
1290
|
+
uuid: str = None,
|
|
1291
|
+
field_name: str = 'json',
|
|
1292
|
+
key: str = None
|
|
1293
|
+
) -> Optional[dict]:
|
|
1294
|
+
"""Remove inputted key from JSON field dict and re-upload it to Alyx.
|
|
1295
|
+
|
|
1296
|
+
Needs endpoint, UUID and json field name.
|
|
1297
|
+
|
|
1298
|
+
Parameters
|
|
1299
|
+
----------
|
|
1300
|
+
endpoint : str
|
|
1301
|
+
Endpoint to hit, defaults to None.
|
|
1302
|
+
uuid : str, uuid.UUID
|
|
1303
|
+
UUID or lookup name for endpoint.
|
|
1304
|
+
field_name : str
|
|
1305
|
+
JSON field name of object, defaults to None.
|
|
1306
|
+
key : str
|
|
1307
|
+
Key name of dictionary inside object, defaults to None.
|
|
1308
|
+
|
|
1309
|
+
Returns
|
|
1310
|
+
-------
|
|
1311
|
+
dict
|
|
1312
|
+
New content of json field.
|
|
1313
|
+
|
|
1314
|
+
"""
|
|
1315
|
+
self._check_inputs(endpoint)
|
|
1316
|
+
current = self.rest(endpoint, 'read', id=uuid)[field_name]
|
|
1317
|
+
# If no contents, cannot remove key, return
|
|
1318
|
+
if current is None:
|
|
1319
|
+
return current
|
|
1320
|
+
# if contents are not dict, cannot remove key, return contents
|
|
1321
|
+
if isinstance(current, str):
|
|
1322
|
+
_logger.warning(f'Cannot remove key {key} content of json field is of type str')
|
|
1323
|
+
return None
|
|
1324
|
+
# If key not present in contents of json field cannot remove key, return contents
|
|
1325
|
+
if current.get(key, None) is None:
|
|
1326
|
+
_logger.warning(
|
|
1327
|
+
f'{key}: Key not found in endpoint {endpoint} field {field_name}'
|
|
1328
|
+
)
|
|
1329
|
+
return current
|
|
1330
|
+
_logger.info(f'Removing key from dict: "{key}"')
|
|
1331
|
+
current.pop(key)
|
|
1332
|
+
# Re-write contents without removed key
|
|
1333
|
+
written = self.json_field_write(
|
|
1334
|
+
endpoint=endpoint, uuid=uuid, field_name=field_name, data=current
|
|
1335
|
+
)
|
|
1336
|
+
return written
|
|
1337
|
+
|
|
1338
|
+
def json_field_delete(
|
|
1339
|
+
self, endpoint: str = None, uuid: str = None, field_name: str = None
|
|
1340
|
+
) -> None:
|
|
1341
|
+
"""Set an entire field to null.
|
|
1342
|
+
|
|
1343
|
+
Note that this deletes all data from a given field. To delete only a single key from a
|
|
1344
|
+
given JSON field, use `json_field_remove_key`.
|
|
1345
|
+
|
|
1346
|
+
Parameters
|
|
1347
|
+
----------
|
|
1348
|
+
endpoint : str
|
|
1349
|
+
Endpoint to hit, defaults to None.
|
|
1350
|
+
uuid : str, uuid.UUID
|
|
1351
|
+
UUID or lookup name for endpoint.
|
|
1352
|
+
field_name : str
|
|
1353
|
+
The field name of object (e.g. 'json', 'name', 'extended_qc'), defaults to None.
|
|
1354
|
+
|
|
1355
|
+
Returns
|
|
1356
|
+
-------
|
|
1357
|
+
None
|
|
1358
|
+
New content of json field.
|
|
1359
|
+
|
|
1360
|
+
"""
|
|
1361
|
+
self._check_inputs(endpoint)
|
|
1362
|
+
_ = self.rest(endpoint, 'partial_update', id=uuid, data={field_name: None})
|
|
1363
|
+
return _[field_name]
|
|
1364
|
+
|
|
1365
|
+
def clear_rest_cache(self):
|
|
1366
|
+
"""Clear all REST response cache files for the base url."""
|
|
1367
|
+
for file in self.cache_dir.joinpath('.rest').glob('*'):
|
|
1368
|
+
file.unlink()
|