ONE-api 3.0b1__py3-none-any.whl → 3.0b4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {ONE_api-3.0b1.dist-info → ONE_api-3.0b4.dist-info}/LICENSE +21 -21
  2. {ONE_api-3.0b1.dist-info → ONE_api-3.0b4.dist-info}/METADATA +115 -115
  3. ONE_api-3.0b4.dist-info/RECORD +37 -0
  4. one/__init__.py +2 -2
  5. one/alf/__init__.py +1 -1
  6. one/alf/cache.py +640 -653
  7. one/alf/exceptions.py +105 -105
  8. one/alf/io.py +876 -876
  9. one/alf/path.py +1450 -1450
  10. one/alf/spec.py +519 -504
  11. one/api.py +2949 -2973
  12. one/converters.py +850 -850
  13. one/params.py +414 -414
  14. one/registration.py +845 -845
  15. one/remote/__init__.py +1 -1
  16. one/remote/aws.py +313 -313
  17. one/remote/base.py +142 -142
  18. one/remote/globus.py +1254 -1254
  19. one/tests/fixtures/params/.caches +6 -6
  20. one/tests/fixtures/params/.test.alyx.internationalbrainlab.org +8 -8
  21. one/tests/fixtures/rest_responses/1f187d80fd59677b395fcdb18e68e4401bfa1cc9 +1 -1
  22. one/tests/fixtures/rest_responses/47893cf67c985e6361cdee009334963f49fb0746 +1 -1
  23. one/tests/fixtures/rest_responses/535d0e9a1e2c1efbdeba0d673b131e00361a2edb +1 -1
  24. one/tests/fixtures/rest_responses/6dc96f7e9bcc6ac2e7581489b9580a6cd3f28293 +1 -1
  25. one/tests/fixtures/rest_responses/db1731fb8df0208944ae85f76718430813a8bf50 +1 -1
  26. one/tests/fixtures/rest_responses/dcce48259bb929661f60a02a48563f70aa6185b3 +1 -1
  27. one/tests/fixtures/rest_responses/f530d6022f61cdc9e38cc66beb3cb71f3003c9a1 +1 -1
  28. one/tests/fixtures/test_dbs.json +14 -14
  29. one/util.py +524 -524
  30. one/webclient.py +1366 -1354
  31. ONE_api-3.0b1.dist-info/RECORD +0 -37
  32. {ONE_api-3.0b1.dist-info → ONE_api-3.0b4.dist-info}/WHEEL +0 -0
  33. {ONE_api-3.0b1.dist-info → ONE_api-3.0b4.dist-info}/top_level.txt +0 -0
one/webclient.py CHANGED
@@ -1,1354 +1,1366 @@
1
- """API for interacting with a remote Alyx instance through REST.
2
-
3
- The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
4
- files through Alyx.
5
-
6
- Examples
7
- --------
8
- >>> alyx = AlyxClient(
9
- ... username='test_user', password='TapetesBloc18',
10
- ... base_url='https://test.alyx.internationalbrainlab.org')
11
-
12
- List subjects
13
-
14
- >>> subjects = alyx.rest('subjects', 'list')
15
-
16
- Create a subject
17
-
18
- >>> record = {
19
- ... 'nickname': nickname,
20
- ... 'responsible_user': 'olivier',
21
- ... 'birth_date': '2019-06-15',
22
- ... 'death_date': None,
23
- ... 'lab': 'cortexlab',
24
- ... }
25
- >>> new_subj = alyx.rest('subjects', 'create', data=record)
26
-
27
- Download a remote file, given a local path
28
-
29
- >>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
30
- >>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
31
-
32
- """
33
- import json
34
- import logging
35
- import math
36
- import re
37
- import functools
38
- import urllib.request
39
- from urllib.error import HTTPError
40
- import urllib.parse
41
- from collections.abc import Mapping
42
- from typing import Optional
43
- from datetime import datetime, timedelta
44
- from pathlib import Path
45
- from weakref import ReferenceType
46
- import warnings
47
- import hashlib
48
- import zipfile
49
- import tempfile
50
- from getpass import getpass
51
- from contextlib import contextmanager
52
-
53
- import requests
54
- from tqdm import tqdm
55
-
56
- from pprint import pprint
57
- import one.params
58
- from iblutil.io import hashfile
59
- from iblutil.io.params import set_hidden
60
- from iblutil.util import ensure_list
61
- import concurrent.futures
62
- _logger = logging.getLogger(__name__)
63
-
64
-
65
- def _cache_response(method):
66
- """Decorator for the generic request method for caching REST reponses.
67
-
68
- Caches the result of the query and on subsequent calls, returns cache instead of hitting the
69
- database.
70
-
71
- Parameters
72
- ----------
73
- method : function
74
- Function to wrap (i.e. AlyxClient._generic_request).
75
-
76
- Returns
77
- -------
78
- function
79
- Handle to wrapped method.
80
-
81
- """
82
-
83
- @functools.wraps(method)
84
- def wrapper_decorator(alyx_client, *args, expires=None, clobber=False, **kwargs):
85
- """REST caching wrapper.
86
-
87
- Parameters
88
- ----------
89
- alyx_client : AlyxClient
90
- An instance of the AlyxClient class.
91
- args : any
92
- Positional arguments for applying to wrapped function.
93
- expires : bool
94
- An optional timedelta for how long cached response is valid. If True, the cached
95
- response will not be used on subsequent calls. If None, the default expiry is applied.
96
- clobber : bool
97
- If True any existing cached response is overwritten.
98
- **kwargs
99
- Keyword arguments for applying to wrapped function.
100
-
101
- Returns
102
- -------
103
- dict
104
- The REST response JSON either from cached file or directly from remote.
105
-
106
- """
107
- expires = expires or alyx_client.default_expiry
108
- mode = (alyx_client.cache_mode or '').casefold()
109
- if args[0].__name__ != mode and mode != '*':
110
- return method(alyx_client, *args, **kwargs)
111
- # Check cache
112
- rest_cache = alyx_client.cache_dir.joinpath('.rest')
113
- sha1 = hashlib.sha1()
114
- sha1.update(bytes(args[1], 'utf-8'))
115
- name = sha1.hexdigest()
116
- # Reversible but length may exceed 255 chars
117
- # name = base64.urlsafe_b64encode(args[2].encode('UTF-8')).decode('UTF-8')
118
- files = list(rest_cache.glob(name))
119
- cached = None
120
- if len(files) == 1 and not clobber:
121
- _logger.debug('loading REST response from cache')
122
- with open(files[0], 'r') as f:
123
- cached, when = json.load(f)
124
- if datetime.fromisoformat(when) > datetime.now():
125
- return cached
126
- try:
127
- response = method(alyx_client, *args, **kwargs)
128
- except requests.exceptions.ConnectionError as ex:
129
- if cached and not clobber:
130
- warnings.warn('Failed to connect, returning cached response', RuntimeWarning)
131
- return cached
132
- raise ex # No cache and can't connect to database; re-raise
133
-
134
- # Save response into cache
135
- if not rest_cache.exists():
136
- rest_cache.mkdir(parents=True)
137
- rest_cache = set_hidden(rest_cache, True)
138
-
139
- _logger.debug('caching REST response')
140
- expiry_datetime = datetime.now() + (timedelta() if expires is True else expires)
141
- with open(rest_cache / name, 'w') as f:
142
- json.dump((response, expiry_datetime.isoformat()), f)
143
- return response
144
-
145
- return wrapper_decorator
146
-
147
-
148
- @contextmanager
149
- def no_cache(ac=None):
150
- """Temporarily turn off the REST cache for a given Alyx instance.
151
-
152
- This function is particularly useful when calling ONE methods in remote mode.
153
-
154
- Parameters
155
- ----------
156
- ac : AlyxClient
157
- An instance of the AlyxClient to modify. If None, the a new object is instantiated
158
-
159
- Returns
160
- -------
161
- AlyxClient
162
- The instance of Alyx with cache disabled
163
-
164
- Examples
165
- --------
166
- >>> from one.api import ONE
167
- >>> with no_cache(ONE().alyx):
168
- ... eids = ONE().search(subject='foobar', query_type='remote')
169
-
170
- """
171
- ac = ac or AlyxClient()
172
- cache_mode = ac.cache_mode
173
- ac.cache_mode = None
174
- try:
175
- yield ac
176
- finally:
177
- ac.cache_mode = cache_mode
178
-
179
-
180
- class _PaginatedResponse(Mapping):
181
- """Emulate a list from a paginated response.
182
-
183
- Provides cache functionality.
184
-
185
- Examples
186
- --------
187
- >>> r = _PaginatedResponse(client, response)
188
-
189
- """
190
-
191
- def __init__(self, alyx, rep, cache_args=None):
192
- """Emulate a list from a paginated response.
193
-
194
- Parameters
195
- ----------
196
- alyx : AlyxClient
197
- An instance of an AlyxClient associated with the REST response
198
- rep : dict
199
- A paginated REST response JSON dictionary
200
- cache_args : dict
201
- A dict of kwargs to pass to _cache_response decorator upon subsequent requests
202
-
203
- """
204
- self.alyx = alyx
205
- self.count = rep['count']
206
- self.limit = len(rep['results'])
207
- self._cache_args = cache_args or {}
208
- # store URL without pagination query params
209
- self.query = rep['next']
210
- # init the cache, list with None with count size
211
- self._cache = [None] * self.count
212
- # fill the cache with results of the query
213
- for i in range(self.limit):
214
- self._cache[i] = rep['results'][i]
215
- self._callbacks = set()
216
-
217
- def add_callback(self, cb):
218
- """Add a callback function to use each time a new page is fetched.
219
-
220
- The callback function will be called with the page results each time :meth:`populate`
221
- is called.
222
-
223
- Parameters
224
- ----------
225
- cb : callable
226
- A callable that takes the results of each paginated resonse.
227
-
228
- """
229
- if not callable(cb):
230
- raise TypeError(f'Expected type "callable", got "{type(cb)}" instead')
231
- else:
232
- self._callbacks.add(cb)
233
-
234
- def __len__(self):
235
- return self.count
236
-
237
- def __getitem__(self, item):
238
- if isinstance(item, slice):
239
- while None in self._cache[item]:
240
- # If slice start index is -ve, convert to +ve index
241
- i = self.count + item.start if item.start < 0 else item.start
242
- self.populate(i + self._cache[item].index(None))
243
- elif self._cache[item] is None:
244
- # If index is -ve, convert to +ve
245
- self.populate(self.count + item if item < 0 else item)
246
- return self._cache[item]
247
-
248
- def populate(self, idx):
249
- """Populate response cache with new page of results.
250
-
251
- Fetches the specific page of results containing the index passed and populates
252
- stores the results in the :prop:`_cache` property.
253
-
254
- Parameters
255
- ----------
256
- idx : int
257
- The index of a given record to fetch.
258
-
259
- """
260
- offset = self.limit * math.floor(idx / self.limit)
261
- query = update_url_params(self.query, {'limit': self.limit, 'offset': offset})
262
- res = self.alyx._generic_request(requests.get, query, **self._cache_args)
263
- if self.count != res['count']:
264
- warnings.warn(
265
- f'remote results for {urllib.parse.urlsplit(query).path} endpoint changed; '
266
- f'results may be inconsistent', RuntimeWarning)
267
- for i, r in enumerate(res['results'][:self.count - offset]):
268
- self._cache[i + offset] = res['results'][i]
269
- # Notify callbacks
270
- pending_removal = []
271
- for callback in self._callbacks:
272
- # Handle weak reference callbacks first
273
- if isinstance(callback, ReferenceType):
274
- wf = callback
275
- if (callback := wf()) is None:
276
- pending_removal.append(wf)
277
- continue
278
- callback(res['results'])
279
- for wf in pending_removal:
280
- self._callbacks.discard(wf)
281
- # When cache is complete, clear our callbacks
282
- if all(reversed(self._cache)):
283
- self._callbacks.clear()
284
-
285
- def __iter__(self):
286
- for i in range(self.count):
287
- yield self.__getitem__(i)
288
-
289
-
290
- def update_url_params(url: str, params: dict) -> str:
291
- """Add/update the query parameters of a URL and make url safe.
292
-
293
- Parameters
294
- ----------
295
- url : str
296
- A URL string with which to update the query parameters
297
- params : dict
298
- A dict of new parameters. For multiple values for the same query, use a list (see example)
299
-
300
- Returns
301
- -------
302
- str
303
- A new URL with said parameters updated
304
-
305
- Examples
306
- --------
307
- >>> update_url_params('website.com/?q=', {'pg': 5})
308
- 'website.com/?pg=5'
309
-
310
- >>> update_url_params('website.com?q=xxx', {'pg': 5, 'foo': ['bar', 'baz']})
311
- 'website.com?q=xxx&pg=5&foo=bar&foo=baz'
312
-
313
- """
314
- # Remove percent-encoding
315
- url = urllib.parse.unquote(url)
316
- parsed_url = urllib.parse.urlsplit(url)
317
- # Extract URL query arguments and convert to dict
318
- parsed_get_args = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=False)
319
- # Merge URL arguments dict with new params
320
- parsed_get_args.update(params)
321
- # Convert back to query string
322
- encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
323
- # Update parser and convert to full URL str
324
- return parsed_url._replace(query=encoded_get_args).geturl()
325
-
326
-
327
- def http_download_file_list(links_to_file_list, **kwargs):
328
- """Download a list of files from a remote HTTP server from a list of links.
329
-
330
- Generates up to 4 separate threads to handle downloads.
331
- Same options behaviour as http_download_file.
332
-
333
- Parameters
334
- ----------
335
- links_to_file_list : list
336
- List of http links to files.
337
- **kwargs
338
- Optional arguments to pass to http_download_file.
339
-
340
- Returns
341
- -------
342
- list of pathlib.Path
343
- A list of the local full path of the downloaded files.
344
-
345
- """
346
- links_to_file_list = list(links_to_file_list) # In case generator was passed
347
- n_threads = 4 # Max number of threads
348
- outputs = []
349
- target_dir = kwargs.pop('target_dir', None)
350
- # Ensure target dir the length of url list
351
- if target_dir is None or isinstance(target_dir, (str, Path)):
352
- target_dir = [target_dir] * len(links_to_file_list)
353
- assert len(target_dir) == len(links_to_file_list)
354
- # using with statement to ensure threads are cleaned up promptly
355
- zipped = zip(links_to_file_list, target_dir)
356
- with concurrent.futures.ThreadPoolExecutor(max_workers=n_threads) as executor:
357
- # Multithreading load operations
358
- futures = [executor.submit(
359
- http_download_file, link, target_dir=target, **kwargs) for link, target in zipped]
360
- zip(links_to_file_list, ensure_list(kwargs.pop('target_dir', None)))
361
- # TODO Reintroduce variable timeout value based on file size and download speed of 5 Mb/s?
362
- # timeout = reduce(lambda x, y: x + (y.get('file_size', 0) or 0), dsets, 0) / 625000 ?
363
- concurrent.futures.wait(futures, timeout=None)
364
- # build return list
365
- for future in futures:
366
- outputs.append(future.result())
367
- # if returning md5, separate list of tuples into two lists: (files, md5)
368
- return list(zip(*outputs)) if kwargs.get('return_md5', False) else outputs
369
-
370
-
371
- def http_download_file(full_link_to_file, chunks=None, *, clobber=False, silent=False,
372
- username='', password='', target_dir='', return_md5=False, headers=None):
373
- """Download a file from a remote HTTP server.
374
-
375
- Parameters
376
- ----------
377
- full_link_to_file : str
378
- HTTP link to the file
379
- chunks : tuple of ints
380
- Chunks to download
381
- clobber : bool
382
- If True, force overwrite the existing file
383
- silent : bool
384
- If True, suppress download progress bar
385
- username : str
386
- User authentication for password protected file server
387
- password : str
388
- Password authentication for password protected file server
389
- target_dir : str, pathlib.Path
390
- Directory in which files are downloaded; defaults to user's Download directory
391
- return_md5 : bool
392
- If True an MD5 hash of the file is additionally returned
393
- headers : list of dicts
394
- Additional headers to add to the request (auth tokens etc.)
395
-
396
- Returns
397
- -------
398
- pathlib.Path
399
- The full file path of the downloaded file
400
-
401
- """
402
- if not full_link_to_file:
403
- return (None, None) if return_md5 else None
404
-
405
- # makes sure special characters get encoded ('#' in file names for example)
406
- surl = urllib.parse.urlsplit(full_link_to_file, allow_fragments=False)
407
- full_link_to_file = surl._replace(path=urllib.parse.quote(surl.path)).geturl()
408
-
409
- # default cache directory is the home dir
410
- if not target_dir:
411
- target_dir = Path.home().joinpath('Downloads')
412
-
413
- # This should be the base url you wanted to access.
414
- base_url, name = full_link_to_file.rsplit('/', 1)
415
- file_name = Path(target_dir, name)
416
-
417
- # do not overwrite an existing file unless specified
418
- if not clobber and file_name.exists():
419
- return (file_name, hashfile.md5(file_name)) if return_md5 else file_name
420
-
421
- # Create a password manager
422
- manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
423
- if username and password:
424
- manager.add_password(None, base_url, username, password)
425
-
426
- # Create an authentication handler using the password manager
427
- auth = urllib.request.HTTPBasicAuthHandler(manager)
428
-
429
- # Create an opener that will replace the default urlopen method on further calls
430
- opener = urllib.request.build_opener(auth)
431
- urllib.request.install_opener(opener)
432
-
433
- # Support for partial download.
434
- req = urllib.request.Request(full_link_to_file)
435
- if chunks is not None:
436
- first_byte, n_bytes = chunks
437
- req.add_header('Range', 'bytes=%d-%d' % (first_byte, first_byte + n_bytes - 1))
438
-
439
- # add additional headers
440
- if headers is not None:
441
- for k in headers:
442
- req.add_header(k, headers[k])
443
-
444
- # Open the url and get the length
445
- try:
446
- u = urllib.request.urlopen(req)
447
- except HTTPError as e:
448
- _logger.error(f'{str(e)} {full_link_to_file}')
449
- raise e
450
-
451
- file_size = int(u.getheader('Content-length'))
452
- if not silent:
453
- print(f'Downloading: {file_name} Bytes: {file_size}')
454
- block_sz = 8192 * 64 * 8
455
-
456
- md5 = hashlib.md5()
457
- f = open(file_name, 'wb')
458
- with tqdm(total=file_size / 1024 / 1024, disable=silent) as pbar:
459
- while True:
460
- buffer = u.read(block_sz)
461
- if not buffer:
462
- break
463
- f.write(buffer)
464
- if return_md5:
465
- md5.update(buffer)
466
- pbar.update(len(buffer) / 1024 / 1024)
467
- f.close()
468
-
469
- return (file_name, md5.hexdigest()) if return_md5 else file_name
470
-
471
-
472
- def file_record_to_url(file_records) -> list:
473
- """Translate a Json dictionary to an usable http url for downloading files.
474
-
475
- Parameters
476
- ----------
477
- file_records : dict
478
- JSON containing a 'data_url' field
479
-
480
- Returns
481
- -------
482
- list of str
483
- A list of full data urls
484
-
485
- """
486
- urls = []
487
- for fr in file_records:
488
- if fr['data_url'] is not None:
489
- urls.append(fr['data_url'])
490
- return urls
491
-
492
-
493
- def dataset_record_to_url(dataset_record) -> list:
494
- """Extract a list of files urls from a list of dataset queries.
495
-
496
- Parameters
497
- ----------
498
- dataset_record : list, dict
499
- Dataset JSON from a REST request
500
-
501
- Returns
502
- -------
503
- list of str
504
- A list of file urls corresponding to the datasets records
505
-
506
- """
507
- urls = []
508
- if isinstance(dataset_record, dict):
509
- dataset_record = [dataset_record]
510
- for ds in dataset_record:
511
- urls += file_record_to_url(ds['file_records'])
512
- return urls
513
-
514
-
515
- class AlyxClient:
516
- """Class that implements simple GET/POST wrappers for the Alyx REST API.
517
-
518
- See https://openalyx.internationalbrainlab.org/docs
519
- """
520
-
521
- _token = None
522
- _headers = {} # Headers for REST requests only
523
- user = None
524
- """str: The Alyx username."""
525
- base_url = None
526
- """str: The Alyx database URL."""
527
-
528
- def __init__(self, base_url=None, username=None, password=None,
529
- cache_dir=None, silent=False, cache_rest='GET'):
530
- """Create a client instance that allows to GET and POST to the Alyx server.
531
-
532
- For One, constructor attempts to authenticate with credentials in params.py.
533
- For standalone cases, AlyxClient(username='', password='', base_url='').
534
-
535
- Parameters
536
- ----------
537
- base_url : str
538
- Alyx server address, including port and protocol.
539
- username : str
540
- Alyx database user.
541
- password : str
542
- Alyx database password.
543
- cache_dir : str, pathlib.Path
544
- The default root download location.
545
- silent : bool
546
- If true, user prompts and progress bars are suppressed.
547
- cache_rest : str, None
548
- Which type of http method to apply cache to; if '*', all requests are cached.
549
- stay_logged_in : bool
550
- If true, auth token is cached.
551
-
552
- """
553
- self.silent = silent
554
- self._par = one.params.get(client=base_url, silent=self.silent, username=username)
555
- self.base_url = base_url or self._par.ALYX_URL
556
- self._par = self._par.set('CACHE_DIR', cache_dir or self._par.CACHE_DIR)
557
- if username or password:
558
- self.authenticate(username, password)
559
- self._rest_schemes = None
560
- # the mixed accept application may cause errors sometimes, only necessary for the docs
561
- self._headers = {**self._headers, 'Accept': 'application/json'}
562
- # REST cache parameters
563
- # The default length of time that cache file is valid for,
564
- # The default expiry is overridden by the `expires` kwarg. If False, the caching is
565
- # turned off.
566
- self.default_expiry = timedelta(minutes=5)
567
- self.cache_mode = cache_rest
568
- self._obj_id = id(self)
569
-
570
- @property
571
- def rest_schemes(self):
572
- """dict: The REST endpoints and their parameters."""
573
- # Delayed fetch of rest schemes speeds up instantiation
574
- if not self._rest_schemes:
575
- self._rest_schemes = self.get('/docs', expires=timedelta(weeks=1))
576
- return self._rest_schemes
577
-
578
- @property
579
- def cache_dir(self):
580
- """pathlib.Path: The location of the downloaded file cache."""
581
- return Path(self._par.CACHE_DIR)
582
-
583
- @cache_dir.setter
584
- def cache_dir(self, cache_dir):
585
- cache_dir = Path(cache_dir)
586
- cache_dir.mkdir(parents=True, exist_ok=True)
587
- self._par = self._par.set('CACHE_DIR', cache_dir)
588
-
589
- @property
590
- def is_logged_in(self):
591
- """bool: Check if user logged into Alyx database; True if user is authenticated."""
592
- return bool(self.user and self._token and 'Authorization' in self._headers)
593
-
594
- def list_endpoints(self):
595
- """Return a list of available REST endpoints.
596
-
597
- Returns
598
- -------
599
- List of REST endpoint strings.
600
-
601
- """
602
- EXCLUDE = ('_type', '_meta', '', 'auth-token')
603
- return sorted(x for x in self.rest_schemes.keys() if x not in EXCLUDE)
604
-
605
- def print_endpoint_info(self, endpoint, action=None):
606
- """Print the available actions and query parameters for a given REST endpoint.
607
-
608
- Parameters
609
- ----------
610
- endpoint : str
611
- An Alyx REST endpoint to query.
612
- action : str
613
- An optional action (e.g. 'list') to print. If None, all actions are printed.
614
-
615
- Returns
616
- -------
617
- dict, list
618
- A dictionary of endpoint query parameter details or a list of parameter details if
619
- action is not None.
620
-
621
- """
622
- rs = self.rest_schemes
623
- if endpoint not in rs:
624
- return print(f'Endpoint "{endpoint}" does not exist')
625
-
626
- for _action in (rs[endpoint] if action is None else [action]):
627
- doc = []
628
- pprint(_action)
629
- for f in rs[endpoint][_action]['fields']:
630
- required = ' (required): ' if f.get('required', False) else ': '
631
- doc.append(f'\t"{f["name"]}"{required}{f["schema"]["_type"]}'
632
- f', {f["schema"]["description"]}')
633
- doc.sort()
634
- [print(d) for d in doc if '(required)' in d]
635
- [print(d) for d in doc if '(required)' not in d]
636
- return (rs[endpoint] if action is None else rs[endpoint][action]).copy()
637
-
638
- @_cache_response
639
- def _generic_request(self, reqfunction, rest_query, data=None, files=None):
640
- if not self.is_logged_in:
641
- self.authenticate(username=self.user)
642
- # makes sure the base url is the one from the instance
643
- rest_query = rest_query.replace(self.base_url, '')
644
- if not rest_query.startswith('/'):
645
- rest_query = '/' + rest_query
646
- _logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}')
647
- headers = self._headers.copy()
648
- if files is None:
649
- data = json.dumps(data) if isinstance(data, dict) or isinstance(data, list) else data
650
- headers['Content-Type'] = 'application/json'
651
- if rest_query.startswith('/docs'):
652
- # the mixed accept application may cause errors sometimes, only necessary for the docs
653
- headers['Accept'] = 'application/coreapi+json'
654
- r = reqfunction(self.base_url + rest_query,
655
- stream=True, headers=headers, data=data, files=files)
656
- if r and r.status_code in (200, 201):
657
- return json.loads(r.text)
658
- elif r and r.status_code == 204:
659
- return
660
- if r.status_code == 403 and '"Invalid token."' in r.text:
661
- _logger.debug('Token invalid; Attempting to re-authenticate...')
662
- # Log out in order to flush stale token. At this point we no longer have the password
663
- # but if the user re-instantiates with a password arg it will request a new token.
664
- username = self.user
665
- if self.silent: # no need to log out otherwise; user will be prompted for password
666
- self.logout()
667
- self.authenticate(username=username, force=True)
668
- return self._generic_request(reqfunction, rest_query, data=data, files=files)
669
- else:
670
- _logger.debug('Response text raw: ' + r.text)
671
- try:
672
- message = json.loads(r.text)
673
- message.pop('status_code', None) # Get status code from response object instead
674
- message = message.get('detail') or message # Get details if available
675
- _logger.debug(message)
676
- except json.decoder.JSONDecodeError:
677
- message = r.text
678
- raise requests.HTTPError(r.status_code, rest_query, message, response=r)
679
-
680
- def authenticate(self, username=None, password=None, cache_token=True, force=False):
681
- """Fetch token from the Alyx REST API for authenticating request headers.
682
-
683
- Credentials are loaded via one.params.
684
-
685
- Parameters
686
- ----------
687
- username : str
688
- Alyx username. If None, token not cached and not silent, user is prompted.
689
- password : str
690
- Alyx password. If None, token not cached and not silent, user is prompted.
691
- cache_token : bool
692
- If true, the token is cached for subsequent auto-logins.
693
- force : bool
694
- If true, any cached token is ignored.
695
-
696
- """
697
- # Get username
698
- if username is None:
699
- username = getattr(self._par, 'ALYX_LOGIN', self.user)
700
- if username is None and not self.silent:
701
- username = input('Enter Alyx username:')
702
-
703
- # If user passes in a password, force re-authentication even if token cached
704
- if password is not None:
705
- if not force:
706
- _logger.debug('Forcing token request with provided password')
707
- force = True
708
- # Check if token cached
709
- if not force and getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
710
- self._token = self._par.TOKEN[username]
711
- self._headers = {
712
- 'Authorization': f'Token {list(self._token.values())[0]}',
713
- 'Accept': 'application/json'}
714
- self.user = username
715
- return
716
-
717
- # Get password
718
- if password is None:
719
- password = getattr(self._par, 'ALYX_PWD', None)
720
- if password is None:
721
- if self.silent:
722
- warnings.warn(
723
- 'No password or cached token in silent mode. '
724
- 'Please run the following to re-authenticate:\n\t'
725
- 'AlyxClient(silent=False).authenticate'
726
- '(username=<username>, force=True)', UserWarning)
727
- else:
728
- password = getpass(f'Enter Alyx password for "{username}":')
729
- # Remove previous token
730
- self._clear_token(username)
731
- try:
732
- credentials = {'username': username, 'password': password}
733
- rep = requests.post(self.base_url + '/auth-token', data=credentials)
734
- except requests.exceptions.ConnectionError:
735
- raise ConnectionError(
736
- f'Can\'t connect to {self.base_url}.\n' +
737
- 'Check your internet connections and Alyx database firewall'
738
- )
739
- # Assign token or raise exception on auth error
740
- if rep.ok:
741
- self._token = rep.json()
742
- assert list(self._token.keys()) == ['token']
743
- else:
744
- if rep.status_code == 400: # Auth error; re-raise with details
745
- redacted = '*' * len(credentials['password']) if credentials['password'] else None
746
- message = ('Alyx authentication failed with credentials: '
747
- f'user = {credentials["username"]}, password = {redacted}')
748
- raise requests.HTTPError(rep.status_code, rep.url, message, response=rep)
749
- else:
750
- rep.raise_for_status()
751
-
752
- self._headers = {
753
- 'Authorization': 'Token {}'.format(list(self._token.values())[0]),
754
- 'Accept': 'application/json'}
755
- if cache_token:
756
- # Update saved pars
757
- par = one.params.get(client=self.base_url, silent=True)
758
- tokens = getattr(par, 'TOKEN', {})
759
- tokens[username] = self._token
760
- one.params.save(par.set('TOKEN', tokens), self.base_url)
761
- # Update current pars
762
- self._par = self._par.set('TOKEN', tokens)
763
- self.user = username
764
- if not self.silent:
765
- print(f'Connected to {self.base_url} as user "{self.user}"')
766
-
767
- def _clear_token(self, username):
768
- """Remove auth token from client params.
769
-
770
- Deletes the cached authentication token for a given user.
771
- """
772
- par = one.params.get(client=self.base_url, silent=True)
773
- # Remove token from cache
774
- if getattr(par, 'TOKEN', False) and username in par.TOKEN:
775
- del par.TOKEN[username]
776
- one.params.save(par, self.base_url)
777
- # Remove token from local pars
778
- if getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
779
- del self._par.TOKEN[username]
780
- # Remove token from object
781
- self._token = None
782
- if self._headers and 'Authorization' in self._headers:
783
- del self._headers['Authorization']
784
-
785
- def logout(self):
786
- """Log out from Alyx.
787
-
788
- Deletes the cached authentication token for the currently logged-in user
789
- and clears the REST cache.
790
- """
791
- if not self.is_logged_in:
792
- return
793
- self._clear_token(username := self.user)
794
- self.user = None
795
- self.clear_rest_cache()
796
- if not self.silent:
797
- print(f'{username} logged out from {self.base_url}')
798
-
799
- def delete(self, rest_query):
800
- """Send a DELETE request to the Alyx server.
801
-
802
- Will raise an exception on any HTTP status code other than 200, 201.
803
-
804
- Parameters
805
- ----------
806
- rest_query : str
807
- A REST query string either as a relative URL path complete URL.
808
-
809
- Returns
810
- -------
811
- JSON interpreted dictionary from response.
812
-
813
- Examples
814
- --------
815
- >>> AlyxClient.delete('/weighings/c617562d-c107-432e-a8ee-682c17f9e698')
816
- >>> AlyxClient.delete(
817
- ... 'https://alyx.example.com/endpoint/c617562d-c107-432e-a8ee-682c17f9e698')
818
-
819
- """
820
- return self._generic_request(requests.delete, rest_query)
821
-
822
- def download_file(self, url, **kwargs):
823
- """Download file(s) from data server from a REST file record URL.
824
-
825
- Parameters
826
- ----------
827
- url : str, list
828
- Full url(s) of the file(s).
829
- **kwargs
830
- WebClient.http_download_file parameters.
831
-
832
- Returns
833
- -------
834
- pathlib.Path, list of pathlib.Path
835
- Local path(s) of downloaded file(s).
836
-
837
- """
838
- if isinstance(url, str):
839
- url = self._validate_file_url(url)
840
- download_fcn = http_download_file
841
- else:
842
- url = (self._validate_file_url(x) for x in url)
843
- download_fcn = http_download_file_list
844
- pars = dict(
845
- silent=kwargs.pop('silent', self.silent),
846
- target_dir=kwargs.pop('target_dir', self._par.CACHE_DIR),
847
- username=self._par.HTTP_DATA_SERVER_LOGIN,
848
- password=self._par.HTTP_DATA_SERVER_PWD,
849
- **kwargs
850
- )
851
- try:
852
- files = download_fcn(url, **pars)
853
- except HTTPError as ex:
854
- if ex.code == 401:
855
- ex.msg += (' - please check your HTTP_DATA_SERVER_LOGIN and '
856
- 'HTTP_DATA_SERVER_PWD ONE params, or username/password kwargs')
857
- raise ex
858
- return files
859
-
860
- def download_cache_tables(self, source=None, destination=None):
861
- """Download Alyx cache tables to the local data cache directory.
862
-
863
- Parameters
864
- ----------
865
- source : str, pathlib.Path
866
- The remote HTTP directory of the cache table (excluding the filename).
867
- Default: AlyxClient.base_url.
868
- destination : str, pathlib.Path
869
- The target directory into to which the tables will be downloaded.
870
-
871
- Returns
872
- -------
873
- List of parquet table file paths.
874
-
875
- """
876
- source = str(source or f'{self.base_url}/cache.zip')
877
- destination = destination or self.cache_dir
878
- Path(destination).mkdir(exist_ok=True, parents=True)
879
-
880
- headers = None
881
- if source.startswith(self.base_url):
882
- if not self.is_logged_in:
883
- self.authenticate()
884
- headers = self._headers
885
-
886
- with tempfile.TemporaryDirectory(dir=destination) as tmp:
887
- file = http_download_file(source,
888
- headers=headers,
889
- silent=self.silent,
890
- target_dir=tmp,
891
- clobber=True)
892
- with zipfile.ZipFile(file, 'r') as zipped:
893
- files = zipped.namelist()
894
- zipped.extractall(destination)
895
- return [Path(destination, table) for table in files]
896
-
897
- def _validate_file_url(self, url):
898
- """Assert that URL matches HTTP_DATA_SERVER parameter.
899
-
900
- Currently only one remote HTTP server is supported for a given AlyxClient instance. If
901
- the URL contains only the relative path part, the full URL is returned.
902
-
903
- Parameters
904
- ----------
905
- url : str
906
- The full or partial URL to validate.
907
-
908
- Returns
909
- -------
910
- The complete URL.
911
-
912
- Examples
913
- --------
914
- >>> url = self._validate_file_url('https://webserver.net/path/to/file')
915
- 'https://webserver.net/path/to/file'
916
- >>> url = self._validate_file_url('path/to/file')
917
- 'https://webserver.net/path/to/file'
918
-
919
- """
920
- if url.startswith('http'): # A full URL
921
- assert url.startswith(self._par.HTTP_DATA_SERVER), \
922
- ('remote protocol and/or hostname does not match HTTP_DATA_SERVER parameter:\n' +
923
- f'"{url[:40]}..." should start with "{self._par.HTTP_DATA_SERVER}"')
924
- elif not url.startswith(self._par.HTTP_DATA_SERVER):
925
- url = self.rel_path2url(url)
926
- return url
927
-
928
- def rel_path2url(self, path):
929
- """Given a relative file path, return the remote HTTP server URL.
930
-
931
- It is expected that the remote HTTP server has the same file tree as the local system.
932
-
933
- Parameters
934
- ----------
935
- path : str, pathlib.Path
936
- A relative ALF path (subject/date/number/etc.).
937
-
938
- Returns
939
- -------
940
- A URL string.
941
-
942
- """
943
- path = str(path).strip('/')
944
- assert not path.startswith('http')
945
- return f'{self._par.HTTP_DATA_SERVER}/{path}'
946
-
947
- def get(self, rest_query, **kwargs):
948
- """Send a GET request to the Alyx server.
949
-
950
- Will raise an exception on any HTTP status code other than 200, 201.
951
-
952
- For the dictionary contents and list of endpoints, refer to:
953
- https://openalyx.internationalbrainlab.org/docs
954
-
955
- Parameters
956
- ----------
957
- rest_query : str
958
- A REST URL path, e.g. '/sessions?user=Hamish'.
959
- **kwargs
960
- Optional arguments to pass to _generic_request and _cache_response decorator.
961
-
962
- Returns
963
- -------
964
- JSON interpreted dictionary from response.
965
-
966
- """
967
- rep = self._generic_request(requests.get, rest_query, **kwargs)
968
- if isinstance(rep, dict) and list(rep.keys()) == ['count', 'next', 'previous', 'results']:
969
- if len(rep['results']) < rep['count']:
970
- cache_args = {k: v for k, v in kwargs.items() if k in ('clobber', 'expires')}
971
- rep = _PaginatedResponse(self, rep, cache_args)
972
- else:
973
- rep = rep['results']
974
- return rep
975
-
976
- def patch(self, rest_query, data=None, files=None):
977
- """Send a PATCH request to the Alyx server.
978
-
979
- For the dictionary contents, refer to:
980
- https://openalyx.internationalbrainlab.org/docs
981
-
982
- Parameters
983
- ----------
984
- rest_query : str
985
- The endpoint as full or relative URL.
986
- data : dict, str
987
- JSON encoded string or dictionary (c.f. requests).
988
- files : dict, tuple
989
- Files to attach (c.f. requests).
990
-
991
- Returns
992
- -------
993
- Response object.
994
-
995
- """
996
- return self._generic_request(requests.patch, rest_query, data=data, files=files)
997
-
998
- def post(self, rest_query, data=None, files=None):
999
- """Send a POST request to the Alyx server.
1000
-
1001
- For the dictionary contents, refer to:
1002
- https://openalyx.internationalbrainlab.org/docs
1003
-
1004
- Parameters
1005
- ----------
1006
- rest_query : str
1007
- The endpoint as full or relative URL.
1008
- data : dict, str
1009
- JSON encoded string or dictionary (c.f. requests).
1010
- files : dict, tuple
1011
- Files to attach (c.f. requests).
1012
-
1013
- Returns
1014
- -------
1015
- Response object.
1016
-
1017
- """
1018
- return self._generic_request(requests.post, rest_query, data=data, files=files)
1019
-
1020
- def put(self, rest_query, data=None, files=None):
1021
- """Send a PUT request to the Alyx server.
1022
-
1023
- For the dictionary contents, refer to:
1024
- https://openalyx.internationalbrainlab.org/docs
1025
-
1026
- Parameters
1027
- ----------
1028
- rest_query : str
1029
- The endpoint as full or relative URL.
1030
- data : dict, str
1031
- JSON encoded string or dictionary (c.f. requests).
1032
- files : dict, tuple
1033
- Files to attach (c.f. requests).
1034
-
1035
- Returns
1036
- -------
1037
- requests.Response
1038
- Response object.
1039
-
1040
- """
1041
- return self._generic_request(requests.put, rest_query, data=data, files=files)
1042
-
1043
- def rest(self, url=None, action=None, id=None, data=None, files=None,
1044
- no_cache=False, **kwargs):
1045
- """Alyx REST API wrapper.
1046
-
1047
- If no arguments are passed, lists available endpoints.
1048
-
1049
- Parameters
1050
- ----------
1051
- url : str
1052
- Endpoint name.
1053
- action : str
1054
- One of 'list', 'create', 'read', 'update', 'partial_update', 'delete'.
1055
- id : str, uuid.UUID
1056
- Lookup string for actions 'read', 'update', 'partial_update', and 'delete'.
1057
- data : dict
1058
- Data dictionary for actions 'update', 'partial_update' and 'create'.
1059
- files : dict, tuple
1060
- Option file(s) to upload.
1061
- no_cache : bool
1062
- If true the `list` and `read` actions are performed without returning the cache.
1063
- kwargs
1064
- Filters as per the Alyx REST documentation
1065
- c.f. https://openalyx.internationalbrainlab.org/docs/
1066
-
1067
- Returns
1068
- -------
1069
- list, dict
1070
- List of queried dicts ('list') or dict (other actions).
1071
-
1072
- Examples
1073
- --------
1074
- List available endpoint
1075
-
1076
- >>> client = AlyxClient()
1077
- ... client.rest()
1078
-
1079
- List available actions for the 'subjects' endpoint
1080
-
1081
- >>> client.rest('subjects')
1082
-
1083
- Example REST endpoint with all actions
1084
-
1085
- >>> client.rest('subjects', 'list')
1086
- >>> client.rest('subjects', 'list', field_filter1='filterval')
1087
- >>> client.rest('subjects', 'create', data=sub_dict)
1088
- >>> client.rest('subjects', 'read', id='nickname')
1089
- >>> client.rest('subjects', 'update', id='nickname', data=sub_dict)
1090
- >>> client.rest('subjects', 'partial_update', id='nickname', data=sub_dict)
1091
- >>> client.rest('subjects', 'delete', id='nickname')
1092
- >>> client.rest('notes', 'create', data=nd, files={'image': open(image_file, 'rb')})
1093
-
1094
- """
1095
- # if endpoint is None, list available endpoints
1096
- if not url:
1097
- pprint(self.list_endpoints())
1098
- return
1099
- # remove beginning slash if any
1100
- if url.startswith('/'):
1101
- url = url[1:]
1102
- # and split to the next slash or question mark
1103
- endpoint = re.findall("^/*[^?/]*", url)[0].replace('/', '')
1104
- # make sure the queried endpoint exists, if not throw an informative error
1105
- if endpoint not in self.rest_schemes.keys():
1106
- av = [k for k in self.rest_schemes.keys() if not k.startswith('_') and k]
1107
- raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1108
- 'endpoints are \n ' + '\n '.join(av))
1109
- endpoint_scheme = self.rest_schemes[endpoint]
1110
- # on a filter request, override the default action parameter
1111
- if '?' in url:
1112
- action = 'list'
1113
- # if action is None, list available actions for the required endpoint
1114
- if not action:
1115
- pprint(list(endpoint_scheme.keys()))
1116
- self.print_endpoint_info(endpoint)
1117
- return
1118
- # make sure the desired action exists, if not throw an informative error
1119
- if action not in endpoint_scheme:
1120
- raise ValueError('Action "' + action + '" for REST endpoint "' + endpoint + '" does ' +
1121
- 'not exist. Available actions are: ' +
1122
- '\n ' + '\n '.join(endpoint_scheme.keys()))
1123
- # the actions below require an id in the URL, warn and help the user
1124
- if action in ['read', 'update', 'partial_update', 'delete'] and not id:
1125
- _logger.warning('REST action "' + action + '" requires an ID in the URL: ' +
1126
- endpoint_scheme[action]['url'])
1127
- return
1128
- # the actions below require a data dictionary, warn and help the user with fields list
1129
- data_required = 'fields' in endpoint_scheme[action]
1130
- if action in ['create', 'update', 'partial_update'] and data_required and not data:
1131
- pprint(endpoint_scheme[action]['fields'])
1132
- for act in endpoint_scheme[action]['fields']:
1133
- print("'" + act['name'] + "': ...,")
1134
- _logger.warning('REST action "' + action + '" requires a data dict with above keys')
1135
- return
1136
-
1137
- # clobber=True means remote request always made, expires=True means response is not cached
1138
- cache_args = {'clobber': no_cache, 'expires': kwargs.pop('expires', False) or no_cache}
1139
- if action == 'list':
1140
- # list doesn't require id nor
1141
- assert endpoint_scheme[action]['action'] == 'get'
1142
- # add to url data if it is a string
1143
- if id:
1144
- # this is a special case of the list where we query a uuid. Usually read is better
1145
- if 'django' in kwargs.keys():
1146
- kwargs['django'] = kwargs['django'] + ','
1147
- else:
1148
- kwargs['django'] = ''
1149
- kwargs['django'] = f"{kwargs['django']}pk,{id}"
1150
- # otherwise, look for a dictionary of filter terms
1151
- if kwargs:
1152
- # Convert all lists in query params to comma separated list
1153
- query_params = {k: ','.join(map(str, ensure_list(v))) for k, v in kwargs.items()}
1154
- url = update_url_params(url, query_params)
1155
- return self.get('/' + url, **cache_args)
1156
- if not isinstance(id, str) and id is not None:
1157
- id = str(id) # e.g. may be uuid.UUID
1158
- if action == 'read':
1159
- assert endpoint_scheme[action]['action'] == 'get'
1160
- return self.get('/' + endpoint + '/' + id.split('/')[-1], **cache_args)
1161
- elif action == 'create':
1162
- assert endpoint_scheme[action]['action'] == 'post'
1163
- return self.post('/' + endpoint, data=data, files=files)
1164
- elif action == 'delete':
1165
- assert endpoint_scheme[action]['action'] == 'delete'
1166
- return self.delete('/' + endpoint + '/' + id.split('/')[-1])
1167
- elif action == 'partial_update':
1168
- assert endpoint_scheme[action]['action'] == 'patch'
1169
- return self.patch('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1170
- elif action == 'update':
1171
- assert endpoint_scheme[action]['action'] == 'put'
1172
- return self.put('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1173
-
1174
- # JSON field interface convenience methods
1175
- def _check_inputs(self, endpoint: str) -> None:
1176
- # make sure the queried endpoint exists, if not throw an informative error
1177
- if endpoint not in self.rest_schemes.keys():
1178
- av = (k for k in self.rest_schemes.keys() if not k.startswith('_') and k)
1179
- raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1180
- 'endpoints are \n ' + '\n '.join(av))
1181
- return
1182
-
1183
- def json_field_write(
1184
- self,
1185
- endpoint: str = None,
1186
- uuid: str = None,
1187
- field_name: str = None,
1188
- data: dict = None
1189
- ) -> dict:
1190
- """Write data to JSON field.
1191
-
1192
- NOTE: Destructive write! WILL NOT CHECK IF DATA EXISTS
1193
-
1194
- Parameters
1195
- ----------
1196
- endpoint : str, None
1197
- Valid alyx endpoint, defaults to None.
1198
- uuid : str, uuid.UUID, None
1199
- UUID or lookup name for endpoint.
1200
- field_name : str, None
1201
- Valid json field name, defaults to None.
1202
- data : dict, None
1203
- Data to write to json field, defaults to None.
1204
-
1205
- Returns
1206
- -------
1207
- dict
1208
- Written data dict.
1209
-
1210
- """
1211
- self._check_inputs(endpoint)
1212
- # Prepare data to patch
1213
- patch_dict = {field_name: data}
1214
- # Upload new extended_qc to session
1215
- ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1216
- return ret[field_name]
1217
-
1218
- def json_field_update(
1219
- self,
1220
- endpoint: str = None,
1221
- uuid: str = None,
1222
- field_name: str = 'json',
1223
- data: dict = None
1224
- ) -> dict:
1225
- """Non-destructive update of JSON field of endpoint for object.
1226
-
1227
- Will update the field_name of the object with pk = uuid of given endpoint
1228
- If data has keys with the same name of existing keys it will squash the old
1229
- values (uses the dict.update() method).
1230
-
1231
- Parameters
1232
- ----------
1233
- endpoint : str
1234
- Alyx REST endpoint to hit.
1235
- uuid : str, uuid.UUID
1236
- UUID or lookup name of object.
1237
- field_name : str
1238
- Name of the json field.
1239
- data : dict
1240
- A dictionary with fields to be updated.
1241
-
1242
- Returns
1243
- -------
1244
- dict
1245
- New patched json field contents as dict.
1246
-
1247
- Examples
1248
- --------
1249
- >>> client = AlyxClient()
1250
- >>> client.json_field_update('sessions', 'eid_str', 'extended_qc', {'key': 'value'})
1251
-
1252
- """
1253
- self._check_inputs(endpoint)
1254
- # Load current json field contents
1255
- current = self.rest(endpoint, 'read', id=uuid)[field_name]
1256
- if current is None:
1257
- current = {}
1258
-
1259
- if not isinstance(current, dict):
1260
- _logger.warning(
1261
- f'Current json field "{field_name}" does not contains a dict, aborting update'
1262
- )
1263
- return current
1264
-
1265
- # Patch current dict with new data
1266
- current.update(data)
1267
- # Prepare data to patch
1268
- patch_dict = {field_name: current}
1269
- # Upload new extended_qc to session
1270
- ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1271
- return ret[field_name]
1272
-
1273
- def json_field_remove_key(
1274
- self,
1275
- endpoint: str = None,
1276
- uuid: str = None,
1277
- field_name: str = 'json',
1278
- key: str = None
1279
- ) -> Optional[dict]:
1280
- """Remove inputted key from JSON field dict and re-upload it to Alyx.
1281
-
1282
- Needs endpoint, UUID and json field name.
1283
-
1284
- Parameters
1285
- ----------
1286
- endpoint : str
1287
- Endpoint to hit, defaults to None.
1288
- uuid : str, uuid.UUID
1289
- UUID or lookup name for endpoint.
1290
- field_name : str
1291
- JSON field name of object, defaults to None.
1292
- key : str
1293
- Key name of dictionary inside object, defaults to None.
1294
-
1295
- Returns
1296
- -------
1297
- dict
1298
- New content of json field.
1299
-
1300
- """
1301
- self._check_inputs(endpoint)
1302
- current = self.rest(endpoint, 'read', id=uuid)[field_name]
1303
- # If no contents, cannot remove key, return
1304
- if current is None:
1305
- return current
1306
- # if contents are not dict, cannot remove key, return contents
1307
- if isinstance(current, str):
1308
- _logger.warning(f'Cannot remove key {key} content of json field is of type str')
1309
- return None
1310
- # If key not present in contents of json field cannot remove key, return contents
1311
- if current.get(key, None) is None:
1312
- _logger.warning(
1313
- f'{key}: Key not found in endpoint {endpoint} field {field_name}'
1314
- )
1315
- return current
1316
- _logger.info(f'Removing key from dict: "{key}"')
1317
- current.pop(key)
1318
- # Re-write contents without removed key
1319
- written = self.json_field_write(
1320
- endpoint=endpoint, uuid=uuid, field_name=field_name, data=current
1321
- )
1322
- return written
1323
-
1324
- def json_field_delete(
1325
- self, endpoint: str = None, uuid: str = None, field_name: str = None
1326
- ) -> None:
1327
- """Set an entire field to null.
1328
-
1329
- Note that this deletes all data from a given field. To delete only a single key from a
1330
- given JSON field, use `json_field_remove_key`.
1331
-
1332
- Parameters
1333
- ----------
1334
- endpoint : str
1335
- Endpoint to hit, defaults to None.
1336
- uuid : str, uuid.UUID
1337
- UUID or lookup name for endpoint.
1338
- field_name : str
1339
- The field name of object (e.g. 'json', 'name', 'extended_qc'), defaults to None.
1340
-
1341
- Returns
1342
- -------
1343
- None
1344
- New content of json field.
1345
-
1346
- """
1347
- self._check_inputs(endpoint)
1348
- _ = self.rest(endpoint, 'partial_update', id=uuid, data={field_name: None})
1349
- return _[field_name]
1350
-
1351
- def clear_rest_cache(self):
1352
- """Clear all REST response cache files for the base url."""
1353
- for file in self.cache_dir.joinpath('.rest').glob('*'):
1354
- file.unlink()
1
+ """API for interacting with a remote Alyx instance through REST.
2
+
3
+ The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
4
+ files through Alyx.
5
+
6
+ Examples
7
+ --------
8
+ >>> alyx = AlyxClient(
9
+ ... username='test_user', password='TapetesBloc18',
10
+ ... base_url='https://test.alyx.internationalbrainlab.org')
11
+
12
+ List subjects
13
+
14
+ >>> subjects = alyx.rest('subjects', 'list')
15
+
16
+ Create a subject
17
+
18
+ >>> record = {
19
+ ... 'nickname': nickname,
20
+ ... 'responsible_user': 'olivier',
21
+ ... 'birth_date': '2019-06-15',
22
+ ... 'death_date': None,
23
+ ... 'lab': 'cortexlab',
24
+ ... }
25
+ >>> new_subj = alyx.rest('subjects', 'create', data=record)
26
+
27
+ Download a remote file, given a local path
28
+
29
+ >>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
30
+ >>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
31
+
32
+ """
33
+ from uuid import UUID
34
+ import json
35
+ import logging
36
+ import math
37
+ import re
38
+ import functools
39
+ import urllib.request
40
+ from urllib.error import HTTPError
41
+ import urllib.parse
42
+ from collections.abc import Mapping
43
+ from typing import Optional
44
+ from datetime import datetime, timedelta
45
+ from pathlib import Path
46
+ from weakref import ReferenceType
47
+ import warnings
48
+ import hashlib
49
+ import zipfile
50
+ import tempfile
51
+ from getpass import getpass
52
+ from contextlib import contextmanager
53
+
54
+ import requests
55
+ from tqdm import tqdm
56
+
57
+ from pprint import pprint
58
+ import one.params
59
+ from iblutil.io import hashfile
60
+ from iblutil.io.params import set_hidden
61
+ from iblutil.util import ensure_list
62
+ import concurrent.futures
63
+ _logger = logging.getLogger(__name__)
64
+
65
+
66
+ class _JSONEncoder(json.JSONEncoder):
67
+ """A JSON encoder that handles UUID objects."""
68
+
69
+ def default(self, o):
70
+ """Cast UUID objects to str before serializing."""
71
+ if isinstance(o, UUID):
72
+ return str(o)
73
+ return super().default(o)
74
+
75
+
76
+ def _cache_response(method):
77
+ """Decorator for the generic request method for caching REST reponses.
78
+
79
+ Caches the result of the query and on subsequent calls, returns cache instead of hitting the
80
+ database.
81
+
82
+ Parameters
83
+ ----------
84
+ method : function
85
+ Function to wrap (i.e. AlyxClient._generic_request).
86
+
87
+ Returns
88
+ -------
89
+ function
90
+ Handle to wrapped method.
91
+
92
+ """
93
+
94
+ @functools.wraps(method)
95
+ def wrapper_decorator(alyx_client, *args, expires=None, clobber=False, **kwargs):
96
+ """REST caching wrapper.
97
+
98
+ Parameters
99
+ ----------
100
+ alyx_client : AlyxClient
101
+ An instance of the AlyxClient class.
102
+ args : any
103
+ Positional arguments for applying to wrapped function.
104
+ expires : bool
105
+ An optional timedelta for how long cached response is valid. If True, the cached
106
+ response will not be used on subsequent calls. If None, the default expiry is applied.
107
+ clobber : bool
108
+ If True any existing cached response is overwritten.
109
+ **kwargs
110
+ Keyword arguments for applying to wrapped function.
111
+
112
+ Returns
113
+ -------
114
+ dict
115
+ The REST response JSON either from cached file or directly from remote.
116
+
117
+ """
118
+ expires = expires or alyx_client.default_expiry
119
+ mode = (alyx_client.cache_mode or '').casefold()
120
+ if args[0].__name__ != mode and mode != '*':
121
+ return method(alyx_client, *args, **kwargs)
122
+ # Check cache
123
+ rest_cache = alyx_client.cache_dir.joinpath('.rest')
124
+ sha1 = hashlib.sha1()
125
+ sha1.update(bytes(args[1], 'utf-8'))
126
+ name = sha1.hexdigest()
127
+ # Reversible but length may exceed 255 chars
128
+ # name = base64.urlsafe_b64encode(args[2].encode('UTF-8')).decode('UTF-8')
129
+ files = list(rest_cache.glob(name))
130
+ cached = None
131
+ if len(files) == 1 and not clobber:
132
+ _logger.debug('loading REST response from cache')
133
+ with open(files[0], 'r') as f:
134
+ cached, when = json.load(f)
135
+ if datetime.fromisoformat(when) > datetime.now():
136
+ return cached
137
+ try:
138
+ response = method(alyx_client, *args, **kwargs)
139
+ except requests.exceptions.ConnectionError as ex:
140
+ if cached and not clobber:
141
+ warnings.warn('Failed to connect, returning cached response', RuntimeWarning)
142
+ return cached
143
+ raise ex # No cache and can't connect to database; re-raise
144
+
145
+ # Save response into cache
146
+ if not rest_cache.exists():
147
+ rest_cache.mkdir(parents=True)
148
+ rest_cache = set_hidden(rest_cache, True)
149
+
150
+ _logger.debug('caching REST response')
151
+ expiry_datetime = datetime.now() + (timedelta() if expires is True else expires)
152
+ with open(rest_cache / name, 'w') as f:
153
+ json.dump((response, expiry_datetime.isoformat()), f, cls=_JSONEncoder)
154
+ return response
155
+
156
+ return wrapper_decorator
157
+
158
+
159
+ @contextmanager
160
+ def no_cache(ac=None):
161
+ """Temporarily turn off the REST cache for a given Alyx instance.
162
+
163
+ This function is particularly useful when calling ONE methods in remote mode.
164
+
165
+ Parameters
166
+ ----------
167
+ ac : AlyxClient
168
+ An instance of the AlyxClient to modify. If None, the a new object is instantiated
169
+
170
+ Returns
171
+ -------
172
+ AlyxClient
173
+ The instance of Alyx with cache disabled
174
+
175
+ Examples
176
+ --------
177
+ >>> from one.api import ONE
178
+ >>> with no_cache(ONE().alyx):
179
+ ... eids = ONE().search(subject='foobar', query_type='remote')
180
+
181
+ """
182
+ ac = ac or AlyxClient()
183
+ cache_mode = ac.cache_mode
184
+ ac.cache_mode = None
185
+ try:
186
+ yield ac
187
+ finally:
188
+ ac.cache_mode = cache_mode
189
+
190
+
191
+ class _PaginatedResponse(Mapping):
192
+ """Emulate a list from a paginated response.
193
+
194
+ Provides cache functionality.
195
+
196
+ Examples
197
+ --------
198
+ >>> r = _PaginatedResponse(client, response)
199
+
200
+ """
201
+
202
+ def __init__(self, alyx, rep, cache_args=None):
203
+ """Emulate a list from a paginated response.
204
+
205
+ Parameters
206
+ ----------
207
+ alyx : AlyxClient
208
+ An instance of an AlyxClient associated with the REST response
209
+ rep : dict
210
+ A paginated REST response JSON dictionary
211
+ cache_args : dict
212
+ A dict of kwargs to pass to _cache_response decorator upon subsequent requests
213
+
214
+ """
215
+ self.alyx = alyx
216
+ self.count = rep['count']
217
+ self.limit = len(rep['results'])
218
+ self._cache_args = cache_args or {}
219
+ # store URL without pagination query params
220
+ self.query = rep['next']
221
+ # init the cache, list with None with count size
222
+ self._cache = [None] * self.count
223
+ # fill the cache with results of the query
224
+ for i in range(self.limit):
225
+ self._cache[i] = rep['results'][i]
226
+ self._callbacks = set()
227
+
228
+ def add_callback(self, cb):
229
+ """Add a callback function to use each time a new page is fetched.
230
+
231
+ The callback function will be called with the page results each time :meth:`populate`
232
+ is called.
233
+
234
+ Parameters
235
+ ----------
236
+ cb : callable
237
+ A callable that takes the results of each paginated resonse.
238
+
239
+ """
240
+ if not callable(cb):
241
+ raise TypeError(f'Expected type "callable", got "{type(cb)}" instead')
242
+ else:
243
+ self._callbacks.add(cb)
244
+
245
+ def __len__(self):
246
+ return self.count
247
+
248
+ def __getitem__(self, item):
249
+ if isinstance(item, slice):
250
+ while None in self._cache[item]:
251
+ # If slice start index is -ve, convert to +ve index
252
+ i = self.count + item.start if item.start < 0 else item.start
253
+ self.populate(i + self._cache[item].index(None))
254
+ elif self._cache[item] is None:
255
+ # If index is -ve, convert to +ve
256
+ self.populate(self.count + item if item < 0 else item)
257
+ return self._cache[item]
258
+
259
+ def populate(self, idx):
260
+ """Populate response cache with new page of results.
261
+
262
+ Fetches the specific page of results containing the index passed and populates
263
+ stores the results in the :prop:`_cache` property.
264
+
265
+ Parameters
266
+ ----------
267
+ idx : int
268
+ The index of a given record to fetch.
269
+
270
+ """
271
+ offset = self.limit * math.floor(idx / self.limit)
272
+ query = update_url_params(self.query, {'limit': self.limit, 'offset': offset})
273
+ res = self.alyx._generic_request(requests.get, query, **self._cache_args)
274
+ if self.count != res['count']:
275
+ warnings.warn(
276
+ f'remote results for {urllib.parse.urlsplit(query).path} endpoint changed; '
277
+ f'results may be inconsistent', RuntimeWarning)
278
+ for i, r in enumerate(res['results'][:self.count - offset]):
279
+ self._cache[i + offset] = res['results'][i]
280
+ # Notify callbacks
281
+ pending_removal = []
282
+ for callback in self._callbacks:
283
+ # Handle weak reference callbacks first
284
+ if isinstance(callback, ReferenceType):
285
+ wf = callback
286
+ if (callback := wf()) is None:
287
+ pending_removal.append(wf)
288
+ continue
289
+ callback(res['results'])
290
+ for wf in pending_removal:
291
+ self._callbacks.discard(wf)
292
+ # When cache is complete, clear our callbacks
293
+ if all(reversed(self._cache)):
294
+ self._callbacks.clear()
295
+
296
+ def __iter__(self):
297
+ for i in range(self.count):
298
+ yield self.__getitem__(i)
299
+
300
+
301
+ def update_url_params(url: str, params: dict) -> str:
302
+ """Add/update the query parameters of a URL and make url safe.
303
+
304
+ Parameters
305
+ ----------
306
+ url : str
307
+ A URL string with which to update the query parameters
308
+ params : dict
309
+ A dict of new parameters. For multiple values for the same query, use a list (see example)
310
+
311
+ Returns
312
+ -------
313
+ str
314
+ A new URL with said parameters updated
315
+
316
+ Examples
317
+ --------
318
+ >>> update_url_params('website.com/?q=', {'pg': 5})
319
+ 'website.com/?pg=5'
320
+
321
+ >>> update_url_params('website.com?q=xxx', {'pg': 5, 'foo': ['bar', 'baz']})
322
+ 'website.com?q=xxx&pg=5&foo=bar&foo=baz'
323
+
324
+ """
325
+ # Remove percent-encoding
326
+ url = urllib.parse.unquote(url)
327
+ parsed_url = urllib.parse.urlsplit(url)
328
+ # Extract URL query arguments and convert to dict
329
+ parsed_get_args = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=False)
330
+ # Merge URL arguments dict with new params
331
+ parsed_get_args.update(params)
332
+ # Convert back to query string
333
+ encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
334
+ # Update parser and convert to full URL str
335
+ return parsed_url._replace(query=encoded_get_args).geturl()
336
+
337
+
338
+ def http_download_file_list(links_to_file_list, **kwargs):
339
+ """Download a list of files from a remote HTTP server from a list of links.
340
+
341
+ Generates up to 4 separate threads to handle downloads.
342
+ Same options behaviour as http_download_file.
343
+
344
+ Parameters
345
+ ----------
346
+ links_to_file_list : list
347
+ List of http links to files.
348
+ **kwargs
349
+ Optional arguments to pass to http_download_file.
350
+
351
+ Returns
352
+ -------
353
+ list of pathlib.Path
354
+ A list of the local full path of the downloaded files.
355
+
356
+ """
357
+ links_to_file_list = list(links_to_file_list) # In case generator was passed
358
+ n_threads = 4 # Max number of threads
359
+ outputs = []
360
+ target_dir = kwargs.pop('target_dir', None)
361
+ # Ensure target dir the length of url list
362
+ if target_dir is None or isinstance(target_dir, (str, Path)):
363
+ target_dir = [target_dir] * len(links_to_file_list)
364
+ assert len(target_dir) == len(links_to_file_list)
365
+ # using with statement to ensure threads are cleaned up promptly
366
+ zipped = zip(links_to_file_list, target_dir)
367
+ with concurrent.futures.ThreadPoolExecutor(max_workers=n_threads) as executor:
368
+ # Multithreading load operations
369
+ futures = [executor.submit(
370
+ http_download_file, link, target_dir=target, **kwargs) for link, target in zipped]
371
+ zip(links_to_file_list, ensure_list(kwargs.pop('target_dir', None)))
372
+ # TODO Reintroduce variable timeout value based on file size and download speed of 5 Mb/s?
373
+ # timeout = reduce(lambda x, y: x + (y.get('file_size', 0) or 0), dsets, 0) / 625000 ?
374
+ concurrent.futures.wait(futures, timeout=None)
375
+ # build return list
376
+ for future in futures:
377
+ outputs.append(future.result())
378
+ # if returning md5, separate list of tuples into two lists: (files, md5)
379
+ return list(zip(*outputs)) if kwargs.get('return_md5', False) else outputs
380
+
381
+
382
+ def http_download_file(full_link_to_file, chunks=None, *, clobber=False, silent=False,
383
+ username='', password='', target_dir='', return_md5=False, headers=None):
384
+ """Download a file from a remote HTTP server.
385
+
386
+ Parameters
387
+ ----------
388
+ full_link_to_file : str
389
+ HTTP link to the file
390
+ chunks : tuple of ints
391
+ Chunks to download
392
+ clobber : bool
393
+ If True, force overwrite the existing file
394
+ silent : bool
395
+ If True, suppress download progress bar
396
+ username : str
397
+ User authentication for password protected file server
398
+ password : str
399
+ Password authentication for password protected file server
400
+ target_dir : str, pathlib.Path
401
+ Directory in which files are downloaded; defaults to user's Download directory
402
+ return_md5 : bool
403
+ If True an MD5 hash of the file is additionally returned
404
+ headers : list of dicts
405
+ Additional headers to add to the request (auth tokens etc.)
406
+
407
+ Returns
408
+ -------
409
+ pathlib.Path
410
+ The full file path of the downloaded file
411
+
412
+ """
413
+ if not full_link_to_file:
414
+ return (None, None) if return_md5 else None
415
+
416
+ # makes sure special characters get encoded ('#' in file names for example)
417
+ surl = urllib.parse.urlsplit(full_link_to_file, allow_fragments=False)
418
+ full_link_to_file = surl._replace(path=urllib.parse.quote(surl.path)).geturl()
419
+
420
+ # default cache directory is the home dir
421
+ if not target_dir:
422
+ target_dir = Path.home().joinpath('Downloads')
423
+
424
+ # This should be the base url you wanted to access.
425
+ base_url, name = full_link_to_file.rsplit('/', 1)
426
+ file_name = Path(target_dir, name)
427
+
428
+ # do not overwrite an existing file unless specified
429
+ if not clobber and file_name.exists():
430
+ return (file_name, hashfile.md5(file_name)) if return_md5 else file_name
431
+
432
+ # Create a password manager
433
+ manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
434
+ if username and password:
435
+ manager.add_password(None, base_url, username, password)
436
+
437
+ # Create an authentication handler using the password manager
438
+ auth = urllib.request.HTTPBasicAuthHandler(manager)
439
+
440
+ # Create an opener that will replace the default urlopen method on further calls
441
+ opener = urllib.request.build_opener(auth)
442
+ urllib.request.install_opener(opener)
443
+
444
+ # Support for partial download.
445
+ req = urllib.request.Request(full_link_to_file)
446
+ if chunks is not None:
447
+ first_byte, n_bytes = chunks
448
+ req.add_header('Range', 'bytes=%d-%d' % (first_byte, first_byte + n_bytes - 1))
449
+
450
+ # add additional headers
451
+ if headers is not None:
452
+ for k in headers:
453
+ req.add_header(k, headers[k])
454
+
455
+ # Open the url and get the length
456
+ try:
457
+ u = urllib.request.urlopen(req)
458
+ except HTTPError as e:
459
+ _logger.error(f'{str(e)} {full_link_to_file}')
460
+ raise e
461
+
462
+ file_size = int(u.getheader('Content-length'))
463
+ if not silent:
464
+ print(f'Downloading: {file_name} Bytes: {file_size}')
465
+ block_sz = 8192 * 64 * 8
466
+
467
+ md5 = hashlib.md5()
468
+ f = open(file_name, 'wb')
469
+ with tqdm(total=file_size / 1024 / 1024, disable=silent) as pbar:
470
+ while True:
471
+ buffer = u.read(block_sz)
472
+ if not buffer:
473
+ break
474
+ f.write(buffer)
475
+ if return_md5:
476
+ md5.update(buffer)
477
+ pbar.update(len(buffer) / 1024 / 1024)
478
+ f.close()
479
+
480
+ return (file_name, md5.hexdigest()) if return_md5 else file_name
481
+
482
+
483
+ def file_record_to_url(file_records) -> list:
484
+ """Translate a Json dictionary to an usable http url for downloading files.
485
+
486
+ Parameters
487
+ ----------
488
+ file_records : dict
489
+ JSON containing a 'data_url' field
490
+
491
+ Returns
492
+ -------
493
+ list of str
494
+ A list of full data urls
495
+
496
+ """
497
+ urls = []
498
+ for fr in file_records:
499
+ if fr['data_url'] is not None:
500
+ urls.append(fr['data_url'])
501
+ return urls
502
+
503
+
504
+ def dataset_record_to_url(dataset_record) -> list:
505
+ """Extract a list of files urls from a list of dataset queries.
506
+
507
+ Parameters
508
+ ----------
509
+ dataset_record : list, dict
510
+ Dataset JSON from a REST request
511
+
512
+ Returns
513
+ -------
514
+ list of str
515
+ A list of file urls corresponding to the datasets records
516
+
517
+ """
518
+ urls = []
519
+ if isinstance(dataset_record, dict):
520
+ dataset_record = [dataset_record]
521
+ for ds in dataset_record:
522
+ urls += file_record_to_url(ds['file_records'])
523
+ return urls
524
+
525
+
526
+ class AlyxClient:
527
+ """Class that implements simple GET/POST wrappers for the Alyx REST API.
528
+
529
+ See https://openalyx.internationalbrainlab.org/docs
530
+ """
531
+
532
+ _token = None
533
+ _headers = {} # Headers for REST requests only
534
+ user = None
535
+ """str: The Alyx username."""
536
+ base_url = None
537
+ """str: The Alyx database URL."""
538
+
539
+ def __init__(self, base_url=None, username=None, password=None,
540
+ cache_dir=None, silent=False, cache_rest='GET'):
541
+ """Create a client instance that allows to GET and POST to the Alyx server.
542
+
543
+ For One, constructor attempts to authenticate with credentials in params.py.
544
+ For standalone cases, AlyxClient(username='', password='', base_url='').
545
+
546
+ Parameters
547
+ ----------
548
+ base_url : str
549
+ Alyx server address, including port and protocol.
550
+ username : str
551
+ Alyx database user.
552
+ password : str
553
+ Alyx database password.
554
+ cache_dir : str, pathlib.Path
555
+ The default root download location.
556
+ silent : bool
557
+ If true, user prompts and progress bars are suppressed.
558
+ cache_rest : str, None
559
+ Which type of http method to apply cache to; if '*', all requests are cached.
560
+ stay_logged_in : bool
561
+ If true, auth token is cached.
562
+
563
+ """
564
+ self.silent = silent
565
+ self._par = one.params.get(client=base_url, silent=self.silent, username=username)
566
+ self.base_url = base_url or self._par.ALYX_URL
567
+ self._par = self._par.set('CACHE_DIR', cache_dir or self._par.CACHE_DIR)
568
+ if username or password:
569
+ self.authenticate(username, password)
570
+ self._rest_schemes = None
571
+ # the mixed accept application may cause errors sometimes, only necessary for the docs
572
+ self._headers = {**self._headers, 'Accept': 'application/json'}
573
+ # REST cache parameters
574
+ # The default length of time that cache file is valid for,
575
+ # The default expiry is overridden by the `expires` kwarg. If False, the caching is
576
+ # turned off.
577
+ self.default_expiry = timedelta(minutes=5)
578
+ self.cache_mode = cache_rest
579
+ self._obj_id = id(self)
580
+
581
+ @property
582
+ def rest_schemes(self):
583
+ """dict: The REST endpoints and their parameters."""
584
+ # Delayed fetch of rest schemes speeds up instantiation
585
+ if not self._rest_schemes:
586
+ self._rest_schemes = self.get('/docs', expires=timedelta(weeks=1))
587
+ return self._rest_schemes
588
+
589
+ @property
590
+ def cache_dir(self):
591
+ """pathlib.Path: The location of the downloaded file cache."""
592
+ return Path(self._par.CACHE_DIR)
593
+
594
+ @cache_dir.setter
595
+ def cache_dir(self, cache_dir):
596
+ cache_dir = Path(cache_dir)
597
+ cache_dir.mkdir(parents=True, exist_ok=True)
598
+ self._par = self._par.set('CACHE_DIR', cache_dir)
599
+
600
+ @property
601
+ def is_logged_in(self):
602
+ """bool: Check if user logged into Alyx database; True if user is authenticated."""
603
+ return bool(self.user and self._token and 'Authorization' in self._headers)
604
+
605
+ def list_endpoints(self):
606
+ """Return a list of available REST endpoints.
607
+
608
+ Returns
609
+ -------
610
+ List of REST endpoint strings.
611
+
612
+ """
613
+ EXCLUDE = ('_type', '_meta', '', 'auth-token')
614
+ return sorted(x for x in self.rest_schemes.keys() if x not in EXCLUDE)
615
+
616
+ def print_endpoint_info(self, endpoint, action=None):
617
+ """Print the available actions and query parameters for a given REST endpoint.
618
+
619
+ Parameters
620
+ ----------
621
+ endpoint : str
622
+ An Alyx REST endpoint to query.
623
+ action : str
624
+ An optional action (e.g. 'list') to print. If None, all actions are printed.
625
+
626
+ Returns
627
+ -------
628
+ dict, list
629
+ A dictionary of endpoint query parameter details or a list of parameter details if
630
+ action is not None.
631
+
632
+ """
633
+ rs = self.rest_schemes
634
+ if endpoint not in rs:
635
+ return print(f'Endpoint "{endpoint}" does not exist')
636
+
637
+ for _action in (rs[endpoint] if action is None else [action]):
638
+ doc = []
639
+ pprint(_action)
640
+ for f in rs[endpoint][_action]['fields']:
641
+ required = ' (required): ' if f.get('required', False) else ': '
642
+ doc.append(f'\t"{f["name"]}"{required}{f["schema"]["_type"]}'
643
+ f', {f["schema"]["description"]}')
644
+ doc.sort()
645
+ [print(d) for d in doc if '(required)' in d]
646
+ [print(d) for d in doc if '(required)' not in d]
647
+ return (rs[endpoint] if action is None else rs[endpoint][action]).copy()
648
+
649
+ @_cache_response
650
+ def _generic_request(self, reqfunction, rest_query, data=None, files=None):
651
+ if not self.is_logged_in:
652
+ self.authenticate(username=self.user)
653
+ # makes sure the base url is the one from the instance
654
+ rest_query = rest_query.replace(self.base_url, '')
655
+ if not rest_query.startswith('/'):
656
+ rest_query = '/' + rest_query
657
+ _logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}')
658
+ headers = self._headers.copy()
659
+ if files is None:
660
+ to_json = functools.partial(json.dumps, cls=_JSONEncoder)
661
+ data = to_json(data) if isinstance(data, dict) or isinstance(data, list) else data
662
+ headers['Content-Type'] = 'application/json'
663
+ if rest_query.startswith('/docs'):
664
+ # the mixed accept application may cause errors sometimes, only necessary for the docs
665
+ headers['Accept'] = 'application/coreapi+json'
666
+ r = reqfunction(self.base_url + rest_query,
667
+ stream=True, headers=headers, data=data, files=files)
668
+ if r and r.status_code in (200, 201):
669
+ return json.loads(r.text)
670
+ elif r and r.status_code == 204:
671
+ return
672
+ if r.status_code == 403 and '"Invalid token."' in r.text:
673
+ _logger.debug('Token invalid; Attempting to re-authenticate...')
674
+ # Log out in order to flush stale token. At this point we no longer have the password
675
+ # but if the user re-instantiates with a password arg it will request a new token.
676
+ username = self.user
677
+ if self.silent: # no need to log out otherwise; user will be prompted for password
678
+ self.logout()
679
+ self.authenticate(username=username, force=True)
680
+ return self._generic_request(reqfunction, rest_query, data=data, files=files)
681
+ else:
682
+ _logger.debug('Response text raw: ' + r.text)
683
+ try:
684
+ message = json.loads(r.text)
685
+ message.pop('status_code', None) # Get status code from response object instead
686
+ message = message.get('detail') or message # Get details if available
687
+ _logger.debug(message)
688
+ except json.decoder.JSONDecodeError:
689
+ message = r.text
690
+ raise requests.HTTPError(r.status_code, rest_query, message, response=r)
691
+
692
+ def authenticate(self, username=None, password=None, cache_token=True, force=False):
693
+ """Fetch token from the Alyx REST API for authenticating request headers.
694
+
695
+ Credentials are loaded via one.params.
696
+
697
+ Parameters
698
+ ----------
699
+ username : str
700
+ Alyx username. If None, token not cached and not silent, user is prompted.
701
+ password : str
702
+ Alyx password. If None, token not cached and not silent, user is prompted.
703
+ cache_token : bool
704
+ If true, the token is cached for subsequent auto-logins.
705
+ force : bool
706
+ If true, any cached token is ignored.
707
+
708
+ """
709
+ # Get username
710
+ if username is None:
711
+ username = getattr(self._par, 'ALYX_LOGIN', self.user)
712
+ if username is None and not self.silent:
713
+ username = input('Enter Alyx username:')
714
+
715
+ # If user passes in a password, force re-authentication even if token cached
716
+ if password is not None:
717
+ if not force:
718
+ _logger.debug('Forcing token request with provided password')
719
+ force = True
720
+ # Check if token cached
721
+ if not force and getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
722
+ self._token = self._par.TOKEN[username]
723
+ self._headers = {
724
+ 'Authorization': f'Token {list(self._token.values())[0]}',
725
+ 'Accept': 'application/json'}
726
+ self.user = username
727
+ return
728
+
729
+ # Get password
730
+ if password is None:
731
+ password = getattr(self._par, 'ALYX_PWD', None)
732
+ if password is None:
733
+ if self.silent:
734
+ warnings.warn(
735
+ 'No password or cached token in silent mode. '
736
+ 'Please run the following to re-authenticate:\n\t'
737
+ 'AlyxClient(silent=False).authenticate'
738
+ '(username=<username>, force=True)', UserWarning)
739
+ else:
740
+ password = getpass(f'Enter Alyx password for "{username}":')
741
+ # Remove previous token
742
+ self._clear_token(username)
743
+ try:
744
+ credentials = {'username': username, 'password': password}
745
+ rep = requests.post(self.base_url + '/auth-token', data=credentials)
746
+ except requests.exceptions.ConnectionError:
747
+ raise ConnectionError(
748
+ f'Can\'t connect to {self.base_url}.\n' +
749
+ 'Check your internet connections and Alyx database firewall'
750
+ )
751
+ # Assign token or raise exception on auth error
752
+ if rep.ok:
753
+ self._token = rep.json()
754
+ assert list(self._token.keys()) == ['token']
755
+ else:
756
+ if rep.status_code == 400: # Auth error; re-raise with details
757
+ redacted = '*' * len(credentials['password']) if credentials['password'] else None
758
+ message = ('Alyx authentication failed with credentials: '
759
+ f'user = {credentials["username"]}, password = {redacted}')
760
+ raise requests.HTTPError(rep.status_code, rep.url, message, response=rep)
761
+ else:
762
+ rep.raise_for_status()
763
+
764
+ self._headers = {
765
+ 'Authorization': 'Token {}'.format(list(self._token.values())[0]),
766
+ 'Accept': 'application/json'}
767
+ if cache_token:
768
+ # Update saved pars
769
+ par = one.params.get(client=self.base_url, silent=True)
770
+ tokens = getattr(par, 'TOKEN', {})
771
+ tokens[username] = self._token
772
+ one.params.save(par.set('TOKEN', tokens), self.base_url)
773
+ # Update current pars
774
+ self._par = self._par.set('TOKEN', tokens)
775
+ self.user = username
776
+ if not self.silent:
777
+ print(f'Connected to {self.base_url} as user "{self.user}"')
778
+
779
+ def _clear_token(self, username):
780
+ """Remove auth token from client params.
781
+
782
+ Deletes the cached authentication token for a given user.
783
+ """
784
+ par = one.params.get(client=self.base_url, silent=True)
785
+ # Remove token from cache
786
+ if getattr(par, 'TOKEN', False) and username in par.TOKEN:
787
+ del par.TOKEN[username]
788
+ one.params.save(par, self.base_url)
789
+ # Remove token from local pars
790
+ if getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
791
+ del self._par.TOKEN[username]
792
+ # Remove token from object
793
+ self._token = None
794
+ if self._headers and 'Authorization' in self._headers:
795
+ del self._headers['Authorization']
796
+
797
+ def logout(self):
798
+ """Log out from Alyx.
799
+
800
+ Deletes the cached authentication token for the currently logged-in user
801
+ and clears the REST cache.
802
+ """
803
+ if not self.is_logged_in:
804
+ return
805
+ self._clear_token(username := self.user)
806
+ self.user = None
807
+ self.clear_rest_cache()
808
+ if not self.silent:
809
+ print(f'{username} logged out from {self.base_url}')
810
+
811
+ def delete(self, rest_query):
812
+ """Send a DELETE request to the Alyx server.
813
+
814
+ Will raise an exception on any HTTP status code other than 200, 201.
815
+
816
+ Parameters
817
+ ----------
818
+ rest_query : str
819
+ A REST query string either as a relative URL path complete URL.
820
+
821
+ Returns
822
+ -------
823
+ JSON interpreted dictionary from response.
824
+
825
+ Examples
826
+ --------
827
+ >>> AlyxClient.delete('/weighings/c617562d-c107-432e-a8ee-682c17f9e698')
828
+ >>> AlyxClient.delete(
829
+ ... 'https://alyx.example.com/endpoint/c617562d-c107-432e-a8ee-682c17f9e698')
830
+
831
+ """
832
+ return self._generic_request(requests.delete, rest_query)
833
+
834
+ def download_file(self, url, **kwargs):
835
+ """Download file(s) from data server from a REST file record URL.
836
+
837
+ Parameters
838
+ ----------
839
+ url : str, list
840
+ Full url(s) of the file(s).
841
+ **kwargs
842
+ WebClient.http_download_file parameters.
843
+
844
+ Returns
845
+ -------
846
+ pathlib.Path, list of pathlib.Path
847
+ Local path(s) of downloaded file(s).
848
+
849
+ """
850
+ if isinstance(url, str):
851
+ url = self._validate_file_url(url)
852
+ download_fcn = http_download_file
853
+ else:
854
+ url = (self._validate_file_url(x) for x in url)
855
+ download_fcn = http_download_file_list
856
+ pars = dict(
857
+ silent=kwargs.pop('silent', self.silent),
858
+ target_dir=kwargs.pop('target_dir', self._par.CACHE_DIR),
859
+ username=self._par.HTTP_DATA_SERVER_LOGIN,
860
+ password=self._par.HTTP_DATA_SERVER_PWD,
861
+ **kwargs
862
+ )
863
+ try:
864
+ files = download_fcn(url, **pars)
865
+ except HTTPError as ex:
866
+ if ex.code == 401:
867
+ ex.msg += (' - please check your HTTP_DATA_SERVER_LOGIN and '
868
+ 'HTTP_DATA_SERVER_PWD ONE params, or username/password kwargs')
869
+ raise ex
870
+ return files
871
+
872
+ def download_cache_tables(self, source=None, destination=None):
873
+ """Download Alyx cache tables to the local data cache directory.
874
+
875
+ Parameters
876
+ ----------
877
+ source : str, pathlib.Path
878
+ The remote HTTP directory of the cache table (excluding the filename).
879
+ Default: AlyxClient.base_url.
880
+ destination : str, pathlib.Path
881
+ The target directory into to which the tables will be downloaded.
882
+
883
+ Returns
884
+ -------
885
+ List of parquet table file paths.
886
+
887
+ """
888
+ source = str(source or f'{self.base_url}/cache.zip')
889
+ destination = destination or self.cache_dir
890
+ Path(destination).mkdir(exist_ok=True, parents=True)
891
+
892
+ headers = None
893
+ if source.startswith(self.base_url):
894
+ if not self.is_logged_in:
895
+ self.authenticate()
896
+ headers = self._headers
897
+
898
+ with tempfile.TemporaryDirectory(dir=destination) as tmp:
899
+ file = http_download_file(source,
900
+ headers=headers,
901
+ silent=self.silent,
902
+ target_dir=tmp,
903
+ clobber=True)
904
+ with zipfile.ZipFile(file, 'r') as zipped:
905
+ files = zipped.namelist()
906
+ zipped.extractall(destination)
907
+ return [Path(destination, table) for table in files]
908
+
909
+ def _validate_file_url(self, url):
910
+ """Assert that URL matches HTTP_DATA_SERVER parameter.
911
+
912
+ Currently only one remote HTTP server is supported for a given AlyxClient instance. If
913
+ the URL contains only the relative path part, the full URL is returned.
914
+
915
+ Parameters
916
+ ----------
917
+ url : str
918
+ The full or partial URL to validate.
919
+
920
+ Returns
921
+ -------
922
+ The complete URL.
923
+
924
+ Examples
925
+ --------
926
+ >>> url = self._validate_file_url('https://webserver.net/path/to/file')
927
+ 'https://webserver.net/path/to/file'
928
+ >>> url = self._validate_file_url('path/to/file')
929
+ 'https://webserver.net/path/to/file'
930
+
931
+ """
932
+ if url.startswith('http'): # A full URL
933
+ assert url.startswith(self._par.HTTP_DATA_SERVER), \
934
+ ('remote protocol and/or hostname does not match HTTP_DATA_SERVER parameter:\n' +
935
+ f'"{url[:40]}..." should start with "{self._par.HTTP_DATA_SERVER}"')
936
+ elif not url.startswith(self._par.HTTP_DATA_SERVER):
937
+ url = self.rel_path2url(url)
938
+ return url
939
+
940
+ def rel_path2url(self, path):
941
+ """Given a relative file path, return the remote HTTP server URL.
942
+
943
+ It is expected that the remote HTTP server has the same file tree as the local system.
944
+
945
+ Parameters
946
+ ----------
947
+ path : str, pathlib.Path
948
+ A relative ALF path (subject/date/number/etc.).
949
+
950
+ Returns
951
+ -------
952
+ A URL string.
953
+
954
+ """
955
+ path = str(path).strip('/')
956
+ assert not path.startswith('http')
957
+ return f'{self._par.HTTP_DATA_SERVER}/{path}'
958
+
959
+ def get(self, rest_query, **kwargs):
960
+ """Send a GET request to the Alyx server.
961
+
962
+ Will raise an exception on any HTTP status code other than 200, 201.
963
+
964
+ For the dictionary contents and list of endpoints, refer to:
965
+ https://openalyx.internationalbrainlab.org/docs
966
+
967
+ Parameters
968
+ ----------
969
+ rest_query : str
970
+ A REST URL path, e.g. '/sessions?user=Hamish'.
971
+ **kwargs
972
+ Optional arguments to pass to _generic_request and _cache_response decorator.
973
+
974
+ Returns
975
+ -------
976
+ JSON interpreted dictionary from response.
977
+
978
+ """
979
+ rep = self._generic_request(requests.get, rest_query, **kwargs)
980
+ if isinstance(rep, dict) and list(rep.keys()) == ['count', 'next', 'previous', 'results']:
981
+ if len(rep['results']) < rep['count']:
982
+ cache_args = {k: v for k, v in kwargs.items() if k in ('clobber', 'expires')}
983
+ rep = _PaginatedResponse(self, rep, cache_args)
984
+ else:
985
+ rep = rep['results']
986
+ return rep
987
+
988
+ def patch(self, rest_query, data=None, files=None):
989
+ """Send a PATCH request to the Alyx server.
990
+
991
+ For the dictionary contents, refer to:
992
+ https://openalyx.internationalbrainlab.org/docs
993
+
994
+ Parameters
995
+ ----------
996
+ rest_query : str
997
+ The endpoint as full or relative URL.
998
+ data : dict, str
999
+ JSON encoded string or dictionary (c.f. requests).
1000
+ files : dict, tuple
1001
+ Files to attach (c.f. requests).
1002
+
1003
+ Returns
1004
+ -------
1005
+ Response object.
1006
+
1007
+ """
1008
+ return self._generic_request(requests.patch, rest_query, data=data, files=files)
1009
+
1010
+ def post(self, rest_query, data=None, files=None):
1011
+ """Send a POST request to the Alyx server.
1012
+
1013
+ For the dictionary contents, refer to:
1014
+ https://openalyx.internationalbrainlab.org/docs
1015
+
1016
+ Parameters
1017
+ ----------
1018
+ rest_query : str
1019
+ The endpoint as full or relative URL.
1020
+ data : dict, str
1021
+ JSON encoded string or dictionary (c.f. requests).
1022
+ files : dict, tuple
1023
+ Files to attach (c.f. requests).
1024
+
1025
+ Returns
1026
+ -------
1027
+ Response object.
1028
+
1029
+ """
1030
+ return self._generic_request(requests.post, rest_query, data=data, files=files)
1031
+
1032
+ def put(self, rest_query, data=None, files=None):
1033
+ """Send a PUT request to the Alyx server.
1034
+
1035
+ For the dictionary contents, refer to:
1036
+ https://openalyx.internationalbrainlab.org/docs
1037
+
1038
+ Parameters
1039
+ ----------
1040
+ rest_query : str
1041
+ The endpoint as full or relative URL.
1042
+ data : dict, str
1043
+ JSON encoded string or dictionary (c.f. requests).
1044
+ files : dict, tuple
1045
+ Files to attach (c.f. requests).
1046
+
1047
+ Returns
1048
+ -------
1049
+ requests.Response
1050
+ Response object.
1051
+
1052
+ """
1053
+ return self._generic_request(requests.put, rest_query, data=data, files=files)
1054
+
1055
+ def rest(self, url=None, action=None, id=None, data=None, files=None,
1056
+ no_cache=False, **kwargs):
1057
+ """Alyx REST API wrapper.
1058
+
1059
+ If no arguments are passed, lists available endpoints.
1060
+
1061
+ Parameters
1062
+ ----------
1063
+ url : str
1064
+ Endpoint name.
1065
+ action : str
1066
+ One of 'list', 'create', 'read', 'update', 'partial_update', 'delete'.
1067
+ id : str, uuid.UUID
1068
+ Lookup string for actions 'read', 'update', 'partial_update', and 'delete'.
1069
+ data : dict
1070
+ Data dictionary for actions 'update', 'partial_update' and 'create'.
1071
+ files : dict, tuple
1072
+ Option file(s) to upload.
1073
+ no_cache : bool
1074
+ If true the `list` and `read` actions are performed without returning the cache.
1075
+ kwargs
1076
+ Filters as per the Alyx REST documentation
1077
+ c.f. https://openalyx.internationalbrainlab.org/docs/
1078
+
1079
+ Returns
1080
+ -------
1081
+ list, dict
1082
+ List of queried dicts ('list') or dict (other actions).
1083
+
1084
+ Examples
1085
+ --------
1086
+ List available endpoint
1087
+
1088
+ >>> client = AlyxClient()
1089
+ ... client.rest()
1090
+
1091
+ List available actions for the 'subjects' endpoint
1092
+
1093
+ >>> client.rest('subjects')
1094
+
1095
+ Example REST endpoint with all actions
1096
+
1097
+ >>> client.rest('subjects', 'list')
1098
+ >>> client.rest('subjects', 'list', field_filter1='filterval')
1099
+ >>> client.rest('subjects', 'create', data=sub_dict)
1100
+ >>> client.rest('subjects', 'read', id='nickname')
1101
+ >>> client.rest('subjects', 'update', id='nickname', data=sub_dict)
1102
+ >>> client.rest('subjects', 'partial_update', id='nickname', data=sub_dict)
1103
+ >>> client.rest('subjects', 'delete', id='nickname')
1104
+ >>> client.rest('notes', 'create', data=nd, files={'image': open(image_file, 'rb')})
1105
+
1106
+ """
1107
+ # if endpoint is None, list available endpoints
1108
+ if not url:
1109
+ pprint(self.list_endpoints())
1110
+ return
1111
+ # remove beginning slash if any
1112
+ if url.startswith('/'):
1113
+ url = url[1:]
1114
+ # and split to the next slash or question mark
1115
+ endpoint = re.findall("^/*[^?/]*", url)[0].replace('/', '')
1116
+ # make sure the queried endpoint exists, if not throw an informative error
1117
+ if endpoint not in self.rest_schemes.keys():
1118
+ av = [k for k in self.rest_schemes.keys() if not k.startswith('_') and k]
1119
+ raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1120
+ 'endpoints are \n ' + '\n '.join(av))
1121
+ endpoint_scheme = self.rest_schemes[endpoint]
1122
+ # on a filter request, override the default action parameter
1123
+ if '?' in url:
1124
+ action = 'list'
1125
+ # if action is None, list available actions for the required endpoint
1126
+ if not action:
1127
+ pprint(list(endpoint_scheme.keys()))
1128
+ self.print_endpoint_info(endpoint)
1129
+ return
1130
+ # make sure the desired action exists, if not throw an informative error
1131
+ if action not in endpoint_scheme:
1132
+ raise ValueError('Action "' + action + '" for REST endpoint "' + endpoint + '" does ' +
1133
+ 'not exist. Available actions are: ' +
1134
+ '\n ' + '\n '.join(endpoint_scheme.keys()))
1135
+ # the actions below require an id in the URL, warn and help the user
1136
+ if action in ['read', 'update', 'partial_update', 'delete'] and not id:
1137
+ _logger.warning('REST action "' + action + '" requires an ID in the URL: ' +
1138
+ endpoint_scheme[action]['url'])
1139
+ return
1140
+ # the actions below require a data dictionary, warn and help the user with fields list
1141
+ data_required = 'fields' in endpoint_scheme[action]
1142
+ if action in ['create', 'update', 'partial_update'] and data_required and not data:
1143
+ pprint(endpoint_scheme[action]['fields'])
1144
+ for act in endpoint_scheme[action]['fields']:
1145
+ print("'" + act['name'] + "': ...,")
1146
+ _logger.warning('REST action "' + action + '" requires a data dict with above keys')
1147
+ return
1148
+
1149
+ # clobber=True means remote request always made, expires=True means response is not cached
1150
+ cache_args = {'clobber': no_cache, 'expires': kwargs.pop('expires', False) or no_cache}
1151
+ if action == 'list':
1152
+ # list doesn't require id nor
1153
+ assert endpoint_scheme[action]['action'] == 'get'
1154
+ # add to url data if it is a string
1155
+ if id:
1156
+ # this is a special case of the list where we query a uuid. Usually read is better
1157
+ if 'django' in kwargs.keys():
1158
+ kwargs['django'] = kwargs['django'] + ','
1159
+ else:
1160
+ kwargs['django'] = ''
1161
+ kwargs['django'] = f"{kwargs['django']}pk,{id}"
1162
+ # otherwise, look for a dictionary of filter terms
1163
+ if kwargs:
1164
+ # Convert all lists in query params to comma separated list
1165
+ query_params = {k: ','.join(map(str, ensure_list(v))) for k, v in kwargs.items()}
1166
+ url = update_url_params(url, query_params)
1167
+ return self.get('/' + url, **cache_args)
1168
+ if not isinstance(id, str) and id is not None:
1169
+ id = str(id) # e.g. may be uuid.UUID
1170
+ if action == 'read':
1171
+ assert endpoint_scheme[action]['action'] == 'get'
1172
+ return self.get('/' + endpoint + '/' + id.split('/')[-1], **cache_args)
1173
+ elif action == 'create':
1174
+ assert endpoint_scheme[action]['action'] == 'post'
1175
+ return self.post('/' + endpoint, data=data, files=files)
1176
+ elif action == 'delete':
1177
+ assert endpoint_scheme[action]['action'] == 'delete'
1178
+ return self.delete('/' + endpoint + '/' + id.split('/')[-1])
1179
+ elif action == 'partial_update':
1180
+ assert endpoint_scheme[action]['action'] == 'patch'
1181
+ return self.patch('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1182
+ elif action == 'update':
1183
+ assert endpoint_scheme[action]['action'] == 'put'
1184
+ return self.put('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1185
+
1186
+ # JSON field interface convenience methods
1187
+ def _check_inputs(self, endpoint: str) -> None:
1188
+ # make sure the queried endpoint exists, if not throw an informative error
1189
+ if endpoint not in self.rest_schemes.keys():
1190
+ av = (k for k in self.rest_schemes.keys() if not k.startswith('_') and k)
1191
+ raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1192
+ 'endpoints are \n ' + '\n '.join(av))
1193
+ return
1194
+
1195
+ def json_field_write(
1196
+ self,
1197
+ endpoint: str = None,
1198
+ uuid: str = None,
1199
+ field_name: str = None,
1200
+ data: dict = None
1201
+ ) -> dict:
1202
+ """Write data to JSON field.
1203
+
1204
+ NOTE: Destructive write! WILL NOT CHECK IF DATA EXISTS
1205
+
1206
+ Parameters
1207
+ ----------
1208
+ endpoint : str, None
1209
+ Valid alyx endpoint, defaults to None.
1210
+ uuid : str, uuid.UUID, None
1211
+ UUID or lookup name for endpoint.
1212
+ field_name : str, None
1213
+ Valid json field name, defaults to None.
1214
+ data : dict, None
1215
+ Data to write to json field, defaults to None.
1216
+
1217
+ Returns
1218
+ -------
1219
+ dict
1220
+ Written data dict.
1221
+
1222
+ """
1223
+ self._check_inputs(endpoint)
1224
+ # Prepare data to patch
1225
+ patch_dict = {field_name: data}
1226
+ # Upload new extended_qc to session
1227
+ ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1228
+ return ret[field_name]
1229
+
1230
+ def json_field_update(
1231
+ self,
1232
+ endpoint: str = None,
1233
+ uuid: str = None,
1234
+ field_name: str = 'json',
1235
+ data: dict = None
1236
+ ) -> dict:
1237
+ """Non-destructive update of JSON field of endpoint for object.
1238
+
1239
+ Will update the field_name of the object with pk = uuid of given endpoint
1240
+ If data has keys with the same name of existing keys it will squash the old
1241
+ values (uses the dict.update() method).
1242
+
1243
+ Parameters
1244
+ ----------
1245
+ endpoint : str
1246
+ Alyx REST endpoint to hit.
1247
+ uuid : str, uuid.UUID
1248
+ UUID or lookup name of object.
1249
+ field_name : str
1250
+ Name of the json field.
1251
+ data : dict
1252
+ A dictionary with fields to be updated.
1253
+
1254
+ Returns
1255
+ -------
1256
+ dict
1257
+ New patched json field contents as dict.
1258
+
1259
+ Examples
1260
+ --------
1261
+ >>> client = AlyxClient()
1262
+ >>> client.json_field_update('sessions', 'eid_str', 'extended_qc', {'key': 'value'})
1263
+
1264
+ """
1265
+ self._check_inputs(endpoint)
1266
+ # Load current json field contents
1267
+ current = self.rest(endpoint, 'read', id=uuid)[field_name]
1268
+ if current is None:
1269
+ current = {}
1270
+
1271
+ if not isinstance(current, dict):
1272
+ _logger.warning(
1273
+ f'Current json field "{field_name}" does not contains a dict, aborting update'
1274
+ )
1275
+ return current
1276
+
1277
+ # Patch current dict with new data
1278
+ current.update(data)
1279
+ # Prepare data to patch
1280
+ patch_dict = {field_name: current}
1281
+ # Upload new extended_qc to session
1282
+ ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1283
+ return ret[field_name]
1284
+
1285
+ def json_field_remove_key(
1286
+ self,
1287
+ endpoint: str = None,
1288
+ uuid: str = None,
1289
+ field_name: str = 'json',
1290
+ key: str = None
1291
+ ) -> Optional[dict]:
1292
+ """Remove inputted key from JSON field dict and re-upload it to Alyx.
1293
+
1294
+ Needs endpoint, UUID and json field name.
1295
+
1296
+ Parameters
1297
+ ----------
1298
+ endpoint : str
1299
+ Endpoint to hit, defaults to None.
1300
+ uuid : str, uuid.UUID
1301
+ UUID or lookup name for endpoint.
1302
+ field_name : str
1303
+ JSON field name of object, defaults to None.
1304
+ key : str
1305
+ Key name of dictionary inside object, defaults to None.
1306
+
1307
+ Returns
1308
+ -------
1309
+ dict
1310
+ New content of json field.
1311
+
1312
+ """
1313
+ self._check_inputs(endpoint)
1314
+ current = self.rest(endpoint, 'read', id=uuid)[field_name]
1315
+ # If no contents, cannot remove key, return
1316
+ if current is None:
1317
+ return current
1318
+ # if contents are not dict, cannot remove key, return contents
1319
+ if isinstance(current, str):
1320
+ _logger.warning(f'Cannot remove key {key} content of json field is of type str')
1321
+ return None
1322
+ # If key not present in contents of json field cannot remove key, return contents
1323
+ if current.get(key, None) is None:
1324
+ _logger.warning(
1325
+ f'{key}: Key not found in endpoint {endpoint} field {field_name}'
1326
+ )
1327
+ return current
1328
+ _logger.info(f'Removing key from dict: "{key}"')
1329
+ current.pop(key)
1330
+ # Re-write contents without removed key
1331
+ written = self.json_field_write(
1332
+ endpoint=endpoint, uuid=uuid, field_name=field_name, data=current
1333
+ )
1334
+ return written
1335
+
1336
+ def json_field_delete(
1337
+ self, endpoint: str = None, uuid: str = None, field_name: str = None
1338
+ ) -> None:
1339
+ """Set an entire field to null.
1340
+
1341
+ Note that this deletes all data from a given field. To delete only a single key from a
1342
+ given JSON field, use `json_field_remove_key`.
1343
+
1344
+ Parameters
1345
+ ----------
1346
+ endpoint : str
1347
+ Endpoint to hit, defaults to None.
1348
+ uuid : str, uuid.UUID
1349
+ UUID or lookup name for endpoint.
1350
+ field_name : str
1351
+ The field name of object (e.g. 'json', 'name', 'extended_qc'), defaults to None.
1352
+
1353
+ Returns
1354
+ -------
1355
+ None
1356
+ New content of json field.
1357
+
1358
+ """
1359
+ self._check_inputs(endpoint)
1360
+ _ = self.rest(endpoint, 'partial_update', id=uuid, data={field_name: None})
1361
+ return _[field_name]
1362
+
1363
+ def clear_rest_cache(self):
1364
+ """Clear all REST response cache files for the base url."""
1365
+ for file in self.cache_dir.joinpath('.rest').glob('*'):
1366
+ file.unlink()