ONE-api 3.0b3__py3-none-any.whl → 3.0b5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/LICENSE +21 -21
  2. {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/METADATA +115 -115
  3. ONE_api-3.0b5.dist-info/RECORD +37 -0
  4. one/__init__.py +2 -2
  5. one/alf/__init__.py +1 -1
  6. one/alf/cache.py +640 -653
  7. one/alf/exceptions.py +105 -105
  8. one/alf/io.py +876 -876
  9. one/alf/path.py +1450 -1450
  10. one/alf/spec.py +519 -519
  11. one/api.py +2979 -2973
  12. one/converters.py +850 -850
  13. one/params.py +414 -414
  14. one/registration.py +845 -845
  15. one/remote/__init__.py +1 -1
  16. one/remote/aws.py +313 -313
  17. one/remote/base.py +142 -142
  18. one/remote/globus.py +1254 -1254
  19. one/tests/fixtures/params/.caches +6 -6
  20. one/tests/fixtures/params/.test.alyx.internationalbrainlab.org +8 -8
  21. one/tests/fixtures/rest_responses/1f187d80fd59677b395fcdb18e68e4401bfa1cc9 +1 -1
  22. one/tests/fixtures/rest_responses/47893cf67c985e6361cdee009334963f49fb0746 +1 -1
  23. one/tests/fixtures/rest_responses/535d0e9a1e2c1efbdeba0d673b131e00361a2edb +1 -1
  24. one/tests/fixtures/rest_responses/6dc96f7e9bcc6ac2e7581489b9580a6cd3f28293 +1 -1
  25. one/tests/fixtures/rest_responses/db1731fb8df0208944ae85f76718430813a8bf50 +1 -1
  26. one/tests/fixtures/rest_responses/dcce48259bb929661f60a02a48563f70aa6185b3 +1 -1
  27. one/tests/fixtures/rest_responses/f530d6022f61cdc9e38cc66beb3cb71f3003c9a1 +1 -1
  28. one/tests/fixtures/test_dbs.json +14 -14
  29. one/util.py +524 -524
  30. one/webclient.py +1368 -1354
  31. ONE_api-3.0b3.dist-info/RECORD +0 -37
  32. {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/WHEEL +0 -0
  33. {ONE_api-3.0b3.dist-info → ONE_api-3.0b5.dist-info}/top_level.txt +0 -0
one/webclient.py CHANGED
@@ -1,1354 +1,1368 @@
1
- """API for interacting with a remote Alyx instance through REST.
2
-
3
- The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
4
- files through Alyx.
5
-
6
- Examples
7
- --------
8
- >>> alyx = AlyxClient(
9
- ... username='test_user', password='TapetesBloc18',
10
- ... base_url='https://test.alyx.internationalbrainlab.org')
11
-
12
- List subjects
13
-
14
- >>> subjects = alyx.rest('subjects', 'list')
15
-
16
- Create a subject
17
-
18
- >>> record = {
19
- ... 'nickname': nickname,
20
- ... 'responsible_user': 'olivier',
21
- ... 'birth_date': '2019-06-15',
22
- ... 'death_date': None,
23
- ... 'lab': 'cortexlab',
24
- ... }
25
- >>> new_subj = alyx.rest('subjects', 'create', data=record)
26
-
27
- Download a remote file, given a local path
28
-
29
- >>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
30
- >>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
31
-
32
- """
33
- import json
34
- import logging
35
- import math
36
- import re
37
- import functools
38
- import urllib.request
39
- from urllib.error import HTTPError
40
- import urllib.parse
41
- from collections.abc import Mapping
42
- from typing import Optional
43
- from datetime import datetime, timedelta
44
- from pathlib import Path
45
- from weakref import ReferenceType
46
- import warnings
47
- import hashlib
48
- import zipfile
49
- import tempfile
50
- from getpass import getpass
51
- from contextlib import contextmanager
52
-
53
- import requests
54
- from tqdm import tqdm
55
-
56
- from pprint import pprint
57
- import one.params
58
- from iblutil.io import hashfile
59
- from iblutil.io.params import set_hidden
60
- from iblutil.util import ensure_list
61
- import concurrent.futures
62
- _logger = logging.getLogger(__name__)
63
-
64
-
65
- def _cache_response(method):
66
- """Decorator for the generic request method for caching REST reponses.
67
-
68
- Caches the result of the query and on subsequent calls, returns cache instead of hitting the
69
- database.
70
-
71
- Parameters
72
- ----------
73
- method : function
74
- Function to wrap (i.e. AlyxClient._generic_request).
75
-
76
- Returns
77
- -------
78
- function
79
- Handle to wrapped method.
80
-
81
- """
82
-
83
- @functools.wraps(method)
84
- def wrapper_decorator(alyx_client, *args, expires=None, clobber=False, **kwargs):
85
- """REST caching wrapper.
86
-
87
- Parameters
88
- ----------
89
- alyx_client : AlyxClient
90
- An instance of the AlyxClient class.
91
- args : any
92
- Positional arguments for applying to wrapped function.
93
- expires : bool
94
- An optional timedelta for how long cached response is valid. If True, the cached
95
- response will not be used on subsequent calls. If None, the default expiry is applied.
96
- clobber : bool
97
- If True any existing cached response is overwritten.
98
- **kwargs
99
- Keyword arguments for applying to wrapped function.
100
-
101
- Returns
102
- -------
103
- dict
104
- The REST response JSON either from cached file or directly from remote.
105
-
106
- """
107
- expires = expires or alyx_client.default_expiry
108
- mode = (alyx_client.cache_mode or '').casefold()
109
- if args[0].__name__ != mode and mode != '*':
110
- return method(alyx_client, *args, **kwargs)
111
- # Check cache
112
- rest_cache = alyx_client.cache_dir.joinpath('.rest')
113
- sha1 = hashlib.sha1()
114
- sha1.update(bytes(args[1], 'utf-8'))
115
- name = sha1.hexdigest()
116
- # Reversible but length may exceed 255 chars
117
- # name = base64.urlsafe_b64encode(args[2].encode('UTF-8')).decode('UTF-8')
118
- files = list(rest_cache.glob(name))
119
- cached = None
120
- if len(files) == 1 and not clobber:
121
- _logger.debug('loading REST response from cache')
122
- with open(files[0], 'r') as f:
123
- cached, when = json.load(f)
124
- if datetime.fromisoformat(when) > datetime.now():
125
- return cached
126
- try:
127
- response = method(alyx_client, *args, **kwargs)
128
- except requests.exceptions.ConnectionError as ex:
129
- if cached and not clobber:
130
- warnings.warn('Failed to connect, returning cached response', RuntimeWarning)
131
- return cached
132
- raise ex # No cache and can't connect to database; re-raise
133
-
134
- # Save response into cache
135
- if not rest_cache.exists():
136
- rest_cache.mkdir(parents=True)
137
- rest_cache = set_hidden(rest_cache, True)
138
-
139
- _logger.debug('caching REST response')
140
- expiry_datetime = datetime.now() + (timedelta() if expires is True else expires)
141
- with open(rest_cache / name, 'w') as f:
142
- json.dump((response, expiry_datetime.isoformat()), f)
143
- return response
144
-
145
- return wrapper_decorator
146
-
147
-
148
- @contextmanager
149
- def no_cache(ac=None):
150
- """Temporarily turn off the REST cache for a given Alyx instance.
151
-
152
- This function is particularly useful when calling ONE methods in remote mode.
153
-
154
- Parameters
155
- ----------
156
- ac : AlyxClient
157
- An instance of the AlyxClient to modify. If None, the a new object is instantiated
158
-
159
- Returns
160
- -------
161
- AlyxClient
162
- The instance of Alyx with cache disabled
163
-
164
- Examples
165
- --------
166
- >>> from one.api import ONE
167
- >>> with no_cache(ONE().alyx):
168
- ... eids = ONE().search(subject='foobar', query_type='remote')
169
-
170
- """
171
- ac = ac or AlyxClient()
172
- cache_mode = ac.cache_mode
173
- ac.cache_mode = None
174
- try:
175
- yield ac
176
- finally:
177
- ac.cache_mode = cache_mode
178
-
179
-
180
- class _PaginatedResponse(Mapping):
181
- """Emulate a list from a paginated response.
182
-
183
- Provides cache functionality.
184
-
185
- Examples
186
- --------
187
- >>> r = _PaginatedResponse(client, response)
188
-
189
- """
190
-
191
- def __init__(self, alyx, rep, cache_args=None):
192
- """Emulate a list from a paginated response.
193
-
194
- Parameters
195
- ----------
196
- alyx : AlyxClient
197
- An instance of an AlyxClient associated with the REST response
198
- rep : dict
199
- A paginated REST response JSON dictionary
200
- cache_args : dict
201
- A dict of kwargs to pass to _cache_response decorator upon subsequent requests
202
-
203
- """
204
- self.alyx = alyx
205
- self.count = rep['count']
206
- self.limit = len(rep['results'])
207
- self._cache_args = cache_args or {}
208
- # store URL without pagination query params
209
- self.query = rep['next']
210
- # init the cache, list with None with count size
211
- self._cache = [None] * self.count
212
- # fill the cache with results of the query
213
- for i in range(self.limit):
214
- self._cache[i] = rep['results'][i]
215
- self._callbacks = set()
216
-
217
- def add_callback(self, cb):
218
- """Add a callback function to use each time a new page is fetched.
219
-
220
- The callback function will be called with the page results each time :meth:`populate`
221
- is called.
222
-
223
- Parameters
224
- ----------
225
- cb : callable
226
- A callable that takes the results of each paginated resonse.
227
-
228
- """
229
- if not callable(cb):
230
- raise TypeError(f'Expected type "callable", got "{type(cb)}" instead')
231
- else:
232
- self._callbacks.add(cb)
233
-
234
- def __len__(self):
235
- return self.count
236
-
237
- def __getitem__(self, item):
238
- if isinstance(item, slice):
239
- while None in self._cache[item]:
240
- # If slice start index is -ve, convert to +ve index
241
- i = self.count + item.start if item.start < 0 else item.start
242
- self.populate(i + self._cache[item].index(None))
243
- elif self._cache[item] is None:
244
- # If index is -ve, convert to +ve
245
- self.populate(self.count + item if item < 0 else item)
246
- return self._cache[item]
247
-
248
- def populate(self, idx):
249
- """Populate response cache with new page of results.
250
-
251
- Fetches the specific page of results containing the index passed and populates
252
- stores the results in the :prop:`_cache` property.
253
-
254
- Parameters
255
- ----------
256
- idx : int
257
- The index of a given record to fetch.
258
-
259
- """
260
- offset = self.limit * math.floor(idx / self.limit)
261
- query = update_url_params(self.query, {'limit': self.limit, 'offset': offset})
262
- res = self.alyx._generic_request(requests.get, query, **self._cache_args)
263
- if self.count != res['count']:
264
- warnings.warn(
265
- f'remote results for {urllib.parse.urlsplit(query).path} endpoint changed; '
266
- f'results may be inconsistent', RuntimeWarning)
267
- for i, r in enumerate(res['results'][:self.count - offset]):
268
- self._cache[i + offset] = res['results'][i]
269
- # Notify callbacks
270
- pending_removal = []
271
- for callback in self._callbacks:
272
- # Handle weak reference callbacks first
273
- if isinstance(callback, ReferenceType):
274
- wf = callback
275
- if (callback := wf()) is None:
276
- pending_removal.append(wf)
277
- continue
278
- callback(res['results'])
279
- for wf in pending_removal:
280
- self._callbacks.discard(wf)
281
- # When cache is complete, clear our callbacks
282
- if all(reversed(self._cache)):
283
- self._callbacks.clear()
284
-
285
- def __iter__(self):
286
- for i in range(self.count):
287
- yield self.__getitem__(i)
288
-
289
-
290
- def update_url_params(url: str, params: dict) -> str:
291
- """Add/update the query parameters of a URL and make url safe.
292
-
293
- Parameters
294
- ----------
295
- url : str
296
- A URL string with which to update the query parameters
297
- params : dict
298
- A dict of new parameters. For multiple values for the same query, use a list (see example)
299
-
300
- Returns
301
- -------
302
- str
303
- A new URL with said parameters updated
304
-
305
- Examples
306
- --------
307
- >>> update_url_params('website.com/?q=', {'pg': 5})
308
- 'website.com/?pg=5'
309
-
310
- >>> update_url_params('website.com?q=xxx', {'pg': 5, 'foo': ['bar', 'baz']})
311
- 'website.com?q=xxx&pg=5&foo=bar&foo=baz'
312
-
313
- """
314
- # Remove percent-encoding
315
- url = urllib.parse.unquote(url)
316
- parsed_url = urllib.parse.urlsplit(url)
317
- # Extract URL query arguments and convert to dict
318
- parsed_get_args = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=False)
319
- # Merge URL arguments dict with new params
320
- parsed_get_args.update(params)
321
- # Convert back to query string
322
- encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
323
- # Update parser and convert to full URL str
324
- return parsed_url._replace(query=encoded_get_args).geturl()
325
-
326
-
327
- def http_download_file_list(links_to_file_list, **kwargs):
328
- """Download a list of files from a remote HTTP server from a list of links.
329
-
330
- Generates up to 4 separate threads to handle downloads.
331
- Same options behaviour as http_download_file.
332
-
333
- Parameters
334
- ----------
335
- links_to_file_list : list
336
- List of http links to files.
337
- **kwargs
338
- Optional arguments to pass to http_download_file.
339
-
340
- Returns
341
- -------
342
- list of pathlib.Path
343
- A list of the local full path of the downloaded files.
344
-
345
- """
346
- links_to_file_list = list(links_to_file_list) # In case generator was passed
347
- n_threads = 4 # Max number of threads
348
- outputs = []
349
- target_dir = kwargs.pop('target_dir', None)
350
- # Ensure target dir the length of url list
351
- if target_dir is None or isinstance(target_dir, (str, Path)):
352
- target_dir = [target_dir] * len(links_to_file_list)
353
- assert len(target_dir) == len(links_to_file_list)
354
- # using with statement to ensure threads are cleaned up promptly
355
- zipped = zip(links_to_file_list, target_dir)
356
- with concurrent.futures.ThreadPoolExecutor(max_workers=n_threads) as executor:
357
- # Multithreading load operations
358
- futures = [executor.submit(
359
- http_download_file, link, target_dir=target, **kwargs) for link, target in zipped]
360
- zip(links_to_file_list, ensure_list(kwargs.pop('target_dir', None)))
361
- # TODO Reintroduce variable timeout value based on file size and download speed of 5 Mb/s?
362
- # timeout = reduce(lambda x, y: x + (y.get('file_size', 0) or 0), dsets, 0) / 625000 ?
363
- concurrent.futures.wait(futures, timeout=None)
364
- # build return list
365
- for future in futures:
366
- outputs.append(future.result())
367
- # if returning md5, separate list of tuples into two lists: (files, md5)
368
- return list(zip(*outputs)) if kwargs.get('return_md5', False) else outputs
369
-
370
-
371
- def http_download_file(full_link_to_file, chunks=None, *, clobber=False, silent=False,
372
- username='', password='', target_dir='', return_md5=False, headers=None):
373
- """Download a file from a remote HTTP server.
374
-
375
- Parameters
376
- ----------
377
- full_link_to_file : str
378
- HTTP link to the file
379
- chunks : tuple of ints
380
- Chunks to download
381
- clobber : bool
382
- If True, force overwrite the existing file
383
- silent : bool
384
- If True, suppress download progress bar
385
- username : str
386
- User authentication for password protected file server
387
- password : str
388
- Password authentication for password protected file server
389
- target_dir : str, pathlib.Path
390
- Directory in which files are downloaded; defaults to user's Download directory
391
- return_md5 : bool
392
- If True an MD5 hash of the file is additionally returned
393
- headers : list of dicts
394
- Additional headers to add to the request (auth tokens etc.)
395
-
396
- Returns
397
- -------
398
- pathlib.Path
399
- The full file path of the downloaded file
400
-
401
- """
402
- if not full_link_to_file:
403
- return (None, None) if return_md5 else None
404
-
405
- # makes sure special characters get encoded ('#' in file names for example)
406
- surl = urllib.parse.urlsplit(full_link_to_file, allow_fragments=False)
407
- full_link_to_file = surl._replace(path=urllib.parse.quote(surl.path)).geturl()
408
-
409
- # default cache directory is the home dir
410
- if not target_dir:
411
- target_dir = Path.home().joinpath('Downloads')
412
-
413
- # This should be the base url you wanted to access.
414
- base_url, name = full_link_to_file.rsplit('/', 1)
415
- file_name = Path(target_dir, name)
416
-
417
- # do not overwrite an existing file unless specified
418
- if not clobber and file_name.exists():
419
- return (file_name, hashfile.md5(file_name)) if return_md5 else file_name
420
-
421
- # Create a password manager
422
- manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
423
- if username and password:
424
- manager.add_password(None, base_url, username, password)
425
-
426
- # Create an authentication handler using the password manager
427
- auth = urllib.request.HTTPBasicAuthHandler(manager)
428
-
429
- # Create an opener that will replace the default urlopen method on further calls
430
- opener = urllib.request.build_opener(auth)
431
- urllib.request.install_opener(opener)
432
-
433
- # Support for partial download.
434
- req = urllib.request.Request(full_link_to_file)
435
- if chunks is not None:
436
- first_byte, n_bytes = chunks
437
- req.add_header('Range', 'bytes=%d-%d' % (first_byte, first_byte + n_bytes - 1))
438
-
439
- # add additional headers
440
- if headers is not None:
441
- for k in headers:
442
- req.add_header(k, headers[k])
443
-
444
- # Open the url and get the length
445
- try:
446
- u = urllib.request.urlopen(req)
447
- except HTTPError as e:
448
- _logger.error(f'{str(e)} {full_link_to_file}')
449
- raise e
450
-
451
- file_size = int(u.getheader('Content-length'))
452
- if not silent:
453
- print(f'Downloading: {file_name} Bytes: {file_size}')
454
- block_sz = 8192 * 64 * 8
455
-
456
- md5 = hashlib.md5()
457
- f = open(file_name, 'wb')
458
- with tqdm(total=file_size / 1024 / 1024, disable=silent) as pbar:
459
- while True:
460
- buffer = u.read(block_sz)
461
- if not buffer:
462
- break
463
- f.write(buffer)
464
- if return_md5:
465
- md5.update(buffer)
466
- pbar.update(len(buffer) / 1024 / 1024)
467
- f.close()
468
-
469
- return (file_name, md5.hexdigest()) if return_md5 else file_name
470
-
471
-
472
- def file_record_to_url(file_records) -> list:
473
- """Translate a Json dictionary to an usable http url for downloading files.
474
-
475
- Parameters
476
- ----------
477
- file_records : dict
478
- JSON containing a 'data_url' field
479
-
480
- Returns
481
- -------
482
- list of str
483
- A list of full data urls
484
-
485
- """
486
- urls = []
487
- for fr in file_records:
488
- if fr['data_url'] is not None:
489
- urls.append(fr['data_url'])
490
- return urls
491
-
492
-
493
- def dataset_record_to_url(dataset_record) -> list:
494
- """Extract a list of files urls from a list of dataset queries.
495
-
496
- Parameters
497
- ----------
498
- dataset_record : list, dict
499
- Dataset JSON from a REST request
500
-
501
- Returns
502
- -------
503
- list of str
504
- A list of file urls corresponding to the datasets records
505
-
506
- """
507
- urls = []
508
- if isinstance(dataset_record, dict):
509
- dataset_record = [dataset_record]
510
- for ds in dataset_record:
511
- urls += file_record_to_url(ds['file_records'])
512
- return urls
513
-
514
-
515
- class AlyxClient:
516
- """Class that implements simple GET/POST wrappers for the Alyx REST API.
517
-
518
- See https://openalyx.internationalbrainlab.org/docs
519
- """
520
-
521
- _token = None
522
- _headers = {} # Headers for REST requests only
523
- user = None
524
- """str: The Alyx username."""
525
- base_url = None
526
- """str: The Alyx database URL."""
527
-
528
- def __init__(self, base_url=None, username=None, password=None,
529
- cache_dir=None, silent=False, cache_rest='GET'):
530
- """Create a client instance that allows to GET and POST to the Alyx server.
531
-
532
- For One, constructor attempts to authenticate with credentials in params.py.
533
- For standalone cases, AlyxClient(username='', password='', base_url='').
534
-
535
- Parameters
536
- ----------
537
- base_url : str
538
- Alyx server address, including port and protocol.
539
- username : str
540
- Alyx database user.
541
- password : str
542
- Alyx database password.
543
- cache_dir : str, pathlib.Path
544
- The default root download location.
545
- silent : bool
546
- If true, user prompts and progress bars are suppressed.
547
- cache_rest : str, None
548
- Which type of http method to apply cache to; if '*', all requests are cached.
549
- stay_logged_in : bool
550
- If true, auth token is cached.
551
-
552
- """
553
- self.silent = silent
554
- self._par = one.params.get(client=base_url, silent=self.silent, username=username)
555
- self.base_url = base_url or self._par.ALYX_URL
556
- self._par = self._par.set('CACHE_DIR', cache_dir or self._par.CACHE_DIR)
557
- if username or password:
558
- self.authenticate(username, password)
559
- self._rest_schemes = None
560
- # the mixed accept application may cause errors sometimes, only necessary for the docs
561
- self._headers = {**self._headers, 'Accept': 'application/json'}
562
- # REST cache parameters
563
- # The default length of time that cache file is valid for,
564
- # The default expiry is overridden by the `expires` kwarg. If False, the caching is
565
- # turned off.
566
- self.default_expiry = timedelta(minutes=5)
567
- self.cache_mode = cache_rest
568
- self._obj_id = id(self)
569
-
570
- @property
571
- def rest_schemes(self):
572
- """dict: The REST endpoints and their parameters."""
573
- # Delayed fetch of rest schemes speeds up instantiation
574
- if not self._rest_schemes:
575
- self._rest_schemes = self.get('/docs', expires=timedelta(weeks=1))
576
- return self._rest_schemes
577
-
578
- @property
579
- def cache_dir(self):
580
- """pathlib.Path: The location of the downloaded file cache."""
581
- return Path(self._par.CACHE_DIR)
582
-
583
- @cache_dir.setter
584
- def cache_dir(self, cache_dir):
585
- cache_dir = Path(cache_dir)
586
- cache_dir.mkdir(parents=True, exist_ok=True)
587
- self._par = self._par.set('CACHE_DIR', cache_dir)
588
-
589
- @property
590
- def is_logged_in(self):
591
- """bool: Check if user logged into Alyx database; True if user is authenticated."""
592
- return bool(self.user and self._token and 'Authorization' in self._headers)
593
-
594
- def list_endpoints(self):
595
- """Return a list of available REST endpoints.
596
-
597
- Returns
598
- -------
599
- List of REST endpoint strings.
600
-
601
- """
602
- EXCLUDE = ('_type', '_meta', '', 'auth-token')
603
- return sorted(x for x in self.rest_schemes.keys() if x not in EXCLUDE)
604
-
605
- def print_endpoint_info(self, endpoint, action=None):
606
- """Print the available actions and query parameters for a given REST endpoint.
607
-
608
- Parameters
609
- ----------
610
- endpoint : str
611
- An Alyx REST endpoint to query.
612
- action : str
613
- An optional action (e.g. 'list') to print. If None, all actions are printed.
614
-
615
- Returns
616
- -------
617
- dict, list
618
- A dictionary of endpoint query parameter details or a list of parameter details if
619
- action is not None.
620
-
621
- """
622
- rs = self.rest_schemes
623
- if endpoint not in rs:
624
- return print(f'Endpoint "{endpoint}" does not exist')
625
-
626
- for _action in (rs[endpoint] if action is None else [action]):
627
- doc = []
628
- pprint(_action)
629
- for f in rs[endpoint][_action]['fields']:
630
- required = ' (required): ' if f.get('required', False) else ': '
631
- doc.append(f'\t"{f["name"]}"{required}{f["schema"]["_type"]}'
632
- f', {f["schema"]["description"]}')
633
- doc.sort()
634
- [print(d) for d in doc if '(required)' in d]
635
- [print(d) for d in doc if '(required)' not in d]
636
- return (rs[endpoint] if action is None else rs[endpoint][action]).copy()
637
-
638
- @_cache_response
639
- def _generic_request(self, reqfunction, rest_query, data=None, files=None):
640
- if not self.is_logged_in:
641
- self.authenticate(username=self.user)
642
- # makes sure the base url is the one from the instance
643
- rest_query = rest_query.replace(self.base_url, '')
644
- if not rest_query.startswith('/'):
645
- rest_query = '/' + rest_query
646
- _logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}')
647
- headers = self._headers.copy()
648
- if files is None:
649
- data = json.dumps(data) if isinstance(data, dict) or isinstance(data, list) else data
650
- headers['Content-Type'] = 'application/json'
651
- if rest_query.startswith('/docs'):
652
- # the mixed accept application may cause errors sometimes, only necessary for the docs
653
- headers['Accept'] = 'application/coreapi+json'
654
- r = reqfunction(self.base_url + rest_query,
655
- stream=True, headers=headers, data=data, files=files)
656
- if r and r.status_code in (200, 201):
657
- return json.loads(r.text)
658
- elif r and r.status_code == 204:
659
- return
660
- if r.status_code == 403 and '"Invalid token."' in r.text:
661
- _logger.debug('Token invalid; Attempting to re-authenticate...')
662
- # Log out in order to flush stale token. At this point we no longer have the password
663
- # but if the user re-instantiates with a password arg it will request a new token.
664
- username = self.user
665
- if self.silent: # no need to log out otherwise; user will be prompted for password
666
- self.logout()
667
- self.authenticate(username=username, force=True)
668
- return self._generic_request(reqfunction, rest_query, data=data, files=files)
669
- else:
670
- _logger.debug('Response text raw: ' + r.text)
671
- try:
672
- message = json.loads(r.text)
673
- message.pop('status_code', None) # Get status code from response object instead
674
- message = message.get('detail') or message # Get details if available
675
- _logger.debug(message)
676
- except json.decoder.JSONDecodeError:
677
- message = r.text
678
- raise requests.HTTPError(r.status_code, rest_query, message, response=r)
679
-
680
- def authenticate(self, username=None, password=None, cache_token=True, force=False):
681
- """Fetch token from the Alyx REST API for authenticating request headers.
682
-
683
- Credentials are loaded via one.params.
684
-
685
- Parameters
686
- ----------
687
- username : str
688
- Alyx username. If None, token not cached and not silent, user is prompted.
689
- password : str
690
- Alyx password. If None, token not cached and not silent, user is prompted.
691
- cache_token : bool
692
- If true, the token is cached for subsequent auto-logins.
693
- force : bool
694
- If true, any cached token is ignored.
695
-
696
- """
697
- # Get username
698
- if username is None:
699
- username = getattr(self._par, 'ALYX_LOGIN', self.user)
700
- if username is None and not self.silent:
701
- username = input('Enter Alyx username:')
702
-
703
- # If user passes in a password, force re-authentication even if token cached
704
- if password is not None:
705
- if not force:
706
- _logger.debug('Forcing token request with provided password')
707
- force = True
708
- # Check if token cached
709
- if not force and getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
710
- self._token = self._par.TOKEN[username]
711
- self._headers = {
712
- 'Authorization': f'Token {list(self._token.values())[0]}',
713
- 'Accept': 'application/json'}
714
- self.user = username
715
- return
716
-
717
- # Get password
718
- if password is None:
719
- password = getattr(self._par, 'ALYX_PWD', None)
720
- if password is None:
721
- if self.silent:
722
- warnings.warn(
723
- 'No password or cached token in silent mode. '
724
- 'Please run the following to re-authenticate:\n\t'
725
- 'AlyxClient(silent=False).authenticate'
726
- '(username=<username>, force=True)', UserWarning)
727
- else:
728
- password = getpass(f'Enter Alyx password for "{username}":')
729
- # Remove previous token
730
- self._clear_token(username)
731
- try:
732
- credentials = {'username': username, 'password': password}
733
- rep = requests.post(self.base_url + '/auth-token', data=credentials)
734
- except requests.exceptions.ConnectionError:
735
- raise ConnectionError(
736
- f'Can\'t connect to {self.base_url}.\n' +
737
- 'Check your internet connections and Alyx database firewall'
738
- )
739
- # Assign token or raise exception on auth error
740
- if rep.ok:
741
- self._token = rep.json()
742
- assert list(self._token.keys()) == ['token']
743
- else:
744
- if rep.status_code == 400: # Auth error; re-raise with details
745
- redacted = '*' * len(credentials['password']) if credentials['password'] else None
746
- message = ('Alyx authentication failed with credentials: '
747
- f'user = {credentials["username"]}, password = {redacted}')
748
- raise requests.HTTPError(rep.status_code, rep.url, message, response=rep)
749
- else:
750
- rep.raise_for_status()
751
-
752
- self._headers = {
753
- 'Authorization': 'Token {}'.format(list(self._token.values())[0]),
754
- 'Accept': 'application/json'}
755
- if cache_token:
756
- # Update saved pars
757
- par = one.params.get(client=self.base_url, silent=True)
758
- tokens = getattr(par, 'TOKEN', {})
759
- tokens[username] = self._token
760
- one.params.save(par.set('TOKEN', tokens), self.base_url)
761
- # Update current pars
762
- self._par = self._par.set('TOKEN', tokens)
763
- self.user = username
764
- if not self.silent:
765
- print(f'Connected to {self.base_url} as user "{self.user}"')
766
-
767
- def _clear_token(self, username):
768
- """Remove auth token from client params.
769
-
770
- Deletes the cached authentication token for a given user.
771
- """
772
- par = one.params.get(client=self.base_url, silent=True)
773
- # Remove token from cache
774
- if getattr(par, 'TOKEN', False) and username in par.TOKEN:
775
- del par.TOKEN[username]
776
- one.params.save(par, self.base_url)
777
- # Remove token from local pars
778
- if getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
779
- del self._par.TOKEN[username]
780
- # Remove token from object
781
- self._token = None
782
- if self._headers and 'Authorization' in self._headers:
783
- del self._headers['Authorization']
784
-
785
- def logout(self):
786
- """Log out from Alyx.
787
-
788
- Deletes the cached authentication token for the currently logged-in user
789
- and clears the REST cache.
790
- """
791
- if not self.is_logged_in:
792
- return
793
- self._clear_token(username := self.user)
794
- self.user = None
795
- self.clear_rest_cache()
796
- if not self.silent:
797
- print(f'{username} logged out from {self.base_url}')
798
-
799
- def delete(self, rest_query):
800
- """Send a DELETE request to the Alyx server.
801
-
802
- Will raise an exception on any HTTP status code other than 200, 201.
803
-
804
- Parameters
805
- ----------
806
- rest_query : str
807
- A REST query string either as a relative URL path complete URL.
808
-
809
- Returns
810
- -------
811
- JSON interpreted dictionary from response.
812
-
813
- Examples
814
- --------
815
- >>> AlyxClient.delete('/weighings/c617562d-c107-432e-a8ee-682c17f9e698')
816
- >>> AlyxClient.delete(
817
- ... 'https://alyx.example.com/endpoint/c617562d-c107-432e-a8ee-682c17f9e698')
818
-
819
- """
820
- return self._generic_request(requests.delete, rest_query)
821
-
822
- def download_file(self, url, **kwargs):
823
- """Download file(s) from data server from a REST file record URL.
824
-
825
- Parameters
826
- ----------
827
- url : str, list
828
- Full url(s) of the file(s).
829
- **kwargs
830
- WebClient.http_download_file parameters.
831
-
832
- Returns
833
- -------
834
- pathlib.Path, list of pathlib.Path
835
- Local path(s) of downloaded file(s).
836
-
837
- """
838
- if isinstance(url, str):
839
- url = self._validate_file_url(url)
840
- download_fcn = http_download_file
841
- else:
842
- url = (self._validate_file_url(x) for x in url)
843
- download_fcn = http_download_file_list
844
- pars = dict(
845
- silent=kwargs.pop('silent', self.silent),
846
- target_dir=kwargs.pop('target_dir', self._par.CACHE_DIR),
847
- username=self._par.HTTP_DATA_SERVER_LOGIN,
848
- password=self._par.HTTP_DATA_SERVER_PWD,
849
- **kwargs
850
- )
851
- try:
852
- files = download_fcn(url, **pars)
853
- except HTTPError as ex:
854
- if ex.code == 401:
855
- ex.msg += (' - please check your HTTP_DATA_SERVER_LOGIN and '
856
- 'HTTP_DATA_SERVER_PWD ONE params, or username/password kwargs')
857
- raise ex
858
- return files
859
-
860
- def download_cache_tables(self, source=None, destination=None):
861
- """Download Alyx cache tables to the local data cache directory.
862
-
863
- Parameters
864
- ----------
865
- source : str, pathlib.Path
866
- The remote HTTP directory of the cache table (excluding the filename).
867
- Default: AlyxClient.base_url.
868
- destination : str, pathlib.Path
869
- The target directory into to which the tables will be downloaded.
870
-
871
- Returns
872
- -------
873
- List of parquet table file paths.
874
-
875
- """
876
- source = str(source or f'{self.base_url}/cache.zip')
877
- destination = destination or self.cache_dir
878
- Path(destination).mkdir(exist_ok=True, parents=True)
879
-
880
- headers = None
881
- if source.startswith(self.base_url):
882
- if not self.is_logged_in:
883
- self.authenticate()
884
- headers = self._headers
885
-
886
- with tempfile.TemporaryDirectory(dir=destination) as tmp:
887
- file = http_download_file(source,
888
- headers=headers,
889
- silent=self.silent,
890
- target_dir=tmp,
891
- clobber=True)
892
- with zipfile.ZipFile(file, 'r') as zipped:
893
- files = zipped.namelist()
894
- zipped.extractall(destination)
895
- return [Path(destination, table) for table in files]
896
-
897
- def _validate_file_url(self, url):
898
- """Assert that URL matches HTTP_DATA_SERVER parameter.
899
-
900
- Currently only one remote HTTP server is supported for a given AlyxClient instance. If
901
- the URL contains only the relative path part, the full URL is returned.
902
-
903
- Parameters
904
- ----------
905
- url : str
906
- The full or partial URL to validate.
907
-
908
- Returns
909
- -------
910
- The complete URL.
911
-
912
- Examples
913
- --------
914
- >>> url = self._validate_file_url('https://webserver.net/path/to/file')
915
- 'https://webserver.net/path/to/file'
916
- >>> url = self._validate_file_url('path/to/file')
917
- 'https://webserver.net/path/to/file'
918
-
919
- """
920
- if url.startswith('http'): # A full URL
921
- assert url.startswith(self._par.HTTP_DATA_SERVER), \
922
- ('remote protocol and/or hostname does not match HTTP_DATA_SERVER parameter:\n' +
923
- f'"{url[:40]}..." should start with "{self._par.HTTP_DATA_SERVER}"')
924
- elif not url.startswith(self._par.HTTP_DATA_SERVER):
925
- url = self.rel_path2url(url)
926
- return url
927
-
928
- def rel_path2url(self, path):
929
- """Given a relative file path, return the remote HTTP server URL.
930
-
931
- It is expected that the remote HTTP server has the same file tree as the local system.
932
-
933
- Parameters
934
- ----------
935
- path : str, pathlib.Path
936
- A relative ALF path (subject/date/number/etc.).
937
-
938
- Returns
939
- -------
940
- A URL string.
941
-
942
- """
943
- path = str(path).strip('/')
944
- assert not path.startswith('http')
945
- return f'{self._par.HTTP_DATA_SERVER}/{path}'
946
-
947
- def get(self, rest_query, **kwargs):
948
- """Send a GET request to the Alyx server.
949
-
950
- Will raise an exception on any HTTP status code other than 200, 201.
951
-
952
- For the dictionary contents and list of endpoints, refer to:
953
- https://openalyx.internationalbrainlab.org/docs
954
-
955
- Parameters
956
- ----------
957
- rest_query : str
958
- A REST URL path, e.g. '/sessions?user=Hamish'.
959
- **kwargs
960
- Optional arguments to pass to _generic_request and _cache_response decorator.
961
-
962
- Returns
963
- -------
964
- JSON interpreted dictionary from response.
965
-
966
- """
967
- rep = self._generic_request(requests.get, rest_query, **kwargs)
968
- if isinstance(rep, dict) and list(rep.keys()) == ['count', 'next', 'previous', 'results']:
969
- if len(rep['results']) < rep['count']:
970
- cache_args = {k: v for k, v in kwargs.items() if k in ('clobber', 'expires')}
971
- rep = _PaginatedResponse(self, rep, cache_args)
972
- else:
973
- rep = rep['results']
974
- return rep
975
-
976
- def patch(self, rest_query, data=None, files=None):
977
- """Send a PATCH request to the Alyx server.
978
-
979
- For the dictionary contents, refer to:
980
- https://openalyx.internationalbrainlab.org/docs
981
-
982
- Parameters
983
- ----------
984
- rest_query : str
985
- The endpoint as full or relative URL.
986
- data : dict, str
987
- JSON encoded string or dictionary (c.f. requests).
988
- files : dict, tuple
989
- Files to attach (c.f. requests).
990
-
991
- Returns
992
- -------
993
- Response object.
994
-
995
- """
996
- return self._generic_request(requests.patch, rest_query, data=data, files=files)
997
-
998
- def post(self, rest_query, data=None, files=None):
999
- """Send a POST request to the Alyx server.
1000
-
1001
- For the dictionary contents, refer to:
1002
- https://openalyx.internationalbrainlab.org/docs
1003
-
1004
- Parameters
1005
- ----------
1006
- rest_query : str
1007
- The endpoint as full or relative URL.
1008
- data : dict, str
1009
- JSON encoded string or dictionary (c.f. requests).
1010
- files : dict, tuple
1011
- Files to attach (c.f. requests).
1012
-
1013
- Returns
1014
- -------
1015
- Response object.
1016
-
1017
- """
1018
- return self._generic_request(requests.post, rest_query, data=data, files=files)
1019
-
1020
- def put(self, rest_query, data=None, files=None):
1021
- """Send a PUT request to the Alyx server.
1022
-
1023
- For the dictionary contents, refer to:
1024
- https://openalyx.internationalbrainlab.org/docs
1025
-
1026
- Parameters
1027
- ----------
1028
- rest_query : str
1029
- The endpoint as full or relative URL.
1030
- data : dict, str
1031
- JSON encoded string or dictionary (c.f. requests).
1032
- files : dict, tuple
1033
- Files to attach (c.f. requests).
1034
-
1035
- Returns
1036
- -------
1037
- requests.Response
1038
- Response object.
1039
-
1040
- """
1041
- return self._generic_request(requests.put, rest_query, data=data, files=files)
1042
-
1043
- def rest(self, url=None, action=None, id=None, data=None, files=None,
1044
- no_cache=False, **kwargs):
1045
- """Alyx REST API wrapper.
1046
-
1047
- If no arguments are passed, lists available endpoints.
1048
-
1049
- Parameters
1050
- ----------
1051
- url : str
1052
- Endpoint name.
1053
- action : str
1054
- One of 'list', 'create', 'read', 'update', 'partial_update', 'delete'.
1055
- id : str, uuid.UUID
1056
- Lookup string for actions 'read', 'update', 'partial_update', and 'delete'.
1057
- data : dict
1058
- Data dictionary for actions 'update', 'partial_update' and 'create'.
1059
- files : dict, tuple
1060
- Option file(s) to upload.
1061
- no_cache : bool
1062
- If true the `list` and `read` actions are performed without returning the cache.
1063
- kwargs
1064
- Filters as per the Alyx REST documentation
1065
- c.f. https://openalyx.internationalbrainlab.org/docs/
1066
-
1067
- Returns
1068
- -------
1069
- list, dict
1070
- List of queried dicts ('list') or dict (other actions).
1071
-
1072
- Examples
1073
- --------
1074
- List available endpoint
1075
-
1076
- >>> client = AlyxClient()
1077
- ... client.rest()
1078
-
1079
- List available actions for the 'subjects' endpoint
1080
-
1081
- >>> client.rest('subjects')
1082
-
1083
- Example REST endpoint with all actions
1084
-
1085
- >>> client.rest('subjects', 'list')
1086
- >>> client.rest('subjects', 'list', field_filter1='filterval')
1087
- >>> client.rest('subjects', 'create', data=sub_dict)
1088
- >>> client.rest('subjects', 'read', id='nickname')
1089
- >>> client.rest('subjects', 'update', id='nickname', data=sub_dict)
1090
- >>> client.rest('subjects', 'partial_update', id='nickname', data=sub_dict)
1091
- >>> client.rest('subjects', 'delete', id='nickname')
1092
- >>> client.rest('notes', 'create', data=nd, files={'image': open(image_file, 'rb')})
1093
-
1094
- """
1095
- # if endpoint is None, list available endpoints
1096
- if not url:
1097
- pprint(self.list_endpoints())
1098
- return
1099
- # remove beginning slash if any
1100
- if url.startswith('/'):
1101
- url = url[1:]
1102
- # and split to the next slash or question mark
1103
- endpoint = re.findall("^/*[^?/]*", url)[0].replace('/', '')
1104
- # make sure the queried endpoint exists, if not throw an informative error
1105
- if endpoint not in self.rest_schemes.keys():
1106
- av = [k for k in self.rest_schemes.keys() if not k.startswith('_') and k]
1107
- raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1108
- 'endpoints are \n ' + '\n '.join(av))
1109
- endpoint_scheme = self.rest_schemes[endpoint]
1110
- # on a filter request, override the default action parameter
1111
- if '?' in url:
1112
- action = 'list'
1113
- # if action is None, list available actions for the required endpoint
1114
- if not action:
1115
- pprint(list(endpoint_scheme.keys()))
1116
- self.print_endpoint_info(endpoint)
1117
- return
1118
- # make sure the desired action exists, if not throw an informative error
1119
- if action not in endpoint_scheme:
1120
- raise ValueError('Action "' + action + '" for REST endpoint "' + endpoint + '" does ' +
1121
- 'not exist. Available actions are: ' +
1122
- '\n ' + '\n '.join(endpoint_scheme.keys()))
1123
- # the actions below require an id in the URL, warn and help the user
1124
- if action in ['read', 'update', 'partial_update', 'delete'] and not id:
1125
- _logger.warning('REST action "' + action + '" requires an ID in the URL: ' +
1126
- endpoint_scheme[action]['url'])
1127
- return
1128
- # the actions below require a data dictionary, warn and help the user with fields list
1129
- data_required = 'fields' in endpoint_scheme[action]
1130
- if action in ['create', 'update', 'partial_update'] and data_required and not data:
1131
- pprint(endpoint_scheme[action]['fields'])
1132
- for act in endpoint_scheme[action]['fields']:
1133
- print("'" + act['name'] + "': ...,")
1134
- _logger.warning('REST action "' + action + '" requires a data dict with above keys')
1135
- return
1136
-
1137
- # clobber=True means remote request always made, expires=True means response is not cached
1138
- cache_args = {'clobber': no_cache, 'expires': kwargs.pop('expires', False) or no_cache}
1139
- if action == 'list':
1140
- # list doesn't require id nor
1141
- assert endpoint_scheme[action]['action'] == 'get'
1142
- # add to url data if it is a string
1143
- if id:
1144
- # this is a special case of the list where we query a uuid. Usually read is better
1145
- if 'django' in kwargs.keys():
1146
- kwargs['django'] = kwargs['django'] + ','
1147
- else:
1148
- kwargs['django'] = ''
1149
- kwargs['django'] = f"{kwargs['django']}pk,{id}"
1150
- # otherwise, look for a dictionary of filter terms
1151
- if kwargs:
1152
- # Convert all lists in query params to comma separated list
1153
- query_params = {k: ','.join(map(str, ensure_list(v))) for k, v in kwargs.items()}
1154
- url = update_url_params(url, query_params)
1155
- return self.get('/' + url, **cache_args)
1156
- if not isinstance(id, str) and id is not None:
1157
- id = str(id) # e.g. may be uuid.UUID
1158
- if action == 'read':
1159
- assert endpoint_scheme[action]['action'] == 'get'
1160
- return self.get('/' + endpoint + '/' + id.split('/')[-1], **cache_args)
1161
- elif action == 'create':
1162
- assert endpoint_scheme[action]['action'] == 'post'
1163
- return self.post('/' + endpoint, data=data, files=files)
1164
- elif action == 'delete':
1165
- assert endpoint_scheme[action]['action'] == 'delete'
1166
- return self.delete('/' + endpoint + '/' + id.split('/')[-1])
1167
- elif action == 'partial_update':
1168
- assert endpoint_scheme[action]['action'] == 'patch'
1169
- return self.patch('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1170
- elif action == 'update':
1171
- assert endpoint_scheme[action]['action'] == 'put'
1172
- return self.put('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1173
-
1174
- # JSON field interface convenience methods
1175
- def _check_inputs(self, endpoint: str) -> None:
1176
- # make sure the queried endpoint exists, if not throw an informative error
1177
- if endpoint not in self.rest_schemes.keys():
1178
- av = (k for k in self.rest_schemes.keys() if not k.startswith('_') and k)
1179
- raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1180
- 'endpoints are \n ' + '\n '.join(av))
1181
- return
1182
-
1183
- def json_field_write(
1184
- self,
1185
- endpoint: str = None,
1186
- uuid: str = None,
1187
- field_name: str = None,
1188
- data: dict = None
1189
- ) -> dict:
1190
- """Write data to JSON field.
1191
-
1192
- NOTE: Destructive write! WILL NOT CHECK IF DATA EXISTS
1193
-
1194
- Parameters
1195
- ----------
1196
- endpoint : str, None
1197
- Valid alyx endpoint, defaults to None.
1198
- uuid : str, uuid.UUID, None
1199
- UUID or lookup name for endpoint.
1200
- field_name : str, None
1201
- Valid json field name, defaults to None.
1202
- data : dict, None
1203
- Data to write to json field, defaults to None.
1204
-
1205
- Returns
1206
- -------
1207
- dict
1208
- Written data dict.
1209
-
1210
- """
1211
- self._check_inputs(endpoint)
1212
- # Prepare data to patch
1213
- patch_dict = {field_name: data}
1214
- # Upload new extended_qc to session
1215
- ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1216
- return ret[field_name]
1217
-
1218
- def json_field_update(
1219
- self,
1220
- endpoint: str = None,
1221
- uuid: str = None,
1222
- field_name: str = 'json',
1223
- data: dict = None
1224
- ) -> dict:
1225
- """Non-destructive update of JSON field of endpoint for object.
1226
-
1227
- Will update the field_name of the object with pk = uuid of given endpoint
1228
- If data has keys with the same name of existing keys it will squash the old
1229
- values (uses the dict.update() method).
1230
-
1231
- Parameters
1232
- ----------
1233
- endpoint : str
1234
- Alyx REST endpoint to hit.
1235
- uuid : str, uuid.UUID
1236
- UUID or lookup name of object.
1237
- field_name : str
1238
- Name of the json field.
1239
- data : dict
1240
- A dictionary with fields to be updated.
1241
-
1242
- Returns
1243
- -------
1244
- dict
1245
- New patched json field contents as dict.
1246
-
1247
- Examples
1248
- --------
1249
- >>> client = AlyxClient()
1250
- >>> client.json_field_update('sessions', 'eid_str', 'extended_qc', {'key': 'value'})
1251
-
1252
- """
1253
- self._check_inputs(endpoint)
1254
- # Load current json field contents
1255
- current = self.rest(endpoint, 'read', id=uuid)[field_name]
1256
- if current is None:
1257
- current = {}
1258
-
1259
- if not isinstance(current, dict):
1260
- _logger.warning(
1261
- f'Current json field "{field_name}" does not contains a dict, aborting update'
1262
- )
1263
- return current
1264
-
1265
- # Patch current dict with new data
1266
- current.update(data)
1267
- # Prepare data to patch
1268
- patch_dict = {field_name: current}
1269
- # Upload new extended_qc to session
1270
- ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1271
- return ret[field_name]
1272
-
1273
- def json_field_remove_key(
1274
- self,
1275
- endpoint: str = None,
1276
- uuid: str = None,
1277
- field_name: str = 'json',
1278
- key: str = None
1279
- ) -> Optional[dict]:
1280
- """Remove inputted key from JSON field dict and re-upload it to Alyx.
1281
-
1282
- Needs endpoint, UUID and json field name.
1283
-
1284
- Parameters
1285
- ----------
1286
- endpoint : str
1287
- Endpoint to hit, defaults to None.
1288
- uuid : str, uuid.UUID
1289
- UUID or lookup name for endpoint.
1290
- field_name : str
1291
- JSON field name of object, defaults to None.
1292
- key : str
1293
- Key name of dictionary inside object, defaults to None.
1294
-
1295
- Returns
1296
- -------
1297
- dict
1298
- New content of json field.
1299
-
1300
- """
1301
- self._check_inputs(endpoint)
1302
- current = self.rest(endpoint, 'read', id=uuid)[field_name]
1303
- # If no contents, cannot remove key, return
1304
- if current is None:
1305
- return current
1306
- # if contents are not dict, cannot remove key, return contents
1307
- if isinstance(current, str):
1308
- _logger.warning(f'Cannot remove key {key} content of json field is of type str')
1309
- return None
1310
- # If key not present in contents of json field cannot remove key, return contents
1311
- if current.get(key, None) is None:
1312
- _logger.warning(
1313
- f'{key}: Key not found in endpoint {endpoint} field {field_name}'
1314
- )
1315
- return current
1316
- _logger.info(f'Removing key from dict: "{key}"')
1317
- current.pop(key)
1318
- # Re-write contents without removed key
1319
- written = self.json_field_write(
1320
- endpoint=endpoint, uuid=uuid, field_name=field_name, data=current
1321
- )
1322
- return written
1323
-
1324
- def json_field_delete(
1325
- self, endpoint: str = None, uuid: str = None, field_name: str = None
1326
- ) -> None:
1327
- """Set an entire field to null.
1328
-
1329
- Note that this deletes all data from a given field. To delete only a single key from a
1330
- given JSON field, use `json_field_remove_key`.
1331
-
1332
- Parameters
1333
- ----------
1334
- endpoint : str
1335
- Endpoint to hit, defaults to None.
1336
- uuid : str, uuid.UUID
1337
- UUID or lookup name for endpoint.
1338
- field_name : str
1339
- The field name of object (e.g. 'json', 'name', 'extended_qc'), defaults to None.
1340
-
1341
- Returns
1342
- -------
1343
- None
1344
- New content of json field.
1345
-
1346
- """
1347
- self._check_inputs(endpoint)
1348
- _ = self.rest(endpoint, 'partial_update', id=uuid, data={field_name: None})
1349
- return _[field_name]
1350
-
1351
- def clear_rest_cache(self):
1352
- """Clear all REST response cache files for the base url."""
1353
- for file in self.cache_dir.joinpath('.rest').glob('*'):
1354
- file.unlink()
1
+ """API for interacting with a remote Alyx instance through REST.
2
+
3
+ The AlyxClient class contains methods for making remote Alyx REST queries and downloading remote
4
+ files through Alyx.
5
+
6
+ Examples
7
+ --------
8
+ >>> alyx = AlyxClient(
9
+ ... username='test_user', password='TapetesBloc18',
10
+ ... base_url='https://test.alyx.internationalbrainlab.org')
11
+
12
+ List subjects
13
+
14
+ >>> subjects = alyx.rest('subjects', 'list')
15
+
16
+ Create a subject
17
+
18
+ >>> record = {
19
+ ... 'nickname': nickname,
20
+ ... 'responsible_user': 'olivier',
21
+ ... 'birth_date': '2019-06-15',
22
+ ... 'death_date': None,
23
+ ... 'lab': 'cortexlab',
24
+ ... }
25
+ >>> new_subj = alyx.rest('subjects', 'create', data=record)
26
+
27
+ Download a remote file, given a local path
28
+
29
+ >>> url = 'zadorlab/Subjects/flowers/2018-07-13/1/channels.probe.npy'
30
+ >>> local_path = alyx.download_file(url, target_dir='zadorlab/Subjects/flowers/2018-07-13/1/')
31
+
32
+ """
33
+ from uuid import UUID
34
+ import json
35
+ import logging
36
+ import math
37
+ import os
38
+ import re
39
+ import functools
40
+ import urllib.request
41
+ from urllib.error import HTTPError
42
+ import urllib.parse
43
+ from collections.abc import Mapping
44
+ from typing import Optional
45
+ from datetime import datetime, timedelta
46
+ from pathlib import Path
47
+ from weakref import ReferenceType
48
+ import warnings
49
+ import hashlib
50
+ import zipfile
51
+ import tempfile
52
+ from getpass import getpass
53
+ from contextlib import contextmanager
54
+
55
+ import requests
56
+ from tqdm import tqdm
57
+
58
+ from pprint import pprint
59
+ import one.params
60
+ from iblutil.io import hashfile
61
+ from iblutil.io.params import set_hidden
62
+ from iblutil.util import ensure_list
63
+ import concurrent.futures
64
+ _logger = logging.getLogger(__name__)
65
+ N_THREADS = int(os.environ.get('ONE_HTTP_DL_THREADS', 4))
66
+ """int: The number of download threads."""
67
+
68
+
69
+ class _JSONEncoder(json.JSONEncoder):
70
+ """A JSON encoder that handles UUID objects."""
71
+
72
+ def default(self, o):
73
+ """Cast UUID objects to str before serializing."""
74
+ if isinstance(o, UUID):
75
+ return str(o)
76
+ return super().default(o)
77
+
78
+
79
+ def _cache_response(method):
80
+ """Decorator for the generic request method for caching REST reponses.
81
+
82
+ Caches the result of the query and on subsequent calls, returns cache instead of hitting the
83
+ database.
84
+
85
+ Parameters
86
+ ----------
87
+ method : function
88
+ Function to wrap (i.e. AlyxClient._generic_request).
89
+
90
+ Returns
91
+ -------
92
+ function
93
+ Handle to wrapped method.
94
+
95
+ """
96
+
97
+ @functools.wraps(method)
98
+ def wrapper_decorator(alyx_client, *args, expires=None, clobber=False, **kwargs):
99
+ """REST caching wrapper.
100
+
101
+ Parameters
102
+ ----------
103
+ alyx_client : AlyxClient
104
+ An instance of the AlyxClient class.
105
+ args : any
106
+ Positional arguments for applying to wrapped function.
107
+ expires : bool
108
+ An optional timedelta for how long cached response is valid. If True, the cached
109
+ response will not be used on subsequent calls. If None, the default expiry is applied.
110
+ clobber : bool
111
+ If True any existing cached response is overwritten.
112
+ **kwargs
113
+ Keyword arguments for applying to wrapped function.
114
+
115
+ Returns
116
+ -------
117
+ dict
118
+ The REST response JSON either from cached file or directly from remote.
119
+
120
+ """
121
+ expires = expires or alyx_client.default_expiry
122
+ mode = (alyx_client.cache_mode or '').casefold()
123
+ if args[0].__name__ != mode and mode != '*':
124
+ return method(alyx_client, *args, **kwargs)
125
+ # Check cache
126
+ rest_cache = alyx_client.cache_dir.joinpath('.rest')
127
+ sha1 = hashlib.sha1()
128
+ sha1.update(bytes(args[1], 'utf-8'))
129
+ name = sha1.hexdigest()
130
+ # Reversible but length may exceed 255 chars
131
+ # name = base64.urlsafe_b64encode(args[2].encode('UTF-8')).decode('UTF-8')
132
+ files = list(rest_cache.glob(name))
133
+ cached = None
134
+ if len(files) == 1 and not clobber:
135
+ _logger.debug('loading REST response from cache')
136
+ with open(files[0], 'r') as f:
137
+ cached, when = json.load(f)
138
+ if datetime.fromisoformat(when) > datetime.now():
139
+ return cached
140
+ try:
141
+ response = method(alyx_client, *args, **kwargs)
142
+ except requests.exceptions.ConnectionError as ex:
143
+ if cached and not clobber:
144
+ warnings.warn('Failed to connect, returning cached response', RuntimeWarning)
145
+ return cached
146
+ raise ex # No cache and can't connect to database; re-raise
147
+
148
+ # Save response into cache
149
+ if not rest_cache.exists():
150
+ rest_cache.mkdir(parents=True)
151
+ rest_cache = set_hidden(rest_cache, True)
152
+
153
+ _logger.debug('caching REST response')
154
+ expiry_datetime = datetime.now() + (timedelta() if expires is True else expires)
155
+ with open(rest_cache / name, 'w') as f:
156
+ json.dump((response, expiry_datetime.isoformat()), f, cls=_JSONEncoder)
157
+ return response
158
+
159
+ return wrapper_decorator
160
+
161
+
162
+ @contextmanager
163
+ def no_cache(ac=None):
164
+ """Temporarily turn off the REST cache for a given Alyx instance.
165
+
166
+ This function is particularly useful when calling ONE methods in remote mode.
167
+
168
+ Parameters
169
+ ----------
170
+ ac : AlyxClient
171
+ An instance of the AlyxClient to modify. If None, the a new object is instantiated
172
+
173
+ Returns
174
+ -------
175
+ AlyxClient
176
+ The instance of Alyx with cache disabled
177
+
178
+ Examples
179
+ --------
180
+ >>> from one.api import ONE
181
+ >>> with no_cache(ONE().alyx):
182
+ ... eids = ONE().search(subject='foobar', query_type='remote')
183
+
184
+ """
185
+ ac = ac or AlyxClient()
186
+ cache_mode = ac.cache_mode
187
+ ac.cache_mode = None
188
+ try:
189
+ yield ac
190
+ finally:
191
+ ac.cache_mode = cache_mode
192
+
193
+
194
+ class _PaginatedResponse(Mapping):
195
+ """Emulate a list from a paginated response.
196
+
197
+ Provides cache functionality.
198
+
199
+ Examples
200
+ --------
201
+ >>> r = _PaginatedResponse(client, response)
202
+
203
+ """
204
+
205
+ def __init__(self, alyx, rep, cache_args=None):
206
+ """Emulate a list from a paginated response.
207
+
208
+ Parameters
209
+ ----------
210
+ alyx : AlyxClient
211
+ An instance of an AlyxClient associated with the REST response
212
+ rep : dict
213
+ A paginated REST response JSON dictionary
214
+ cache_args : dict
215
+ A dict of kwargs to pass to _cache_response decorator upon subsequent requests
216
+
217
+ """
218
+ self.alyx = alyx
219
+ self.count = rep['count']
220
+ self.limit = len(rep['results'])
221
+ self._cache_args = cache_args or {}
222
+ # store URL without pagination query params
223
+ self.query = rep['next']
224
+ # init the cache, list with None with count size
225
+ self._cache = [None] * self.count
226
+ # fill the cache with results of the query
227
+ for i in range(self.limit):
228
+ self._cache[i] = rep['results'][i]
229
+ self._callbacks = set()
230
+
231
+ def add_callback(self, cb):
232
+ """Add a callback function to use each time a new page is fetched.
233
+
234
+ The callback function will be called with the page results each time :meth:`populate`
235
+ is called.
236
+
237
+ Parameters
238
+ ----------
239
+ cb : callable
240
+ A callable that takes the results of each paginated resonse.
241
+
242
+ """
243
+ if not callable(cb):
244
+ raise TypeError(f'Expected type "callable", got "{type(cb)}" instead')
245
+ else:
246
+ self._callbacks.add(cb)
247
+
248
+ def __len__(self):
249
+ return self.count
250
+
251
+ def __getitem__(self, item):
252
+ if isinstance(item, slice):
253
+ while None in self._cache[item]:
254
+ # If slice start index is -ve, convert to +ve index
255
+ i = self.count + item.start if item.start < 0 else item.start
256
+ self.populate(i + self._cache[item].index(None))
257
+ elif self._cache[item] is None:
258
+ # If index is -ve, convert to +ve
259
+ self.populate(self.count + item if item < 0 else item)
260
+ return self._cache[item]
261
+
262
+ def populate(self, idx):
263
+ """Populate response cache with new page of results.
264
+
265
+ Fetches the specific page of results containing the index passed and populates
266
+ stores the results in the :prop:`_cache` property.
267
+
268
+ Parameters
269
+ ----------
270
+ idx : int
271
+ The index of a given record to fetch.
272
+
273
+ """
274
+ offset = self.limit * math.floor(idx / self.limit)
275
+ query = update_url_params(self.query, {'limit': self.limit, 'offset': offset})
276
+ res = self.alyx._generic_request(requests.get, query, **self._cache_args)
277
+ if self.count != res['count']:
278
+ warnings.warn(
279
+ f'remote results for {urllib.parse.urlsplit(query).path} endpoint changed; '
280
+ f'results may be inconsistent', RuntimeWarning)
281
+ for i, r in enumerate(res['results'][:self.count - offset]):
282
+ self._cache[i + offset] = res['results'][i]
283
+ # Notify callbacks
284
+ pending_removal = []
285
+ for callback in self._callbacks:
286
+ # Handle weak reference callbacks first
287
+ if isinstance(callback, ReferenceType):
288
+ wf = callback
289
+ if (callback := wf()) is None:
290
+ pending_removal.append(wf)
291
+ continue
292
+ callback(res['results'])
293
+ for wf in pending_removal:
294
+ self._callbacks.discard(wf)
295
+ # When cache is complete, clear our callbacks
296
+ if all(reversed(self._cache)):
297
+ self._callbacks.clear()
298
+
299
+ def __iter__(self):
300
+ for i in range(self.count):
301
+ yield self.__getitem__(i)
302
+
303
+
304
+ def update_url_params(url: str, params: dict) -> str:
305
+ """Add/update the query parameters of a URL and make url safe.
306
+
307
+ Parameters
308
+ ----------
309
+ url : str
310
+ A URL string with which to update the query parameters
311
+ params : dict
312
+ A dict of new parameters. For multiple values for the same query, use a list (see example)
313
+
314
+ Returns
315
+ -------
316
+ str
317
+ A new URL with said parameters updated
318
+
319
+ Examples
320
+ --------
321
+ >>> update_url_params('website.com/?q=', {'pg': 5})
322
+ 'website.com/?pg=5'
323
+
324
+ >>> update_url_params('website.com?q=xxx', {'pg': 5, 'foo': ['bar', 'baz']})
325
+ 'website.com?q=xxx&pg=5&foo=bar&foo=baz'
326
+
327
+ """
328
+ # Remove percent-encoding
329
+ url = urllib.parse.unquote(url)
330
+ parsed_url = urllib.parse.urlsplit(url)
331
+ # Extract URL query arguments and convert to dict
332
+ parsed_get_args = urllib.parse.parse_qs(parsed_url.query, keep_blank_values=False)
333
+ # Merge URL arguments dict with new params
334
+ parsed_get_args.update(params)
335
+ # Convert back to query string
336
+ encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
337
+ # Update parser and convert to full URL str
338
+ return parsed_url._replace(query=encoded_get_args).geturl()
339
+
340
+
341
+ def http_download_file_list(links_to_file_list, **kwargs):
342
+ """Download a list of files from a remote HTTP server from a list of links.
343
+
344
+ Generates up to 4 separate threads to handle downloads.
345
+ Same options behaviour as http_download_file.
346
+
347
+ Parameters
348
+ ----------
349
+ links_to_file_list : list
350
+ List of http links to files.
351
+ **kwargs
352
+ Optional arguments to pass to http_download_file.
353
+
354
+ Returns
355
+ -------
356
+ list of pathlib.Path
357
+ A list of the local full path of the downloaded files.
358
+
359
+ """
360
+ links_to_file_list = list(links_to_file_list) # In case generator was passed
361
+ outputs = []
362
+ target_dir = kwargs.pop('target_dir', None)
363
+ # Ensure target dir the length of url list
364
+ if target_dir is None or isinstance(target_dir, (str, Path)):
365
+ target_dir = [target_dir] * len(links_to_file_list)
366
+ assert len(target_dir) == len(links_to_file_list)
367
+ # using with statement to ensure threads are cleaned up promptly
368
+ zipped = zip(links_to_file_list, target_dir)
369
+ with concurrent.futures.ThreadPoolExecutor(max_workers=N_THREADS) as executor:
370
+ # Multithreading load operations
371
+ futures = [executor.submit(
372
+ http_download_file, link, target_dir=target, **kwargs) for link, target in zipped]
373
+ zip(links_to_file_list, ensure_list(kwargs.pop('target_dir', None)))
374
+ # TODO Reintroduce variable timeout value based on file size and download speed of 5 Mb/s?
375
+ # timeout = reduce(lambda x, y: x + (y.get('file_size', 0) or 0), dsets, 0) / 625000 ?
376
+ concurrent.futures.wait(futures, timeout=None)
377
+ # build return list
378
+ for future in futures:
379
+ outputs.append(future.result())
380
+ # if returning md5, separate list of tuples into two lists: (files, md5)
381
+ return list(zip(*outputs)) if kwargs.get('return_md5', False) else outputs
382
+
383
+
384
+ def http_download_file(full_link_to_file, chunks=None, *, clobber=False, silent=False,
385
+ username='', password='', target_dir='', return_md5=False, headers=None):
386
+ """Download a file from a remote HTTP server.
387
+
388
+ Parameters
389
+ ----------
390
+ full_link_to_file : str
391
+ HTTP link to the file
392
+ chunks : tuple of ints
393
+ Chunks to download
394
+ clobber : bool
395
+ If True, force overwrite the existing file
396
+ silent : bool
397
+ If True, suppress download progress bar
398
+ username : str
399
+ User authentication for password protected file server
400
+ password : str
401
+ Password authentication for password protected file server
402
+ target_dir : str, pathlib.Path
403
+ Directory in which files are downloaded; defaults to user's Download directory
404
+ return_md5 : bool
405
+ If True an MD5 hash of the file is additionally returned
406
+ headers : list of dicts
407
+ Additional headers to add to the request (auth tokens etc.)
408
+
409
+ Returns
410
+ -------
411
+ pathlib.Path
412
+ The full file path of the downloaded file
413
+
414
+ """
415
+ if not full_link_to_file:
416
+ return (None, None) if return_md5 else None
417
+
418
+ # makes sure special characters get encoded ('#' in file names for example)
419
+ surl = urllib.parse.urlsplit(full_link_to_file, allow_fragments=False)
420
+ full_link_to_file = surl._replace(path=urllib.parse.quote(surl.path)).geturl()
421
+
422
+ # default cache directory is the home dir
423
+ if not target_dir:
424
+ target_dir = Path.home().joinpath('Downloads')
425
+
426
+ # This should be the base url you wanted to access.
427
+ base_url, name = full_link_to_file.rsplit('/', 1)
428
+ file_name = Path(target_dir, name)
429
+
430
+ # do not overwrite an existing file unless specified
431
+ if not clobber and file_name.exists():
432
+ return (file_name, hashfile.md5(file_name)) if return_md5 else file_name
433
+
434
+ # Create a password manager
435
+ manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
436
+ if username and password:
437
+ manager.add_password(None, base_url, username, password)
438
+
439
+ # Create an authentication handler using the password manager
440
+ auth = urllib.request.HTTPBasicAuthHandler(manager)
441
+
442
+ # Create an opener that will replace the default urlopen method on further calls
443
+ opener = urllib.request.build_opener(auth)
444
+ urllib.request.install_opener(opener)
445
+
446
+ # Support for partial download.
447
+ req = urllib.request.Request(full_link_to_file)
448
+ if chunks is not None:
449
+ first_byte, n_bytes = chunks
450
+ req.add_header('Range', 'bytes=%d-%d' % (first_byte, first_byte + n_bytes - 1))
451
+
452
+ # add additional headers
453
+ if headers is not None:
454
+ for k in headers:
455
+ req.add_header(k, headers[k])
456
+
457
+ # Open the url and get the length
458
+ try:
459
+ u = urllib.request.urlopen(req)
460
+ except HTTPError as e:
461
+ _logger.error(f'{str(e)} {full_link_to_file}')
462
+ raise e
463
+
464
+ file_size = int(u.getheader('Content-length'))
465
+ if not silent:
466
+ print(f'Downloading: {file_name} Bytes: {file_size}')
467
+ block_sz = 8192 * 64 * 8
468
+
469
+ md5 = hashlib.md5()
470
+ f = open(file_name, 'wb')
471
+ with tqdm(total=file_size / 1024 / 1024, disable=silent) as pbar:
472
+ while True:
473
+ buffer = u.read(block_sz)
474
+ if not buffer:
475
+ break
476
+ f.write(buffer)
477
+ if return_md5:
478
+ md5.update(buffer)
479
+ pbar.update(len(buffer) / 1024 / 1024)
480
+ f.close()
481
+
482
+ return (file_name, md5.hexdigest()) if return_md5 else file_name
483
+
484
+
485
+ def file_record_to_url(file_records) -> list:
486
+ """Translate a Json dictionary to an usable http url for downloading files.
487
+
488
+ Parameters
489
+ ----------
490
+ file_records : dict
491
+ JSON containing a 'data_url' field
492
+
493
+ Returns
494
+ -------
495
+ list of str
496
+ A list of full data urls
497
+
498
+ """
499
+ urls = []
500
+ for fr in file_records:
501
+ if fr['data_url'] is not None:
502
+ urls.append(fr['data_url'])
503
+ return urls
504
+
505
+
506
+ def dataset_record_to_url(dataset_record) -> list:
507
+ """Extract a list of files urls from a list of dataset queries.
508
+
509
+ Parameters
510
+ ----------
511
+ dataset_record : list, dict
512
+ Dataset JSON from a REST request
513
+
514
+ Returns
515
+ -------
516
+ list of str
517
+ A list of file urls corresponding to the datasets records
518
+
519
+ """
520
+ urls = []
521
+ if isinstance(dataset_record, dict):
522
+ dataset_record = [dataset_record]
523
+ for ds in dataset_record:
524
+ urls += file_record_to_url(ds['file_records'])
525
+ return urls
526
+
527
+
528
+ class AlyxClient:
529
+ """Class that implements simple GET/POST wrappers for the Alyx REST API.
530
+
531
+ See https://openalyx.internationalbrainlab.org/docs
532
+ """
533
+
534
+ _token = None
535
+ _headers = {} # Headers for REST requests only
536
+ user = None
537
+ """str: The Alyx username."""
538
+ base_url = None
539
+ """str: The Alyx database URL."""
540
+
541
+ def __init__(self, base_url=None, username=None, password=None,
542
+ cache_dir=None, silent=False, cache_rest='GET'):
543
+ """Create a client instance that allows to GET and POST to the Alyx server.
544
+
545
+ For One, constructor attempts to authenticate with credentials in params.py.
546
+ For standalone cases, AlyxClient(username='', password='', base_url='').
547
+
548
+ Parameters
549
+ ----------
550
+ base_url : str
551
+ Alyx server address, including port and protocol.
552
+ username : str
553
+ Alyx database user.
554
+ password : str
555
+ Alyx database password.
556
+ cache_dir : str, pathlib.Path
557
+ The default root download location.
558
+ silent : bool
559
+ If true, user prompts and progress bars are suppressed.
560
+ cache_rest : str, None
561
+ Which type of http method to apply cache to; if '*', all requests are cached.
562
+ stay_logged_in : bool
563
+ If true, auth token is cached.
564
+
565
+ """
566
+ self.silent = silent
567
+ self._par = one.params.get(client=base_url, silent=self.silent, username=username)
568
+ self.base_url = base_url or self._par.ALYX_URL
569
+ self._par = self._par.set('CACHE_DIR', cache_dir or self._par.CACHE_DIR)
570
+ if username or password:
571
+ self.authenticate(username, password)
572
+ self._rest_schemes = None
573
+ # the mixed accept application may cause errors sometimes, only necessary for the docs
574
+ self._headers = {**self._headers, 'Accept': 'application/json'}
575
+ # REST cache parameters
576
+ # The default length of time that cache file is valid for,
577
+ # The default expiry is overridden by the `expires` kwarg. If False, the caching is
578
+ # turned off.
579
+ self.default_expiry = timedelta(minutes=5)
580
+ self.cache_mode = cache_rest
581
+ self._obj_id = id(self)
582
+
583
+ @property
584
+ def rest_schemes(self):
585
+ """dict: The REST endpoints and their parameters."""
586
+ # Delayed fetch of rest schemes speeds up instantiation
587
+ if not self._rest_schemes:
588
+ self._rest_schemes = self.get('/docs', expires=timedelta(weeks=1))
589
+ return self._rest_schemes
590
+
591
+ @property
592
+ def cache_dir(self):
593
+ """pathlib.Path: The location of the downloaded file cache."""
594
+ return Path(self._par.CACHE_DIR)
595
+
596
+ @cache_dir.setter
597
+ def cache_dir(self, cache_dir):
598
+ cache_dir = Path(cache_dir)
599
+ cache_dir.mkdir(parents=True, exist_ok=True)
600
+ self._par = self._par.set('CACHE_DIR', cache_dir)
601
+
602
+ @property
603
+ def is_logged_in(self):
604
+ """bool: Check if user logged into Alyx database; True if user is authenticated."""
605
+ return bool(self.user and self._token and 'Authorization' in self._headers)
606
+
607
+ def list_endpoints(self):
608
+ """Return a list of available REST endpoints.
609
+
610
+ Returns
611
+ -------
612
+ List of REST endpoint strings.
613
+
614
+ """
615
+ EXCLUDE = ('_type', '_meta', '', 'auth-token')
616
+ return sorted(x for x in self.rest_schemes.keys() if x not in EXCLUDE)
617
+
618
+ def print_endpoint_info(self, endpoint, action=None):
619
+ """Print the available actions and query parameters for a given REST endpoint.
620
+
621
+ Parameters
622
+ ----------
623
+ endpoint : str
624
+ An Alyx REST endpoint to query.
625
+ action : str
626
+ An optional action (e.g. 'list') to print. If None, all actions are printed.
627
+
628
+ Returns
629
+ -------
630
+ dict, list
631
+ A dictionary of endpoint query parameter details or a list of parameter details if
632
+ action is not None.
633
+
634
+ """
635
+ rs = self.rest_schemes
636
+ if endpoint not in rs:
637
+ return print(f'Endpoint "{endpoint}" does not exist')
638
+
639
+ for _action in (rs[endpoint] if action is None else [action]):
640
+ doc = []
641
+ pprint(_action)
642
+ for f in rs[endpoint][_action]['fields']:
643
+ required = ' (required): ' if f.get('required', False) else ': '
644
+ doc.append(f'\t"{f["name"]}"{required}{f["schema"]["_type"]}'
645
+ f', {f["schema"]["description"]}')
646
+ doc.sort()
647
+ [print(d) for d in doc if '(required)' in d]
648
+ [print(d) for d in doc if '(required)' not in d]
649
+ return (rs[endpoint] if action is None else rs[endpoint][action]).copy()
650
+
651
+ @_cache_response
652
+ def _generic_request(self, reqfunction, rest_query, data=None, files=None):
653
+ if not self.is_logged_in:
654
+ self.authenticate(username=self.user)
655
+ # makes sure the base url is the one from the instance
656
+ rest_query = rest_query.replace(self.base_url, '')
657
+ if not rest_query.startswith('/'):
658
+ rest_query = '/' + rest_query
659
+ _logger.debug(f'{self.base_url + rest_query}, headers: {self._headers}')
660
+ headers = self._headers.copy()
661
+ if files is None:
662
+ to_json = functools.partial(json.dumps, cls=_JSONEncoder)
663
+ data = to_json(data) if isinstance(data, dict) or isinstance(data, list) else data
664
+ headers['Content-Type'] = 'application/json'
665
+ if rest_query.startswith('/docs'):
666
+ # the mixed accept application may cause errors sometimes, only necessary for the docs
667
+ headers['Accept'] = 'application/coreapi+json'
668
+ r = reqfunction(self.base_url + rest_query,
669
+ stream=True, headers=headers, data=data, files=files)
670
+ if r and r.status_code in (200, 201):
671
+ return json.loads(r.text)
672
+ elif r and r.status_code == 204:
673
+ return
674
+ if r.status_code == 403 and '"Invalid token."' in r.text:
675
+ _logger.debug('Token invalid; Attempting to re-authenticate...')
676
+ # Log out in order to flush stale token. At this point we no longer have the password
677
+ # but if the user re-instantiates with a password arg it will request a new token.
678
+ username = self.user
679
+ if self.silent: # no need to log out otherwise; user will be prompted for password
680
+ self.logout()
681
+ self.authenticate(username=username, force=True)
682
+ return self._generic_request(reqfunction, rest_query, data=data, files=files)
683
+ else:
684
+ _logger.debug('Response text raw: ' + r.text)
685
+ try:
686
+ message = json.loads(r.text)
687
+ message.pop('status_code', None) # Get status code from response object instead
688
+ message = message.get('detail') or message # Get details if available
689
+ _logger.debug(message)
690
+ except json.decoder.JSONDecodeError:
691
+ message = r.text
692
+ raise requests.HTTPError(r.status_code, rest_query, message, response=r)
693
+
694
+ def authenticate(self, username=None, password=None, cache_token=True, force=False):
695
+ """Fetch token from the Alyx REST API for authenticating request headers.
696
+
697
+ Credentials are loaded via one.params.
698
+
699
+ Parameters
700
+ ----------
701
+ username : str
702
+ Alyx username. If None, token not cached and not silent, user is prompted.
703
+ password : str
704
+ Alyx password. If None, token not cached and not silent, user is prompted.
705
+ cache_token : bool
706
+ If true, the token is cached for subsequent auto-logins.
707
+ force : bool
708
+ If true, any cached token is ignored.
709
+
710
+ """
711
+ # Get username
712
+ if username is None:
713
+ username = getattr(self._par, 'ALYX_LOGIN', self.user)
714
+ if username is None and not self.silent:
715
+ username = input('Enter Alyx username:')
716
+
717
+ # If user passes in a password, force re-authentication even if token cached
718
+ if password is not None:
719
+ if not force:
720
+ _logger.debug('Forcing token request with provided password')
721
+ force = True
722
+ # Check if token cached
723
+ if not force and getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
724
+ self._token = self._par.TOKEN[username]
725
+ self._headers = {
726
+ 'Authorization': f'Token {list(self._token.values())[0]}',
727
+ 'Accept': 'application/json'}
728
+ self.user = username
729
+ return
730
+
731
+ # Get password
732
+ if password is None:
733
+ password = getattr(self._par, 'ALYX_PWD', None)
734
+ if password is None:
735
+ if self.silent:
736
+ warnings.warn(
737
+ 'No password or cached token in silent mode. '
738
+ 'Please run the following to re-authenticate:\n\t'
739
+ 'AlyxClient(silent=False).authenticate'
740
+ '(username=<username>, force=True)', UserWarning)
741
+ else:
742
+ password = getpass(f'Enter Alyx password for "{username}":')
743
+ # Remove previous token
744
+ self._clear_token(username)
745
+ try:
746
+ credentials = {'username': username, 'password': password}
747
+ rep = requests.post(self.base_url + '/auth-token', data=credentials)
748
+ except requests.exceptions.ConnectionError:
749
+ raise ConnectionError(
750
+ f'Can\'t connect to {self.base_url}.\n' +
751
+ 'Check your internet connections and Alyx database firewall'
752
+ )
753
+ # Assign token or raise exception on auth error
754
+ if rep.ok:
755
+ self._token = rep.json()
756
+ assert list(self._token.keys()) == ['token']
757
+ else:
758
+ if rep.status_code == 400: # Auth error; re-raise with details
759
+ redacted = '*' * len(credentials['password']) if credentials['password'] else None
760
+ message = ('Alyx authentication failed with credentials: '
761
+ f'user = {credentials["username"]}, password = {redacted}')
762
+ raise requests.HTTPError(rep.status_code, rep.url, message, response=rep)
763
+ else:
764
+ rep.raise_for_status()
765
+
766
+ self._headers = {
767
+ 'Authorization': 'Token {}'.format(list(self._token.values())[0]),
768
+ 'Accept': 'application/json'}
769
+ if cache_token:
770
+ # Update saved pars
771
+ par = one.params.get(client=self.base_url, silent=True)
772
+ tokens = getattr(par, 'TOKEN', {})
773
+ tokens[username] = self._token
774
+ one.params.save(par.set('TOKEN', tokens), self.base_url)
775
+ # Update current pars
776
+ self._par = self._par.set('TOKEN', tokens)
777
+ self.user = username
778
+ if not self.silent:
779
+ print(f'Connected to {self.base_url} as user "{self.user}"')
780
+
781
+ def _clear_token(self, username):
782
+ """Remove auth token from client params.
783
+
784
+ Deletes the cached authentication token for a given user.
785
+ """
786
+ par = one.params.get(client=self.base_url, silent=True)
787
+ # Remove token from cache
788
+ if getattr(par, 'TOKEN', False) and username in par.TOKEN:
789
+ del par.TOKEN[username]
790
+ one.params.save(par, self.base_url)
791
+ # Remove token from local pars
792
+ if getattr(self._par, 'TOKEN', False) and username in self._par.TOKEN:
793
+ del self._par.TOKEN[username]
794
+ # Remove token from object
795
+ self._token = None
796
+ if self._headers and 'Authorization' in self._headers:
797
+ del self._headers['Authorization']
798
+
799
+ def logout(self):
800
+ """Log out from Alyx.
801
+
802
+ Deletes the cached authentication token for the currently logged-in user
803
+ and clears the REST cache.
804
+ """
805
+ if not self.is_logged_in:
806
+ return
807
+ self._clear_token(username := self.user)
808
+ self.user = None
809
+ self.clear_rest_cache()
810
+ if not self.silent:
811
+ print(f'{username} logged out from {self.base_url}')
812
+
813
+ def delete(self, rest_query):
814
+ """Send a DELETE request to the Alyx server.
815
+
816
+ Will raise an exception on any HTTP status code other than 200, 201.
817
+
818
+ Parameters
819
+ ----------
820
+ rest_query : str
821
+ A REST query string either as a relative URL path complete URL.
822
+
823
+ Returns
824
+ -------
825
+ JSON interpreted dictionary from response.
826
+
827
+ Examples
828
+ --------
829
+ >>> AlyxClient.delete('/weighings/c617562d-c107-432e-a8ee-682c17f9e698')
830
+ >>> AlyxClient.delete(
831
+ ... 'https://alyx.example.com/endpoint/c617562d-c107-432e-a8ee-682c17f9e698')
832
+
833
+ """
834
+ return self._generic_request(requests.delete, rest_query)
835
+
836
+ def download_file(self, url, **kwargs):
837
+ """Download file(s) from data server from a REST file record URL.
838
+
839
+ Parameters
840
+ ----------
841
+ url : str, list
842
+ Full url(s) of the file(s).
843
+ **kwargs
844
+ WebClient.http_download_file parameters.
845
+
846
+ Returns
847
+ -------
848
+ pathlib.Path, list of pathlib.Path
849
+ Local path(s) of downloaded file(s).
850
+
851
+ """
852
+ if isinstance(url, str):
853
+ url = self._validate_file_url(url)
854
+ download_fcn = http_download_file
855
+ else:
856
+ url = (self._validate_file_url(x) for x in url)
857
+ download_fcn = http_download_file_list
858
+ pars = dict(
859
+ silent=kwargs.pop('silent', self.silent),
860
+ target_dir=kwargs.pop('target_dir', self._par.CACHE_DIR),
861
+ username=self._par.HTTP_DATA_SERVER_LOGIN,
862
+ password=self._par.HTTP_DATA_SERVER_PWD,
863
+ **kwargs
864
+ )
865
+ try:
866
+ files = download_fcn(url, **pars)
867
+ except HTTPError as ex:
868
+ if ex.code == 401:
869
+ ex.msg += (' - please check your HTTP_DATA_SERVER_LOGIN and '
870
+ 'HTTP_DATA_SERVER_PWD ONE params, or username/password kwargs')
871
+ raise ex
872
+ return files
873
+
874
+ def download_cache_tables(self, source=None, destination=None):
875
+ """Download Alyx cache tables to the local data cache directory.
876
+
877
+ Parameters
878
+ ----------
879
+ source : str, pathlib.Path
880
+ The remote HTTP directory of the cache table (excluding the filename).
881
+ Default: AlyxClient.base_url.
882
+ destination : str, pathlib.Path
883
+ The target directory into to which the tables will be downloaded.
884
+
885
+ Returns
886
+ -------
887
+ List of parquet table file paths.
888
+
889
+ """
890
+ source = str(source or f'{self.base_url}/cache.zip')
891
+ destination = destination or self.cache_dir
892
+ Path(destination).mkdir(exist_ok=True, parents=True)
893
+
894
+ headers = None
895
+ if source.startswith(self.base_url):
896
+ if not self.is_logged_in:
897
+ self.authenticate()
898
+ headers = self._headers
899
+
900
+ with tempfile.TemporaryDirectory(dir=destination) as tmp:
901
+ file = http_download_file(source,
902
+ headers=headers,
903
+ silent=self.silent,
904
+ target_dir=tmp,
905
+ clobber=True)
906
+ with zipfile.ZipFile(file, 'r') as zipped:
907
+ files = zipped.namelist()
908
+ zipped.extractall(destination)
909
+ return [Path(destination, table) for table in files]
910
+
911
+ def _validate_file_url(self, url):
912
+ """Assert that URL matches HTTP_DATA_SERVER parameter.
913
+
914
+ Currently only one remote HTTP server is supported for a given AlyxClient instance. If
915
+ the URL contains only the relative path part, the full URL is returned.
916
+
917
+ Parameters
918
+ ----------
919
+ url : str
920
+ The full or partial URL to validate.
921
+
922
+ Returns
923
+ -------
924
+ The complete URL.
925
+
926
+ Examples
927
+ --------
928
+ >>> url = self._validate_file_url('https://webserver.net/path/to/file')
929
+ 'https://webserver.net/path/to/file'
930
+ >>> url = self._validate_file_url('path/to/file')
931
+ 'https://webserver.net/path/to/file'
932
+
933
+ """
934
+ if url.startswith('http'): # A full URL
935
+ assert url.startswith(self._par.HTTP_DATA_SERVER), \
936
+ ('remote protocol and/or hostname does not match HTTP_DATA_SERVER parameter:\n' +
937
+ f'"{url[:40]}..." should start with "{self._par.HTTP_DATA_SERVER}"')
938
+ elif not url.startswith(self._par.HTTP_DATA_SERVER):
939
+ url = self.rel_path2url(url)
940
+ return url
941
+
942
+ def rel_path2url(self, path):
943
+ """Given a relative file path, return the remote HTTP server URL.
944
+
945
+ It is expected that the remote HTTP server has the same file tree as the local system.
946
+
947
+ Parameters
948
+ ----------
949
+ path : str, pathlib.Path
950
+ A relative ALF path (subject/date/number/etc.).
951
+
952
+ Returns
953
+ -------
954
+ A URL string.
955
+
956
+ """
957
+ path = str(path).strip('/')
958
+ assert not path.startswith('http')
959
+ return f'{self._par.HTTP_DATA_SERVER}/{path}'
960
+
961
+ def get(self, rest_query, **kwargs):
962
+ """Send a GET request to the Alyx server.
963
+
964
+ Will raise an exception on any HTTP status code other than 200, 201.
965
+
966
+ For the dictionary contents and list of endpoints, refer to:
967
+ https://openalyx.internationalbrainlab.org/docs
968
+
969
+ Parameters
970
+ ----------
971
+ rest_query : str
972
+ A REST URL path, e.g. '/sessions?user=Hamish'.
973
+ **kwargs
974
+ Optional arguments to pass to _generic_request and _cache_response decorator.
975
+
976
+ Returns
977
+ -------
978
+ JSON interpreted dictionary from response.
979
+
980
+ """
981
+ rep = self._generic_request(requests.get, rest_query, **kwargs)
982
+ if isinstance(rep, dict) and list(rep.keys()) == ['count', 'next', 'previous', 'results']:
983
+ if len(rep['results']) < rep['count']:
984
+ cache_args = {k: v for k, v in kwargs.items() if k in ('clobber', 'expires')}
985
+ rep = _PaginatedResponse(self, rep, cache_args)
986
+ else:
987
+ rep = rep['results']
988
+ return rep
989
+
990
+ def patch(self, rest_query, data=None, files=None):
991
+ """Send a PATCH request to the Alyx server.
992
+
993
+ For the dictionary contents, refer to:
994
+ https://openalyx.internationalbrainlab.org/docs
995
+
996
+ Parameters
997
+ ----------
998
+ rest_query : str
999
+ The endpoint as full or relative URL.
1000
+ data : dict, str
1001
+ JSON encoded string or dictionary (c.f. requests).
1002
+ files : dict, tuple
1003
+ Files to attach (c.f. requests).
1004
+
1005
+ Returns
1006
+ -------
1007
+ Response object.
1008
+
1009
+ """
1010
+ return self._generic_request(requests.patch, rest_query, data=data, files=files)
1011
+
1012
+ def post(self, rest_query, data=None, files=None):
1013
+ """Send a POST request to the Alyx server.
1014
+
1015
+ For the dictionary contents, refer to:
1016
+ https://openalyx.internationalbrainlab.org/docs
1017
+
1018
+ Parameters
1019
+ ----------
1020
+ rest_query : str
1021
+ The endpoint as full or relative URL.
1022
+ data : dict, str
1023
+ JSON encoded string or dictionary (c.f. requests).
1024
+ files : dict, tuple
1025
+ Files to attach (c.f. requests).
1026
+
1027
+ Returns
1028
+ -------
1029
+ Response object.
1030
+
1031
+ """
1032
+ return self._generic_request(requests.post, rest_query, data=data, files=files)
1033
+
1034
+ def put(self, rest_query, data=None, files=None):
1035
+ """Send a PUT request to the Alyx server.
1036
+
1037
+ For the dictionary contents, refer to:
1038
+ https://openalyx.internationalbrainlab.org/docs
1039
+
1040
+ Parameters
1041
+ ----------
1042
+ rest_query : str
1043
+ The endpoint as full or relative URL.
1044
+ data : dict, str
1045
+ JSON encoded string or dictionary (c.f. requests).
1046
+ files : dict, tuple
1047
+ Files to attach (c.f. requests).
1048
+
1049
+ Returns
1050
+ -------
1051
+ requests.Response
1052
+ Response object.
1053
+
1054
+ """
1055
+ return self._generic_request(requests.put, rest_query, data=data, files=files)
1056
+
1057
+ def rest(self, url=None, action=None, id=None, data=None, files=None,
1058
+ no_cache=False, **kwargs):
1059
+ """Alyx REST API wrapper.
1060
+
1061
+ If no arguments are passed, lists available endpoints.
1062
+
1063
+ Parameters
1064
+ ----------
1065
+ url : str
1066
+ Endpoint name.
1067
+ action : str
1068
+ One of 'list', 'create', 'read', 'update', 'partial_update', 'delete'.
1069
+ id : str, uuid.UUID
1070
+ Lookup string for actions 'read', 'update', 'partial_update', and 'delete'.
1071
+ data : dict
1072
+ Data dictionary for actions 'update', 'partial_update' and 'create'.
1073
+ files : dict, tuple
1074
+ Option file(s) to upload.
1075
+ no_cache : bool
1076
+ If true the `list` and `read` actions are performed without returning the cache.
1077
+ kwargs
1078
+ Filters as per the Alyx REST documentation
1079
+ c.f. https://openalyx.internationalbrainlab.org/docs/
1080
+
1081
+ Returns
1082
+ -------
1083
+ list, dict
1084
+ List of queried dicts ('list') or dict (other actions).
1085
+
1086
+ Examples
1087
+ --------
1088
+ List available endpoint
1089
+
1090
+ >>> client = AlyxClient()
1091
+ ... client.rest()
1092
+
1093
+ List available actions for the 'subjects' endpoint
1094
+
1095
+ >>> client.rest('subjects')
1096
+
1097
+ Example REST endpoint with all actions
1098
+
1099
+ >>> client.rest('subjects', 'list')
1100
+ >>> client.rest('subjects', 'list', field_filter1='filterval')
1101
+ >>> client.rest('subjects', 'create', data=sub_dict)
1102
+ >>> client.rest('subjects', 'read', id='nickname')
1103
+ >>> client.rest('subjects', 'update', id='nickname', data=sub_dict)
1104
+ >>> client.rest('subjects', 'partial_update', id='nickname', data=sub_dict)
1105
+ >>> client.rest('subjects', 'delete', id='nickname')
1106
+ >>> client.rest('notes', 'create', data=nd, files={'image': open(image_file, 'rb')})
1107
+
1108
+ """
1109
+ # if endpoint is None, list available endpoints
1110
+ if not url:
1111
+ pprint(self.list_endpoints())
1112
+ return
1113
+ # remove beginning slash if any
1114
+ if url.startswith('/'):
1115
+ url = url[1:]
1116
+ # and split to the next slash or question mark
1117
+ endpoint = re.findall("^/*[^?/]*", url)[0].replace('/', '')
1118
+ # make sure the queried endpoint exists, if not throw an informative error
1119
+ if endpoint not in self.rest_schemes.keys():
1120
+ av = [k for k in self.rest_schemes.keys() if not k.startswith('_') and k]
1121
+ raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1122
+ 'endpoints are \n ' + '\n '.join(av))
1123
+ endpoint_scheme = self.rest_schemes[endpoint]
1124
+ # on a filter request, override the default action parameter
1125
+ if '?' in url:
1126
+ action = 'list'
1127
+ # if action is None, list available actions for the required endpoint
1128
+ if not action:
1129
+ pprint(list(endpoint_scheme.keys()))
1130
+ self.print_endpoint_info(endpoint)
1131
+ return
1132
+ # make sure the desired action exists, if not throw an informative error
1133
+ if action not in endpoint_scheme:
1134
+ raise ValueError('Action "' + action + '" for REST endpoint "' + endpoint + '" does ' +
1135
+ 'not exist. Available actions are: ' +
1136
+ '\n ' + '\n '.join(endpoint_scheme.keys()))
1137
+ # the actions below require an id in the URL, warn and help the user
1138
+ if action in ['read', 'update', 'partial_update', 'delete'] and not id:
1139
+ _logger.warning('REST action "' + action + '" requires an ID in the URL: ' +
1140
+ endpoint_scheme[action]['url'])
1141
+ return
1142
+ # the actions below require a data dictionary, warn and help the user with fields list
1143
+ data_required = 'fields' in endpoint_scheme[action]
1144
+ if action in ['create', 'update', 'partial_update'] and data_required and not data:
1145
+ pprint(endpoint_scheme[action]['fields'])
1146
+ for act in endpoint_scheme[action]['fields']:
1147
+ print("'" + act['name'] + "': ...,")
1148
+ _logger.warning('REST action "' + action + '" requires a data dict with above keys')
1149
+ return
1150
+
1151
+ # clobber=True means remote request always made, expires=True means response is not cached
1152
+ cache_args = {'clobber': no_cache, 'expires': kwargs.pop('expires', False) or no_cache}
1153
+ if action == 'list':
1154
+ # list doesn't require id nor
1155
+ assert endpoint_scheme[action]['action'] == 'get'
1156
+ # add to url data if it is a string
1157
+ if id:
1158
+ # this is a special case of the list where we query a uuid. Usually read is better
1159
+ if 'django' in kwargs.keys():
1160
+ kwargs['django'] = kwargs['django'] + ','
1161
+ else:
1162
+ kwargs['django'] = ''
1163
+ kwargs['django'] = f"{kwargs['django']}pk,{id}"
1164
+ # otherwise, look for a dictionary of filter terms
1165
+ if kwargs:
1166
+ # Convert all lists in query params to comma separated list
1167
+ query_params = {k: ','.join(map(str, ensure_list(v))) for k, v in kwargs.items()}
1168
+ url = update_url_params(url, query_params)
1169
+ return self.get('/' + url, **cache_args)
1170
+ if not isinstance(id, str) and id is not None:
1171
+ id = str(id) # e.g. may be uuid.UUID
1172
+ if action == 'read':
1173
+ assert endpoint_scheme[action]['action'] == 'get'
1174
+ return self.get('/' + endpoint + '/' + id.split('/')[-1], **cache_args)
1175
+ elif action == 'create':
1176
+ assert endpoint_scheme[action]['action'] == 'post'
1177
+ return self.post('/' + endpoint, data=data, files=files)
1178
+ elif action == 'delete':
1179
+ assert endpoint_scheme[action]['action'] == 'delete'
1180
+ return self.delete('/' + endpoint + '/' + id.split('/')[-1])
1181
+ elif action == 'partial_update':
1182
+ assert endpoint_scheme[action]['action'] == 'patch'
1183
+ return self.patch('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1184
+ elif action == 'update':
1185
+ assert endpoint_scheme[action]['action'] == 'put'
1186
+ return self.put('/' + endpoint + '/' + id.split('/')[-1], data=data, files=files)
1187
+
1188
+ # JSON field interface convenience methods
1189
+ def _check_inputs(self, endpoint: str) -> None:
1190
+ # make sure the queried endpoint exists, if not throw an informative error
1191
+ if endpoint not in self.rest_schemes.keys():
1192
+ av = (k for k in self.rest_schemes.keys() if not k.startswith('_') and k)
1193
+ raise ValueError('REST endpoint "' + endpoint + '" does not exist. Available ' +
1194
+ 'endpoints are \n ' + '\n '.join(av))
1195
+ return
1196
+
1197
+ def json_field_write(
1198
+ self,
1199
+ endpoint: str = None,
1200
+ uuid: str = None,
1201
+ field_name: str = None,
1202
+ data: dict = None
1203
+ ) -> dict:
1204
+ """Write data to JSON field.
1205
+
1206
+ NOTE: Destructive write! WILL NOT CHECK IF DATA EXISTS
1207
+
1208
+ Parameters
1209
+ ----------
1210
+ endpoint : str, None
1211
+ Valid alyx endpoint, defaults to None.
1212
+ uuid : str, uuid.UUID, None
1213
+ UUID or lookup name for endpoint.
1214
+ field_name : str, None
1215
+ Valid json field name, defaults to None.
1216
+ data : dict, None
1217
+ Data to write to json field, defaults to None.
1218
+
1219
+ Returns
1220
+ -------
1221
+ dict
1222
+ Written data dict.
1223
+
1224
+ """
1225
+ self._check_inputs(endpoint)
1226
+ # Prepare data to patch
1227
+ patch_dict = {field_name: data}
1228
+ # Upload new extended_qc to session
1229
+ ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1230
+ return ret[field_name]
1231
+
1232
+ def json_field_update(
1233
+ self,
1234
+ endpoint: str = None,
1235
+ uuid: str = None,
1236
+ field_name: str = 'json',
1237
+ data: dict = None
1238
+ ) -> dict:
1239
+ """Non-destructive update of JSON field of endpoint for object.
1240
+
1241
+ Will update the field_name of the object with pk = uuid of given endpoint
1242
+ If data has keys with the same name of existing keys it will squash the old
1243
+ values (uses the dict.update() method).
1244
+
1245
+ Parameters
1246
+ ----------
1247
+ endpoint : str
1248
+ Alyx REST endpoint to hit.
1249
+ uuid : str, uuid.UUID
1250
+ UUID or lookup name of object.
1251
+ field_name : str
1252
+ Name of the json field.
1253
+ data : dict
1254
+ A dictionary with fields to be updated.
1255
+
1256
+ Returns
1257
+ -------
1258
+ dict
1259
+ New patched json field contents as dict.
1260
+
1261
+ Examples
1262
+ --------
1263
+ >>> client = AlyxClient()
1264
+ >>> client.json_field_update('sessions', 'eid_str', 'extended_qc', {'key': 'value'})
1265
+
1266
+ """
1267
+ self._check_inputs(endpoint)
1268
+ # Load current json field contents
1269
+ current = self.rest(endpoint, 'read', id=uuid)[field_name]
1270
+ if current is None:
1271
+ current = {}
1272
+
1273
+ if not isinstance(current, dict):
1274
+ _logger.warning(
1275
+ f'Current json field "{field_name}" does not contains a dict, aborting update'
1276
+ )
1277
+ return current
1278
+
1279
+ # Patch current dict with new data
1280
+ current.update(data)
1281
+ # Prepare data to patch
1282
+ patch_dict = {field_name: current}
1283
+ # Upload new extended_qc to session
1284
+ ret = self.rest(endpoint, 'partial_update', id=uuid, data=patch_dict)
1285
+ return ret[field_name]
1286
+
1287
+ def json_field_remove_key(
1288
+ self,
1289
+ endpoint: str = None,
1290
+ uuid: str = None,
1291
+ field_name: str = 'json',
1292
+ key: str = None
1293
+ ) -> Optional[dict]:
1294
+ """Remove inputted key from JSON field dict and re-upload it to Alyx.
1295
+
1296
+ Needs endpoint, UUID and json field name.
1297
+
1298
+ Parameters
1299
+ ----------
1300
+ endpoint : str
1301
+ Endpoint to hit, defaults to None.
1302
+ uuid : str, uuid.UUID
1303
+ UUID or lookup name for endpoint.
1304
+ field_name : str
1305
+ JSON field name of object, defaults to None.
1306
+ key : str
1307
+ Key name of dictionary inside object, defaults to None.
1308
+
1309
+ Returns
1310
+ -------
1311
+ dict
1312
+ New content of json field.
1313
+
1314
+ """
1315
+ self._check_inputs(endpoint)
1316
+ current = self.rest(endpoint, 'read', id=uuid)[field_name]
1317
+ # If no contents, cannot remove key, return
1318
+ if current is None:
1319
+ return current
1320
+ # if contents are not dict, cannot remove key, return contents
1321
+ if isinstance(current, str):
1322
+ _logger.warning(f'Cannot remove key {key} content of json field is of type str')
1323
+ return None
1324
+ # If key not present in contents of json field cannot remove key, return contents
1325
+ if current.get(key, None) is None:
1326
+ _logger.warning(
1327
+ f'{key}: Key not found in endpoint {endpoint} field {field_name}'
1328
+ )
1329
+ return current
1330
+ _logger.info(f'Removing key from dict: "{key}"')
1331
+ current.pop(key)
1332
+ # Re-write contents without removed key
1333
+ written = self.json_field_write(
1334
+ endpoint=endpoint, uuid=uuid, field_name=field_name, data=current
1335
+ )
1336
+ return written
1337
+
1338
+ def json_field_delete(
1339
+ self, endpoint: str = None, uuid: str = None, field_name: str = None
1340
+ ) -> None:
1341
+ """Set an entire field to null.
1342
+
1343
+ Note that this deletes all data from a given field. To delete only a single key from a
1344
+ given JSON field, use `json_field_remove_key`.
1345
+
1346
+ Parameters
1347
+ ----------
1348
+ endpoint : str
1349
+ Endpoint to hit, defaults to None.
1350
+ uuid : str, uuid.UUID
1351
+ UUID or lookup name for endpoint.
1352
+ field_name : str
1353
+ The field name of object (e.g. 'json', 'name', 'extended_qc'), defaults to None.
1354
+
1355
+ Returns
1356
+ -------
1357
+ None
1358
+ New content of json field.
1359
+
1360
+ """
1361
+ self._check_inputs(endpoint)
1362
+ _ = self.rest(endpoint, 'partial_update', id=uuid, data={field_name: None})
1363
+ return _[field_name]
1364
+
1365
+ def clear_rest_cache(self):
1366
+ """Clear all REST response cache files for the base url."""
1367
+ for file in self.cache_dir.joinpath('.rest').glob('*'):
1368
+ file.unlink()