dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. dtlpy/__init__.py +488 -488
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
  5. dtlpy/assets/code_server/config.yaml +2 -2
  6. dtlpy/assets/code_server/installation.sh +24 -24
  7. dtlpy/assets/code_server/launch.json +13 -13
  8. dtlpy/assets/code_server/settings.json +2 -2
  9. dtlpy/assets/main.py +53 -53
  10. dtlpy/assets/main_partial.py +18 -18
  11. dtlpy/assets/mock.json +11 -11
  12. dtlpy/assets/model_adapter.py +83 -83
  13. dtlpy/assets/package.json +61 -61
  14. dtlpy/assets/package_catalog.json +29 -29
  15. dtlpy/assets/package_gitignore +307 -307
  16. dtlpy/assets/service_runners/__init__.py +33 -33
  17. dtlpy/assets/service_runners/converter.py +96 -96
  18. dtlpy/assets/service_runners/multi_method.py +49 -49
  19. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  20. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  21. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  22. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  23. dtlpy/assets/service_runners/single_method.py +37 -37
  24. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  25. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  26. dtlpy/assets/service_runners/single_method_item.py +41 -41
  27. dtlpy/assets/service_runners/single_method_json.py +42 -42
  28. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  29. dtlpy/assets/voc_annotation_template.xml +23 -23
  30. dtlpy/caches/base_cache.py +32 -32
  31. dtlpy/caches/cache.py +473 -473
  32. dtlpy/caches/dl_cache.py +201 -201
  33. dtlpy/caches/filesystem_cache.py +89 -89
  34. dtlpy/caches/redis_cache.py +84 -84
  35. dtlpy/dlp/__init__.py +20 -20
  36. dtlpy/dlp/cli_utilities.py +367 -367
  37. dtlpy/dlp/command_executor.py +764 -764
  38. dtlpy/dlp/dlp +1 -1
  39. dtlpy/dlp/dlp.bat +1 -1
  40. dtlpy/dlp/dlp.py +128 -128
  41. dtlpy/dlp/parser.py +651 -651
  42. dtlpy/entities/__init__.py +83 -83
  43. dtlpy/entities/analytic.py +311 -311
  44. dtlpy/entities/annotation.py +1879 -1879
  45. dtlpy/entities/annotation_collection.py +699 -699
  46. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  47. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  48. dtlpy/entities/annotation_definitions/box.py +195 -195
  49. dtlpy/entities/annotation_definitions/classification.py +67 -67
  50. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  51. dtlpy/entities/annotation_definitions/cube.py +204 -204
  52. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  53. dtlpy/entities/annotation_definitions/description.py +32 -32
  54. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  55. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  56. dtlpy/entities/annotation_definitions/gis.py +69 -69
  57. dtlpy/entities/annotation_definitions/note.py +139 -139
  58. dtlpy/entities/annotation_definitions/point.py +117 -117
  59. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  60. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  61. dtlpy/entities/annotation_definitions/pose.py +92 -92
  62. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  63. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  64. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  65. dtlpy/entities/annotation_definitions/text.py +85 -85
  66. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  67. dtlpy/entities/app.py +220 -220
  68. dtlpy/entities/app_module.py +107 -107
  69. dtlpy/entities/artifact.py +174 -174
  70. dtlpy/entities/assignment.py +399 -399
  71. dtlpy/entities/base_entity.py +214 -214
  72. dtlpy/entities/bot.py +113 -113
  73. dtlpy/entities/codebase.py +296 -296
  74. dtlpy/entities/collection.py +38 -38
  75. dtlpy/entities/command.py +169 -169
  76. dtlpy/entities/compute.py +442 -442
  77. dtlpy/entities/dataset.py +1285 -1285
  78. dtlpy/entities/directory_tree.py +44 -44
  79. dtlpy/entities/dpk.py +470 -470
  80. dtlpy/entities/driver.py +222 -222
  81. dtlpy/entities/execution.py +397 -397
  82. dtlpy/entities/feature.py +124 -124
  83. dtlpy/entities/feature_set.py +145 -145
  84. dtlpy/entities/filters.py +641 -641
  85. dtlpy/entities/gis_item.py +107 -107
  86. dtlpy/entities/integration.py +184 -184
  87. dtlpy/entities/item.py +953 -953
  88. dtlpy/entities/label.py +123 -123
  89. dtlpy/entities/links.py +85 -85
  90. dtlpy/entities/message.py +175 -175
  91. dtlpy/entities/model.py +694 -691
  92. dtlpy/entities/node.py +1005 -1005
  93. dtlpy/entities/ontology.py +803 -803
  94. dtlpy/entities/organization.py +287 -287
  95. dtlpy/entities/package.py +657 -657
  96. dtlpy/entities/package_defaults.py +5 -5
  97. dtlpy/entities/package_function.py +185 -185
  98. dtlpy/entities/package_module.py +113 -113
  99. dtlpy/entities/package_slot.py +118 -118
  100. dtlpy/entities/paged_entities.py +290 -267
  101. dtlpy/entities/pipeline.py +593 -593
  102. dtlpy/entities/pipeline_execution.py +279 -279
  103. dtlpy/entities/project.py +394 -394
  104. dtlpy/entities/prompt_item.py +499 -499
  105. dtlpy/entities/recipe.py +301 -301
  106. dtlpy/entities/reflect_dict.py +102 -102
  107. dtlpy/entities/resource_execution.py +138 -138
  108. dtlpy/entities/service.py +958 -958
  109. dtlpy/entities/service_driver.py +117 -117
  110. dtlpy/entities/setting.py +294 -294
  111. dtlpy/entities/task.py +491 -491
  112. dtlpy/entities/time_series.py +143 -143
  113. dtlpy/entities/trigger.py +426 -426
  114. dtlpy/entities/user.py +118 -118
  115. dtlpy/entities/webhook.py +124 -124
  116. dtlpy/examples/__init__.py +19 -19
  117. dtlpy/examples/add_labels.py +135 -135
  118. dtlpy/examples/add_metadata_to_item.py +21 -21
  119. dtlpy/examples/annotate_items_using_model.py +65 -65
  120. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  121. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  122. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  123. dtlpy/examples/convert_annotation_types.py +51 -51
  124. dtlpy/examples/converter.py +143 -143
  125. dtlpy/examples/copy_annotations.py +22 -22
  126. dtlpy/examples/copy_folder.py +31 -31
  127. dtlpy/examples/create_annotations.py +51 -51
  128. dtlpy/examples/create_video_annotations.py +83 -83
  129. dtlpy/examples/delete_annotations.py +26 -26
  130. dtlpy/examples/filters.py +113 -113
  131. dtlpy/examples/move_item.py +23 -23
  132. dtlpy/examples/play_video_annotation.py +13 -13
  133. dtlpy/examples/show_item_and_mask.py +53 -53
  134. dtlpy/examples/triggers.py +49 -49
  135. dtlpy/examples/upload_batch_of_items.py +20 -20
  136. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  137. dtlpy/examples/upload_items_with_modalities.py +43 -43
  138. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  139. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  140. dtlpy/exceptions.py +125 -125
  141. dtlpy/miscellaneous/__init__.py +20 -20
  142. dtlpy/miscellaneous/dict_differ.py +95 -95
  143. dtlpy/miscellaneous/git_utils.py +217 -217
  144. dtlpy/miscellaneous/json_utils.py +14 -14
  145. dtlpy/miscellaneous/list_print.py +105 -105
  146. dtlpy/miscellaneous/zipping.py +130 -130
  147. dtlpy/ml/__init__.py +20 -20
  148. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  149. dtlpy/ml/base_model_adapter.py +945 -940
  150. dtlpy/ml/metrics.py +461 -461
  151. dtlpy/ml/predictions_utils.py +274 -274
  152. dtlpy/ml/summary_writer.py +57 -57
  153. dtlpy/ml/train_utils.py +60 -60
  154. dtlpy/new_instance.py +252 -252
  155. dtlpy/repositories/__init__.py +56 -56
  156. dtlpy/repositories/analytics.py +85 -85
  157. dtlpy/repositories/annotations.py +916 -916
  158. dtlpy/repositories/apps.py +383 -383
  159. dtlpy/repositories/artifacts.py +452 -452
  160. dtlpy/repositories/assignments.py +599 -599
  161. dtlpy/repositories/bots.py +213 -213
  162. dtlpy/repositories/codebases.py +559 -559
  163. dtlpy/repositories/collections.py +332 -348
  164. dtlpy/repositories/commands.py +158 -158
  165. dtlpy/repositories/compositions.py +61 -61
  166. dtlpy/repositories/computes.py +434 -406
  167. dtlpy/repositories/datasets.py +1291 -1291
  168. dtlpy/repositories/downloader.py +895 -895
  169. dtlpy/repositories/dpks.py +433 -433
  170. dtlpy/repositories/drivers.py +266 -266
  171. dtlpy/repositories/executions.py +817 -817
  172. dtlpy/repositories/feature_sets.py +226 -226
  173. dtlpy/repositories/features.py +238 -238
  174. dtlpy/repositories/integrations.py +484 -484
  175. dtlpy/repositories/items.py +909 -915
  176. dtlpy/repositories/messages.py +94 -94
  177. dtlpy/repositories/models.py +877 -867
  178. dtlpy/repositories/nodes.py +80 -80
  179. dtlpy/repositories/ontologies.py +511 -511
  180. dtlpy/repositories/organizations.py +525 -525
  181. dtlpy/repositories/packages.py +1941 -1941
  182. dtlpy/repositories/pipeline_executions.py +448 -448
  183. dtlpy/repositories/pipelines.py +642 -642
  184. dtlpy/repositories/projects.py +539 -539
  185. dtlpy/repositories/recipes.py +399 -399
  186. dtlpy/repositories/resource_executions.py +137 -137
  187. dtlpy/repositories/schema.py +120 -120
  188. dtlpy/repositories/service_drivers.py +213 -213
  189. dtlpy/repositories/services.py +1704 -1704
  190. dtlpy/repositories/settings.py +339 -339
  191. dtlpy/repositories/tasks.py +1124 -1124
  192. dtlpy/repositories/times_series.py +278 -278
  193. dtlpy/repositories/triggers.py +536 -536
  194. dtlpy/repositories/upload_element.py +257 -257
  195. dtlpy/repositories/uploader.py +651 -651
  196. dtlpy/repositories/webhooks.py +249 -249
  197. dtlpy/services/__init__.py +22 -22
  198. dtlpy/services/aihttp_retry.py +131 -131
  199. dtlpy/services/api_client.py +1782 -1782
  200. dtlpy/services/api_reference.py +40 -40
  201. dtlpy/services/async_utils.py +133 -133
  202. dtlpy/services/calls_counter.py +44 -44
  203. dtlpy/services/check_sdk.py +68 -68
  204. dtlpy/services/cookie.py +115 -115
  205. dtlpy/services/create_logger.py +156 -156
  206. dtlpy/services/events.py +84 -84
  207. dtlpy/services/logins.py +235 -235
  208. dtlpy/services/reporter.py +256 -256
  209. dtlpy/services/service_defaults.py +91 -91
  210. dtlpy/utilities/__init__.py +20 -20
  211. dtlpy/utilities/annotations/__init__.py +16 -16
  212. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  213. dtlpy/utilities/base_package_runner.py +264 -264
  214. dtlpy/utilities/converter.py +1650 -1650
  215. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  216. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  217. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  218. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  219. dtlpy/utilities/local_development/__init__.py +1 -1
  220. dtlpy/utilities/local_development/local_session.py +179 -179
  221. dtlpy/utilities/reports/__init__.py +2 -2
  222. dtlpy/utilities/reports/figures.py +343 -343
  223. dtlpy/utilities/reports/report.py +71 -71
  224. dtlpy/utilities/videos/__init__.py +17 -17
  225. dtlpy/utilities/videos/video_player.py +598 -598
  226. dtlpy/utilities/videos/videos.py +470 -470
  227. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
  228. dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
  229. {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
  230. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
  231. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
  232. dtlpy-1.114.13.dist-info/RECORD +240 -0
  233. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
  234. tests/features/environment.py +551 -550
  235. dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.113.10.dist-info/RECORD +0 -244
  237. tests/assets/__init__.py +0 -0
  238. tests/assets/models_flow/__init__.py +0 -0
  239. tests/assets/models_flow/failedmain.py +0 -52
  240. tests/assets/models_flow/main.py +0 -62
  241. tests/assets/models_flow/main_model.py +0 -54
  242. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
  243. {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
@@ -1,1782 +1,1782 @@
1
- """
2
- Dataloop platform calls
3
- """
4
- import aiohttp.client_exceptions
5
- import requests_toolbelt
6
- import multiprocessing
7
- import threading
8
- import traceback
9
- import datetime
10
- import requests
11
- import aiohttp
12
- import logging
13
- import asyncio
14
- import certifi
15
- import base64
16
- import enum
17
- import time
18
- import tqdm
19
- import json
20
- import sys
21
- import ssl
22
- import jwt
23
- import os
24
- import io
25
- import concurrent
26
- from concurrent.futures import ThreadPoolExecutor
27
- from requests.adapters import HTTPAdapter
28
- from urllib3.util import Retry
29
- from functools import wraps
30
- import numpy as np
31
- import inspect
32
- from requests.models import Response
33
- from dtlpy.caches.cache import CacheManger, CacheConfig
34
- from .calls_counter import CallsCounter
35
- from .cookie import CookieIO
36
- from .logins import login, logout, login_secret, login_m2m, gate_url_from_host
37
- from .async_utils import AsyncResponse, AsyncUploadStream, AsyncResponseError, AsyncThreadEventLoop
38
- from .events import Events
39
- from .service_defaults import DEFAULT_ENVIRONMENTS, DEFAULT_ENVIRONMENT
40
- from .aihttp_retry import RetryClient
41
- from .. import miscellaneous, exceptions, __version__
42
-
43
- logger = logging.getLogger(name='dtlpy')
44
- threadLock = threading.Lock()
45
-
46
-
47
- def format_message(message):
48
- if message and isinstance(message, str):
49
- return message.replace('\\n', '\n')
50
- return message
51
-
52
-
53
- class VerboseLoggingLevel:
54
- DEBUG = "debug"
55
- INFO = "info"
56
- WARNING = "warning"
57
- ERROR = "error"
58
- CRITICAL = "critical"
59
-
60
-
61
- class PlatformError(Exception):
62
- """
63
- Error handling for api calls
64
- """
65
-
66
- def __init__(self, resp):
67
- msg = ''
68
- if hasattr(resp, 'status_code'):
69
- msg += '<Response [{}]>'.format(resp.status_code)
70
- if hasattr(resp, 'reason'):
71
- msg += '<Reason [{}]>'.format(format_message(resp.reason))
72
- elif hasattr(resp, 'text'):
73
- msg += '<Reason [{}]>'.format(format_message(resp.text))
74
- super().__init__(msg)
75
-
76
-
77
- class Callbacks:
78
- def __init__(self):
79
- self._callbacks = {}
80
-
81
- class CallbackEvent(str, enum.Enum):
82
- DATASET_EXPORT = 'datasetExport'
83
- ITEMS_UPLOAD = 'itemUpload'
84
-
85
- def add(self, event, func):
86
-
87
- if not callable(func):
88
- raise ValueError(f"The provided callback for {event} is not callable")
89
- if event not in list(self.CallbackEvent):
90
- raise ValueError(f"Unknown event: {event!r}, allowed events are: {list(self.CallbackEvent)}")
91
- self._callbacks[event] = func
92
-
93
- def get(self, name):
94
- return self._callbacks.get(name)
95
-
96
- def run_on_event(self, event, context, progress):
97
- callback = self.get(event)
98
- if callback is not None:
99
- callback(progress=progress, context=context)
100
-
101
-
102
- class Verbose:
103
- __DEFAULT_LOGGING_LEVEL = 'warning'
104
- __DEFAULT_DISABLE_PROGRESS_BAR = False
105
- __DEFAULT_PRINT_ALL_RESPONSES = False
106
- __PRINT_ERROR_LOGS = False
107
- __DEFAULT_PROGRESS_BAR_SETTINGS = {
108
- 'Iterate Pages': False,
109
- 'Command Progress': False,
110
- 'Download Dataset': False,
111
- 'Download Item': False,
112
- 'Upload Items': False,
113
- 'Download Annotations': False,
114
- 'Upload Annotations': False,
115
- 'Convert Annotations': False
116
- }
117
-
118
- def __init__(self, cookie):
119
- self.cookie = cookie
120
- dictionary = self.cookie.get('verbose')
121
- if isinstance(dictionary, dict):
122
- self.from_cookie(dictionary)
123
- else:
124
- self._logging_level = self.__DEFAULT_LOGGING_LEVEL
125
- self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
126
- self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
127
- self._print_error_logs = self.__PRINT_ERROR_LOGS
128
- self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
129
- if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
130
- self._print_error_logs = True
131
- self.to_cookie()
132
-
133
- def to_cookie(self):
134
- dictionary = {'logging_level': self._logging_level,
135
- 'disable_progress_bar': self._disable_progress_bar,
136
- 'print_all_responses': self._print_all_responses,
137
- 'print_error_logs': self._print_error_logs,
138
- 'progress_bar_setting': json.dumps(self._progress_bar_settings)
139
- }
140
- self.cookie.put(key='verbose', value=dictionary)
141
-
142
- def from_cookie(self, dictionary):
143
- self._logging_level = dictionary.get('logging_level', self.__DEFAULT_LOGGING_LEVEL)
144
- self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
145
- self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
146
- self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
147
- progress_bar_settings = dictionary.get('progress_bar_setting', None)
148
- if progress_bar_settings is None:
149
- self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
150
- else:
151
- self._progress_bar_settings = json.loads(progress_bar_settings)
152
-
153
- @property
154
- def disable_progress_bar_iterate_pages(self):
155
- return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
156
-
157
- @disable_progress_bar_iterate_pages.setter
158
- def disable_progress_bar_iterate_pages(self, val):
159
- self._progress_bar_settings['Iterate Pages'] = val
160
- self.to_cookie()
161
-
162
- @property
163
- def disable_progress_bar_command_progress(self):
164
- return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
165
-
166
- @disable_progress_bar_command_progress.setter
167
- def disable_progress_bar_command_progress(self, val):
168
- self._progress_bar_settings['Command Progress'] = val
169
- self.to_cookie()
170
-
171
- @property
172
- def disable_progress_bar_download_item(self):
173
- return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
174
-
175
- @disable_progress_bar_download_item.setter
176
- def disable_progress_bar_download_item(self, val):
177
- self._progress_bar_settings['Download Item'] = val
178
- self.to_cookie()
179
-
180
- @property
181
- def disable_progress_bar_download_dataset(self):
182
- return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
183
-
184
- @disable_progress_bar_download_dataset.setter
185
- def disable_progress_bar_download_dataset(self, val):
186
- self._progress_bar_settings['Download Dataset'] = val
187
- self.to_cookie()
188
-
189
- @property
190
- def disable_progress_bar_upload_items(self):
191
- return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
192
-
193
- @disable_progress_bar_upload_items.setter
194
- def disable_progress_bar_upload_items(self, val):
195
- self._progress_bar_settings['Upload Items'] = val
196
- self.to_cookie()
197
-
198
- @property
199
- def disable_progress_bar_download_annotations(self):
200
- return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
201
-
202
- @disable_progress_bar_download_annotations.setter
203
- def disable_progress_bar_download_annotations(self, val):
204
- self._progress_bar_settings['Download Annotations'] = val
205
- self.to_cookie()
206
-
207
- @property
208
- def disable_progress_bar_upload_annotations(self):
209
- return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
210
-
211
- @disable_progress_bar_upload_annotations.setter
212
- def disable_progress_bar_upload_annotations(self, val):
213
- self._progress_bar_settings['Upload Annotations'] = val
214
- self.to_cookie()
215
-
216
- @property
217
- def disable_progress_bar_convert_annotations(self):
218
- return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
219
-
220
- @disable_progress_bar_convert_annotations.setter
221
- def disable_progress_bar_convert_annotations(self, val):
222
- self._progress_bar_settings['Convert Annotations'] = val
223
- self.to_cookie()
224
-
225
- @property
226
- def disable_progress_bar(self):
227
- return self._disable_progress_bar
228
-
229
- @disable_progress_bar.setter
230
- def disable_progress_bar(self, val):
231
- self._disable_progress_bar = val
232
- self.to_cookie()
233
-
234
- @property
235
- def logging_level(self):
236
- return self._logging_level
237
-
238
- @logging_level.setter
239
- def logging_level(self, val):
240
- self._logging_level = val
241
- # set log level
242
- logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self._logging_level.upper()])
243
- # write to cookie
244
- self.to_cookie()
245
-
246
- @property
247
- def print_all_responses(self):
248
- return self._print_all_responses
249
-
250
- @print_all_responses.setter
251
- def print_all_responses(self, val):
252
- self._print_all_responses = val
253
- self.to_cookie()
254
-
255
- @property
256
- def print_error_logs(self):
257
- return self._print_error_logs
258
-
259
- @print_error_logs.setter
260
- def print_error_logs(self, val):
261
- self._print_error_logs = val
262
- self.to_cookie()
263
-
264
-
265
- class CacheMode:
266
- __DEFAULT_ENABLE_CACHE = True
267
- __DEFAULT_CHUNK_CACHE = 200000
268
-
269
- def __init__(self, cookie):
270
- self.cookie = cookie
271
- dictionary = self.cookie.get('cache_mode')
272
- if isinstance(dictionary, dict):
273
- self.from_cookie(dictionary)
274
- else:
275
- self._enable_cache = self.__DEFAULT_ENABLE_CACHE
276
- self._chunk_cache = self.__DEFAULT_CHUNK_CACHE
277
- self.to_cookie()
278
-
279
- def to_cookie(self):
280
- dictionary = {'enable_cache': self._enable_cache,
281
- 'chunk_cache': self._chunk_cache}
282
- self.cookie.put(key='cache_mode', value=dictionary)
283
-
284
- def from_cookie(self, dictionary):
285
- self._enable_cache = dictionary.get('enable_cache', self.__DEFAULT_ENABLE_CACHE)
286
- self._chunk_cache = dictionary.get('chunk_cache', self.__DEFAULT_CHUNK_CACHE)
287
-
288
- @property
289
- def enable_cache(self):
290
- return self._enable_cache
291
-
292
- @enable_cache.setter
293
- def enable_cache(self, val: bool):
294
- if not isinstance(val, bool):
295
- raise exceptions.PlatformException(error=400,
296
- message="input must be of type bool")
297
- self._enable_cache = val
298
- self.to_cookie()
299
-
300
- @property
301
- def chunk_cache(self):
302
- return self._chunk_cache
303
-
304
- @chunk_cache.setter
305
- def chunk_cache(self, val):
306
- self._chunk_cache = val
307
- self.to_cookie()
308
-
309
-
310
- class SDKCache:
311
- __DEFAULT_USE_CACHE = False
312
- __DEFAULT_CACHE_PATH = os.path.join(os.path.expanduser('~'), '.dataloop', 'obj_cache')
313
- __DEFAULT_CACHE_PATH_BIN = os.path.join(os.path.expanduser('~'), '.dataloop')
314
- __DEFAULT_CONFIGS_CACHE = CacheConfig().to_string()
315
- __DEFAULT_BINARY_CACHE_SIZE = 1000
316
-
317
- def __init__(self, cookie):
318
- self.cookie = cookie
319
- dictionary = self.cookie.get('cache_configs')
320
- if isinstance(dictionary, dict):
321
- self.from_cookie(dictionary)
322
- else:
323
- self._cache_path = self.__DEFAULT_CACHE_PATH
324
- self._cache_path_bin = self.__DEFAULT_CACHE_PATH_BIN
325
- self._configs = self.__DEFAULT_CONFIGS_CACHE
326
- self._bin_size = self.__DEFAULT_BINARY_CACHE_SIZE
327
- self._use_cache = self.__DEFAULT_USE_CACHE
328
- self.to_cookie()
329
-
330
- def to_cookie(self):
331
- dictionary = {'cache_path': self._cache_path,
332
- 'cache_path_bin': self._cache_path_bin,
333
- 'configs': self._configs,
334
- 'bin_size': self._bin_size,
335
- 'use_cache': self._use_cache}
336
- self.cookie.put(key='cache_configs', value=dictionary)
337
-
338
- def from_cookie(self, dictionary):
339
- self._cache_path = dictionary.get('cache_path', self.__DEFAULT_CACHE_PATH)
340
- self._cache_path_bin = dictionary.get('cache_path_bin', self.__DEFAULT_CACHE_PATH_BIN)
341
- self._configs = dictionary.get('configs', self.__DEFAULT_CONFIGS_CACHE)
342
- self._bin_size = dictionary.get('bin_size', self.__DEFAULT_BINARY_CACHE_SIZE)
343
- self._use_cache = dictionary.get('use_cache', self.__DEFAULT_USE_CACHE)
344
-
345
- @property
346
- def cache_path(self):
347
- return self._cache_path
348
-
349
- @property
350
- def cache_path_bin(self):
351
- return self._cache_path_bin
352
-
353
- @cache_path_bin.setter
354
- def cache_path_bin(self, val: str):
355
- if not isinstance(val, str):
356
- raise exceptions.PlatformException(error=400,
357
- message="input must be of type str")
358
- self._cache_path_bin = val
359
- os.environ['DEFAULT_CACHE_PATH'] = val
360
- self.to_cookie()
361
-
362
- @property
363
- def use_cache(self):
364
- return self._use_cache
365
-
366
- @use_cache.setter
367
- def use_cache(self, val: bool):
368
- if not isinstance(val, bool):
369
- raise exceptions.PlatformException(error=400,
370
- message="input must be of type bool")
371
- self._use_cache = val
372
- self.to_cookie()
373
-
374
- @property
375
- def configs(self):
376
- return self._configs
377
-
378
- @configs.setter
379
- def configs(self, val):
380
- if isinstance(val, CacheConfig):
381
- val = val.to_string()
382
- if not isinstance(val, str):
383
- raise exceptions.PlatformException(error=400,
384
- message="input must be of type str or CacheConfig")
385
- self._configs = val
386
- self.to_cookie()
387
-
388
- @property
389
- def bin_size(self):
390
- return self._bin_size
391
-
392
- @bin_size.setter
393
- def bin_size(self, val: int):
394
- if not isinstance(val, int):
395
- raise exceptions.PlatformException(error=400,
396
- message="input must be of type int")
397
- self._bin_size = val
398
- self.to_cookie()
399
-
400
-
401
- class Attributes2:
402
- __DEFAULT_USE_ATTRIBUTE = False
403
-
404
- def __init__(self, cookie):
405
- self.cookie = cookie
406
- dictionary = self.cookie.get('use_attributes_2')
407
- if isinstance(dictionary, dict):
408
- self.from_cookie(dictionary)
409
- else:
410
- self._use_attributes_2 = self.__DEFAULT_USE_ATTRIBUTE
411
- self.to_cookie()
412
-
413
- def to_cookie(self):
414
- dictionary = {'use_attributes_2': self._use_attributes_2}
415
- self.cookie.put(key='use_attributes_2', value=dictionary)
416
-
417
- def from_cookie(self, dictionary):
418
- self._use_attributes_2 = dictionary.get('use_attributes_2', self.__DEFAULT_USE_ATTRIBUTE)
419
-
420
- @property
421
- def use_attributes_2(self):
422
- return self._use_attributes_2
423
-
424
- @use_attributes_2.setter
425
- def use_attributes_2(self, val: bool):
426
- if not isinstance(val, bool):
427
- raise exceptions.PlatformException(error=400,
428
- message="input must be of type bool")
429
- self._use_attributes_2 = val
430
- os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
431
- self.to_cookie()
432
-
433
- class Decorators:
434
- @staticmethod
435
- def token_expired_decorator(method):
436
- @wraps(method)
437
- def decorated_method(inst, *args, **kwargs):
438
- # save event
439
- frm = inspect.stack()[1]
440
-
441
- # before the method call
442
- kwargs.update({'stack': frm})
443
- if inst.token_expired():
444
- if inst.renew_token_method() is False:
445
- raise exceptions.PlatformException('600', 'Token expired, Please login.'
446
- '\nSDK login options: dl.login(), dl.login_token(), '
447
- 'dl.login_m2m()'
448
- '\nCLI login options: dlp login, dlp login-token, '
449
- 'dlp login-m2m')
450
- # the actual method call
451
- result = method(inst, *args, **kwargs)
452
- # after the method call
453
- return result
454
-
455
- return decorated_method
456
-
457
-
458
- class ApiClient:
459
- """
460
- API calls to Dataloop gate
461
- """
462
-
463
- def __init__(self, token=None, num_processes=None, cookie_filepath=None):
464
- ############
465
- # Initiate #
466
- ############
467
- # define local params - read only once from cookie file
468
- self.lock = threading.Lock()
469
- self.renew_token_method = self.renew_token
470
- self.is_cli = False
471
- self.session = None
472
- self.default_headers = dict()
473
- self._token = None
474
- self._environments = None
475
- self._environment = None
476
- self._verbose = None
477
- self._callbacks = None
478
- self._cache_state = None
479
- self._attributes_mode = None
480
- self._cache_configs = None
481
- self._sdk_cache = None
482
- self._fetch_entities = None
483
- # define other params
484
- self.last_response = None
485
- self.last_request = None
486
- self.platform_exception = None
487
- self.last_curl = None
488
- self.minimal_print = True
489
- # start refresh token
490
- self.refresh_token_active = True
491
- # event and pools
492
- self._thread_pools = dict()
493
- self._event_loop = None
494
- self._login_domain = None
495
- self.__gate_url_for_requests = None
496
-
497
- # TODO- remove before release - only for debugging
498
- self._stopped_pools = list()
499
-
500
- if cookie_filepath is None:
501
- self.cookie_io = CookieIO.init()
502
- else:
503
- self.cookie_io = CookieIO(path=cookie_filepath)
504
- assert isinstance(self.cookie_io, CookieIO)
505
- self.state_io = CookieIO.init_local_cookie(create=False)
506
- assert isinstance(self.state_io, CookieIO)
507
-
508
- ##################
509
- # configurations #
510
- ##################
511
- # check for proxies in connection
512
- self.check_proxy()
513
-
514
- # set token if input
515
- if token is not None:
516
- self.token = token
517
-
518
- # STDOUT
519
- self.remove_keys_list = ['contributors', 'url', 'annotations', 'items', 'export', 'directoryTree',
520
- 'attributes', 'partitions', 'metadata', 'stream', 'createdAt', 'updatedAt', 'arch']
521
-
522
- # API calls counter
523
- counter_filepath = os.path.join(os.path.dirname(self.cookie_io.COOKIE), 'calls_counter.json')
524
- self.calls_counter = CallsCounter(filepath=counter_filepath)
525
-
526
- # create a global thread pool to run multi threading
527
- if num_processes is None:
528
- num_processes = 3 * multiprocessing.cpu_count()
529
- self._num_processes = num_processes
530
- self._thread_pools_names = {'item.download': num_processes,
531
- 'item.status_update': num_processes,
532
- 'item.page': num_processes,
533
- 'annotation.upload': num_processes,
534
- 'annotation.download': num_processes,
535
- 'annotation.update': num_processes,
536
- 'entity.create': num_processes,
537
- 'dataset.download': num_processes}
538
- # set logging level
539
- logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self.verbose.logging_level.upper()])
540
- os.environ["USE_ATTRIBUTE_2"] = json.dumps(self.attributes_mode.use_attributes_2)
541
-
542
- self.cache = None
543
- #######################
544
- # start event tracker #
545
- self.event_tracker = Events(client_api=self)
546
- self.event_tracker.daemon = True
547
- self.event_tracker.start()
548
-
549
- @property
550
- def event_loop(self):
551
- self.lock.acquire()
552
- if self._event_loop is None:
553
- self._event_loop = self.create_event_loop_thread()
554
- elif not self._event_loop.loop.is_running():
555
- if self._event_loop.is_alive():
556
- self._event_loop.stop()
557
- self._event_loop = self.create_event_loop_thread()
558
- self.lock.release()
559
- return self._event_loop
560
-
561
- def build_cache(self, cache_config=None):
562
- if cache_config is None:
563
- cache_config_json = os.environ.get('CACHE_CONFIG', None)
564
- if cache_config_json is None:
565
- if self.sdk_cache.use_cache:
566
- cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=self.sdk_cache.configs)
567
- else:
568
- cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config_json)
569
- if cache_config:
570
- # cache paths
571
- if os.environ.get('DEFAULT_CACHE_PATH', None) is None:
572
- os.environ['DEFAULT_CACHE_PATH'] = self.sdk_cache.cache_path_bin
573
- else:
574
- self.sdk_cache.cache_path_bin = os.environ['DEFAULT_CACHE_PATH']
575
-
576
- if not os.path.isdir(self.sdk_cache.cache_path_bin):
577
- os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
578
-
579
- if not os.path.isfile(os.path.join(self.sdk_cache.cache_path_bin, 'cacheConfig.json')):
580
- os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
581
-
582
- if isinstance(cache_config, str):
583
- self.sdk_cache.configs = cache_config
584
- cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config)
585
- elif isinstance(cache_config, CacheConfig):
586
- self.sdk_cache.configs = cache_config.to_string()
587
- else:
588
- raise Exception("config should be of type str or CacheConfig")
589
- try:
590
- self.cache = CacheManger(cache_configs=[cache_config], bin_cache_size=self.sdk_cache.bin_size)
591
- self.cache.ping()
592
- self.sdk_cache.use_cache = True
593
- except Exception as e:
594
- logger.warning("Cache build error {}".format(e))
595
- self.cache = None
596
-
597
- def __del__(self):
598
- for name, pool in self._thread_pools.items():
599
- pool.shutdown()
600
- self.event_loop.stop()
601
-
602
- def _build_request_headers(self, headers=None):
603
- if headers is None:
604
- headers = dict()
605
- if not isinstance(headers, dict):
606
- raise exceptions.PlatformException(
607
- error='400',
608
- message="Input 'headers' must be a dictionary, got: {}".format(type(headers)))
609
- headers.update(self.default_headers)
610
- headers.update(self.auth)
611
- headers.update({'User-Agent': requests_toolbelt.user_agent('dtlpy', __version__)})
612
- return headers
613
-
614
- @property
615
- def num_processes(self):
616
- return self._num_processes
617
-
618
- @num_processes.setter
619
- def num_processes(self, num_processes):
620
- if num_processes == self._num_processes:
621
- # same number. no need to do anything
622
- return
623
- self._num_processes = num_processes
624
- for pool_name in self._thread_pools_names:
625
- self._thread_pools_names[pool_name] = num_processes
626
-
627
- for pool in self._thread_pools:
628
- self._thread_pools[pool].shutdown()
629
- self._thread_pools = dict()
630
-
631
- def create_event_loop_thread(self):
632
- loop = asyncio.new_event_loop()
633
- event_loop = AsyncThreadEventLoop(loop=loop,
634
- n=self._num_processes)
635
- event_loop.daemon = True
636
- event_loop.start()
637
- time.sleep(1)
638
- return event_loop
639
-
640
- def thread_pools(self, pool_name):
641
- if pool_name not in self._thread_pools_names:
642
- raise ValueError('unknown thread pool name: {}. known name: {}'.format(
643
- pool_name,
644
- list(self._thread_pools_names.keys())))
645
- num_processes = self._thread_pools_names[pool_name]
646
- if pool_name not in self._thread_pools or self._thread_pools[pool_name]._shutdown:
647
- self._thread_pools[pool_name] = ThreadPoolExecutor(max_workers=num_processes)
648
- pool = self._thread_pools[pool_name]
649
- assert isinstance(pool, concurrent.futures.ThreadPoolExecutor)
650
- return pool
651
-
652
- @property
653
- def verify(self):
654
- environments = self.environments
655
- verify = True
656
- if self.environment in environments:
657
- if 'verify_ssl' in environments[self.environment]:
658
- verify = environments[self.environment]['verify_ssl']
659
- return verify
660
-
661
- @property
662
- def use_ssl_context(self):
663
- environments = self.environments
664
- use_ssl_context = False
665
- if self.environment in environments:
666
- if 'use_ssl_context' in environments[self.environment]:
667
- use_ssl_context = environments[self.environment]['use_ssl_context']
668
- return use_ssl_context
669
-
670
- @property
671
- def auth(self):
672
- return {'authorization': 'Bearer ' + self.token}
673
-
674
- @property
675
- def environment(self):
676
- _environment = self._environment
677
- if _environment is None:
678
- _environment = self.cookie_io.get('url')
679
- if _environment is None:
680
- _environment = DEFAULT_ENVIRONMENT
681
- self._environment = _environment
682
- return _environment
683
-
684
- @environment.setter
685
- def environment(self, env):
686
- self._environment = env
687
- self.cookie_io.put('url', env)
688
-
689
- @property
690
- def fetch_entities(self):
691
- if self._fetch_entities is None:
692
- self._fetch_entities = self.cookie_io.get('fetch_entities')
693
- if self._fetch_entities is None:
694
- self.fetch_entities = True # default
695
- return self._fetch_entities
696
-
697
- @fetch_entities.setter
698
- def fetch_entities(self, val):
699
- self._fetch_entities = val
700
- self.cookie_io.put('fetch_entities', val)
701
-
702
- @property
703
- def environments(self):
704
- """
705
- List of known environments
706
- :return:
707
- """
708
- # get environment login parameters
709
- _environments = self._environments
710
- if _environments is None:
711
- # take from cookie
712
- _environments = self.cookie_io.get('login_parameters')
713
- # if cookie is None - init with defaults
714
- if _environments is None:
715
- # default
716
- _environments = DEFAULT_ENVIRONMENTS
717
- # save to local variable
718
- self.environments = _environments
719
- else:
720
- # save from cookie to ram
721
- self._environments = _environments
722
- return _environments
723
-
724
- @environments.setter
725
- def environments(self, env_dict):
726
- self._environments = env_dict
727
- self.cookie_io.put(key='login_parameters', value=self._environments)
728
-
729
- @property
730
- def verbose(self):
731
- if self._verbose is None:
732
- self._verbose = Verbose(cookie=self.cookie_io)
733
- assert isinstance(self._verbose, Verbose)
734
- return self._verbose
735
-
736
- @property
737
- def cache_state(self):
738
- if self._cache_state is None:
739
- self._cache_state = CacheMode(cookie=self.cookie_io)
740
- assert isinstance(self._cache_state, CacheMode)
741
- return self._cache_state
742
-
743
- @property
744
- def attributes_mode(self):
745
- if self._attributes_mode is None:
746
- self._attributes_mode = Attributes2(cookie=self.cookie_io)
747
- assert isinstance(self._attributes_mode, Attributes2)
748
- return self._attributes_mode
749
-
750
- @property
751
- def sdk_cache(self):
752
- if self._sdk_cache is None:
753
- self._sdk_cache = SDKCache(cookie=self.cookie_io)
754
- assert isinstance(self._sdk_cache, SDKCache)
755
- return self._sdk_cache
756
-
757
- @property
758
- def callbacks(self):
759
- if self._callbacks is None:
760
- self._callbacks = Callbacks()
761
- assert isinstance(self._callbacks, Callbacks)
762
- return self._callbacks
763
-
764
- def add_callback(self, event, func):
765
- """
766
- function to add callback to the client
767
- :param event: dl.CallbackEvent enum, name of the callback
768
- :param func: function to call with 2 arguments: progress and context
769
- """
770
- self.callbacks.add(event, func)
771
-
772
- @property
773
- def token(self):
774
- _token = self._token
775
- if _token is None:
776
- environments = self.environments
777
- if self.environment in environments:
778
- if 'token' in environments[self.environment]:
779
- _token = environments[self.environment]['token']
780
- return _token
781
-
782
- @token.setter
783
- def token(self, token):
784
- # set to variable
785
- self._token = token
786
- self.refresh_token = None
787
- # set to cookie file
788
- environments = self.environments
789
- if self.environment in environments:
790
- environments[self.environment]['token'] = token
791
- else:
792
- environments[self.environment] = {'token': token}
793
- self.environments = environments
794
-
795
- @property
796
- def refresh_token(self):
797
- environments = self.environments
798
- refresh_token = None
799
- if self.environment in environments:
800
- if 'refresh_token' in environments[self.environment]:
801
- refresh_token = environments[self.environment]['refresh_token']
802
- return refresh_token
803
-
804
- @refresh_token.setter
805
- def refresh_token(self, token):
806
- environments = self.environments
807
- if self.environment in environments:
808
- environments[self.environment]['refresh_token'] = token
809
- else:
810
- environments[self.environment] = {'refresh_token': token}
811
- self.refresh_token_active = True
812
- self.environments = environments
813
-
814
- def add_environment(self, environment,
815
- audience=None,
816
- client_id=None,
817
- auth0_url=None,
818
- verify_ssl=True,
819
- token=None,
820
- refresh_token=None,
821
- alias=None,
822
- use_ssl_context=False,
823
- gate_url=None,
824
- url=None,
825
- login_domain=None
826
- ):
827
- environments = self.environments
828
- if environment in environments:
829
- logger.warning('Environment exists. Overwriting. env: {}'.format(environment))
830
- if token is None:
831
- token = None
832
- if alias is None:
833
- alias = None
834
- environments[environment] = {'audience': audience,
835
- 'client_id': client_id,
836
- 'auth0_url': auth0_url,
837
- 'alias': alias,
838
- 'token': token,
839
- 'gate_url': gate_url,
840
- 'refresh_token': refresh_token,
841
- 'verify_ssl': verify_ssl,
842
- 'use_ssl_context': use_ssl_context,
843
- 'url': url,
844
- 'login_domain': login_domain}
845
- self.environments = environments
846
-
847
- def info(self, with_token=True):
848
- """
849
- Return a dictionary with current information: env, user, token
850
- :param with_token:
851
- :return:
852
- """
853
- user_email = 'null'
854
- if self.token is not None:
855
- payload = jwt.decode(self.token, algorithms=['HS256'],
856
- verify=False, options={'verify_signature': False})
857
- user_email = payload['email']
858
- information = {'environment': self.environment,
859
- 'user_email': user_email}
860
- if with_token:
861
- information['token'] = self.token
862
- return information
863
-
864
- @property
865
- def base_gate_url(self):
866
- if self.__gate_url_for_requests is None:
867
- self.__gate_url_for_requests = self.environment
868
- internal_requests_url = os.environ.get('INTERNAL_REQUESTS_URL', None)
869
- if internal_requests_url is not None:
870
- self.__gate_url_for_requests = internal_requests_url
871
- return self.__gate_url_for_requests
872
-
873
- def export_curl_request(self, req_type, path, headers=None, json_req=None, files=None, data=None):
874
- curl, prepared = self._build_gen_request(req_type=req_type,
875
- path=path,
876
- headers=headers,
877
- json_req=json_req,
878
- files=files,
879
- data=data)
880
- return curl
881
-
882
- def _build_gen_request(self, req_type, path, headers, json_req, files, data):
883
- req_type = req_type.upper()
884
- valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
885
- assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
886
-
887
- # prepare request
888
- req = requests.Request(method=req_type,
889
- url=self.base_gate_url + path,
890
- json=json_req,
891
- files=files,
892
- data=data,
893
- headers=self._build_request_headers(headers=headers))
894
- # prepare to send
895
- prepared = req.prepare()
896
- # save curl for debug
897
- command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
898
- method = prepared.method
899
- uri = prepared.url
900
- data = prepared.body
901
- headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
902
- headers = " -H ".join(headers)
903
- curl = command.format(method=method, headers=headers, data=data, uri=uri)
904
- return curl, prepared
905
-
906
- def _convert_json_to_response(self, response_json):
907
- the_response = Response()
908
- the_response._content = json.dumps(response_json).encode('utf-8')
909
- return the_response
910
-
911
- def _cache_on(self, request):
912
- if self.cache is not None and self.sdk_cache.use_cache:
913
- pure_request = request.split('?')[0]
914
- valid_req = ['annotation', 'item', 'dataset', 'project', 'task', 'assignment']
915
- for req_type in valid_req:
916
- if req_type in pure_request:
917
- return True
918
- return False
919
-
920
- @Decorators.token_expired_decorator
921
- def gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
922
- log_error=True, dataset_id=None, **kwargs):
923
- """
924
- Generic request from platform
925
- :param req_type: type of the request: GET, POST etc
926
- :param path: url (without host header - take from environment)
927
- :param data: data to pass to request
928
- :param json_req: json to pass to request
929
- :param files: files to pass to request
930
- :param stream: stream to pass the request
931
- :param headers: headers to pass to request. auth will be added to it
932
- :param log_error: if true - print the error log of the request
933
- :param dataset_id: dataset id needed in stream True
934
- :param kwargs: kwargs
935
- :return:
936
- """
937
- success, resp, cache_values = False, None, []
938
- if self.cache is None and 'sdk' not in path:
939
- self.build_cache()
940
- if req_type.lower() not in ['patch', 'put', 'post', 'delete'] and self._cache_on(request=path):
941
- try:
942
- if stream:
943
- if dataset_id is None:
944
- raise ValueError("must provide a dataset id")
945
- success, cache_values = self.cache.read_stream(request_path=path, dataset_id=dataset_id)
946
-
947
- else:
948
- success, cache_values = self.cache.read(request_path=path)
949
- if success:
950
- resp = self._convert_json_to_response(cache_values)
951
- except Exception as e:
952
- logger.warning("Cache error {}".format(e))
953
- success, resp = False, None
954
-
955
- if not success and not resp:
956
- success, resp = self._gen_request(req_type=req_type,
957
- path=path,
958
- data=data,
959
- json_req=json_req,
960
- files=files,
961
- stream=stream,
962
- headers=headers,
963
- log_error=log_error)
964
-
965
- if success and self._cache_on(request=path):
966
- try:
967
- if stream:
968
- res = self.cache.write_stream(request_path=path,
969
- response=resp,
970
- dataset_id=dataset_id)
971
- if res != '':
972
- resp = self._convert_json_to_response(res)
973
- else:
974
- if req_type == 'delete':
975
- self.cache.invalidate(path=path)
976
- else:
977
- try:
978
- resp_list = resp.json()
979
- write = True
980
- if isinstance(resp_list, list):
981
- pass
982
- elif isinstance(resp_list, dict):
983
- if 'hasNextPage' in resp_list:
984
- resp_list = resp_list['items']
985
- elif 'id' in resp_list:
986
- resp_list = [resp_list]
987
- else:
988
- write = False
989
- else:
990
- raise exceptions.PlatformException(error='400', message="unsupported return type")
991
- if write:
992
- self.cache.write(list_entities_json=resp_list)
993
- except:
994
- raise exceptions.PlatformException(error='400', message="failed to set cache")
995
- except Exception as e:
996
- logger.warning("Cache error {}".format(e))
997
- self.cache = None
998
- # only for projects events
999
- if success:
1000
- if 'stack' in kwargs:
1001
- self.event_tracker.put(event=kwargs.get('stack'), resp=resp, path=path)
1002
- return success, resp
1003
-
1004
- def _gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
1005
- log_error=True):
1006
- """
1007
- Generic request from platform
1008
- :param req_type: type of the request: GET, POST etc
1009
- :param path: url (without host header - take from environment)
1010
- :param data: data to pass to request
1011
- :param json_req: json to pass to request
1012
- :param files: files to pass to request
1013
- :param stream: stream to pass the request
1014
- :param headers: headers to pass to request. auth will be added to it
1015
- :param log_error: if true - print the error log of the request
1016
- :return:
1017
- """
1018
- curl, prepared = self._build_gen_request(req_type=req_type,
1019
- path=path,
1020
- headers=headers,
1021
- json_req=json_req,
1022
- files=files,
1023
- data=data)
1024
- self.last_curl = curl
1025
- self.last_request = prepared
1026
- # send request
1027
- try:
1028
- resp = self.send_session(prepared=prepared, stream=stream)
1029
- except Exception:
1030
- logger.error(self.print_request(req=prepared, to_return=True))
1031
- raise
1032
- self.last_response = resp
1033
- # handle output
1034
- if not resp.ok:
1035
- self.print_bad_response(resp, log_error=log_error and not self.is_cli)
1036
- return_type = False
1037
- else:
1038
- try:
1039
- # print only what is printable (dont print get steam etc..)
1040
- if not stream:
1041
- self.print_response(resp)
1042
- except ValueError:
1043
- # no JSON returned
1044
- pass
1045
- return_type = True
1046
- return return_type, resp
1047
-
1048
- @Decorators.token_expired_decorator
1049
- async def gen_async_request(self,
1050
- req_type,
1051
- path,
1052
- data=None,
1053
- json_req=None,
1054
- files=None,
1055
- stream=None,
1056
- headers=None,
1057
- log_error=True,
1058
- filepath=None,
1059
- chunk_size=8192,
1060
- pbar=None,
1061
- is_dataloop=True,
1062
- **kwargs):
1063
- req_type = req_type.upper()
1064
- valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
1065
- assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
1066
-
1067
- # prepare request
1068
- if is_dataloop:
1069
- full_url = self.base_gate_url + path
1070
- headers_req = self._build_request_headers(headers=headers)
1071
- else:
1072
- full_url = path
1073
- headers = dict()
1074
- headers_req = headers
1075
-
1076
- if headers is not None:
1077
- if not isinstance(headers, dict):
1078
- raise exceptions.PlatformException(error='400', message="Input 'headers' must be a dictionary")
1079
- for k, v in headers.items():
1080
- headers_req[k] = v
1081
- req = requests.Request(method=req_type,
1082
- url=full_url,
1083
- json=json_req,
1084
- files=files,
1085
- data=data,
1086
- headers=headers_req)
1087
- # prepare to send
1088
- prepared = req.prepare()
1089
- # save curl for debug
1090
- command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
1091
- headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
1092
- headers = " -H ".join(headers)
1093
- curl = command.format(method=prepared.method,
1094
- headers=headers,
1095
- data=prepared.body,
1096
- uri=prepared.url)
1097
- self.last_curl = curl
1098
- self.last_request = prepared
1099
- # send request
1100
- try:
1101
- timeout = aiohttp.ClientTimeout(total=0)
1102
- async with RetryClient(headers=headers_req,
1103
- timeout=timeout) as session:
1104
- try:
1105
- async with session._request(request=session._client.request,
1106
- url=self.base_gate_url + path,
1107
- method=req_type,
1108
- json=json_req,
1109
- data=data,
1110
- headers=headers_req,
1111
- chunked=stream,
1112
- retry_attempts=5,
1113
- ssl=self.verify,
1114
- retry_exceptions={aiohttp.client_exceptions.ClientOSError,
1115
- aiohttp.client_exceptions.ServerDisconnectedError,
1116
- aiohttp.client_exceptions.ClientPayloadError},
1117
- raise_for_status=False) as request:
1118
- if stream:
1119
- pbar = self.__get_pbar(pbar=pbar,
1120
- total_length=request.headers.get("content-length"))
1121
- if filepath is not None:
1122
- to_close = False
1123
- if isinstance(filepath, str):
1124
- to_close = True
1125
- buffer = open(filepath, 'wb')
1126
- elif isinstance(filepath, io.BytesIO):
1127
- pass
1128
- else:
1129
- raise ValueError('unknown data type to write file: {}'.format(type(filepath)))
1130
- try:
1131
- while True:
1132
- chunk = await request.content.read(chunk_size)
1133
- await asyncio.sleep(0)
1134
- if not chunk:
1135
- break
1136
- buffer.write(chunk)
1137
- if pbar is not None:
1138
- pbar.update(len(chunk))
1139
- finally:
1140
- if to_close:
1141
- buffer.close()
1142
-
1143
- if pbar is not None:
1144
- pbar.close()
1145
- text = await request.text()
1146
- try:
1147
- _json = await request.json()
1148
- except Exception:
1149
- _json = dict()
1150
- response = AsyncResponse(text=text,
1151
- _json=_json,
1152
- async_resp=request)
1153
- except Exception as err:
1154
- response = AsyncResponseError(error=err, trace=traceback.format_exc())
1155
- finally:
1156
- with threadLock:
1157
- self.calls_counter.add()
1158
- except Exception:
1159
- logger.error(self.print_request(req=prepared, to_return=True))
1160
- raise
1161
- self.last_response = response
1162
- # handle output
1163
- if not response.ok:
1164
- self.print_bad_response(response, log_error=log_error and not self.is_cli)
1165
- return_type = False
1166
- else:
1167
- try:
1168
- # print only what is printable (dont print get steam etc..)
1169
- if not stream:
1170
- self.print_response(response)
1171
- except ValueError:
1172
- # no JSON returned
1173
- pass
1174
- return_type = True
1175
- return return_type, response
1176
-
1177
- @Decorators.token_expired_decorator
1178
- async def upload_file_async(self,
1179
- to_upload,
1180
- item_type,
1181
- item_size,
1182
- remote_url,
1183
- uploaded_filename,
1184
- remote_path=None,
1185
- callback=None,
1186
- mode='skip',
1187
- item_metadata=None,
1188
- headers=None,
1189
- item_description=None,
1190
- **kwargs):
1191
- headers = self._build_request_headers(headers=headers)
1192
- pbar = None
1193
- if callback is None:
1194
- if item_size > 10e6:
1195
- # size larger than 10MB
1196
- pbar = tqdm.tqdm(total=item_size,
1197
- unit="B",
1198
- unit_scale=True,
1199
- unit_divisor=1024,
1200
- position=1,
1201
- file=sys.stdout,
1202
- disable=self.verbose.disable_progress_bar_upload_items,
1203
- desc='Upload Items')
1204
-
1205
- def callback(bytes_read):
1206
- pbar.update(bytes_read)
1207
- else:
1208
- def callback(bytes_read):
1209
- pass
1210
-
1211
- timeout = aiohttp.ClientTimeout(total=0)
1212
- async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
1213
- try:
1214
- form = aiohttp.FormData({})
1215
- form.add_field('type', item_type)
1216
- form.add_field('path', os.path.join(remote_path, uploaded_filename).replace('\\', '/'))
1217
- if item_metadata is not None:
1218
- form.add_field('metadata', json.dumps(item_metadata))
1219
- if item_description is not None:
1220
- form.add_field('description', item_description)
1221
- form.add_field('file', AsyncUploadStream(buffer=to_upload,
1222
- callback=callback,
1223
- name=uploaded_filename,
1224
- chunk_timeout=2 * 60))
1225
- url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
1226
-
1227
- # use SSL context
1228
- ssl_context = None
1229
- if self.use_ssl_context:
1230
- ssl_context = ssl.create_default_context(cafile=certifi.where())
1231
- async with session.post(url,
1232
- data=form,
1233
- verify_ssl=self.verify,
1234
- ssl=ssl_context) as resp:
1235
- self.last_request = resp.request_info
1236
- command = "curl -X {method} -H {headers} -d '{uri}'"
1237
- headers = ['"{0}: {1}"'.format(k, v) for k, v in resp.request_info.headers.items()]
1238
- headers = " -H ".join(headers)
1239
- self.last_curl = command.format(method=resp.request_info.method,
1240
- headers=headers,
1241
- uri=resp.request_info.url)
1242
- text = await resp.text()
1243
- try:
1244
- _json = await resp.json()
1245
- except:
1246
- _json = dict()
1247
- response = AsyncResponse(text=text,
1248
- _json=_json,
1249
- async_resp=resp)
1250
- except Exception as err:
1251
- response = AsyncResponseError(error=err, trace=traceback.format_exc())
1252
- finally:
1253
- if pbar is not None:
1254
- pbar.close()
1255
- with threadLock:
1256
- self.calls_counter.add()
1257
- if response.ok and self.cache is not None:
1258
- try:
1259
- self.cache.write(list_entities_json=[response.json()])
1260
- dataset_id = url.split('/')[-2]
1261
- self.cache.write_stream(request_path=url,
1262
- buffer=to_upload,
1263
- file_name=uploaded_filename,
1264
- entity_id=response.json()['id'],
1265
- dataset_id=dataset_id)
1266
- except:
1267
- logger.warning("Failed to add the file to the cache")
1268
- return response
1269
-
1270
- def __get_pbar(self, pbar, total_length):
1271
- # decide if create progress bar for item
1272
- if pbar:
1273
- try:
1274
- if total_length is not None and int(total_length) > 10e6: # size larger than 10 MB:
1275
- pbar = tqdm.tqdm(total=int(total_length),
1276
- unit='B',
1277
- unit_scale=True,
1278
- unit_divisor=1024,
1279
- position=1,
1280
- file=sys.stdout,
1281
- disable=self.verbose.disable_progress_bar)
1282
- else:
1283
- pbar = None
1284
- except Exception as err:
1285
- pbar = None
1286
- logger.debug('Cant decide downloaded file length, bar will not be presented: {}'.format(err))
1287
- return pbar
1288
-
1289
- def send_session(self, prepared, stream=None):
1290
- if self.session is None:
1291
- self.session = requests.Session()
1292
- retry = Retry(
1293
- total=5,
1294
- read=5,
1295
- connect=5,
1296
- backoff_factor=1,
1297
- # use on any request type
1298
- allowed_methods=False,
1299
- # force retry on those status responses
1300
- status_forcelist=(501, 502, 503, 504, 505, 506, 507, 508, 510, 511),
1301
- raise_on_status=False
1302
- )
1303
- adapter = HTTPAdapter(max_retries=retry,
1304
- pool_maxsize=np.sum(list(self._thread_pools_names.values())),
1305
- pool_connections=np.sum(list(self._thread_pools_names.values())))
1306
- self.session.mount('http://', adapter)
1307
- self.session.mount('https://', adapter)
1308
- resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=120)
1309
-
1310
- with threadLock:
1311
- self.calls_counter.add()
1312
-
1313
- return resp
1314
-
1315
- @staticmethod
1316
- def check_proxy():
1317
- """
1318
- Verify that dataloop urls are not blocked
1319
- :return:
1320
- """
1321
- proxy_envs = ['HTTP', 'HTTPS', 'http', 'https']
1322
- dataloop_urls = ['dev-gate.dataloop.ai',
1323
- 'gate.dataloop.ai',
1324
- 'dataloop-development.auth0.com',
1325
- 'dataloop-production.auth0.com']
1326
- if True in [env in os.environ for env in proxy_envs]:
1327
- # check if proxy exists
1328
- if True in [env in os.environ for env in ['no_proxy', 'NO_PROXY']]:
1329
- # check if no_proxy exists
1330
- if 'no_proxy' in os.environ:
1331
- # check if dataloop urls in no_proxy
1332
- if True not in [url in os.environ['no_proxy'] for url in dataloop_urls]:
1333
- # no dataloop url exists in no_proxy
1334
- logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1335
- else:
1336
- # check if dataloop urls in no_proxy
1337
- if True not in [url in os.environ['NO_PROXY'] for url in dataloop_urls]:
1338
- # no dataloop url exists in no_proxy
1339
- logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1340
- else:
1341
- logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1342
-
1343
- def token_expired(self, t=60):
1344
- """
1345
- Check token validation
1346
- :param t: time ahead interval in seconds
1347
- """
1348
- try:
1349
- if self.token is None or self.token == '':
1350
- expired = True
1351
- else:
1352
- payload = jwt.decode(self.token, algorithms=['HS256'],
1353
- options={'verify_signature': False}, verify=False)
1354
- d = datetime.datetime.now(datetime.timezone.utc)
1355
- epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
1356
- now = (d - epoch).total_seconds()
1357
- exp = payload['exp']
1358
- if now < (exp - t):
1359
- expired = False
1360
- else:
1361
- expired = True
1362
- except jwt.exceptions.DecodeError:
1363
- logger.exception('Invalid token.')
1364
- expired = True
1365
- except Exception:
1366
- logger.exception('Unknown error:')
1367
- expired = True
1368
- if expired:
1369
- if self.renew_token_method():
1370
- expired = False
1371
- return expired
1372
-
1373
- @staticmethod
1374
- def is_json_serializable(response):
1375
- try:
1376
- response_json = response.json()
1377
- return True, response_json
1378
- except ValueError:
1379
- return False, None
1380
-
1381
- ##########
1382
- # STDOUT #
1383
- ##########
1384
- def print_response(self, resp=None):
1385
- """
1386
- Print tabulate response
1387
- :param resp: response from requests
1388
- :return:
1389
- """
1390
- try:
1391
- if resp is None:
1392
- resp = self.last_response
1393
- is_json_serializable, results = self.is_json_serializable(response=resp)
1394
- if self.verbose.print_all_responses and is_json_serializable:
1395
- if isinstance(results, dict):
1396
- to_print = miscellaneous.List([results])
1397
- elif isinstance(results, list):
1398
- to_print = miscellaneous.List(results)
1399
- else:
1400
- logger.debug('Unknown response type: {}. cant print'.format(type(results)))
1401
- return
1402
- request_id = resp.headers.get('x-request-id', 'na')
1403
- logger.debug('--- [Request] Start ---')
1404
- logger.debug(self.print_request(req=resp.request, to_return=True))
1405
- logger.debug('--- [Request] End ---')
1406
- logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
1407
- to_print.print(show_all=False, level='debug')
1408
- logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
1409
- except Exception:
1410
- logger.exception('Printing response from gate:')
1411
-
1412
- def print_bad_response(self, resp=None, log_error=True):
1413
- """
1414
- Print error from platform
1415
- :param resp:
1416
- :param log_error: print error log (to use when trying request more than once)
1417
- :return:
1418
- """
1419
- if resp is None:
1420
- resp = self.last_response
1421
- msg = ''
1422
- if hasattr(resp, 'status_code'):
1423
- msg += '[Response <{val}>]'.format(val=resp.status_code)
1424
- if hasattr(resp, 'reason'):
1425
- msg += '[Reason: {val}]'.format(val=resp.reason)
1426
- if hasattr(resp, 'text') and isinstance(resp.text, str):
1427
- msg += '[Text: {val}]'.format(val=format_message(resp.text))
1428
-
1429
- request_id = resp.headers.get('x-request-id', 'na')
1430
- logger.debug('--- [Request] Start ---')
1431
- logger.debug(self.print_request(req=resp.request, to_return=True))
1432
- logger.debug('--- [Request] End ---')
1433
- logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
1434
- if log_error:
1435
- logger.error(msg)
1436
- else:
1437
- logger.debug(msg)
1438
- logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
1439
- self.platform_exception = PlatformError(resp)
1440
-
1441
- def print_request(self, req=None, to_return=False, with_auth=False):
1442
- """
1443
- Print a request to the platform
1444
- :param req:
1445
- :param to_return: return string instead of printing
1446
- :param with_auth: print authentication
1447
- :return:
1448
- """
1449
- if not req:
1450
- req = self.last_request
1451
-
1452
- headers = list()
1453
- for k, v in req.headers.items():
1454
- if k == 'authorization' and not with_auth:
1455
- continue
1456
- headers.append('{}: {}'.format(k, v))
1457
- if hasattr(req, 'body'):
1458
- body = req.body
1459
- elif isinstance(req, aiohttp.RequestInfo):
1460
- body = {'multipart': 'true'}
1461
- else:
1462
- body = dict()
1463
-
1464
- # remove secrets and passwords
1465
- try:
1466
- body = json.loads(body)
1467
- if isinstance(body, dict):
1468
- for key, value in body.items():
1469
- hide = any([field in key for field in ['secret', 'password']])
1470
- if hide:
1471
- body[key] = '*' * len(value)
1472
- except Exception:
1473
- pass
1474
-
1475
- msg = '{}\n{}\n{}'.format(
1476
- req.method + ' ' + str(req.url),
1477
- '\n'.join(headers),
1478
- body,
1479
- )
1480
- if to_return:
1481
- return msg
1482
- else:
1483
- print(msg)
1484
-
1485
- ################
1486
- # Environments #
1487
- ################
1488
- def setenv(self, env):
1489
- """
1490
- Set environment
1491
- :param env:
1492
- :return:
1493
- """
1494
-
1495
- environments = self.environments
1496
- if env.startswith('http'):
1497
- if env not in environments.keys():
1498
- msg = 'Unknown environment. Please add environment to SDK ("add_environment" method)'
1499
- logger.error(msg)
1500
- raise ConnectionError(msg)
1501
- elif env == 'custom':
1502
- custom_env = os.environ.get('DTLPY_CUSTOM_ENV', None)
1503
- environment = json.loads(base64.b64decode(custom_env.encode()).decode())
1504
- env = environment.pop('url')
1505
- token = None
1506
- if self.environments.get(env):
1507
- token = self.environments[env].get('token', None)
1508
- self.environments[env] = environment.get(env, environment)
1509
- self.environments[env]['token'] = token
1510
- verify_ssl = self.environments[env].get('verify_ssl', None)
1511
- if verify_ssl is not None and isinstance(verify_ssl, str):
1512
- self.environments[env]['verify_ssl'] = True if verify_ssl.lower() == 'true' else False
1513
- else:
1514
- matched_env = [env_url for env_url, env_dict in environments.items() if env_dict['alias'] == env]
1515
- if len(matched_env) != 1:
1516
- known_aliases = [env_dict['alias'] for env_url, env_dict in environments.items()]
1517
- raise ConnectionError(
1518
- 'Unknown platform environment: "{}". Known: {}'.format(env, ', '.join(known_aliases)))
1519
- env = matched_env[0]
1520
- if self.environment != env:
1521
- self.environment = env
1522
- self.__gate_url_for_requests = None
1523
- # reset local token
1524
- self._token = None
1525
- self.refresh_token_active = True
1526
- logger.info('Platform environment: {}'.format(self.environment))
1527
- if self.token_expired():
1528
- logger.info('Token expired, Please login.')
1529
-
1530
- ##########
1531
- # Log in #
1532
- ##########
1533
- def login_secret(self, email, password, client_id, client_secret=None, force=False):
1534
- """
1535
- Login with email and password from environment variables.
1536
- If already logged in with same user - login will NOT happen. see "force"
1537
-
1538
- :param email: user email.
1539
- :param password: user password
1540
- :param client_id: auth0 client id
1541
- :param client_secret: secret that match the client id
1542
- :param force: force login. in case login with same user but want to get a new JWT
1543
- :return:
1544
- """
1545
- logger.warning('dl.login_secret is deprecated. Please use dl.login_m2m instead.')
1546
- return login_secret(api_client=self,
1547
- email=email,
1548
- password=password,
1549
- client_id=client_id,
1550
- client_secret=client_secret,
1551
- force=force)
1552
-
1553
- def login_m2m(self, email, password, client_id=None, client_secret=None, force=False):
1554
- """
1555
- Login with email and password from environment variables
1556
- :param email: user email. if already logged in with same user - login will NOT happen. see "force"
1557
- :param password: user password
1558
- :param client_id:
1559
- :param client_secret:
1560
- :param force: force login. in case login with same user but want to get a new JWT
1561
- :return:
1562
- """
1563
- res = login_m2m(api_client=self,
1564
- email=email,
1565
- password=password,
1566
- client_id=client_id,
1567
- client_secret=client_secret,
1568
- force=force)
1569
- if res:
1570
- self._send_login_event(user_type='human', login_type='m2m')
1571
- return res
1572
-
1573
- def login_token(self, token):
1574
- """
1575
- Login using existing token
1576
- :param token: a valid token
1577
- :return:
1578
- """
1579
- current_token = self.token
1580
- self.token = token
1581
- success, response = self.gen_request(req_type='get', path='/users/me')
1582
- if not response.ok:
1583
- # switch back to before
1584
- self.token = current_token
1585
- raise ValueError(f"Invalid API key provided. Error: {response.text}")
1586
-
1587
- def login_api_key(self, api_key):
1588
- """
1589
- Login using API key
1590
- :param api_key: a valid API key
1591
- :return:
1592
- """
1593
- current_token = self.token
1594
- self.token = api_key
1595
- success, response = self.gen_request(req_type='get', path='/users/me')
1596
- if not response.ok:
1597
- # switch back to before
1598
- self.token = current_token
1599
- raise ValueError(f"Invalid API key provided. Error: {response.text}")
1600
-
1601
- @property
1602
- def login_domain(self):
1603
- if self._login_domain is None:
1604
- self._login_domain = self.environments[self.environment].get('login_domain', None)
1605
- return self._login_domain
1606
-
1607
- @login_domain.setter
1608
- def login_domain(self, domain: str):
1609
- if domain is not None and not isinstance(domain, str):
1610
- raise exceptions.PlatformException('400', 'domain should be a string value')
1611
- self._login_domain = domain
1612
- self.environments[self.environment]['login_domain'] = domain
1613
- self.cookie_io.put('login_parameters', self.environments)
1614
-
1615
- def login(self, audience=None, auth0_url=None, client_id=None, callback_port=None):
1616
- """
1617
- Login using Auth0.
1618
- :return:
1619
- """
1620
- res = login(
1621
- api_client=self,
1622
- audience=audience,
1623
- auth0_url=auth0_url,
1624
- client_id=client_id,
1625
- login_domain=self.login_domain,
1626
- callback_port=callback_port
1627
- )
1628
- if res:
1629
- self._send_login_event(user_type='human', login_type='interactive')
1630
- return res
1631
-
1632
- def _send_login_event(self, user_type, login_type):
1633
- event_payload = {
1634
- 'event': 'dtlpy:login',
1635
- 'properties': {
1636
- 'login_type': login_type,
1637
- 'user_type': user_type
1638
- }
1639
- }
1640
- self.event_tracker.put(event=event_payload)
1641
-
1642
- def logout(self):
1643
- """
1644
- Logout.
1645
- :return:
1646
- """
1647
- return logout(api_client=self)
1648
-
1649
- def _renew_token_in_dual_agent(self):
1650
- renewed = False
1651
- try:
1652
- proxy_port = os.environ.get('AGENT_PROXY_MAIN_PORT') or "1001"
1653
- resp = requests.get('http://localhost:{port}/get_jwt'.format(port=proxy_port))
1654
- if resp.ok:
1655
- self.token = resp.json()['jwt']
1656
- renewed = True
1657
- else:
1658
- self.print_bad_response(resp)
1659
- except Exception:
1660
- logger.exception('Failed to get token from proxy')
1661
-
1662
- return renewed
1663
-
1664
- def renew_token(self):
1665
- refresh_method = os.environ.get('DTLPY_REFRESH_TOKEN_METHOD', None)
1666
- if refresh_method is not None and refresh_method == 'proxy':
1667
- res = self._renew_token_in_dual_agent()
1668
- else:
1669
- res = self._renew_token_with_refresh_token()
1670
- if res:
1671
- self._send_login_event(user_type='human', login_type='refresh')
1672
- return res
1673
-
1674
- def generate_api_key(self, description: str = None, login: bool = False):
1675
- """
1676
- Generate an API key for a user
1677
- :param description: description for the API key
1678
- :param login: if True, login with the new API key
1679
- :return: User token
1680
- """
1681
- user_email = self.info()['user_email']
1682
- payload = {
1683
- 'userId': user_email
1684
- }
1685
- if description:
1686
- if not isinstance(description, str):
1687
- raise ValueError('description should be a string')
1688
- payload['description'] = description
1689
- success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
1690
- if not success:
1691
- raise exceptions.PlatformException(response)
1692
- if login:
1693
- self.login_api_key(response.json()['jwt'])
1694
- return True
1695
-
1696
- return response.json()['jwt']
1697
-
1698
- def _renew_token_with_refresh_token(self):
1699
- renewed = False
1700
- if self.refresh_token_active is False:
1701
- return renewed
1702
- logger.debug('RefreshToken: Started')
1703
- if self.token is None or self.token == '':
1704
- # token is missing
1705
- logger.debug('RefreshToken: Missing token.')
1706
- self.refresh_token_active = False
1707
- if self.refresh_token is None or self.refresh_token == '':
1708
- # missing refresh token
1709
- logger.debug('RefreshToken: Missing "refresh_token"')
1710
- self.refresh_token_active = False
1711
- if self.environment not in self.environments.keys():
1712
- # env params missing
1713
- logger.debug('RefreshToken: Missing environments params for refreshing token')
1714
- self.refresh_token_active = False
1715
-
1716
- if self.refresh_token_active is False:
1717
- return renewed
1718
-
1719
- refresh_token = self.refresh_token
1720
-
1721
- env_params = self.environments[self.environment]
1722
- if 'gate_url' not in env_params:
1723
- env_params['gate_url'] = gate_url_from_host(environment=self.environment)
1724
- self.environments[self.environment] = env_params
1725
- token_endpoint = "{}/token?default".format(env_params['gate_url'])
1726
-
1727
- payload = {
1728
- 'type': 'refresh_token',
1729
- 'refresh_token': refresh_token
1730
- }
1731
- logger.debug("RefreshToken: Refreshing token via {}".format(token_endpoint))
1732
- resp = requests.request(
1733
- "POST",
1734
- token_endpoint,
1735
- json=payload,
1736
- headers={'content-type': 'application/json'},
1737
- verify=self.verify
1738
- )
1739
- if not resp.ok:
1740
- logger.debug('RefreshToken: Failed')
1741
- self.print_bad_response(resp)
1742
- else:
1743
- response_dict = resp.json()
1744
- # get new token
1745
- final_token = response_dict['id_token']
1746
- self.token = final_token
1747
- self.refresh_token = refresh_token
1748
- # set status back to pending
1749
- logger.debug('RefreshToken: Success')
1750
- renewed = True
1751
- return renewed
1752
-
1753
- def set_api_counter(self, filepath):
1754
- self.calls_counter = CallsCounter(filepath=filepath)
1755
-
1756
- def _get_resource_url(self, url):
1757
-
1758
- env = self._environments[self._environment]['alias']
1759
- head = self._environments[self._environment].get('url', None)
1760
- # TODO need to deprecate somehow (the following)
1761
- if head is None:
1762
- if env == 'prod':
1763
- head = 'https://console.dataloop.ai/'
1764
- elif env == 'dev':
1765
- head = 'https://dev-con.dataloop.ai/'
1766
- elif env == 'rc':
1767
- head = 'https://rc-con.dataloop.ai/'
1768
- elif env in ['local', 'minikube_local_mac']:
1769
- head = 'https://localhost:8443/'
1770
- elif env == 'new-dev':
1771
- head = 'https://custom1-gate.dataloop.ai/'
1772
- else:
1773
- raise exceptions.PlatformException(error='400', message='Unknown environment: {}'.format(env))
1774
-
1775
- return head + url
1776
-
1777
- def _open_in_web(self, url):
1778
- import webbrowser
1779
- webbrowser.open(url=url, new=2, autoraise=True)
1780
-
1781
-
1782
- client = ApiClient()
1
+ """
2
+ Dataloop platform calls
3
+ """
4
+ import aiohttp.client_exceptions
5
+ import requests_toolbelt
6
+ import multiprocessing
7
+ import threading
8
+ import traceback
9
+ import datetime
10
+ import requests
11
+ import aiohttp
12
+ import logging
13
+ import asyncio
14
+ import certifi
15
+ import base64
16
+ import enum
17
+ import time
18
+ import tqdm
19
+ import json
20
+ import sys
21
+ import ssl
22
+ import jwt
23
+ import os
24
+ import io
25
+ import concurrent
26
+ from concurrent.futures import ThreadPoolExecutor
27
+ from requests.adapters import HTTPAdapter
28
+ from urllib3.util import Retry
29
+ from functools import wraps
30
+ import numpy as np
31
+ import inspect
32
+ from requests.models import Response
33
+ from dtlpy.caches.cache import CacheManger, CacheConfig
34
+ from .calls_counter import CallsCounter
35
+ from .cookie import CookieIO
36
+ from .logins import login, logout, login_secret, login_m2m, gate_url_from_host
37
+ from .async_utils import AsyncResponse, AsyncUploadStream, AsyncResponseError, AsyncThreadEventLoop
38
+ from .events import Events
39
+ from .service_defaults import DEFAULT_ENVIRONMENTS, DEFAULT_ENVIRONMENT
40
+ from .aihttp_retry import RetryClient
41
+ from .. import miscellaneous, exceptions, __version__
42
+
43
+ logger = logging.getLogger(name='dtlpy')
44
+ threadLock = threading.Lock()
45
+
46
+
47
+ def format_message(message):
48
+ if message and isinstance(message, str):
49
+ return message.replace('\\n', '\n')
50
+ return message
51
+
52
+
53
+ class VerboseLoggingLevel:
54
+ DEBUG = "debug"
55
+ INFO = "info"
56
+ WARNING = "warning"
57
+ ERROR = "error"
58
+ CRITICAL = "critical"
59
+
60
+
61
+ class PlatformError(Exception):
62
+ """
63
+ Error handling for api calls
64
+ """
65
+
66
+ def __init__(self, resp):
67
+ msg = ''
68
+ if hasattr(resp, 'status_code'):
69
+ msg += '<Response [{}]>'.format(resp.status_code)
70
+ if hasattr(resp, 'reason'):
71
+ msg += '<Reason [{}]>'.format(format_message(resp.reason))
72
+ elif hasattr(resp, 'text'):
73
+ msg += '<Reason [{}]>'.format(format_message(resp.text))
74
+ super().__init__(msg)
75
+
76
+
77
+ class Callbacks:
78
+ def __init__(self):
79
+ self._callbacks = {}
80
+
81
+ class CallbackEvent(str, enum.Enum):
82
+ DATASET_EXPORT = 'datasetExport'
83
+ ITEMS_UPLOAD = 'itemUpload'
84
+
85
+ def add(self, event, func):
86
+
87
+ if not callable(func):
88
+ raise ValueError(f"The provided callback for {event} is not callable")
89
+ if event not in list(self.CallbackEvent):
90
+ raise ValueError(f"Unknown event: {event!r}, allowed events are: {list(self.CallbackEvent)}")
91
+ self._callbacks[event] = func
92
+
93
+ def get(self, name):
94
+ return self._callbacks.get(name)
95
+
96
+ def run_on_event(self, event, context, progress):
97
+ callback = self.get(event)
98
+ if callback is not None:
99
+ callback(progress=progress, context=context)
100
+
101
+
102
+ class Verbose:
103
+ __DEFAULT_LOGGING_LEVEL = 'warning'
104
+ __DEFAULT_DISABLE_PROGRESS_BAR = False
105
+ __DEFAULT_PRINT_ALL_RESPONSES = False
106
+ __PRINT_ERROR_LOGS = False
107
+ __DEFAULT_PROGRESS_BAR_SETTINGS = {
108
+ 'Iterate Pages': False,
109
+ 'Command Progress': False,
110
+ 'Download Dataset': False,
111
+ 'Download Item': False,
112
+ 'Upload Items': False,
113
+ 'Download Annotations': False,
114
+ 'Upload Annotations': False,
115
+ 'Convert Annotations': False
116
+ }
117
+
118
+ def __init__(self, cookie):
119
+ self.cookie = cookie
120
+ dictionary = self.cookie.get('verbose')
121
+ if isinstance(dictionary, dict):
122
+ self.from_cookie(dictionary)
123
+ else:
124
+ self._logging_level = self.__DEFAULT_LOGGING_LEVEL
125
+ self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
126
+ self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
127
+ self._print_error_logs = self.__PRINT_ERROR_LOGS
128
+ self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
129
+ if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
130
+ self._print_error_logs = True
131
+ self.to_cookie()
132
+
133
+ def to_cookie(self):
134
+ dictionary = {'logging_level': self._logging_level,
135
+ 'disable_progress_bar': self._disable_progress_bar,
136
+ 'print_all_responses': self._print_all_responses,
137
+ 'print_error_logs': self._print_error_logs,
138
+ 'progress_bar_setting': json.dumps(self._progress_bar_settings)
139
+ }
140
+ self.cookie.put(key='verbose', value=dictionary)
141
+
142
+ def from_cookie(self, dictionary):
143
+ self._logging_level = dictionary.get('logging_level', self.__DEFAULT_LOGGING_LEVEL)
144
+ self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
145
+ self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
146
+ self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
147
+ progress_bar_settings = dictionary.get('progress_bar_setting', None)
148
+ if progress_bar_settings is None:
149
+ self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
150
+ else:
151
+ self._progress_bar_settings = json.loads(progress_bar_settings)
152
+
153
+ @property
154
+ def disable_progress_bar_iterate_pages(self):
155
+ return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
156
+
157
+ @disable_progress_bar_iterate_pages.setter
158
+ def disable_progress_bar_iterate_pages(self, val):
159
+ self._progress_bar_settings['Iterate Pages'] = val
160
+ self.to_cookie()
161
+
162
+ @property
163
+ def disable_progress_bar_command_progress(self):
164
+ return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
165
+
166
+ @disable_progress_bar_command_progress.setter
167
+ def disable_progress_bar_command_progress(self, val):
168
+ self._progress_bar_settings['Command Progress'] = val
169
+ self.to_cookie()
170
+
171
+ @property
172
+ def disable_progress_bar_download_item(self):
173
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
174
+
175
+ @disable_progress_bar_download_item.setter
176
+ def disable_progress_bar_download_item(self, val):
177
+ self._progress_bar_settings['Download Item'] = val
178
+ self.to_cookie()
179
+
180
+ @property
181
+ def disable_progress_bar_download_dataset(self):
182
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
183
+
184
+ @disable_progress_bar_download_dataset.setter
185
+ def disable_progress_bar_download_dataset(self, val):
186
+ self._progress_bar_settings['Download Dataset'] = val
187
+ self.to_cookie()
188
+
189
+ @property
190
+ def disable_progress_bar_upload_items(self):
191
+ return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
192
+
193
+ @disable_progress_bar_upload_items.setter
194
+ def disable_progress_bar_upload_items(self, val):
195
+ self._progress_bar_settings['Upload Items'] = val
196
+ self.to_cookie()
197
+
198
+ @property
199
+ def disable_progress_bar_download_annotations(self):
200
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
201
+
202
+ @disable_progress_bar_download_annotations.setter
203
+ def disable_progress_bar_download_annotations(self, val):
204
+ self._progress_bar_settings['Download Annotations'] = val
205
+ self.to_cookie()
206
+
207
+ @property
208
+ def disable_progress_bar_upload_annotations(self):
209
+ return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
210
+
211
+ @disable_progress_bar_upload_annotations.setter
212
+ def disable_progress_bar_upload_annotations(self, val):
213
+ self._progress_bar_settings['Upload Annotations'] = val
214
+ self.to_cookie()
215
+
216
+ @property
217
+ def disable_progress_bar_convert_annotations(self):
218
+ return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
219
+
220
+ @disable_progress_bar_convert_annotations.setter
221
+ def disable_progress_bar_convert_annotations(self, val):
222
+ self._progress_bar_settings['Convert Annotations'] = val
223
+ self.to_cookie()
224
+
225
+ @property
226
+ def disable_progress_bar(self):
227
+ return self._disable_progress_bar
228
+
229
+ @disable_progress_bar.setter
230
+ def disable_progress_bar(self, val):
231
+ self._disable_progress_bar = val
232
+ self.to_cookie()
233
+
234
+ @property
235
+ def logging_level(self):
236
+ return self._logging_level
237
+
238
+ @logging_level.setter
239
+ def logging_level(self, val):
240
+ self._logging_level = val
241
+ # set log level
242
+ logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self._logging_level.upper()])
243
+ # write to cookie
244
+ self.to_cookie()
245
+
246
+ @property
247
+ def print_all_responses(self):
248
+ return self._print_all_responses
249
+
250
+ @print_all_responses.setter
251
+ def print_all_responses(self, val):
252
+ self._print_all_responses = val
253
+ self.to_cookie()
254
+
255
+ @property
256
+ def print_error_logs(self):
257
+ return self._print_error_logs
258
+
259
+ @print_error_logs.setter
260
+ def print_error_logs(self, val):
261
+ self._print_error_logs = val
262
+ self.to_cookie()
263
+
264
+
265
+ class CacheMode:
266
+ __DEFAULT_ENABLE_CACHE = True
267
+ __DEFAULT_CHUNK_CACHE = 200000
268
+
269
+ def __init__(self, cookie):
270
+ self.cookie = cookie
271
+ dictionary = self.cookie.get('cache_mode')
272
+ if isinstance(dictionary, dict):
273
+ self.from_cookie(dictionary)
274
+ else:
275
+ self._enable_cache = self.__DEFAULT_ENABLE_CACHE
276
+ self._chunk_cache = self.__DEFAULT_CHUNK_CACHE
277
+ self.to_cookie()
278
+
279
+ def to_cookie(self):
280
+ dictionary = {'enable_cache': self._enable_cache,
281
+ 'chunk_cache': self._chunk_cache}
282
+ self.cookie.put(key='cache_mode', value=dictionary)
283
+
284
+ def from_cookie(self, dictionary):
285
+ self._enable_cache = dictionary.get('enable_cache', self.__DEFAULT_ENABLE_CACHE)
286
+ self._chunk_cache = dictionary.get('chunk_cache', self.__DEFAULT_CHUNK_CACHE)
287
+
288
+ @property
289
+ def enable_cache(self):
290
+ return self._enable_cache
291
+
292
+ @enable_cache.setter
293
+ def enable_cache(self, val: bool):
294
+ if not isinstance(val, bool):
295
+ raise exceptions.PlatformException(error=400,
296
+ message="input must be of type bool")
297
+ self._enable_cache = val
298
+ self.to_cookie()
299
+
300
+ @property
301
+ def chunk_cache(self):
302
+ return self._chunk_cache
303
+
304
+ @chunk_cache.setter
305
+ def chunk_cache(self, val):
306
+ self._chunk_cache = val
307
+ self.to_cookie()
308
+
309
+
310
+ class SDKCache:
311
+ __DEFAULT_USE_CACHE = False
312
+ __DEFAULT_CACHE_PATH = os.path.join(os.path.expanduser('~'), '.dataloop', 'obj_cache')
313
+ __DEFAULT_CACHE_PATH_BIN = os.path.join(os.path.expanduser('~'), '.dataloop')
314
+ __DEFAULT_CONFIGS_CACHE = CacheConfig().to_string()
315
+ __DEFAULT_BINARY_CACHE_SIZE = 1000
316
+
317
+ def __init__(self, cookie):
318
+ self.cookie = cookie
319
+ dictionary = self.cookie.get('cache_configs')
320
+ if isinstance(dictionary, dict):
321
+ self.from_cookie(dictionary)
322
+ else:
323
+ self._cache_path = self.__DEFAULT_CACHE_PATH
324
+ self._cache_path_bin = self.__DEFAULT_CACHE_PATH_BIN
325
+ self._configs = self.__DEFAULT_CONFIGS_CACHE
326
+ self._bin_size = self.__DEFAULT_BINARY_CACHE_SIZE
327
+ self._use_cache = self.__DEFAULT_USE_CACHE
328
+ self.to_cookie()
329
+
330
+ def to_cookie(self):
331
+ dictionary = {'cache_path': self._cache_path,
332
+ 'cache_path_bin': self._cache_path_bin,
333
+ 'configs': self._configs,
334
+ 'bin_size': self._bin_size,
335
+ 'use_cache': self._use_cache}
336
+ self.cookie.put(key='cache_configs', value=dictionary)
337
+
338
+ def from_cookie(self, dictionary):
339
+ self._cache_path = dictionary.get('cache_path', self.__DEFAULT_CACHE_PATH)
340
+ self._cache_path_bin = dictionary.get('cache_path_bin', self.__DEFAULT_CACHE_PATH_BIN)
341
+ self._configs = dictionary.get('configs', self.__DEFAULT_CONFIGS_CACHE)
342
+ self._bin_size = dictionary.get('bin_size', self.__DEFAULT_BINARY_CACHE_SIZE)
343
+ self._use_cache = dictionary.get('use_cache', self.__DEFAULT_USE_CACHE)
344
+
345
+ @property
346
+ def cache_path(self):
347
+ return self._cache_path
348
+
349
+ @property
350
+ def cache_path_bin(self):
351
+ return self._cache_path_bin
352
+
353
+ @cache_path_bin.setter
354
+ def cache_path_bin(self, val: str):
355
+ if not isinstance(val, str):
356
+ raise exceptions.PlatformException(error=400,
357
+ message="input must be of type str")
358
+ self._cache_path_bin = val
359
+ os.environ['DEFAULT_CACHE_PATH'] = val
360
+ self.to_cookie()
361
+
362
+ @property
363
+ def use_cache(self):
364
+ return self._use_cache
365
+
366
+ @use_cache.setter
367
+ def use_cache(self, val: bool):
368
+ if not isinstance(val, bool):
369
+ raise exceptions.PlatformException(error=400,
370
+ message="input must be of type bool")
371
+ self._use_cache = val
372
+ self.to_cookie()
373
+
374
+ @property
375
+ def configs(self):
376
+ return self._configs
377
+
378
+ @configs.setter
379
+ def configs(self, val):
380
+ if isinstance(val, CacheConfig):
381
+ val = val.to_string()
382
+ if not isinstance(val, str):
383
+ raise exceptions.PlatformException(error=400,
384
+ message="input must be of type str or CacheConfig")
385
+ self._configs = val
386
+ self.to_cookie()
387
+
388
+ @property
389
+ def bin_size(self):
390
+ return self._bin_size
391
+
392
+ @bin_size.setter
393
+ def bin_size(self, val: int):
394
+ if not isinstance(val, int):
395
+ raise exceptions.PlatformException(error=400,
396
+ message="input must be of type int")
397
+ self._bin_size = val
398
+ self.to_cookie()
399
+
400
+
401
+ class Attributes2:
402
+ __DEFAULT_USE_ATTRIBUTE = False
403
+
404
+ def __init__(self, cookie):
405
+ self.cookie = cookie
406
+ dictionary = self.cookie.get('use_attributes_2')
407
+ if isinstance(dictionary, dict):
408
+ self.from_cookie(dictionary)
409
+ else:
410
+ self._use_attributes_2 = self.__DEFAULT_USE_ATTRIBUTE
411
+ self.to_cookie()
412
+
413
+ def to_cookie(self):
414
+ dictionary = {'use_attributes_2': self._use_attributes_2}
415
+ self.cookie.put(key='use_attributes_2', value=dictionary)
416
+
417
+ def from_cookie(self, dictionary):
418
+ self._use_attributes_2 = dictionary.get('use_attributes_2', self.__DEFAULT_USE_ATTRIBUTE)
419
+
420
+ @property
421
+ def use_attributes_2(self):
422
+ return self._use_attributes_2
423
+
424
+ @use_attributes_2.setter
425
+ def use_attributes_2(self, val: bool):
426
+ if not isinstance(val, bool):
427
+ raise exceptions.PlatformException(error=400,
428
+ message="input must be of type bool")
429
+ self._use_attributes_2 = val
430
+ os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
431
+ self.to_cookie()
432
+
433
+ class Decorators:
434
+ @staticmethod
435
+ def token_expired_decorator(method):
436
+ @wraps(method)
437
+ def decorated_method(inst, *args, **kwargs):
438
+ # save event
439
+ frm = inspect.stack()[1]
440
+
441
+ # before the method call
442
+ kwargs.update({'stack': frm})
443
+ if inst.token_expired():
444
+ if inst.renew_token_method() is False:
445
+ raise exceptions.PlatformException('600', 'Token expired, Please login.'
446
+ '\nSDK login options: dl.login(), dl.login_token(), '
447
+ 'dl.login_m2m()'
448
+ '\nCLI login options: dlp login, dlp login-token, '
449
+ 'dlp login-m2m')
450
+ # the actual method call
451
+ result = method(inst, *args, **kwargs)
452
+ # after the method call
453
+ return result
454
+
455
+ return decorated_method
456
+
457
+
458
+ class ApiClient:
459
+ """
460
+ API calls to Dataloop gate
461
+ """
462
+
463
+ def __init__(self, token=None, num_processes=None, cookie_filepath=None):
464
+ ############
465
+ # Initiate #
466
+ ############
467
+ # define local params - read only once from cookie file
468
+ self.lock = threading.Lock()
469
+ self.renew_token_method = self.renew_token
470
+ self.is_cli = False
471
+ self.session = None
472
+ self.default_headers = dict()
473
+ self._token = None
474
+ self._environments = None
475
+ self._environment = None
476
+ self._verbose = None
477
+ self._callbacks = None
478
+ self._cache_state = None
479
+ self._attributes_mode = None
480
+ self._cache_configs = None
481
+ self._sdk_cache = None
482
+ self._fetch_entities = None
483
+ # define other params
484
+ self.last_response = None
485
+ self.last_request = None
486
+ self.platform_exception = None
487
+ self.last_curl = None
488
+ self.minimal_print = True
489
+ # start refresh token
490
+ self.refresh_token_active = True
491
+ # event and pools
492
+ self._thread_pools = dict()
493
+ self._event_loop = None
494
+ self._login_domain = None
495
+ self.__gate_url_for_requests = None
496
+
497
+ # TODO- remove before release - only for debugging
498
+ self._stopped_pools = list()
499
+
500
+ if cookie_filepath is None:
501
+ self.cookie_io = CookieIO.init()
502
+ else:
503
+ self.cookie_io = CookieIO(path=cookie_filepath)
504
+ assert isinstance(self.cookie_io, CookieIO)
505
+ self.state_io = CookieIO.init_local_cookie(create=False)
506
+ assert isinstance(self.state_io, CookieIO)
507
+
508
+ ##################
509
+ # configurations #
510
+ ##################
511
+ # check for proxies in connection
512
+ self.check_proxy()
513
+
514
+ # set token if input
515
+ if token is not None:
516
+ self.token = token
517
+
518
+ # STDOUT
519
+ self.remove_keys_list = ['contributors', 'url', 'annotations', 'items', 'export', 'directoryTree',
520
+ 'attributes', 'partitions', 'metadata', 'stream', 'createdAt', 'updatedAt', 'arch']
521
+
522
+ # API calls counter
523
+ counter_filepath = os.path.join(os.path.dirname(self.cookie_io.COOKIE), 'calls_counter.json')
524
+ self.calls_counter = CallsCounter(filepath=counter_filepath)
525
+
526
+ # create a global thread pool to run multi threading
527
+ if num_processes is None:
528
+ num_processes = 3 * multiprocessing.cpu_count()
529
+ self._num_processes = num_processes
530
+ self._thread_pools_names = {'item.download': num_processes,
531
+ 'item.status_update': num_processes,
532
+ 'item.page': num_processes,
533
+ 'annotation.upload': num_processes,
534
+ 'annotation.download': num_processes,
535
+ 'annotation.update': num_processes,
536
+ 'entity.create': num_processes,
537
+ 'dataset.download': num_processes}
538
+ # set logging level
539
+ logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self.verbose.logging_level.upper()])
540
+ os.environ["USE_ATTRIBUTE_2"] = json.dumps(self.attributes_mode.use_attributes_2)
541
+
542
+ self.cache = None
543
+ #######################
544
+ # start event tracker #
545
+ self.event_tracker = Events(client_api=self)
546
+ self.event_tracker.daemon = True
547
+ self.event_tracker.start()
548
+
549
+ @property
550
+ def event_loop(self):
551
+ self.lock.acquire()
552
+ if self._event_loop is None:
553
+ self._event_loop = self.create_event_loop_thread()
554
+ elif not self._event_loop.loop.is_running():
555
+ if self._event_loop.is_alive():
556
+ self._event_loop.stop()
557
+ self._event_loop = self.create_event_loop_thread()
558
+ self.lock.release()
559
+ return self._event_loop
560
+
561
+ def build_cache(self, cache_config=None):
562
+ if cache_config is None:
563
+ cache_config_json = os.environ.get('CACHE_CONFIG', None)
564
+ if cache_config_json is None:
565
+ if self.sdk_cache.use_cache:
566
+ cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=self.sdk_cache.configs)
567
+ else:
568
+ cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config_json)
569
+ if cache_config:
570
+ # cache paths
571
+ if os.environ.get('DEFAULT_CACHE_PATH', None) is None:
572
+ os.environ['DEFAULT_CACHE_PATH'] = self.sdk_cache.cache_path_bin
573
+ else:
574
+ self.sdk_cache.cache_path_bin = os.environ['DEFAULT_CACHE_PATH']
575
+
576
+ if not os.path.isdir(self.sdk_cache.cache_path_bin):
577
+ os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
578
+
579
+ if not os.path.isfile(os.path.join(self.sdk_cache.cache_path_bin, 'cacheConfig.json')):
580
+ os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
581
+
582
+ if isinstance(cache_config, str):
583
+ self.sdk_cache.configs = cache_config
584
+ cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config)
585
+ elif isinstance(cache_config, CacheConfig):
586
+ self.sdk_cache.configs = cache_config.to_string()
587
+ else:
588
+ raise Exception("config should be of type str or CacheConfig")
589
+ try:
590
+ self.cache = CacheManger(cache_configs=[cache_config], bin_cache_size=self.sdk_cache.bin_size)
591
+ self.cache.ping()
592
+ self.sdk_cache.use_cache = True
593
+ except Exception as e:
594
+ logger.warning("Cache build error {}".format(e))
595
+ self.cache = None
596
+
597
+ def __del__(self):
598
+ for name, pool in self._thread_pools.items():
599
+ pool.shutdown()
600
+ self.event_loop.stop()
601
+
602
+ def _build_request_headers(self, headers=None):
603
+ if headers is None:
604
+ headers = dict()
605
+ if not isinstance(headers, dict):
606
+ raise exceptions.PlatformException(
607
+ error='400',
608
+ message="Input 'headers' must be a dictionary, got: {}".format(type(headers)))
609
+ headers.update(self.default_headers)
610
+ headers.update(self.auth)
611
+ headers.update({'User-Agent': requests_toolbelt.user_agent('dtlpy', __version__)})
612
+ return headers
613
+
614
+ @property
615
+ def num_processes(self):
616
+ return self._num_processes
617
+
618
+ @num_processes.setter
619
+ def num_processes(self, num_processes):
620
+ if num_processes == self._num_processes:
621
+ # same number. no need to do anything
622
+ return
623
+ self._num_processes = num_processes
624
+ for pool_name in self._thread_pools_names:
625
+ self._thread_pools_names[pool_name] = num_processes
626
+
627
+ for pool in self._thread_pools:
628
+ self._thread_pools[pool].shutdown()
629
+ self._thread_pools = dict()
630
+
631
+ def create_event_loop_thread(self):
632
+ loop = asyncio.new_event_loop()
633
+ event_loop = AsyncThreadEventLoop(loop=loop,
634
+ n=self._num_processes)
635
+ event_loop.daemon = True
636
+ event_loop.start()
637
+ time.sleep(1)
638
+ return event_loop
639
+
640
+ def thread_pools(self, pool_name):
641
+ if pool_name not in self._thread_pools_names:
642
+ raise ValueError('unknown thread pool name: {}. known name: {}'.format(
643
+ pool_name,
644
+ list(self._thread_pools_names.keys())))
645
+ num_processes = self._thread_pools_names[pool_name]
646
+ if pool_name not in self._thread_pools or self._thread_pools[pool_name]._shutdown:
647
+ self._thread_pools[pool_name] = ThreadPoolExecutor(max_workers=num_processes)
648
+ pool = self._thread_pools[pool_name]
649
+ assert isinstance(pool, concurrent.futures.ThreadPoolExecutor)
650
+ return pool
651
+
652
+ @property
653
+ def verify(self):
654
+ environments = self.environments
655
+ verify = True
656
+ if self.environment in environments:
657
+ if 'verify_ssl' in environments[self.environment]:
658
+ verify = environments[self.environment]['verify_ssl']
659
+ return verify
660
+
661
+ @property
662
+ def use_ssl_context(self):
663
+ environments = self.environments
664
+ use_ssl_context = False
665
+ if self.environment in environments:
666
+ if 'use_ssl_context' in environments[self.environment]:
667
+ use_ssl_context = environments[self.environment]['use_ssl_context']
668
+ return use_ssl_context
669
+
670
+ @property
671
+ def auth(self):
672
+ return {'authorization': 'Bearer ' + self.token}
673
+
674
+ @property
675
+ def environment(self):
676
+ _environment = self._environment
677
+ if _environment is None:
678
+ _environment = self.cookie_io.get('url')
679
+ if _environment is None:
680
+ _environment = DEFAULT_ENVIRONMENT
681
+ self._environment = _environment
682
+ return _environment
683
+
684
+ @environment.setter
685
+ def environment(self, env):
686
+ self._environment = env
687
+ self.cookie_io.put('url', env)
688
+
689
+ @property
690
+ def fetch_entities(self):
691
+ if self._fetch_entities is None:
692
+ self._fetch_entities = self.cookie_io.get('fetch_entities')
693
+ if self._fetch_entities is None:
694
+ self.fetch_entities = True # default
695
+ return self._fetch_entities
696
+
697
+ @fetch_entities.setter
698
+ def fetch_entities(self, val):
699
+ self._fetch_entities = val
700
+ self.cookie_io.put('fetch_entities', val)
701
+
702
+ @property
703
+ def environments(self):
704
+ """
705
+ List of known environments
706
+ :return:
707
+ """
708
+ # get environment login parameters
709
+ _environments = self._environments
710
+ if _environments is None:
711
+ # take from cookie
712
+ _environments = self.cookie_io.get('login_parameters')
713
+ # if cookie is None - init with defaults
714
+ if _environments is None:
715
+ # default
716
+ _environments = DEFAULT_ENVIRONMENTS
717
+ # save to local variable
718
+ self.environments = _environments
719
+ else:
720
+ # save from cookie to ram
721
+ self._environments = _environments
722
+ return _environments
723
+
724
+ @environments.setter
725
+ def environments(self, env_dict):
726
+ self._environments = env_dict
727
+ self.cookie_io.put(key='login_parameters', value=self._environments)
728
+
729
+ @property
730
+ def verbose(self):
731
+ if self._verbose is None:
732
+ self._verbose = Verbose(cookie=self.cookie_io)
733
+ assert isinstance(self._verbose, Verbose)
734
+ return self._verbose
735
+
736
+ @property
737
+ def cache_state(self):
738
+ if self._cache_state is None:
739
+ self._cache_state = CacheMode(cookie=self.cookie_io)
740
+ assert isinstance(self._cache_state, CacheMode)
741
+ return self._cache_state
742
+
743
+ @property
744
+ def attributes_mode(self):
745
+ if self._attributes_mode is None:
746
+ self._attributes_mode = Attributes2(cookie=self.cookie_io)
747
+ assert isinstance(self._attributes_mode, Attributes2)
748
+ return self._attributes_mode
749
+
750
+ @property
751
+ def sdk_cache(self):
752
+ if self._sdk_cache is None:
753
+ self._sdk_cache = SDKCache(cookie=self.cookie_io)
754
+ assert isinstance(self._sdk_cache, SDKCache)
755
+ return self._sdk_cache
756
+
757
+ @property
758
+ def callbacks(self):
759
+ if self._callbacks is None:
760
+ self._callbacks = Callbacks()
761
+ assert isinstance(self._callbacks, Callbacks)
762
+ return self._callbacks
763
+
764
+ def add_callback(self, event, func):
765
+ """
766
+ function to add callback to the client
767
+ :param event: dl.CallbackEvent enum, name of the callback
768
+ :param func: function to call with 2 arguments: progress and context
769
+ """
770
+ self.callbacks.add(event, func)
771
+
772
+ @property
773
+ def token(self):
774
+ _token = self._token
775
+ if _token is None:
776
+ environments = self.environments
777
+ if self.environment in environments:
778
+ if 'token' in environments[self.environment]:
779
+ _token = environments[self.environment]['token']
780
+ return _token
781
+
782
+ @token.setter
783
+ def token(self, token):
784
+ # set to variable
785
+ self._token = token
786
+ self.refresh_token = None
787
+ # set to cookie file
788
+ environments = self.environments
789
+ if self.environment in environments:
790
+ environments[self.environment]['token'] = token
791
+ else:
792
+ environments[self.environment] = {'token': token}
793
+ self.environments = environments
794
+
795
+ @property
796
+ def refresh_token(self):
797
+ environments = self.environments
798
+ refresh_token = None
799
+ if self.environment in environments:
800
+ if 'refresh_token' in environments[self.environment]:
801
+ refresh_token = environments[self.environment]['refresh_token']
802
+ return refresh_token
803
+
804
+ @refresh_token.setter
805
+ def refresh_token(self, token):
806
+ environments = self.environments
807
+ if self.environment in environments:
808
+ environments[self.environment]['refresh_token'] = token
809
+ else:
810
+ environments[self.environment] = {'refresh_token': token}
811
+ self.refresh_token_active = True
812
+ self.environments = environments
813
+
814
+ def add_environment(self, environment,
815
+ audience=None,
816
+ client_id=None,
817
+ auth0_url=None,
818
+ verify_ssl=True,
819
+ token=None,
820
+ refresh_token=None,
821
+ alias=None,
822
+ use_ssl_context=False,
823
+ gate_url=None,
824
+ url=None,
825
+ login_domain=None
826
+ ):
827
+ environments = self.environments
828
+ if environment in environments:
829
+ logger.warning('Environment exists. Overwriting. env: {}'.format(environment))
830
+ if token is None:
831
+ token = None
832
+ if alias is None:
833
+ alias = None
834
+ environments[environment] = {'audience': audience,
835
+ 'client_id': client_id,
836
+ 'auth0_url': auth0_url,
837
+ 'alias': alias,
838
+ 'token': token,
839
+ 'gate_url': gate_url,
840
+ 'refresh_token': refresh_token,
841
+ 'verify_ssl': verify_ssl,
842
+ 'use_ssl_context': use_ssl_context,
843
+ 'url': url,
844
+ 'login_domain': login_domain}
845
+ self.environments = environments
846
+
847
+ def info(self, with_token=True):
848
+ """
849
+ Return a dictionary with current information: env, user, token
850
+ :param with_token:
851
+ :return:
852
+ """
853
+ user_email = 'null'
854
+ if self.token is not None:
855
+ payload = jwt.decode(self.token, algorithms=['HS256'],
856
+ verify=False, options={'verify_signature': False})
857
+ user_email = payload['email']
858
+ information = {'environment': self.environment,
859
+ 'user_email': user_email}
860
+ if with_token:
861
+ information['token'] = self.token
862
+ return information
863
+
864
+ @property
865
+ def base_gate_url(self):
866
+ if self.__gate_url_for_requests is None:
867
+ self.__gate_url_for_requests = self.environment
868
+ internal_requests_url = os.environ.get('INTERNAL_REQUESTS_URL', None)
869
+ if internal_requests_url is not None:
870
+ self.__gate_url_for_requests = internal_requests_url
871
+ return self.__gate_url_for_requests
872
+
873
+ def export_curl_request(self, req_type, path, headers=None, json_req=None, files=None, data=None):
874
+ curl, prepared = self._build_gen_request(req_type=req_type,
875
+ path=path,
876
+ headers=headers,
877
+ json_req=json_req,
878
+ files=files,
879
+ data=data)
880
+ return curl
881
+
882
+ def _build_gen_request(self, req_type, path, headers, json_req, files, data):
883
+ req_type = req_type.upper()
884
+ valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
885
+ assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
886
+
887
+ # prepare request
888
+ req = requests.Request(method=req_type,
889
+ url=self.base_gate_url + path,
890
+ json=json_req,
891
+ files=files,
892
+ data=data,
893
+ headers=self._build_request_headers(headers=headers))
894
+ # prepare to send
895
+ prepared = req.prepare()
896
+ # save curl for debug
897
+ command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
898
+ method = prepared.method
899
+ uri = prepared.url
900
+ data = prepared.body
901
+ headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
902
+ headers = " -H ".join(headers)
903
+ curl = command.format(method=method, headers=headers, data=data, uri=uri)
904
+ return curl, prepared
905
+
906
+ def _convert_json_to_response(self, response_json):
907
+ the_response = Response()
908
+ the_response._content = json.dumps(response_json).encode('utf-8')
909
+ return the_response
910
+
911
+ def _cache_on(self, request):
912
+ if self.cache is not None and self.sdk_cache.use_cache:
913
+ pure_request = request.split('?')[0]
914
+ valid_req = ['annotation', 'item', 'dataset', 'project', 'task', 'assignment']
915
+ for req_type in valid_req:
916
+ if req_type in pure_request:
917
+ return True
918
+ return False
919
+
920
+ @Decorators.token_expired_decorator
921
+ def gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
922
+ log_error=True, dataset_id=None, **kwargs):
923
+ """
924
+ Generic request from platform
925
+ :param req_type: type of the request: GET, POST etc
926
+ :param path: url (without host header - take from environment)
927
+ :param data: data to pass to request
928
+ :param json_req: json to pass to request
929
+ :param files: files to pass to request
930
+ :param stream: stream to pass the request
931
+ :param headers: headers to pass to request. auth will be added to it
932
+ :param log_error: if true - print the error log of the request
933
+ :param dataset_id: dataset id needed in stream True
934
+ :param kwargs: kwargs
935
+ :return:
936
+ """
937
+ success, resp, cache_values = False, None, []
938
+ if self.cache is None and 'sdk' not in path:
939
+ self.build_cache()
940
+ if req_type.lower() not in ['patch', 'put', 'post', 'delete'] and self._cache_on(request=path):
941
+ try:
942
+ if stream:
943
+ if dataset_id is None:
944
+ raise ValueError("must provide a dataset id")
945
+ success, cache_values = self.cache.read_stream(request_path=path, dataset_id=dataset_id)
946
+
947
+ else:
948
+ success, cache_values = self.cache.read(request_path=path)
949
+ if success:
950
+ resp = self._convert_json_to_response(cache_values)
951
+ except Exception as e:
952
+ logger.warning("Cache error {}".format(e))
953
+ success, resp = False, None
954
+
955
+ if not success and not resp:
956
+ success, resp = self._gen_request(req_type=req_type,
957
+ path=path,
958
+ data=data,
959
+ json_req=json_req,
960
+ files=files,
961
+ stream=stream,
962
+ headers=headers,
963
+ log_error=log_error)
964
+
965
+ if success and self._cache_on(request=path):
966
+ try:
967
+ if stream:
968
+ res = self.cache.write_stream(request_path=path,
969
+ response=resp,
970
+ dataset_id=dataset_id)
971
+ if res != '':
972
+ resp = self._convert_json_to_response(res)
973
+ else:
974
+ if req_type == 'delete':
975
+ self.cache.invalidate(path=path)
976
+ else:
977
+ try:
978
+ resp_list = resp.json()
979
+ write = True
980
+ if isinstance(resp_list, list):
981
+ pass
982
+ elif isinstance(resp_list, dict):
983
+ if 'hasNextPage' in resp_list:
984
+ resp_list = resp_list['items']
985
+ elif 'id' in resp_list:
986
+ resp_list = [resp_list]
987
+ else:
988
+ write = False
989
+ else:
990
+ raise exceptions.PlatformException(error='400', message="unsupported return type")
991
+ if write:
992
+ self.cache.write(list_entities_json=resp_list)
993
+ except:
994
+ raise exceptions.PlatformException(error='400', message="failed to set cache")
995
+ except Exception as e:
996
+ logger.warning("Cache error {}".format(e))
997
+ self.cache = None
998
+ # only for projects events
999
+ if success:
1000
+ if 'stack' in kwargs:
1001
+ self.event_tracker.put(event=kwargs.get('stack'), resp=resp, path=path)
1002
+ return success, resp
1003
+
1004
+ def _gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
1005
+ log_error=True):
1006
+ """
1007
+ Generic request from platform
1008
+ :param req_type: type of the request: GET, POST etc
1009
+ :param path: url (without host header - take from environment)
1010
+ :param data: data to pass to request
1011
+ :param json_req: json to pass to request
1012
+ :param files: files to pass to request
1013
+ :param stream: stream to pass the request
1014
+ :param headers: headers to pass to request. auth will be added to it
1015
+ :param log_error: if true - print the error log of the request
1016
+ :return:
1017
+ """
1018
+ curl, prepared = self._build_gen_request(req_type=req_type,
1019
+ path=path,
1020
+ headers=headers,
1021
+ json_req=json_req,
1022
+ files=files,
1023
+ data=data)
1024
+ self.last_curl = curl
1025
+ self.last_request = prepared
1026
+ # send request
1027
+ try:
1028
+ resp = self.send_session(prepared=prepared, stream=stream)
1029
+ except Exception:
1030
+ logger.error(self.print_request(req=prepared, to_return=True))
1031
+ raise
1032
+ self.last_response = resp
1033
+ # handle output
1034
+ if not resp.ok:
1035
+ self.print_bad_response(resp, log_error=log_error and not self.is_cli)
1036
+ return_type = False
1037
+ else:
1038
+ try:
1039
+ # print only what is printable (dont print get steam etc..)
1040
+ if not stream:
1041
+ self.print_response(resp)
1042
+ except ValueError:
1043
+ # no JSON returned
1044
+ pass
1045
+ return_type = True
1046
+ return return_type, resp
1047
+
1048
+ @Decorators.token_expired_decorator
1049
+ async def gen_async_request(self,
1050
+ req_type,
1051
+ path,
1052
+ data=None,
1053
+ json_req=None,
1054
+ files=None,
1055
+ stream=None,
1056
+ headers=None,
1057
+ log_error=True,
1058
+ filepath=None,
1059
+ chunk_size=8192,
1060
+ pbar=None,
1061
+ is_dataloop=True,
1062
+ **kwargs):
1063
+ req_type = req_type.upper()
1064
+ valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
1065
+ assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
1066
+
1067
+ # prepare request
1068
+ if is_dataloop:
1069
+ full_url = self.base_gate_url + path
1070
+ headers_req = self._build_request_headers(headers=headers)
1071
+ else:
1072
+ full_url = path
1073
+ headers = dict()
1074
+ headers_req = headers
1075
+
1076
+ if headers is not None:
1077
+ if not isinstance(headers, dict):
1078
+ raise exceptions.PlatformException(error='400', message="Input 'headers' must be a dictionary")
1079
+ for k, v in headers.items():
1080
+ headers_req[k] = v
1081
+ req = requests.Request(method=req_type,
1082
+ url=full_url,
1083
+ json=json_req,
1084
+ files=files,
1085
+ data=data,
1086
+ headers=headers_req)
1087
+ # prepare to send
1088
+ prepared = req.prepare()
1089
+ # save curl for debug
1090
+ command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
1091
+ headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
1092
+ headers = " -H ".join(headers)
1093
+ curl = command.format(method=prepared.method,
1094
+ headers=headers,
1095
+ data=prepared.body,
1096
+ uri=prepared.url)
1097
+ self.last_curl = curl
1098
+ self.last_request = prepared
1099
+ # send request
1100
+ try:
1101
+ timeout = aiohttp.ClientTimeout(total=0)
1102
+ async with RetryClient(headers=headers_req,
1103
+ timeout=timeout) as session:
1104
+ try:
1105
+ async with session._request(request=session._client.request,
1106
+ url=self.base_gate_url + path,
1107
+ method=req_type,
1108
+ json=json_req,
1109
+ data=data,
1110
+ headers=headers_req,
1111
+ chunked=stream,
1112
+ retry_attempts=5,
1113
+ ssl=self.verify,
1114
+ retry_exceptions={aiohttp.client_exceptions.ClientOSError,
1115
+ aiohttp.client_exceptions.ServerDisconnectedError,
1116
+ aiohttp.client_exceptions.ClientPayloadError},
1117
+ raise_for_status=False) as request:
1118
+ if stream:
1119
+ pbar = self.__get_pbar(pbar=pbar,
1120
+ total_length=request.headers.get("content-length"))
1121
+ if filepath is not None:
1122
+ to_close = False
1123
+ if isinstance(filepath, str):
1124
+ to_close = True
1125
+ buffer = open(filepath, 'wb')
1126
+ elif isinstance(filepath, io.BytesIO):
1127
+ pass
1128
+ else:
1129
+ raise ValueError('unknown data type to write file: {}'.format(type(filepath)))
1130
+ try:
1131
+ while True:
1132
+ chunk = await request.content.read(chunk_size)
1133
+ await asyncio.sleep(0)
1134
+ if not chunk:
1135
+ break
1136
+ buffer.write(chunk)
1137
+ if pbar is not None:
1138
+ pbar.update(len(chunk))
1139
+ finally:
1140
+ if to_close:
1141
+ buffer.close()
1142
+
1143
+ if pbar is not None:
1144
+ pbar.close()
1145
+ text = await request.text()
1146
+ try:
1147
+ _json = await request.json()
1148
+ except Exception:
1149
+ _json = dict()
1150
+ response = AsyncResponse(text=text,
1151
+ _json=_json,
1152
+ async_resp=request)
1153
+ except Exception as err:
1154
+ response = AsyncResponseError(error=err, trace=traceback.format_exc())
1155
+ finally:
1156
+ with threadLock:
1157
+ self.calls_counter.add()
1158
+ except Exception:
1159
+ logger.error(self.print_request(req=prepared, to_return=True))
1160
+ raise
1161
+ self.last_response = response
1162
+ # handle output
1163
+ if not response.ok:
1164
+ self.print_bad_response(response, log_error=log_error and not self.is_cli)
1165
+ return_type = False
1166
+ else:
1167
+ try:
1168
+ # print only what is printable (dont print get steam etc..)
1169
+ if not stream:
1170
+ self.print_response(response)
1171
+ except ValueError:
1172
+ # no JSON returned
1173
+ pass
1174
+ return_type = True
1175
+ return return_type, response
1176
+
1177
+ @Decorators.token_expired_decorator
1178
+ async def upload_file_async(self,
1179
+ to_upload,
1180
+ item_type,
1181
+ item_size,
1182
+ remote_url,
1183
+ uploaded_filename,
1184
+ remote_path=None,
1185
+ callback=None,
1186
+ mode='skip',
1187
+ item_metadata=None,
1188
+ headers=None,
1189
+ item_description=None,
1190
+ **kwargs):
1191
+ headers = self._build_request_headers(headers=headers)
1192
+ pbar = None
1193
+ if callback is None:
1194
+ if item_size > 10e6:
1195
+ # size larger than 10MB
1196
+ pbar = tqdm.tqdm(total=item_size,
1197
+ unit="B",
1198
+ unit_scale=True,
1199
+ unit_divisor=1024,
1200
+ position=1,
1201
+ file=sys.stdout,
1202
+ disable=self.verbose.disable_progress_bar_upload_items,
1203
+ desc='Upload Items')
1204
+
1205
+ def callback(bytes_read):
1206
+ pbar.update(bytes_read)
1207
+ else:
1208
+ def callback(bytes_read):
1209
+ pass
1210
+
1211
+ timeout = aiohttp.ClientTimeout(total=0)
1212
+ async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
1213
+ try:
1214
+ form = aiohttp.FormData({})
1215
+ form.add_field('type', item_type)
1216
+ form.add_field('path', os.path.join(remote_path, uploaded_filename).replace('\\', '/'))
1217
+ if item_metadata is not None:
1218
+ form.add_field('metadata', json.dumps(item_metadata))
1219
+ if item_description is not None:
1220
+ form.add_field('description', item_description)
1221
+ form.add_field('file', AsyncUploadStream(buffer=to_upload,
1222
+ callback=callback,
1223
+ name=uploaded_filename,
1224
+ chunk_timeout=2 * 60))
1225
+ url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
1226
+
1227
+ # use SSL context
1228
+ ssl_context = None
1229
+ if self.use_ssl_context:
1230
+ ssl_context = ssl.create_default_context(cafile=certifi.where())
1231
+ async with session.post(url,
1232
+ data=form,
1233
+ verify_ssl=self.verify,
1234
+ ssl=ssl_context) as resp:
1235
+ self.last_request = resp.request_info
1236
+ command = "curl -X {method} -H {headers} -d '{uri}'"
1237
+ headers = ['"{0}: {1}"'.format(k, v) for k, v in resp.request_info.headers.items()]
1238
+ headers = " -H ".join(headers)
1239
+ self.last_curl = command.format(method=resp.request_info.method,
1240
+ headers=headers,
1241
+ uri=resp.request_info.url)
1242
+ text = await resp.text()
1243
+ try:
1244
+ _json = await resp.json()
1245
+ except:
1246
+ _json = dict()
1247
+ response = AsyncResponse(text=text,
1248
+ _json=_json,
1249
+ async_resp=resp)
1250
+ except Exception as err:
1251
+ response = AsyncResponseError(error=err, trace=traceback.format_exc())
1252
+ finally:
1253
+ if pbar is not None:
1254
+ pbar.close()
1255
+ with threadLock:
1256
+ self.calls_counter.add()
1257
+ if response.ok and self.cache is not None:
1258
+ try:
1259
+ self.cache.write(list_entities_json=[response.json()])
1260
+ dataset_id = url.split('/')[-2]
1261
+ self.cache.write_stream(request_path=url,
1262
+ buffer=to_upload,
1263
+ file_name=uploaded_filename,
1264
+ entity_id=response.json()['id'],
1265
+ dataset_id=dataset_id)
1266
+ except:
1267
+ logger.warning("Failed to add the file to the cache")
1268
+ return response
1269
+
1270
+ def __get_pbar(self, pbar, total_length):
1271
+ # decide if create progress bar for item
1272
+ if pbar:
1273
+ try:
1274
+ if total_length is not None and int(total_length) > 10e6: # size larger than 10 MB:
1275
+ pbar = tqdm.tqdm(total=int(total_length),
1276
+ unit='B',
1277
+ unit_scale=True,
1278
+ unit_divisor=1024,
1279
+ position=1,
1280
+ file=sys.stdout,
1281
+ disable=self.verbose.disable_progress_bar)
1282
+ else:
1283
+ pbar = None
1284
+ except Exception as err:
1285
+ pbar = None
1286
+ logger.debug('Cant decide downloaded file length, bar will not be presented: {}'.format(err))
1287
+ return pbar
1288
+
1289
+ def send_session(self, prepared, stream=None):
1290
+ if self.session is None:
1291
+ self.session = requests.Session()
1292
+ retry = Retry(
1293
+ total=5,
1294
+ read=5,
1295
+ connect=5,
1296
+ backoff_factor=1,
1297
+ # use on any request type
1298
+ allowed_methods=False,
1299
+ # force retry on those status responses
1300
+ status_forcelist=(501, 502, 503, 504, 505, 506, 507, 508, 510, 511),
1301
+ raise_on_status=False
1302
+ )
1303
+ adapter = HTTPAdapter(max_retries=retry,
1304
+ pool_maxsize=np.sum(list(self._thread_pools_names.values())),
1305
+ pool_connections=np.sum(list(self._thread_pools_names.values())))
1306
+ self.session.mount('http://', adapter)
1307
+ self.session.mount('https://', adapter)
1308
+ resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=120)
1309
+
1310
+ with threadLock:
1311
+ self.calls_counter.add()
1312
+
1313
+ return resp
1314
+
1315
+ @staticmethod
1316
+ def check_proxy():
1317
+ """
1318
+ Verify that dataloop urls are not blocked
1319
+ :return:
1320
+ """
1321
+ proxy_envs = ['HTTP', 'HTTPS', 'http', 'https']
1322
+ dataloop_urls = ['dev-gate.dataloop.ai',
1323
+ 'gate.dataloop.ai',
1324
+ 'dataloop-development.auth0.com',
1325
+ 'dataloop-production.auth0.com']
1326
+ if True in [env in os.environ for env in proxy_envs]:
1327
+ # check if proxy exists
1328
+ if True in [env in os.environ for env in ['no_proxy', 'NO_PROXY']]:
1329
+ # check if no_proxy exists
1330
+ if 'no_proxy' in os.environ:
1331
+ # check if dataloop urls in no_proxy
1332
+ if True not in [url in os.environ['no_proxy'] for url in dataloop_urls]:
1333
+ # no dataloop url exists in no_proxy
1334
+ logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1335
+ else:
1336
+ # check if dataloop urls in no_proxy
1337
+ if True not in [url in os.environ['NO_PROXY'] for url in dataloop_urls]:
1338
+ # no dataloop url exists in no_proxy
1339
+ logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1340
+ else:
1341
+ logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
1342
+
1343
+ def token_expired(self, t=60):
1344
+ """
1345
+ Check token validation
1346
+ :param t: time ahead interval in seconds
1347
+ """
1348
+ try:
1349
+ if self.token is None or self.token == '':
1350
+ expired = True
1351
+ else:
1352
+ payload = jwt.decode(self.token, algorithms=['HS256'],
1353
+ options={'verify_signature': False}, verify=False)
1354
+ d = datetime.datetime.now(datetime.timezone.utc)
1355
+ epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
1356
+ now = (d - epoch).total_seconds()
1357
+ exp = payload['exp']
1358
+ if now < (exp - t):
1359
+ expired = False
1360
+ else:
1361
+ expired = True
1362
+ except jwt.exceptions.DecodeError:
1363
+ logger.exception('Invalid token.')
1364
+ expired = True
1365
+ except Exception:
1366
+ logger.exception('Unknown error:')
1367
+ expired = True
1368
+ if expired:
1369
+ if self.renew_token_method():
1370
+ expired = False
1371
+ return expired
1372
+
1373
+ @staticmethod
1374
+ def is_json_serializable(response):
1375
+ try:
1376
+ response_json = response.json()
1377
+ return True, response_json
1378
+ except ValueError:
1379
+ return False, None
1380
+
1381
+ ##########
1382
+ # STDOUT #
1383
+ ##########
1384
+ def print_response(self, resp=None):
1385
+ """
1386
+ Print tabulate response
1387
+ :param resp: response from requests
1388
+ :return:
1389
+ """
1390
+ try:
1391
+ if resp is None:
1392
+ resp = self.last_response
1393
+ is_json_serializable, results = self.is_json_serializable(response=resp)
1394
+ if self.verbose.print_all_responses and is_json_serializable:
1395
+ if isinstance(results, dict):
1396
+ to_print = miscellaneous.List([results])
1397
+ elif isinstance(results, list):
1398
+ to_print = miscellaneous.List(results)
1399
+ else:
1400
+ logger.debug('Unknown response type: {}. cant print'.format(type(results)))
1401
+ return
1402
+ request_id = resp.headers.get('x-request-id', 'na')
1403
+ logger.debug('--- [Request] Start ---')
1404
+ logger.debug(self.print_request(req=resp.request, to_return=True))
1405
+ logger.debug('--- [Request] End ---')
1406
+ logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
1407
+ to_print.print(show_all=False, level='debug')
1408
+ logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
1409
+ except Exception:
1410
+ logger.exception('Printing response from gate:')
1411
+
1412
+ def print_bad_response(self, resp=None, log_error=True):
1413
+ """
1414
+ Print error from platform
1415
+ :param resp:
1416
+ :param log_error: print error log (to use when trying request more than once)
1417
+ :return:
1418
+ """
1419
+ if resp is None:
1420
+ resp = self.last_response
1421
+ msg = ''
1422
+ if hasattr(resp, 'status_code'):
1423
+ msg += '[Response <{val}>]'.format(val=resp.status_code)
1424
+ if hasattr(resp, 'reason'):
1425
+ msg += '[Reason: {val}]'.format(val=resp.reason)
1426
+ if hasattr(resp, 'text') and isinstance(resp.text, str):
1427
+ msg += '[Text: {val}]'.format(val=format_message(resp.text))
1428
+
1429
+ request_id = resp.headers.get('x-request-id', 'na')
1430
+ logger.debug('--- [Request] Start ---')
1431
+ logger.debug(self.print_request(req=resp.request, to_return=True))
1432
+ logger.debug('--- [Request] End ---')
1433
+ logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
1434
+ if log_error:
1435
+ logger.error(msg)
1436
+ else:
1437
+ logger.debug(msg)
1438
+ logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
1439
+ self.platform_exception = PlatformError(resp)
1440
+
1441
+ def print_request(self, req=None, to_return=False, with_auth=False):
1442
+ """
1443
+ Print a request to the platform
1444
+ :param req:
1445
+ :param to_return: return string instead of printing
1446
+ :param with_auth: print authentication
1447
+ :return:
1448
+ """
1449
+ if not req:
1450
+ req = self.last_request
1451
+
1452
+ headers = list()
1453
+ for k, v in req.headers.items():
1454
+ if k == 'authorization' and not with_auth:
1455
+ continue
1456
+ headers.append('{}: {}'.format(k, v))
1457
+ if hasattr(req, 'body'):
1458
+ body = req.body
1459
+ elif isinstance(req, aiohttp.RequestInfo):
1460
+ body = {'multipart': 'true'}
1461
+ else:
1462
+ body = dict()
1463
+
1464
+ # remove secrets and passwords
1465
+ try:
1466
+ body = json.loads(body)
1467
+ if isinstance(body, dict):
1468
+ for key, value in body.items():
1469
+ hide = any([field in key for field in ['secret', 'password']])
1470
+ if hide:
1471
+ body[key] = '*' * len(value)
1472
+ except Exception:
1473
+ pass
1474
+
1475
+ msg = '{}\n{}\n{}'.format(
1476
+ req.method + ' ' + str(req.url),
1477
+ '\n'.join(headers),
1478
+ body,
1479
+ )
1480
+ if to_return:
1481
+ return msg
1482
+ else:
1483
+ print(msg)
1484
+
1485
+ ################
1486
+ # Environments #
1487
+ ################
1488
+ def setenv(self, env):
1489
+ """
1490
+ Set environment
1491
+ :param env:
1492
+ :return:
1493
+ """
1494
+
1495
+ environments = self.environments
1496
+ if env.startswith('http'):
1497
+ if env not in environments.keys():
1498
+ msg = 'Unknown environment. Please add environment to SDK ("add_environment" method)'
1499
+ logger.error(msg)
1500
+ raise ConnectionError(msg)
1501
+ elif env == 'custom':
1502
+ custom_env = os.environ.get('DTLPY_CUSTOM_ENV', None)
1503
+ environment = json.loads(base64.b64decode(custom_env.encode()).decode())
1504
+ env = environment.pop('url')
1505
+ token = None
1506
+ if self.environments.get(env):
1507
+ token = self.environments[env].get('token', None)
1508
+ self.environments[env] = environment.get(env, environment)
1509
+ self.environments[env]['token'] = token
1510
+ verify_ssl = self.environments[env].get('verify_ssl', None)
1511
+ if verify_ssl is not None and isinstance(verify_ssl, str):
1512
+ self.environments[env]['verify_ssl'] = True if verify_ssl.lower() == 'true' else False
1513
+ else:
1514
+ matched_env = [env_url for env_url, env_dict in environments.items() if env_dict['alias'] == env]
1515
+ if len(matched_env) != 1:
1516
+ known_aliases = [env_dict['alias'] for env_url, env_dict in environments.items()]
1517
+ raise ConnectionError(
1518
+ 'Unknown platform environment: "{}". Known: {}'.format(env, ', '.join(known_aliases)))
1519
+ env = matched_env[0]
1520
+ if self.environment != env:
1521
+ self.environment = env
1522
+ self.__gate_url_for_requests = None
1523
+ # reset local token
1524
+ self._token = None
1525
+ self.refresh_token_active = True
1526
+ logger.info('Platform environment: {}'.format(self.environment))
1527
+ if self.token_expired():
1528
+ logger.info('Token expired, Please login.')
1529
+
1530
+ ##########
1531
+ # Log in #
1532
+ ##########
1533
+ def login_secret(self, email, password, client_id, client_secret=None, force=False):
1534
+ """
1535
+ Login with email and password from environment variables.
1536
+ If already logged in with same user - login will NOT happen. see "force"
1537
+
1538
+ :param email: user email.
1539
+ :param password: user password
1540
+ :param client_id: auth0 client id
1541
+ :param client_secret: secret that match the client id
1542
+ :param force: force login. in case login with same user but want to get a new JWT
1543
+ :return:
1544
+ """
1545
+ logger.warning('dl.login_secret is deprecated. Please use dl.login_m2m instead.')
1546
+ return login_secret(api_client=self,
1547
+ email=email,
1548
+ password=password,
1549
+ client_id=client_id,
1550
+ client_secret=client_secret,
1551
+ force=force)
1552
+
1553
+ def login_m2m(self, email, password, client_id=None, client_secret=None, force=False):
1554
+ """
1555
+ Login with email and password from environment variables
1556
+ :param email: user email. if already logged in with same user - login will NOT happen. see "force"
1557
+ :param password: user password
1558
+ :param client_id:
1559
+ :param client_secret:
1560
+ :param force: force login. in case login with same user but want to get a new JWT
1561
+ :return:
1562
+ """
1563
+ res = login_m2m(api_client=self,
1564
+ email=email,
1565
+ password=password,
1566
+ client_id=client_id,
1567
+ client_secret=client_secret,
1568
+ force=force)
1569
+ if res:
1570
+ self._send_login_event(user_type='human', login_type='m2m')
1571
+ return res
1572
+
1573
+ def login_token(self, token):
1574
+ """
1575
+ Login using existing token
1576
+ :param token: a valid token
1577
+ :return:
1578
+ """
1579
+ current_token = self.token
1580
+ self.token = token
1581
+ success, response = self.gen_request(req_type='get', path='/users/me')
1582
+ if not response.ok:
1583
+ # switch back to before
1584
+ self.token = current_token
1585
+ raise ValueError(f"Invalid API key provided. Error: {response.text}")
1586
+
1587
+ def login_api_key(self, api_key):
1588
+ """
1589
+ Login using API key
1590
+ :param api_key: a valid API key
1591
+ :return:
1592
+ """
1593
+ current_token = self.token
1594
+ self.token = api_key
1595
+ success, response = self.gen_request(req_type='get', path='/users/me')
1596
+ if not response.ok:
1597
+ # switch back to before
1598
+ self.token = current_token
1599
+ raise ValueError(f"Invalid API key provided. Error: {response.text}")
1600
+
1601
+ @property
1602
+ def login_domain(self):
1603
+ if self._login_domain is None:
1604
+ self._login_domain = self.environments[self.environment].get('login_domain', None)
1605
+ return self._login_domain
1606
+
1607
+ @login_domain.setter
1608
+ def login_domain(self, domain: str):
1609
+ if domain is not None and not isinstance(domain, str):
1610
+ raise exceptions.PlatformException('400', 'domain should be a string value')
1611
+ self._login_domain = domain
1612
+ self.environments[self.environment]['login_domain'] = domain
1613
+ self.cookie_io.put('login_parameters', self.environments)
1614
+
1615
+ def login(self, audience=None, auth0_url=None, client_id=None, callback_port=None):
1616
+ """
1617
+ Login using Auth0.
1618
+ :return:
1619
+ """
1620
+ res = login(
1621
+ api_client=self,
1622
+ audience=audience,
1623
+ auth0_url=auth0_url,
1624
+ client_id=client_id,
1625
+ login_domain=self.login_domain,
1626
+ callback_port=callback_port
1627
+ )
1628
+ if res:
1629
+ self._send_login_event(user_type='human', login_type='interactive')
1630
+ return res
1631
+
1632
+ def _send_login_event(self, user_type, login_type):
1633
+ event_payload = {
1634
+ 'event': 'dtlpy:login',
1635
+ 'properties': {
1636
+ 'login_type': login_type,
1637
+ 'user_type': user_type
1638
+ }
1639
+ }
1640
+ self.event_tracker.put(event=event_payload)
1641
+
1642
+ def logout(self):
1643
+ """
1644
+ Logout.
1645
+ :return:
1646
+ """
1647
+ return logout(api_client=self)
1648
+
1649
+ def _renew_token_in_dual_agent(self):
1650
+ renewed = False
1651
+ try:
1652
+ proxy_port = os.environ.get('AGENT_PROXY_MAIN_PORT') or "1001"
1653
+ resp = requests.get('http://localhost:{port}/get_jwt'.format(port=proxy_port))
1654
+ if resp.ok:
1655
+ self.token = resp.json()['jwt']
1656
+ renewed = True
1657
+ else:
1658
+ self.print_bad_response(resp)
1659
+ except Exception:
1660
+ logger.exception('Failed to get token from proxy')
1661
+
1662
+ return renewed
1663
+
1664
+ def renew_token(self):
1665
+ refresh_method = os.environ.get('DTLPY_REFRESH_TOKEN_METHOD', None)
1666
+ if refresh_method is not None and refresh_method == 'proxy':
1667
+ res = self._renew_token_in_dual_agent()
1668
+ else:
1669
+ res = self._renew_token_with_refresh_token()
1670
+ if res:
1671
+ self._send_login_event(user_type='human', login_type='refresh')
1672
+ return res
1673
+
1674
+ def generate_api_key(self, description: str = None, login: bool = False):
1675
+ """
1676
+ Generate an API key for a user
1677
+ :param description: description for the API key
1678
+ :param login: if True, login with the new API key
1679
+ :return: User token
1680
+ """
1681
+ user_email = self.info()['user_email']
1682
+ payload = {
1683
+ 'userId': user_email
1684
+ }
1685
+ if description:
1686
+ if not isinstance(description, str):
1687
+ raise ValueError('description should be a string')
1688
+ payload['description'] = description
1689
+ success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
1690
+ if not success:
1691
+ raise exceptions.PlatformException(response)
1692
+ if login:
1693
+ self.login_api_key(response.json()['jwt'])
1694
+ return True
1695
+
1696
+ return response.json()['jwt']
1697
+
1698
+ def _renew_token_with_refresh_token(self):
1699
+ renewed = False
1700
+ if self.refresh_token_active is False:
1701
+ return renewed
1702
+ logger.debug('RefreshToken: Started')
1703
+ if self.token is None or self.token == '':
1704
+ # token is missing
1705
+ logger.debug('RefreshToken: Missing token.')
1706
+ self.refresh_token_active = False
1707
+ if self.refresh_token is None or self.refresh_token == '':
1708
+ # missing refresh token
1709
+ logger.debug('RefreshToken: Missing "refresh_token"')
1710
+ self.refresh_token_active = False
1711
+ if self.environment not in self.environments.keys():
1712
+ # env params missing
1713
+ logger.debug('RefreshToken: Missing environments params for refreshing token')
1714
+ self.refresh_token_active = False
1715
+
1716
+ if self.refresh_token_active is False:
1717
+ return renewed
1718
+
1719
+ refresh_token = self.refresh_token
1720
+
1721
+ env_params = self.environments[self.environment]
1722
+ if 'gate_url' not in env_params:
1723
+ env_params['gate_url'] = gate_url_from_host(environment=self.environment)
1724
+ self.environments[self.environment] = env_params
1725
+ token_endpoint = "{}/token?default".format(env_params['gate_url'])
1726
+
1727
+ payload = {
1728
+ 'type': 'refresh_token',
1729
+ 'refresh_token': refresh_token
1730
+ }
1731
+ logger.debug("RefreshToken: Refreshing token via {}".format(token_endpoint))
1732
+ resp = requests.request(
1733
+ "POST",
1734
+ token_endpoint,
1735
+ json=payload,
1736
+ headers={'content-type': 'application/json'},
1737
+ verify=self.verify
1738
+ )
1739
+ if not resp.ok:
1740
+ logger.debug('RefreshToken: Failed')
1741
+ self.print_bad_response(resp)
1742
+ else:
1743
+ response_dict = resp.json()
1744
+ # get new token
1745
+ final_token = response_dict['id_token']
1746
+ self.token = final_token
1747
+ self.refresh_token = refresh_token
1748
+ # set status back to pending
1749
+ logger.debug('RefreshToken: Success')
1750
+ renewed = True
1751
+ return renewed
1752
+
1753
+ def set_api_counter(self, filepath):
1754
+ self.calls_counter = CallsCounter(filepath=filepath)
1755
+
1756
+ def _get_resource_url(self, url):
1757
+
1758
+ env = self._environments[self._environment]['alias']
1759
+ head = self._environments[self._environment].get('url', None)
1760
+ # TODO need to deprecate somehow (the following)
1761
+ if head is None:
1762
+ if env == 'prod':
1763
+ head = 'https://console.dataloop.ai/'
1764
+ elif env == 'dev':
1765
+ head = 'https://dev-con.dataloop.ai/'
1766
+ elif env == 'rc':
1767
+ head = 'https://rc-con.dataloop.ai/'
1768
+ elif env in ['local', 'minikube_local_mac']:
1769
+ head = 'https://localhost:8443/'
1770
+ elif env == 'new-dev':
1771
+ head = 'https://custom1-gate.dataloop.ai/'
1772
+ else:
1773
+ raise exceptions.PlatformException(error='400', message='Unknown environment: {}'.format(env))
1774
+
1775
+ return head + url
1776
+
1777
+ def _open_in_web(self, url):
1778
+ import webbrowser
1779
+ webbrowser.open(url=url, new=2, autoraise=True)
1780
+
1781
+
1782
+ client = ApiClient()