dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +959 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +963 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1257 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1504 -1504
- dtlpy/repositories/downloader.py +976 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +419 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1785 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
- dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
- dtlpy-1.116.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/services/api_client.py
CHANGED
|
@@ -1,1785 +1,1785 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Dataloop platform calls
|
|
3
|
-
"""
|
|
4
|
-
import aiohttp.client_exceptions
|
|
5
|
-
import requests_toolbelt
|
|
6
|
-
import multiprocessing
|
|
7
|
-
import threading
|
|
8
|
-
import traceback
|
|
9
|
-
import datetime
|
|
10
|
-
import requests
|
|
11
|
-
import aiohttp
|
|
12
|
-
import logging
|
|
13
|
-
import asyncio
|
|
14
|
-
import certifi
|
|
15
|
-
import base64
|
|
16
|
-
import enum
|
|
17
|
-
import time
|
|
18
|
-
import tqdm
|
|
19
|
-
import json
|
|
20
|
-
import sys
|
|
21
|
-
import ssl
|
|
22
|
-
import jwt
|
|
23
|
-
import os
|
|
24
|
-
import io
|
|
25
|
-
import concurrent
|
|
26
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
27
|
-
from requests.adapters import HTTPAdapter
|
|
28
|
-
from urllib3.util import Retry
|
|
29
|
-
from functools import wraps
|
|
30
|
-
import numpy as np
|
|
31
|
-
import inspect
|
|
32
|
-
from requests.models import Response
|
|
33
|
-
from dtlpy.caches.cache import CacheManger, CacheConfig
|
|
34
|
-
from .calls_counter import CallsCounter
|
|
35
|
-
from .cookie import CookieIO
|
|
36
|
-
from .logins import login, logout, login_secret, login_m2m, gate_url_from_host
|
|
37
|
-
from .async_utils import AsyncResponse, AsyncUploadStream, AsyncResponseError, AsyncThreadEventLoop
|
|
38
|
-
from .events import Events
|
|
39
|
-
from .service_defaults import DEFAULT_ENVIRONMENTS, DEFAULT_ENVIRONMENT
|
|
40
|
-
from .aihttp_retry import RetryClient
|
|
41
|
-
from .. import miscellaneous, exceptions, __version__
|
|
42
|
-
|
|
43
|
-
logger = logging.getLogger(name='dtlpy')
|
|
44
|
-
threadLock = threading.Lock()
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def format_message(message):
|
|
48
|
-
if message and isinstance(message, str):
|
|
49
|
-
return message.replace('\\n', '\n')
|
|
50
|
-
return message
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
class VerboseLoggingLevel:
|
|
54
|
-
DEBUG = "debug"
|
|
55
|
-
INFO = "info"
|
|
56
|
-
WARNING = "warning"
|
|
57
|
-
ERROR = "error"
|
|
58
|
-
CRITICAL = "critical"
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
class PlatformError(Exception):
|
|
62
|
-
"""
|
|
63
|
-
Error handling for api calls
|
|
64
|
-
"""
|
|
65
|
-
|
|
66
|
-
def __init__(self, resp):
|
|
67
|
-
msg = ''
|
|
68
|
-
if hasattr(resp, 'status_code'):
|
|
69
|
-
msg += '<Response [{}]>'.format(resp.status_code)
|
|
70
|
-
if hasattr(resp, 'reason'):
|
|
71
|
-
msg += '<Reason [{}]>'.format(format_message(resp.reason))
|
|
72
|
-
elif hasattr(resp, 'text'):
|
|
73
|
-
msg += '<Reason [{}]>'.format(format_message(resp.text))
|
|
74
|
-
super().__init__(msg)
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
class Callbacks:
|
|
78
|
-
def __init__(self):
|
|
79
|
-
self._callbacks = {}
|
|
80
|
-
|
|
81
|
-
class CallbackEvent(str, enum.Enum):
|
|
82
|
-
DATASET_EXPORT = 'datasetExport'
|
|
83
|
-
ITEMS_UPLOAD = 'itemUpload'
|
|
84
|
-
|
|
85
|
-
def add(self, event, func):
|
|
86
|
-
|
|
87
|
-
if not callable(func):
|
|
88
|
-
raise ValueError(f"The provided callback for {event} is not callable")
|
|
89
|
-
if event not in list(self.CallbackEvent):
|
|
90
|
-
raise ValueError(f"Unknown event: {event!r}, allowed events are: {list(self.CallbackEvent)}")
|
|
91
|
-
self._callbacks[event] = func
|
|
92
|
-
|
|
93
|
-
def get(self, name):
|
|
94
|
-
return self._callbacks.get(name)
|
|
95
|
-
|
|
96
|
-
def run_on_event(self, event, context, progress):
|
|
97
|
-
callback = self.get(event)
|
|
98
|
-
if callback is not None:
|
|
99
|
-
callback(progress=progress, context=context)
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
class Verbose:
|
|
103
|
-
__DEFAULT_LOGGING_LEVEL = 'warning'
|
|
104
|
-
__DEFAULT_DISABLE_PROGRESS_BAR = False
|
|
105
|
-
__DEFAULT_PRINT_ALL_RESPONSES = False
|
|
106
|
-
__PRINT_ERROR_LOGS = False
|
|
107
|
-
__DEFAULT_PROGRESS_BAR_SETTINGS = {
|
|
108
|
-
'Iterate Pages': False,
|
|
109
|
-
'Command Progress': False,
|
|
110
|
-
'Download Dataset': False,
|
|
111
|
-
'Download Item': False,
|
|
112
|
-
'Upload Items': False,
|
|
113
|
-
'Download Annotations': False,
|
|
114
|
-
'Upload Annotations': False,
|
|
115
|
-
'Convert Annotations': False
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
def __init__(self, cookie):
|
|
119
|
-
self.cookie = cookie
|
|
120
|
-
dictionary = self.cookie.get('verbose')
|
|
121
|
-
if isinstance(dictionary, dict):
|
|
122
|
-
self.from_cookie(dictionary)
|
|
123
|
-
else:
|
|
124
|
-
self._logging_level = self.__DEFAULT_LOGGING_LEVEL
|
|
125
|
-
self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
|
|
126
|
-
self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
|
|
127
|
-
self._print_error_logs = self.__PRINT_ERROR_LOGS
|
|
128
|
-
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
129
|
-
if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
|
|
130
|
-
self._print_error_logs = True
|
|
131
|
-
self.to_cookie()
|
|
132
|
-
|
|
133
|
-
def to_cookie(self):
|
|
134
|
-
dictionary = {'logging_level': self._logging_level,
|
|
135
|
-
'disable_progress_bar': self._disable_progress_bar,
|
|
136
|
-
'print_all_responses': self._print_all_responses,
|
|
137
|
-
'print_error_logs': self._print_error_logs,
|
|
138
|
-
'progress_bar_setting': json.dumps(self._progress_bar_settings)
|
|
139
|
-
}
|
|
140
|
-
self.cookie.put(key='verbose', value=dictionary)
|
|
141
|
-
|
|
142
|
-
def from_cookie(self, dictionary):
|
|
143
|
-
self._logging_level = dictionary.get('logging_level', self.__DEFAULT_LOGGING_LEVEL)
|
|
144
|
-
self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
|
|
145
|
-
self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
|
|
146
|
-
self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
|
|
147
|
-
progress_bar_settings = dictionary.get('progress_bar_setting', None)
|
|
148
|
-
if progress_bar_settings is None:
|
|
149
|
-
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
150
|
-
else:
|
|
151
|
-
self._progress_bar_settings = json.loads(progress_bar_settings)
|
|
152
|
-
|
|
153
|
-
@property
|
|
154
|
-
def disable_progress_bar_iterate_pages(self):
|
|
155
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
|
|
156
|
-
|
|
157
|
-
@disable_progress_bar_iterate_pages.setter
|
|
158
|
-
def disable_progress_bar_iterate_pages(self, val):
|
|
159
|
-
self._progress_bar_settings['Iterate Pages'] = val
|
|
160
|
-
self.to_cookie()
|
|
161
|
-
|
|
162
|
-
@property
|
|
163
|
-
def disable_progress_bar_command_progress(self):
|
|
164
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
|
|
165
|
-
|
|
166
|
-
@disable_progress_bar_command_progress.setter
|
|
167
|
-
def disable_progress_bar_command_progress(self, val):
|
|
168
|
-
self._progress_bar_settings['Command Progress'] = val
|
|
169
|
-
self.to_cookie()
|
|
170
|
-
|
|
171
|
-
@property
|
|
172
|
-
def disable_progress_bar_download_item(self):
|
|
173
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
|
|
174
|
-
|
|
175
|
-
@disable_progress_bar_download_item.setter
|
|
176
|
-
def disable_progress_bar_download_item(self, val):
|
|
177
|
-
self._progress_bar_settings['Download Item'] = val
|
|
178
|
-
self.to_cookie()
|
|
179
|
-
|
|
180
|
-
@property
|
|
181
|
-
def disable_progress_bar_download_dataset(self):
|
|
182
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
|
|
183
|
-
|
|
184
|
-
@disable_progress_bar_download_dataset.setter
|
|
185
|
-
def disable_progress_bar_download_dataset(self, val):
|
|
186
|
-
self._progress_bar_settings['Download Dataset'] = val
|
|
187
|
-
self.to_cookie()
|
|
188
|
-
|
|
189
|
-
@property
|
|
190
|
-
def disable_progress_bar_upload_items(self):
|
|
191
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
|
|
192
|
-
|
|
193
|
-
@disable_progress_bar_upload_items.setter
|
|
194
|
-
def disable_progress_bar_upload_items(self, val):
|
|
195
|
-
self._progress_bar_settings['Upload Items'] = val
|
|
196
|
-
self.to_cookie()
|
|
197
|
-
|
|
198
|
-
@property
|
|
199
|
-
def disable_progress_bar_download_annotations(self):
|
|
200
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
|
|
201
|
-
|
|
202
|
-
@disable_progress_bar_download_annotations.setter
|
|
203
|
-
def disable_progress_bar_download_annotations(self, val):
|
|
204
|
-
self._progress_bar_settings['Download Annotations'] = val
|
|
205
|
-
self.to_cookie()
|
|
206
|
-
|
|
207
|
-
@property
|
|
208
|
-
def disable_progress_bar_upload_annotations(self):
|
|
209
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
|
|
210
|
-
|
|
211
|
-
@disable_progress_bar_upload_annotations.setter
|
|
212
|
-
def disable_progress_bar_upload_annotations(self, val):
|
|
213
|
-
self._progress_bar_settings['Upload Annotations'] = val
|
|
214
|
-
self.to_cookie()
|
|
215
|
-
|
|
216
|
-
@property
|
|
217
|
-
def disable_progress_bar_convert_annotations(self):
|
|
218
|
-
return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
|
|
219
|
-
|
|
220
|
-
@disable_progress_bar_convert_annotations.setter
|
|
221
|
-
def disable_progress_bar_convert_annotations(self, val):
|
|
222
|
-
self._progress_bar_settings['Convert Annotations'] = val
|
|
223
|
-
self.to_cookie()
|
|
224
|
-
|
|
225
|
-
@property
|
|
226
|
-
def disable_progress_bar(self):
|
|
227
|
-
return self._disable_progress_bar
|
|
228
|
-
|
|
229
|
-
@disable_progress_bar.setter
|
|
230
|
-
def disable_progress_bar(self, val):
|
|
231
|
-
self._disable_progress_bar = val
|
|
232
|
-
self.to_cookie()
|
|
233
|
-
|
|
234
|
-
@property
|
|
235
|
-
def logging_level(self):
|
|
236
|
-
return self._logging_level
|
|
237
|
-
|
|
238
|
-
@logging_level.setter
|
|
239
|
-
def logging_level(self, val):
|
|
240
|
-
self._logging_level = val
|
|
241
|
-
# set log level
|
|
242
|
-
logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self._logging_level.upper()])
|
|
243
|
-
# write to cookie
|
|
244
|
-
self.to_cookie()
|
|
245
|
-
|
|
246
|
-
@property
|
|
247
|
-
def print_all_responses(self):
|
|
248
|
-
return self._print_all_responses
|
|
249
|
-
|
|
250
|
-
@print_all_responses.setter
|
|
251
|
-
def print_all_responses(self, val):
|
|
252
|
-
self._print_all_responses = val
|
|
253
|
-
self.to_cookie()
|
|
254
|
-
|
|
255
|
-
@property
|
|
256
|
-
def print_error_logs(self):
|
|
257
|
-
return self._print_error_logs
|
|
258
|
-
|
|
259
|
-
@print_error_logs.setter
|
|
260
|
-
def print_error_logs(self, val):
|
|
261
|
-
self._print_error_logs = val
|
|
262
|
-
self.to_cookie()
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
class CacheMode:
|
|
266
|
-
__DEFAULT_ENABLE_CACHE = True
|
|
267
|
-
__DEFAULT_CHUNK_CACHE = 200000
|
|
268
|
-
|
|
269
|
-
def __init__(self, cookie):
|
|
270
|
-
self.cookie = cookie
|
|
271
|
-
dictionary = self.cookie.get('cache_mode')
|
|
272
|
-
if isinstance(dictionary, dict):
|
|
273
|
-
self.from_cookie(dictionary)
|
|
274
|
-
else:
|
|
275
|
-
self._enable_cache = self.__DEFAULT_ENABLE_CACHE
|
|
276
|
-
self._chunk_cache = self.__DEFAULT_CHUNK_CACHE
|
|
277
|
-
self.to_cookie()
|
|
278
|
-
|
|
279
|
-
def to_cookie(self):
|
|
280
|
-
dictionary = {'enable_cache': self._enable_cache,
|
|
281
|
-
'chunk_cache': self._chunk_cache}
|
|
282
|
-
self.cookie.put(key='cache_mode', value=dictionary)
|
|
283
|
-
|
|
284
|
-
def from_cookie(self, dictionary):
|
|
285
|
-
self._enable_cache = dictionary.get('enable_cache', self.__DEFAULT_ENABLE_CACHE)
|
|
286
|
-
self._chunk_cache = dictionary.get('chunk_cache', self.__DEFAULT_CHUNK_CACHE)
|
|
287
|
-
|
|
288
|
-
@property
|
|
289
|
-
def enable_cache(self):
|
|
290
|
-
return self._enable_cache
|
|
291
|
-
|
|
292
|
-
@enable_cache.setter
|
|
293
|
-
def enable_cache(self, val: bool):
|
|
294
|
-
if not isinstance(val, bool):
|
|
295
|
-
raise exceptions.PlatformException(error=400,
|
|
296
|
-
message="input must be of type bool")
|
|
297
|
-
self._enable_cache = val
|
|
298
|
-
self.to_cookie()
|
|
299
|
-
|
|
300
|
-
@property
|
|
301
|
-
def chunk_cache(self):
|
|
302
|
-
return self._chunk_cache
|
|
303
|
-
|
|
304
|
-
@chunk_cache.setter
|
|
305
|
-
def chunk_cache(self, val):
|
|
306
|
-
self._chunk_cache = val
|
|
307
|
-
self.to_cookie()
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
class SDKCache:
|
|
311
|
-
__DEFAULT_USE_CACHE = False
|
|
312
|
-
__DEFAULT_CACHE_PATH = os.path.join(os.path.expanduser('~'), '.dataloop', 'obj_cache')
|
|
313
|
-
__DEFAULT_CACHE_PATH_BIN = os.path.join(os.path.expanduser('~'), '.dataloop')
|
|
314
|
-
__DEFAULT_CONFIGS_CACHE = CacheConfig().to_string()
|
|
315
|
-
__DEFAULT_BINARY_CACHE_SIZE = 1000
|
|
316
|
-
|
|
317
|
-
def __init__(self, cookie):
|
|
318
|
-
self.cookie = cookie
|
|
319
|
-
dictionary = self.cookie.get('cache_configs')
|
|
320
|
-
if isinstance(dictionary, dict):
|
|
321
|
-
self.from_cookie(dictionary)
|
|
322
|
-
else:
|
|
323
|
-
self._cache_path = self.__DEFAULT_CACHE_PATH
|
|
324
|
-
self._cache_path_bin = self.__DEFAULT_CACHE_PATH_BIN
|
|
325
|
-
self._configs = self.__DEFAULT_CONFIGS_CACHE
|
|
326
|
-
self._bin_size = self.__DEFAULT_BINARY_CACHE_SIZE
|
|
327
|
-
self._use_cache = self.__DEFAULT_USE_CACHE
|
|
328
|
-
self.to_cookie()
|
|
329
|
-
|
|
330
|
-
def to_cookie(self):
|
|
331
|
-
dictionary = {'cache_path': self._cache_path,
|
|
332
|
-
'cache_path_bin': self._cache_path_bin,
|
|
333
|
-
'configs': self._configs,
|
|
334
|
-
'bin_size': self._bin_size,
|
|
335
|
-
'use_cache': self._use_cache}
|
|
336
|
-
self.cookie.put(key='cache_configs', value=dictionary)
|
|
337
|
-
|
|
338
|
-
def from_cookie(self, dictionary):
|
|
339
|
-
self._cache_path = dictionary.get('cache_path', self.__DEFAULT_CACHE_PATH)
|
|
340
|
-
self._cache_path_bin = dictionary.get('cache_path_bin', self.__DEFAULT_CACHE_PATH_BIN)
|
|
341
|
-
self._configs = dictionary.get('configs', self.__DEFAULT_CONFIGS_CACHE)
|
|
342
|
-
self._bin_size = dictionary.get('bin_size', self.__DEFAULT_BINARY_CACHE_SIZE)
|
|
343
|
-
self._use_cache = dictionary.get('use_cache', self.__DEFAULT_USE_CACHE)
|
|
344
|
-
|
|
345
|
-
@property
|
|
346
|
-
def cache_path(self):
|
|
347
|
-
return self._cache_path
|
|
348
|
-
|
|
349
|
-
@property
|
|
350
|
-
def cache_path_bin(self):
|
|
351
|
-
return self._cache_path_bin
|
|
352
|
-
|
|
353
|
-
@cache_path_bin.setter
|
|
354
|
-
def cache_path_bin(self, val: str):
|
|
355
|
-
if not isinstance(val, str):
|
|
356
|
-
raise exceptions.PlatformException(error=400,
|
|
357
|
-
message="input must be of type str")
|
|
358
|
-
self._cache_path_bin = val
|
|
359
|
-
os.environ['DEFAULT_CACHE_PATH'] = val
|
|
360
|
-
self.to_cookie()
|
|
361
|
-
|
|
362
|
-
@property
|
|
363
|
-
def use_cache(self):
|
|
364
|
-
return self._use_cache
|
|
365
|
-
|
|
366
|
-
@use_cache.setter
|
|
367
|
-
def use_cache(self, val: bool):
|
|
368
|
-
if not isinstance(val, bool):
|
|
369
|
-
raise exceptions.PlatformException(error=400,
|
|
370
|
-
message="input must be of type bool")
|
|
371
|
-
self._use_cache = val
|
|
372
|
-
self.to_cookie()
|
|
373
|
-
|
|
374
|
-
@property
|
|
375
|
-
def configs(self):
|
|
376
|
-
return self._configs
|
|
377
|
-
|
|
378
|
-
@configs.setter
|
|
379
|
-
def configs(self, val):
|
|
380
|
-
if isinstance(val, CacheConfig):
|
|
381
|
-
val = val.to_string()
|
|
382
|
-
if not isinstance(val, str):
|
|
383
|
-
raise exceptions.PlatformException(error=400,
|
|
384
|
-
message="input must be of type str or CacheConfig")
|
|
385
|
-
self._configs = val
|
|
386
|
-
self.to_cookie()
|
|
387
|
-
|
|
388
|
-
@property
|
|
389
|
-
def bin_size(self):
|
|
390
|
-
return self._bin_size
|
|
391
|
-
|
|
392
|
-
@bin_size.setter
|
|
393
|
-
def bin_size(self, val: int):
|
|
394
|
-
if not isinstance(val, int):
|
|
395
|
-
raise exceptions.PlatformException(error=400,
|
|
396
|
-
message="input must be of type int")
|
|
397
|
-
self._bin_size = val
|
|
398
|
-
self.to_cookie()
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
class Attributes2:
|
|
402
|
-
__DEFAULT_USE_ATTRIBUTE = False
|
|
403
|
-
|
|
404
|
-
def __init__(self, cookie):
|
|
405
|
-
self.cookie = cookie
|
|
406
|
-
dictionary = self.cookie.get('use_attributes_2')
|
|
407
|
-
if isinstance(dictionary, dict):
|
|
408
|
-
self.from_cookie(dictionary)
|
|
409
|
-
else:
|
|
410
|
-
self._use_attributes_2 = self.__DEFAULT_USE_ATTRIBUTE
|
|
411
|
-
self.to_cookie()
|
|
412
|
-
|
|
413
|
-
def to_cookie(self):
|
|
414
|
-
dictionary = {'use_attributes_2': self._use_attributes_2}
|
|
415
|
-
self.cookie.put(key='use_attributes_2', value=dictionary)
|
|
416
|
-
|
|
417
|
-
def from_cookie(self, dictionary):
|
|
418
|
-
self._use_attributes_2 = dictionary.get('use_attributes_2', self.__DEFAULT_USE_ATTRIBUTE)
|
|
419
|
-
|
|
420
|
-
@property
|
|
421
|
-
def use_attributes_2(self):
|
|
422
|
-
return self._use_attributes_2
|
|
423
|
-
|
|
424
|
-
@use_attributes_2.setter
|
|
425
|
-
def use_attributes_2(self, val: bool):
|
|
426
|
-
if not isinstance(val, bool):
|
|
427
|
-
raise exceptions.PlatformException(error=400,
|
|
428
|
-
message="input must be of type bool")
|
|
429
|
-
self._use_attributes_2 = val
|
|
430
|
-
os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
|
|
431
|
-
self.to_cookie()
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
class Decorators:
|
|
435
|
-
@staticmethod
|
|
436
|
-
def token_expired_decorator(method):
|
|
437
|
-
@wraps(method)
|
|
438
|
-
def decorated_method(inst, *args, **kwargs):
|
|
439
|
-
# save event
|
|
440
|
-
frm = inspect.stack()[1]
|
|
441
|
-
|
|
442
|
-
# before the method call
|
|
443
|
-
kwargs.update({'stack': frm})
|
|
444
|
-
if inst.token_expired():
|
|
445
|
-
if inst.renew_token_method() is False:
|
|
446
|
-
raise exceptions.PlatformException('600', 'Token expired, Please login.'
|
|
447
|
-
'\nSDK login options: dl.login(), dl.login_token(), '
|
|
448
|
-
'dl.login_m2m()'
|
|
449
|
-
'\nCLI login options: dlp login, dlp login-token, '
|
|
450
|
-
'dlp login-m2m')
|
|
451
|
-
# the actual method call
|
|
452
|
-
result = method(inst, *args, **kwargs)
|
|
453
|
-
# after the method call
|
|
454
|
-
return result
|
|
455
|
-
|
|
456
|
-
return decorated_method
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
class ApiClient:
|
|
460
|
-
"""
|
|
461
|
-
API calls to Dataloop gate
|
|
462
|
-
"""
|
|
463
|
-
|
|
464
|
-
def __init__(self, token=None, num_processes=None, cookie_filepath=None):
|
|
465
|
-
############
|
|
466
|
-
# Initiate #
|
|
467
|
-
############
|
|
468
|
-
# define local params - read only once from cookie file
|
|
469
|
-
self.lock = threading.Lock()
|
|
470
|
-
self.renew_token_method = self.renew_token
|
|
471
|
-
self.is_cli = False
|
|
472
|
-
self.session = None
|
|
473
|
-
self.default_headers = dict()
|
|
474
|
-
self._token = None
|
|
475
|
-
self._environments = None
|
|
476
|
-
self._environment = None
|
|
477
|
-
self._verbose = None
|
|
478
|
-
self._callbacks = None
|
|
479
|
-
self._cache_state = None
|
|
480
|
-
self._attributes_mode = None
|
|
481
|
-
self._cache_configs = None
|
|
482
|
-
self._sdk_cache = None
|
|
483
|
-
self._fetch_entities = None
|
|
484
|
-
# define other params
|
|
485
|
-
self.last_response = None
|
|
486
|
-
self.last_request = None
|
|
487
|
-
self.platform_exception = None
|
|
488
|
-
self.last_curl = None
|
|
489
|
-
self.minimal_print = True
|
|
490
|
-
# start refresh token
|
|
491
|
-
self.refresh_token_active = True
|
|
492
|
-
# event and pools
|
|
493
|
-
self._thread_pools = dict()
|
|
494
|
-
self._event_loop = None
|
|
495
|
-
self._login_domain = None
|
|
496
|
-
self.__gate_url_for_requests = None
|
|
497
|
-
|
|
498
|
-
# TODO- remove before release - only for debugging
|
|
499
|
-
self._stopped_pools = list()
|
|
500
|
-
|
|
501
|
-
if cookie_filepath is None:
|
|
502
|
-
self.cookie_io = CookieIO.init()
|
|
503
|
-
else:
|
|
504
|
-
self.cookie_io = CookieIO(path=cookie_filepath)
|
|
505
|
-
assert isinstance(self.cookie_io, CookieIO)
|
|
506
|
-
self.state_io = CookieIO.init_local_cookie(create=False)
|
|
507
|
-
assert isinstance(self.state_io, CookieIO)
|
|
508
|
-
|
|
509
|
-
##################
|
|
510
|
-
# configurations #
|
|
511
|
-
##################
|
|
512
|
-
# check for proxies in connection
|
|
513
|
-
self.check_proxy()
|
|
514
|
-
|
|
515
|
-
# set token if input
|
|
516
|
-
if token is not None:
|
|
517
|
-
self.token = token
|
|
518
|
-
|
|
519
|
-
# STDOUT
|
|
520
|
-
self.remove_keys_list = ['contributors', 'url', 'annotations', 'items', 'export', 'directoryTree',
|
|
521
|
-
'attributes', 'partitions', 'metadata', 'stream', 'createdAt', 'updatedAt', 'arch']
|
|
522
|
-
|
|
523
|
-
# API calls counter
|
|
524
|
-
counter_filepath = os.path.join(os.path.dirname(self.cookie_io.COOKIE), 'calls_counter.json')
|
|
525
|
-
self.calls_counter = CallsCounter(filepath=counter_filepath)
|
|
526
|
-
|
|
527
|
-
# create a global thread pool to run multi threading
|
|
528
|
-
if num_processes is None:
|
|
529
|
-
num_processes = 3 * multiprocessing.cpu_count()
|
|
530
|
-
self._num_processes = num_processes
|
|
531
|
-
self._thread_pools_names = {'item.download': num_processes,
|
|
532
|
-
'item.status_update': num_processes,
|
|
533
|
-
'item.page': num_processes,
|
|
534
|
-
'annotation.upload': num_processes,
|
|
535
|
-
'annotation.download': num_processes,
|
|
536
|
-
'annotation.update': num_processes,
|
|
537
|
-
'entity.create': num_processes,
|
|
538
|
-
'dataset.download': num_processes}
|
|
539
|
-
# set logging level
|
|
540
|
-
logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self.verbose.logging_level.upper()])
|
|
541
|
-
os.environ["USE_ATTRIBUTE_2"] = json.dumps(self.attributes_mode.use_attributes_2)
|
|
542
|
-
|
|
543
|
-
self.cache = None
|
|
544
|
-
#######################
|
|
545
|
-
# start event tracker #
|
|
546
|
-
self.event_tracker = Events(client_api=self)
|
|
547
|
-
self.event_tracker.daemon = True
|
|
548
|
-
self.event_tracker.start()
|
|
549
|
-
self.upload_session_timeout = int(os.environ.get('UPLOAD_SESSION_TIMEOUT', 0))
|
|
550
|
-
self.upload_chunk_timeout = int(os.environ.get('UPLOAD_CHUNK_TIMEOUT', 2 * 60))
|
|
551
|
-
|
|
552
|
-
@property
|
|
553
|
-
def event_loop(self):
|
|
554
|
-
self.lock.acquire()
|
|
555
|
-
if self._event_loop is None:
|
|
556
|
-
self._event_loop = self.create_event_loop_thread()
|
|
557
|
-
elif not self._event_loop.loop.is_running():
|
|
558
|
-
if self._event_loop.is_alive():
|
|
559
|
-
self._event_loop.stop()
|
|
560
|
-
self._event_loop = self.create_event_loop_thread()
|
|
561
|
-
self.lock.release()
|
|
562
|
-
return self._event_loop
|
|
563
|
-
|
|
564
|
-
def build_cache(self, cache_config=None):
|
|
565
|
-
if cache_config is None:
|
|
566
|
-
cache_config_json = os.environ.get('CACHE_CONFIG', None)
|
|
567
|
-
if cache_config_json is None:
|
|
568
|
-
if self.sdk_cache.use_cache:
|
|
569
|
-
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=self.sdk_cache.configs)
|
|
570
|
-
else:
|
|
571
|
-
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config_json)
|
|
572
|
-
if cache_config:
|
|
573
|
-
# cache paths
|
|
574
|
-
if os.environ.get('DEFAULT_CACHE_PATH', None) is None:
|
|
575
|
-
os.environ['DEFAULT_CACHE_PATH'] = self.sdk_cache.cache_path_bin
|
|
576
|
-
else:
|
|
577
|
-
self.sdk_cache.cache_path_bin = os.environ['DEFAULT_CACHE_PATH']
|
|
578
|
-
|
|
579
|
-
if not os.path.isdir(self.sdk_cache.cache_path_bin):
|
|
580
|
-
os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
|
|
581
|
-
|
|
582
|
-
if not os.path.isfile(os.path.join(self.sdk_cache.cache_path_bin, 'cacheConfig.json')):
|
|
583
|
-
os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
|
|
584
|
-
|
|
585
|
-
if isinstance(cache_config, str):
|
|
586
|
-
self.sdk_cache.configs = cache_config
|
|
587
|
-
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config)
|
|
588
|
-
elif isinstance(cache_config, CacheConfig):
|
|
589
|
-
self.sdk_cache.configs = cache_config.to_string()
|
|
590
|
-
else:
|
|
591
|
-
raise Exception("config should be of type str or CacheConfig")
|
|
592
|
-
try:
|
|
593
|
-
self.cache = CacheManger(cache_configs=[cache_config], bin_cache_size=self.sdk_cache.bin_size)
|
|
594
|
-
self.cache.ping()
|
|
595
|
-
self.sdk_cache.use_cache = True
|
|
596
|
-
except Exception as e:
|
|
597
|
-
logger.warning("Cache build error {}".format(e))
|
|
598
|
-
self.cache = None
|
|
599
|
-
|
|
600
|
-
def __del__(self):
|
|
601
|
-
for name, pool in self._thread_pools.items():
|
|
602
|
-
pool.shutdown()
|
|
603
|
-
self.event_loop.stop()
|
|
604
|
-
|
|
605
|
-
def _build_request_headers(self, headers=None):
|
|
606
|
-
if headers is None:
|
|
607
|
-
headers = dict()
|
|
608
|
-
if not isinstance(headers, dict):
|
|
609
|
-
raise exceptions.PlatformException(
|
|
610
|
-
error='400',
|
|
611
|
-
message="Input 'headers' must be a dictionary, got: {}".format(type(headers)))
|
|
612
|
-
headers.update(self.default_headers)
|
|
613
|
-
headers.update(self.auth)
|
|
614
|
-
headers.update({'User-Agent': requests_toolbelt.user_agent('dtlpy', __version__)})
|
|
615
|
-
return headers
|
|
616
|
-
|
|
617
|
-
@property
|
|
618
|
-
def num_processes(self):
|
|
619
|
-
return self._num_processes
|
|
620
|
-
|
|
621
|
-
@num_processes.setter
|
|
622
|
-
def num_processes(self, num_processes):
|
|
623
|
-
if num_processes == self._num_processes:
|
|
624
|
-
# same number. no need to do anything
|
|
625
|
-
return
|
|
626
|
-
self._num_processes = num_processes
|
|
627
|
-
for pool_name in self._thread_pools_names:
|
|
628
|
-
self._thread_pools_names[pool_name] = num_processes
|
|
629
|
-
|
|
630
|
-
for pool in self._thread_pools:
|
|
631
|
-
self._thread_pools[pool].shutdown()
|
|
632
|
-
self._thread_pools = dict()
|
|
633
|
-
|
|
634
|
-
def create_event_loop_thread(self):
|
|
635
|
-
loop = asyncio.new_event_loop()
|
|
636
|
-
event_loop = AsyncThreadEventLoop(loop=loop,
|
|
637
|
-
n=self._num_processes)
|
|
638
|
-
event_loop.daemon = True
|
|
639
|
-
event_loop.start()
|
|
640
|
-
time.sleep(1)
|
|
641
|
-
return event_loop
|
|
642
|
-
|
|
643
|
-
def thread_pools(self, pool_name):
|
|
644
|
-
if pool_name not in self._thread_pools_names:
|
|
645
|
-
raise ValueError('unknown thread pool name: {}. known name: {}'.format(
|
|
646
|
-
pool_name,
|
|
647
|
-
list(self._thread_pools_names.keys())))
|
|
648
|
-
num_processes = self._thread_pools_names[pool_name]
|
|
649
|
-
if pool_name not in self._thread_pools or self._thread_pools[pool_name]._shutdown:
|
|
650
|
-
self._thread_pools[pool_name] = ThreadPoolExecutor(max_workers=num_processes)
|
|
651
|
-
pool = self._thread_pools[pool_name]
|
|
652
|
-
assert isinstance(pool, concurrent.futures.ThreadPoolExecutor)
|
|
653
|
-
return pool
|
|
654
|
-
|
|
655
|
-
@property
|
|
656
|
-
def verify(self):
|
|
657
|
-
environments = self.environments
|
|
658
|
-
verify = True
|
|
659
|
-
if self.environment in environments:
|
|
660
|
-
if 'verify_ssl' in environments[self.environment]:
|
|
661
|
-
verify = environments[self.environment]['verify_ssl']
|
|
662
|
-
return verify
|
|
663
|
-
|
|
664
|
-
@property
|
|
665
|
-
def use_ssl_context(self):
|
|
666
|
-
environments = self.environments
|
|
667
|
-
use_ssl_context = False
|
|
668
|
-
if self.environment in environments:
|
|
669
|
-
if 'use_ssl_context' in environments[self.environment]:
|
|
670
|
-
use_ssl_context = environments[self.environment]['use_ssl_context']
|
|
671
|
-
return use_ssl_context
|
|
672
|
-
|
|
673
|
-
@property
|
|
674
|
-
def auth(self):
|
|
675
|
-
return {'authorization': 'Bearer ' + self.token}
|
|
676
|
-
|
|
677
|
-
@property
|
|
678
|
-
def environment(self):
|
|
679
|
-
_environment = self._environment
|
|
680
|
-
if _environment is None:
|
|
681
|
-
_environment = self.cookie_io.get('url')
|
|
682
|
-
if _environment is None:
|
|
683
|
-
_environment = DEFAULT_ENVIRONMENT
|
|
684
|
-
self._environment = _environment
|
|
685
|
-
return _environment
|
|
686
|
-
|
|
687
|
-
@environment.setter
|
|
688
|
-
def environment(self, env):
|
|
689
|
-
self._environment = env
|
|
690
|
-
self.cookie_io.put('url', env)
|
|
691
|
-
|
|
692
|
-
@property
|
|
693
|
-
def fetch_entities(self):
|
|
694
|
-
if self._fetch_entities is None:
|
|
695
|
-
self._fetch_entities = self.cookie_io.get('fetch_entities')
|
|
696
|
-
if self._fetch_entities is None:
|
|
697
|
-
self.fetch_entities = True # default
|
|
698
|
-
return self._fetch_entities
|
|
699
|
-
|
|
700
|
-
@fetch_entities.setter
|
|
701
|
-
def fetch_entities(self, val):
|
|
702
|
-
self._fetch_entities = val
|
|
703
|
-
self.cookie_io.put('fetch_entities', val)
|
|
704
|
-
|
|
705
|
-
@property
|
|
706
|
-
def environments(self):
|
|
707
|
-
"""
|
|
708
|
-
List of known environments
|
|
709
|
-
:return:
|
|
710
|
-
"""
|
|
711
|
-
# get environment login parameters
|
|
712
|
-
_environments = self._environments
|
|
713
|
-
if _environments is None:
|
|
714
|
-
# take from cookie
|
|
715
|
-
_environments = self.cookie_io.get('login_parameters')
|
|
716
|
-
# if cookie is None - init with defaults
|
|
717
|
-
if _environments is None:
|
|
718
|
-
# default
|
|
719
|
-
_environments = DEFAULT_ENVIRONMENTS
|
|
720
|
-
# save to local variable
|
|
721
|
-
self.environments = _environments
|
|
722
|
-
else:
|
|
723
|
-
# save from cookie to ram
|
|
724
|
-
self._environments = _environments
|
|
725
|
-
return _environments
|
|
726
|
-
|
|
727
|
-
@environments.setter
|
|
728
|
-
def environments(self, env_dict):
|
|
729
|
-
self._environments = env_dict
|
|
730
|
-
self.cookie_io.put(key='login_parameters', value=self._environments)
|
|
731
|
-
|
|
732
|
-
@property
|
|
733
|
-
def verbose(self):
|
|
734
|
-
if self._verbose is None:
|
|
735
|
-
self._verbose = Verbose(cookie=self.cookie_io)
|
|
736
|
-
assert isinstance(self._verbose, Verbose)
|
|
737
|
-
return self._verbose
|
|
738
|
-
|
|
739
|
-
@property
|
|
740
|
-
def cache_state(self):
|
|
741
|
-
if self._cache_state is None:
|
|
742
|
-
self._cache_state = CacheMode(cookie=self.cookie_io)
|
|
743
|
-
assert isinstance(self._cache_state, CacheMode)
|
|
744
|
-
return self._cache_state
|
|
745
|
-
|
|
746
|
-
@property
|
|
747
|
-
def attributes_mode(self):
|
|
748
|
-
if self._attributes_mode is None:
|
|
749
|
-
self._attributes_mode = Attributes2(cookie=self.cookie_io)
|
|
750
|
-
assert isinstance(self._attributes_mode, Attributes2)
|
|
751
|
-
return self._attributes_mode
|
|
752
|
-
|
|
753
|
-
@property
|
|
754
|
-
def sdk_cache(self):
|
|
755
|
-
if self._sdk_cache is None:
|
|
756
|
-
self._sdk_cache = SDKCache(cookie=self.cookie_io)
|
|
757
|
-
assert isinstance(self._sdk_cache, SDKCache)
|
|
758
|
-
return self._sdk_cache
|
|
759
|
-
|
|
760
|
-
@property
|
|
761
|
-
def callbacks(self):
|
|
762
|
-
if self._callbacks is None:
|
|
763
|
-
self._callbacks = Callbacks()
|
|
764
|
-
assert isinstance(self._callbacks, Callbacks)
|
|
765
|
-
return self._callbacks
|
|
766
|
-
|
|
767
|
-
def add_callback(self, event, func):
|
|
768
|
-
"""
|
|
769
|
-
function to add callback to the client
|
|
770
|
-
:param event: dl.CallbackEvent enum, name of the callback
|
|
771
|
-
:param func: function to call with 2 arguments: progress and context
|
|
772
|
-
"""
|
|
773
|
-
self.callbacks.add(event, func)
|
|
774
|
-
|
|
775
|
-
@property
|
|
776
|
-
def token(self):
|
|
777
|
-
_token = self._token
|
|
778
|
-
if _token is None:
|
|
779
|
-
environments = self.environments
|
|
780
|
-
if self.environment in environments:
|
|
781
|
-
if 'token' in environments[self.environment]:
|
|
782
|
-
_token = environments[self.environment]['token']
|
|
783
|
-
return _token
|
|
784
|
-
|
|
785
|
-
@token.setter
|
|
786
|
-
def token(self, token):
|
|
787
|
-
# set to variable
|
|
788
|
-
self._token = token
|
|
789
|
-
self.refresh_token = None
|
|
790
|
-
# set to cookie file
|
|
791
|
-
environments = self.environments
|
|
792
|
-
if self.environment in environments:
|
|
793
|
-
environments[self.environment]['token'] = token
|
|
794
|
-
else:
|
|
795
|
-
environments[self.environment] = {'token': token}
|
|
796
|
-
self.environments = environments
|
|
797
|
-
|
|
798
|
-
@property
|
|
799
|
-
def refresh_token(self):
|
|
800
|
-
environments = self.environments
|
|
801
|
-
refresh_token = None
|
|
802
|
-
if self.environment in environments:
|
|
803
|
-
if 'refresh_token' in environments[self.environment]:
|
|
804
|
-
refresh_token = environments[self.environment]['refresh_token']
|
|
805
|
-
return refresh_token
|
|
806
|
-
|
|
807
|
-
@refresh_token.setter
|
|
808
|
-
def refresh_token(self, token):
|
|
809
|
-
environments = self.environments
|
|
810
|
-
if self.environment in environments:
|
|
811
|
-
environments[self.environment]['refresh_token'] = token
|
|
812
|
-
else:
|
|
813
|
-
environments[self.environment] = {'refresh_token': token}
|
|
814
|
-
self.refresh_token_active = True
|
|
815
|
-
self.environments = environments
|
|
816
|
-
|
|
817
|
-
def add_environment(self, environment,
|
|
818
|
-
audience=None,
|
|
819
|
-
client_id=None,
|
|
820
|
-
auth0_url=None,
|
|
821
|
-
verify_ssl=True,
|
|
822
|
-
token=None,
|
|
823
|
-
refresh_token=None,
|
|
824
|
-
alias=None,
|
|
825
|
-
use_ssl_context=False,
|
|
826
|
-
gate_url=None,
|
|
827
|
-
url=None,
|
|
828
|
-
login_domain=None
|
|
829
|
-
):
|
|
830
|
-
environments = self.environments
|
|
831
|
-
if environment in environments:
|
|
832
|
-
logger.warning('Environment exists. Overwriting. env: {}'.format(environment))
|
|
833
|
-
if token is None:
|
|
834
|
-
token = None
|
|
835
|
-
if alias is None:
|
|
836
|
-
alias = None
|
|
837
|
-
environments[environment] = {'audience': audience,
|
|
838
|
-
'client_id': client_id,
|
|
839
|
-
'auth0_url': auth0_url,
|
|
840
|
-
'alias': alias,
|
|
841
|
-
'token': token,
|
|
842
|
-
'gate_url': gate_url,
|
|
843
|
-
'refresh_token': refresh_token,
|
|
844
|
-
'verify_ssl': verify_ssl,
|
|
845
|
-
'use_ssl_context': use_ssl_context,
|
|
846
|
-
'url': url,
|
|
847
|
-
'login_domain': login_domain}
|
|
848
|
-
self.environments = environments
|
|
849
|
-
|
|
850
|
-
def info(self, with_token=True):
|
|
851
|
-
"""
|
|
852
|
-
Return a dictionary with current information: env, user, token
|
|
853
|
-
:param with_token:
|
|
854
|
-
:return:
|
|
855
|
-
"""
|
|
856
|
-
user_email = 'null'
|
|
857
|
-
if self.token is not None:
|
|
858
|
-
payload = jwt.decode(self.token, algorithms=['HS256'],
|
|
859
|
-
verify=False, options={'verify_signature': False})
|
|
860
|
-
user_email = payload['email']
|
|
861
|
-
information = {'environment': self.environment,
|
|
862
|
-
'user_email': user_email}
|
|
863
|
-
if with_token:
|
|
864
|
-
information['token'] = self.token
|
|
865
|
-
return information
|
|
866
|
-
|
|
867
|
-
@property
|
|
868
|
-
def base_gate_url(self):
|
|
869
|
-
if self.__gate_url_for_requests is None:
|
|
870
|
-
self.__gate_url_for_requests = self.environment
|
|
871
|
-
internal_requests_url = os.environ.get('INTERNAL_REQUESTS_URL', None)
|
|
872
|
-
if internal_requests_url is not None:
|
|
873
|
-
self.__gate_url_for_requests = internal_requests_url
|
|
874
|
-
return self.__gate_url_for_requests
|
|
875
|
-
|
|
876
|
-
def export_curl_request(self, req_type, path, headers=None, json_req=None, files=None, data=None):
|
|
877
|
-
curl, prepared = self._build_gen_request(req_type=req_type,
|
|
878
|
-
path=path,
|
|
879
|
-
headers=headers,
|
|
880
|
-
json_req=json_req,
|
|
881
|
-
files=files,
|
|
882
|
-
data=data)
|
|
883
|
-
return curl
|
|
884
|
-
|
|
885
|
-
def _build_gen_request(self, req_type, path, headers, json_req, files, data):
|
|
886
|
-
req_type = req_type.upper()
|
|
887
|
-
valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
|
|
888
|
-
assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
|
|
889
|
-
|
|
890
|
-
# prepare request
|
|
891
|
-
req = requests.Request(method=req_type,
|
|
892
|
-
url=self.base_gate_url + path,
|
|
893
|
-
json=json_req,
|
|
894
|
-
files=files,
|
|
895
|
-
data=data,
|
|
896
|
-
headers=self._build_request_headers(headers=headers))
|
|
897
|
-
# prepare to send
|
|
898
|
-
prepared = req.prepare()
|
|
899
|
-
# save curl for debug
|
|
900
|
-
command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
|
|
901
|
-
method = prepared.method
|
|
902
|
-
uri = prepared.url
|
|
903
|
-
data = prepared.body
|
|
904
|
-
headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
|
|
905
|
-
headers = " -H ".join(headers)
|
|
906
|
-
curl = command.format(method=method, headers=headers, data=data, uri=uri)
|
|
907
|
-
return curl, prepared
|
|
908
|
-
|
|
909
|
-
def _convert_json_to_response(self, response_json):
|
|
910
|
-
the_response = Response()
|
|
911
|
-
the_response._content = json.dumps(response_json).encode('utf-8')
|
|
912
|
-
return the_response
|
|
913
|
-
|
|
914
|
-
def _cache_on(self, request):
|
|
915
|
-
if self.cache is not None and self.sdk_cache.use_cache:
|
|
916
|
-
pure_request = request.split('?')[0]
|
|
917
|
-
valid_req = ['annotation', 'item', 'dataset', 'project', 'task', 'assignment']
|
|
918
|
-
for req_type in valid_req:
|
|
919
|
-
if req_type in pure_request:
|
|
920
|
-
return True
|
|
921
|
-
return False
|
|
922
|
-
|
|
923
|
-
@Decorators.token_expired_decorator
|
|
924
|
-
def gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
|
|
925
|
-
log_error=True, dataset_id=None, **kwargs):
|
|
926
|
-
"""
|
|
927
|
-
Generic request from platform
|
|
928
|
-
:param req_type: type of the request: GET, POST etc
|
|
929
|
-
:param path: url (without host header - take from environment)
|
|
930
|
-
:param data: data to pass to request
|
|
931
|
-
:param json_req: json to pass to request
|
|
932
|
-
:param files: files to pass to request
|
|
933
|
-
:param stream: stream to pass the request
|
|
934
|
-
:param headers: headers to pass to request. auth will be added to it
|
|
935
|
-
:param log_error: if true - print the error log of the request
|
|
936
|
-
:param dataset_id: dataset id needed in stream True
|
|
937
|
-
:param kwargs: kwargs
|
|
938
|
-
:return:
|
|
939
|
-
"""
|
|
940
|
-
success, resp, cache_values = False, None, []
|
|
941
|
-
if self.cache is None and 'sdk' not in path:
|
|
942
|
-
self.build_cache()
|
|
943
|
-
if req_type.lower() not in ['patch', 'put', 'post', 'delete'] and self._cache_on(request=path):
|
|
944
|
-
try:
|
|
945
|
-
if stream:
|
|
946
|
-
if dataset_id is None:
|
|
947
|
-
raise ValueError("must provide a dataset id")
|
|
948
|
-
success, cache_values = self.cache.read_stream(request_path=path, dataset_id=dataset_id)
|
|
949
|
-
|
|
950
|
-
else:
|
|
951
|
-
success, cache_values = self.cache.read(request_path=path)
|
|
952
|
-
if success:
|
|
953
|
-
resp = self._convert_json_to_response(cache_values)
|
|
954
|
-
except Exception as e:
|
|
955
|
-
logger.warning("Cache error {}".format(e))
|
|
956
|
-
success, resp = False, None
|
|
957
|
-
|
|
958
|
-
if not success and not resp:
|
|
959
|
-
success, resp = self._gen_request(req_type=req_type,
|
|
960
|
-
path=path,
|
|
961
|
-
data=data,
|
|
962
|
-
json_req=json_req,
|
|
963
|
-
files=files,
|
|
964
|
-
stream=stream,
|
|
965
|
-
headers=headers,
|
|
966
|
-
log_error=log_error)
|
|
967
|
-
|
|
968
|
-
if success and self._cache_on(request=path):
|
|
969
|
-
try:
|
|
970
|
-
if stream:
|
|
971
|
-
res = self.cache.write_stream(request_path=path,
|
|
972
|
-
response=resp,
|
|
973
|
-
dataset_id=dataset_id)
|
|
974
|
-
if res != '':
|
|
975
|
-
resp = self._convert_json_to_response(res)
|
|
976
|
-
else:
|
|
977
|
-
if req_type == 'delete':
|
|
978
|
-
self.cache.invalidate(path=path)
|
|
979
|
-
else:
|
|
980
|
-
try:
|
|
981
|
-
resp_list = resp.json()
|
|
982
|
-
write = True
|
|
983
|
-
if isinstance(resp_list, list):
|
|
984
|
-
pass
|
|
985
|
-
elif isinstance(resp_list, dict):
|
|
986
|
-
if 'hasNextPage' in resp_list:
|
|
987
|
-
resp_list = resp_list['items']
|
|
988
|
-
elif 'id' in resp_list:
|
|
989
|
-
resp_list = [resp_list]
|
|
990
|
-
else:
|
|
991
|
-
write = False
|
|
992
|
-
else:
|
|
993
|
-
raise exceptions.PlatformException(error='400', message="unsupported return type")
|
|
994
|
-
if write:
|
|
995
|
-
self.cache.write(list_entities_json=resp_list)
|
|
996
|
-
except:
|
|
997
|
-
raise exceptions.PlatformException(error='400', message="failed to set cache")
|
|
998
|
-
except Exception as e:
|
|
999
|
-
logger.warning("Cache error {}".format(e))
|
|
1000
|
-
self.cache = None
|
|
1001
|
-
# only for projects events
|
|
1002
|
-
if success:
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
return success, resp
|
|
1006
|
-
|
|
1007
|
-
def _gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
|
|
1008
|
-
log_error=True):
|
|
1009
|
-
"""
|
|
1010
|
-
Generic request from platform
|
|
1011
|
-
:param req_type: type of the request: GET, POST etc
|
|
1012
|
-
:param path: url (without host header - take from environment)
|
|
1013
|
-
:param data: data to pass to request
|
|
1014
|
-
:param json_req: json to pass to request
|
|
1015
|
-
:param files: files to pass to request
|
|
1016
|
-
:param stream: stream to pass the request
|
|
1017
|
-
:param headers: headers to pass to request. auth will be added to it
|
|
1018
|
-
:param log_error: if true - print the error log of the request
|
|
1019
|
-
:return:
|
|
1020
|
-
"""
|
|
1021
|
-
curl, prepared = self._build_gen_request(req_type=req_type,
|
|
1022
|
-
path=path,
|
|
1023
|
-
headers=headers,
|
|
1024
|
-
json_req=json_req,
|
|
1025
|
-
files=files,
|
|
1026
|
-
data=data)
|
|
1027
|
-
self.last_curl = curl
|
|
1028
|
-
self.last_request = prepared
|
|
1029
|
-
# send request
|
|
1030
|
-
try:
|
|
1031
|
-
resp = self.send_session(prepared=prepared, stream=stream)
|
|
1032
|
-
except Exception:
|
|
1033
|
-
logger.error(self.print_request(req=prepared, to_return=True))
|
|
1034
|
-
raise
|
|
1035
|
-
self.last_response = resp
|
|
1036
|
-
# handle output
|
|
1037
|
-
if not resp.ok:
|
|
1038
|
-
self.print_bad_response(resp, log_error=log_error and not self.is_cli)
|
|
1039
|
-
return_type = False
|
|
1040
|
-
else:
|
|
1041
|
-
try:
|
|
1042
|
-
# print only what is printable (dont print get steam etc..)
|
|
1043
|
-
if not stream:
|
|
1044
|
-
self.print_response(resp)
|
|
1045
|
-
except ValueError:
|
|
1046
|
-
# no JSON returned
|
|
1047
|
-
pass
|
|
1048
|
-
return_type = True
|
|
1049
|
-
return return_type, resp
|
|
1050
|
-
|
|
1051
|
-
@Decorators.token_expired_decorator
|
|
1052
|
-
async def gen_async_request(self,
|
|
1053
|
-
req_type,
|
|
1054
|
-
path,
|
|
1055
|
-
data=None,
|
|
1056
|
-
json_req=None,
|
|
1057
|
-
files=None,
|
|
1058
|
-
stream=None,
|
|
1059
|
-
headers=None,
|
|
1060
|
-
log_error=True,
|
|
1061
|
-
filepath=None,
|
|
1062
|
-
chunk_size=8192,
|
|
1063
|
-
pbar=None,
|
|
1064
|
-
is_dataloop=True,
|
|
1065
|
-
**kwargs):
|
|
1066
|
-
req_type = req_type.upper()
|
|
1067
|
-
valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
|
|
1068
|
-
assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
|
|
1069
|
-
|
|
1070
|
-
# prepare request
|
|
1071
|
-
if is_dataloop:
|
|
1072
|
-
full_url = self.base_gate_url + path
|
|
1073
|
-
headers_req = self._build_request_headers(headers=headers)
|
|
1074
|
-
else:
|
|
1075
|
-
full_url = path
|
|
1076
|
-
headers = dict()
|
|
1077
|
-
headers_req = headers
|
|
1078
|
-
|
|
1079
|
-
if headers is not None:
|
|
1080
|
-
if not isinstance(headers, dict):
|
|
1081
|
-
raise exceptions.PlatformException(error='400', message="Input 'headers' must be a dictionary")
|
|
1082
|
-
for k, v in headers.items():
|
|
1083
|
-
headers_req[k] = v
|
|
1084
|
-
req = requests.Request(method=req_type,
|
|
1085
|
-
url=full_url,
|
|
1086
|
-
json=json_req,
|
|
1087
|
-
files=files,
|
|
1088
|
-
data=data,
|
|
1089
|
-
headers=headers_req)
|
|
1090
|
-
# prepare to send
|
|
1091
|
-
prepared = req.prepare()
|
|
1092
|
-
# save curl for debug
|
|
1093
|
-
command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
|
|
1094
|
-
headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
|
|
1095
|
-
headers = " -H ".join(headers)
|
|
1096
|
-
curl = command.format(method=prepared.method,
|
|
1097
|
-
headers=headers,
|
|
1098
|
-
data=prepared.body,
|
|
1099
|
-
uri=prepared.url)
|
|
1100
|
-
self.last_curl = curl
|
|
1101
|
-
self.last_request = prepared
|
|
1102
|
-
# send request
|
|
1103
|
-
try:
|
|
1104
|
-
timeout = aiohttp.ClientTimeout(total=0)
|
|
1105
|
-
async with RetryClient(headers=headers_req,
|
|
1106
|
-
timeout=timeout) as session:
|
|
1107
|
-
try:
|
|
1108
|
-
async with session._request(request=session._client.request,
|
|
1109
|
-
url=self.base_gate_url + path,
|
|
1110
|
-
method=req_type,
|
|
1111
|
-
json=json_req,
|
|
1112
|
-
data=data,
|
|
1113
|
-
headers=headers_req,
|
|
1114
|
-
chunked=stream,
|
|
1115
|
-
retry_attempts=5,
|
|
1116
|
-
ssl=self.verify,
|
|
1117
|
-
retry_exceptions={aiohttp.client_exceptions.ClientOSError,
|
|
1118
|
-
aiohttp.client_exceptions.ServerDisconnectedError,
|
|
1119
|
-
aiohttp.client_exceptions.ClientPayloadError},
|
|
1120
|
-
raise_for_status=False) as request:
|
|
1121
|
-
if stream:
|
|
1122
|
-
pbar = self.__get_pbar(pbar=pbar,
|
|
1123
|
-
total_length=request.headers.get("content-length"))
|
|
1124
|
-
if filepath is not None:
|
|
1125
|
-
to_close = False
|
|
1126
|
-
if isinstance(filepath, str):
|
|
1127
|
-
to_close = True
|
|
1128
|
-
buffer = open(filepath, 'wb')
|
|
1129
|
-
elif isinstance(filepath, io.BytesIO):
|
|
1130
|
-
pass
|
|
1131
|
-
else:
|
|
1132
|
-
raise ValueError('unknown data type to write file: {}'.format(type(filepath)))
|
|
1133
|
-
try:
|
|
1134
|
-
while True:
|
|
1135
|
-
chunk = await request.content.read(chunk_size)
|
|
1136
|
-
await asyncio.sleep(0)
|
|
1137
|
-
if not chunk:
|
|
1138
|
-
break
|
|
1139
|
-
buffer.write(chunk)
|
|
1140
|
-
if pbar is not None:
|
|
1141
|
-
pbar.update(len(chunk))
|
|
1142
|
-
finally:
|
|
1143
|
-
if to_close:
|
|
1144
|
-
buffer.close()
|
|
1145
|
-
|
|
1146
|
-
if pbar is not None:
|
|
1147
|
-
pbar.close()
|
|
1148
|
-
text = await request.text()
|
|
1149
|
-
try:
|
|
1150
|
-
_json = await request.json()
|
|
1151
|
-
except Exception:
|
|
1152
|
-
_json = dict()
|
|
1153
|
-
response = AsyncResponse(text=text,
|
|
1154
|
-
_json=_json,
|
|
1155
|
-
async_resp=request)
|
|
1156
|
-
except Exception as err:
|
|
1157
|
-
response = AsyncResponseError(error=err, trace=traceback.format_exc())
|
|
1158
|
-
finally:
|
|
1159
|
-
with threadLock:
|
|
1160
|
-
self.calls_counter.add()
|
|
1161
|
-
except Exception:
|
|
1162
|
-
logger.error(self.print_request(req=prepared, to_return=True))
|
|
1163
|
-
raise
|
|
1164
|
-
self.last_response = response
|
|
1165
|
-
# handle output
|
|
1166
|
-
if not response.ok:
|
|
1167
|
-
self.print_bad_response(response, log_error=log_error and not self.is_cli)
|
|
1168
|
-
return_type = False
|
|
1169
|
-
else:
|
|
1170
|
-
try:
|
|
1171
|
-
# print only what is printable (dont print get steam etc..)
|
|
1172
|
-
if not stream:
|
|
1173
|
-
self.print_response(response)
|
|
1174
|
-
except ValueError:
|
|
1175
|
-
# no JSON returned
|
|
1176
|
-
pass
|
|
1177
|
-
return_type = True
|
|
1178
|
-
return return_type, response
|
|
1179
|
-
|
|
1180
|
-
@Decorators.token_expired_decorator
|
|
1181
|
-
async def upload_file_async(self,
|
|
1182
|
-
to_upload,
|
|
1183
|
-
item_type,
|
|
1184
|
-
item_size,
|
|
1185
|
-
remote_url,
|
|
1186
|
-
uploaded_filename,
|
|
1187
|
-
remote_path=None,
|
|
1188
|
-
callback=None,
|
|
1189
|
-
mode='skip',
|
|
1190
|
-
item_metadata=None,
|
|
1191
|
-
headers=None,
|
|
1192
|
-
item_description=None,
|
|
1193
|
-
**kwargs):
|
|
1194
|
-
headers = self._build_request_headers(headers=headers)
|
|
1195
|
-
pbar = None
|
|
1196
|
-
if callback is None:
|
|
1197
|
-
if item_size > 10e6:
|
|
1198
|
-
# size larger than 10MB
|
|
1199
|
-
pbar = tqdm.tqdm(total=item_size,
|
|
1200
|
-
unit="B",
|
|
1201
|
-
unit_scale=True,
|
|
1202
|
-
unit_divisor=1024,
|
|
1203
|
-
position=1,
|
|
1204
|
-
file=sys.stdout,
|
|
1205
|
-
disable=self.verbose.disable_progress_bar_upload_items,
|
|
1206
|
-
desc='Upload Items')
|
|
1207
|
-
|
|
1208
|
-
def callback(bytes_read):
|
|
1209
|
-
pbar.update(bytes_read)
|
|
1210
|
-
else:
|
|
1211
|
-
def callback(bytes_read):
|
|
1212
|
-
pass
|
|
1213
|
-
|
|
1214
|
-
timeout = aiohttp.ClientTimeout(total=self.upload_session_timeout)
|
|
1215
|
-
async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
|
|
1216
|
-
try:
|
|
1217
|
-
form = aiohttp.FormData({})
|
|
1218
|
-
form.add_field('type', item_type)
|
|
1219
|
-
form.add_field('path', os.path.join(remote_path, uploaded_filename).replace('\\', '/'))
|
|
1220
|
-
if item_metadata is not None:
|
|
1221
|
-
form.add_field('metadata', json.dumps(item_metadata))
|
|
1222
|
-
if item_description is not None:
|
|
1223
|
-
form.add_field('description', item_description)
|
|
1224
|
-
form.add_field('file', AsyncUploadStream(buffer=to_upload,
|
|
1225
|
-
callback=callback,
|
|
1226
|
-
name=uploaded_filename,
|
|
1227
|
-
chunk_timeout=self.upload_chunk_timeout))
|
|
1228
|
-
url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
|
|
1229
|
-
|
|
1230
|
-
# use SSL context
|
|
1231
|
-
ssl_context = None
|
|
1232
|
-
if self.use_ssl_context:
|
|
1233
|
-
ssl_context = ssl.create_default_context(cafile=certifi.where())
|
|
1234
|
-
async with session.post(url,
|
|
1235
|
-
data=form,
|
|
1236
|
-
verify_ssl=self.verify,
|
|
1237
|
-
ssl=ssl_context) as resp:
|
|
1238
|
-
self.last_request = resp.request_info
|
|
1239
|
-
command = "curl -X {method} -H {headers} -d '{uri}'"
|
|
1240
|
-
headers = ['"{0}: {1}"'.format(k, v) for k, v in resp.request_info.headers.items()]
|
|
1241
|
-
headers = " -H ".join(headers)
|
|
1242
|
-
self.last_curl = command.format(method=resp.request_info.method,
|
|
1243
|
-
headers=headers,
|
|
1244
|
-
uri=resp.request_info.url)
|
|
1245
|
-
text = await resp.text()
|
|
1246
|
-
try:
|
|
1247
|
-
_json = await resp.json()
|
|
1248
|
-
except:
|
|
1249
|
-
_json = dict()
|
|
1250
|
-
response = AsyncResponse(text=text,
|
|
1251
|
-
_json=_json,
|
|
1252
|
-
async_resp=resp)
|
|
1253
|
-
except Exception as err:
|
|
1254
|
-
response = AsyncResponseError(error=err, trace=traceback.format_exc())
|
|
1255
|
-
finally:
|
|
1256
|
-
if pbar is not None:
|
|
1257
|
-
pbar.close()
|
|
1258
|
-
with threadLock:
|
|
1259
|
-
self.calls_counter.add()
|
|
1260
|
-
if response.ok and self.cache is not None:
|
|
1261
|
-
try:
|
|
1262
|
-
self.cache.write(list_entities_json=[response.json()])
|
|
1263
|
-
dataset_id = url.split('/')[-2]
|
|
1264
|
-
self.cache.write_stream(request_path=url,
|
|
1265
|
-
buffer=to_upload,
|
|
1266
|
-
file_name=uploaded_filename,
|
|
1267
|
-
entity_id=response.json()['id'],
|
|
1268
|
-
dataset_id=dataset_id)
|
|
1269
|
-
except:
|
|
1270
|
-
logger.warning("Failed to add the file to the cache")
|
|
1271
|
-
return response
|
|
1272
|
-
|
|
1273
|
-
def __get_pbar(self, pbar, total_length):
|
|
1274
|
-
# decide if create progress bar for item
|
|
1275
|
-
if pbar:
|
|
1276
|
-
try:
|
|
1277
|
-
if total_length is not None and int(total_length) > 10e6: # size larger than 10 MB:
|
|
1278
|
-
pbar = tqdm.tqdm(total=int(total_length),
|
|
1279
|
-
unit='B',
|
|
1280
|
-
unit_scale=True,
|
|
1281
|
-
unit_divisor=1024,
|
|
1282
|
-
position=1,
|
|
1283
|
-
file=sys.stdout,
|
|
1284
|
-
disable=self.verbose.disable_progress_bar)
|
|
1285
|
-
else:
|
|
1286
|
-
pbar = None
|
|
1287
|
-
except Exception as err:
|
|
1288
|
-
pbar = None
|
|
1289
|
-
logger.debug('Cant decide downloaded file length, bar will not be presented: {}'.format(err))
|
|
1290
|
-
return pbar
|
|
1291
|
-
|
|
1292
|
-
def send_session(self, prepared, stream=None):
|
|
1293
|
-
if self.session is None:
|
|
1294
|
-
self.session = requests.Session()
|
|
1295
|
-
retry = Retry(
|
|
1296
|
-
total=5,
|
|
1297
|
-
read=5,
|
|
1298
|
-
connect=5,
|
|
1299
|
-
backoff_factor=1,
|
|
1300
|
-
# use on any request type
|
|
1301
|
-
allowed_methods=False,
|
|
1302
|
-
# force retry on those status responses
|
|
1303
|
-
status_forcelist=(501, 502, 503, 504, 505, 506, 507, 508, 510, 511),
|
|
1304
|
-
raise_on_status=False
|
|
1305
|
-
)
|
|
1306
|
-
adapter = HTTPAdapter(max_retries=retry,
|
|
1307
|
-
pool_maxsize=np.sum(list(self._thread_pools_names.values())),
|
|
1308
|
-
pool_connections=np.sum(list(self._thread_pools_names.values())))
|
|
1309
|
-
self.session.mount('http://', adapter)
|
|
1310
|
-
self.session.mount('https://', adapter)
|
|
1311
|
-
resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=120)
|
|
1312
|
-
|
|
1313
|
-
with threadLock:
|
|
1314
|
-
self.calls_counter.add()
|
|
1315
|
-
|
|
1316
|
-
return resp
|
|
1317
|
-
|
|
1318
|
-
@staticmethod
|
|
1319
|
-
def check_proxy():
|
|
1320
|
-
"""
|
|
1321
|
-
Verify that dataloop urls are not blocked
|
|
1322
|
-
:return:
|
|
1323
|
-
"""
|
|
1324
|
-
proxy_envs = ['HTTP', 'HTTPS', 'http', 'https']
|
|
1325
|
-
dataloop_urls = ['dev-gate.dataloop.ai',
|
|
1326
|
-
'gate.dataloop.ai',
|
|
1327
|
-
'dataloop-development.auth0.com',
|
|
1328
|
-
'dataloop-production.auth0.com']
|
|
1329
|
-
if True in [env in os.environ for env in proxy_envs]:
|
|
1330
|
-
# check if proxy exists
|
|
1331
|
-
if True in [env in os.environ for env in ['no_proxy', 'NO_PROXY']]:
|
|
1332
|
-
# check if no_proxy exists
|
|
1333
|
-
if 'no_proxy' in os.environ:
|
|
1334
|
-
# check if dataloop urls in no_proxy
|
|
1335
|
-
if True not in [url in os.environ['no_proxy'] for url in dataloop_urls]:
|
|
1336
|
-
# no dataloop url exists in no_proxy
|
|
1337
|
-
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1338
|
-
else:
|
|
1339
|
-
# check if dataloop urls in no_proxy
|
|
1340
|
-
if True not in [url in os.environ['NO_PROXY'] for url in dataloop_urls]:
|
|
1341
|
-
# no dataloop url exists in no_proxy
|
|
1342
|
-
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1343
|
-
else:
|
|
1344
|
-
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1345
|
-
|
|
1346
|
-
def token_expired(self, t=60):
|
|
1347
|
-
"""
|
|
1348
|
-
Check token validation
|
|
1349
|
-
:param t: time ahead interval in seconds
|
|
1350
|
-
"""
|
|
1351
|
-
try:
|
|
1352
|
-
if self.token is None or self.token == '':
|
|
1353
|
-
expired = True
|
|
1354
|
-
else:
|
|
1355
|
-
payload = jwt.decode(self.token, algorithms=['HS256'],
|
|
1356
|
-
options={'verify_signature': False}, verify=False)
|
|
1357
|
-
d = datetime.datetime.now(datetime.timezone.utc)
|
|
1358
|
-
epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
|
|
1359
|
-
now = (d - epoch).total_seconds()
|
|
1360
|
-
exp = payload['exp']
|
|
1361
|
-
if now < (exp - t):
|
|
1362
|
-
expired = False
|
|
1363
|
-
else:
|
|
1364
|
-
expired = True
|
|
1365
|
-
except jwt.exceptions.DecodeError:
|
|
1366
|
-
logger.exception('Invalid token.')
|
|
1367
|
-
expired = True
|
|
1368
|
-
except Exception:
|
|
1369
|
-
logger.exception('Unknown error:')
|
|
1370
|
-
expired = True
|
|
1371
|
-
if expired:
|
|
1372
|
-
if self.renew_token_method():
|
|
1373
|
-
expired = False
|
|
1374
|
-
return expired
|
|
1375
|
-
|
|
1376
|
-
@staticmethod
|
|
1377
|
-
def is_json_serializable(response):
|
|
1378
|
-
try:
|
|
1379
|
-
response_json = response.json()
|
|
1380
|
-
return True, response_json
|
|
1381
|
-
except ValueError:
|
|
1382
|
-
return False, None
|
|
1383
|
-
|
|
1384
|
-
##########
|
|
1385
|
-
# STDOUT #
|
|
1386
|
-
##########
|
|
1387
|
-
def print_response(self, resp=None):
|
|
1388
|
-
"""
|
|
1389
|
-
Print tabulate response
|
|
1390
|
-
:param resp: response from requests
|
|
1391
|
-
:return:
|
|
1392
|
-
"""
|
|
1393
|
-
try:
|
|
1394
|
-
if resp is None:
|
|
1395
|
-
resp = self.last_response
|
|
1396
|
-
is_json_serializable, results = self.is_json_serializable(response=resp)
|
|
1397
|
-
if self.verbose.print_all_responses and is_json_serializable:
|
|
1398
|
-
if isinstance(results, dict):
|
|
1399
|
-
to_print = miscellaneous.List([results])
|
|
1400
|
-
elif isinstance(results, list):
|
|
1401
|
-
to_print = miscellaneous.List(results)
|
|
1402
|
-
else:
|
|
1403
|
-
logger.debug('Unknown response type: {}. cant print'.format(type(results)))
|
|
1404
|
-
return
|
|
1405
|
-
request_id = resp.headers.get('x-request-id', 'na')
|
|
1406
|
-
logger.debug('--- [Request] Start ---')
|
|
1407
|
-
logger.debug(self.print_request(req=resp.request, to_return=True))
|
|
1408
|
-
logger.debug('--- [Request] End ---')
|
|
1409
|
-
logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
|
|
1410
|
-
to_print.print(show_all=False, level='debug')
|
|
1411
|
-
logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
|
|
1412
|
-
except Exception:
|
|
1413
|
-
logger.exception('Printing response from gate:')
|
|
1414
|
-
|
|
1415
|
-
def print_bad_response(self, resp=None, log_error=True):
|
|
1416
|
-
"""
|
|
1417
|
-
Print error from platform
|
|
1418
|
-
:param resp:
|
|
1419
|
-
:param log_error: print error log (to use when trying request more than once)
|
|
1420
|
-
:return:
|
|
1421
|
-
"""
|
|
1422
|
-
if resp is None:
|
|
1423
|
-
resp = self.last_response
|
|
1424
|
-
msg = ''
|
|
1425
|
-
if hasattr(resp, 'status_code'):
|
|
1426
|
-
msg += '[Response <{val}>]'.format(val=resp.status_code)
|
|
1427
|
-
if hasattr(resp, 'reason'):
|
|
1428
|
-
msg += '[Reason: {val}]'.format(val=resp.reason)
|
|
1429
|
-
if hasattr(resp, 'text') and isinstance(resp.text, str):
|
|
1430
|
-
msg += '[Text: {val}]'.format(val=format_message(resp.text))
|
|
1431
|
-
|
|
1432
|
-
request_id = resp.headers.get('x-request-id', 'na')
|
|
1433
|
-
logger.debug('--- [Request] Start ---')
|
|
1434
|
-
logger.debug(self.print_request(req=resp.request, to_return=True))
|
|
1435
|
-
logger.debug('--- [Request] End ---')
|
|
1436
|
-
logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
|
|
1437
|
-
if log_error:
|
|
1438
|
-
logger.error(msg)
|
|
1439
|
-
else:
|
|
1440
|
-
logger.debug(msg)
|
|
1441
|
-
logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
|
|
1442
|
-
self.platform_exception = PlatformError(resp)
|
|
1443
|
-
|
|
1444
|
-
def print_request(self, req=None, to_return=False, with_auth=False):
|
|
1445
|
-
"""
|
|
1446
|
-
Print a request to the platform
|
|
1447
|
-
:param req:
|
|
1448
|
-
:param to_return: return string instead of printing
|
|
1449
|
-
:param with_auth: print authentication
|
|
1450
|
-
:return:
|
|
1451
|
-
"""
|
|
1452
|
-
if not req:
|
|
1453
|
-
req = self.last_request
|
|
1454
|
-
|
|
1455
|
-
headers = list()
|
|
1456
|
-
for k, v in req.headers.items():
|
|
1457
|
-
if k == 'authorization' and not with_auth:
|
|
1458
|
-
continue
|
|
1459
|
-
headers.append('{}: {}'.format(k, v))
|
|
1460
|
-
if hasattr(req, 'body'):
|
|
1461
|
-
body = req.body
|
|
1462
|
-
elif isinstance(req, aiohttp.RequestInfo):
|
|
1463
|
-
body = {'multipart': 'true'}
|
|
1464
|
-
else:
|
|
1465
|
-
body = dict()
|
|
1466
|
-
|
|
1467
|
-
# remove secrets and passwords
|
|
1468
|
-
try:
|
|
1469
|
-
body = json.loads(body)
|
|
1470
|
-
if isinstance(body, dict):
|
|
1471
|
-
for key, value in body.items():
|
|
1472
|
-
hide = any([field in key for field in ['secret', 'password']])
|
|
1473
|
-
if hide:
|
|
1474
|
-
body[key] = '*' * len(value)
|
|
1475
|
-
except Exception:
|
|
1476
|
-
pass
|
|
1477
|
-
|
|
1478
|
-
msg = '{}\n{}\n{}'.format(
|
|
1479
|
-
req.method + ' ' + str(req.url),
|
|
1480
|
-
'\n'.join(headers),
|
|
1481
|
-
body,
|
|
1482
|
-
)
|
|
1483
|
-
if to_return:
|
|
1484
|
-
return msg
|
|
1485
|
-
else:
|
|
1486
|
-
print(msg)
|
|
1487
|
-
|
|
1488
|
-
################
|
|
1489
|
-
# Environments #
|
|
1490
|
-
################
|
|
1491
|
-
def setenv(self, env):
|
|
1492
|
-
"""
|
|
1493
|
-
Set environment
|
|
1494
|
-
:param env:
|
|
1495
|
-
:return:
|
|
1496
|
-
"""
|
|
1497
|
-
|
|
1498
|
-
environments = self.environments
|
|
1499
|
-
if env.startswith('http'):
|
|
1500
|
-
if env not in environments.keys():
|
|
1501
|
-
msg = 'Unknown environment. Please add environment to SDK ("add_environment" method)'
|
|
1502
|
-
logger.error(msg)
|
|
1503
|
-
raise ConnectionError(msg)
|
|
1504
|
-
elif env == 'custom':
|
|
1505
|
-
custom_env = os.environ.get('DTLPY_CUSTOM_ENV', None)
|
|
1506
|
-
environment = json.loads(base64.b64decode(custom_env.encode()).decode())
|
|
1507
|
-
env = environment.pop('url')
|
|
1508
|
-
token = None
|
|
1509
|
-
if self.environments.get(env):
|
|
1510
|
-
token = self.environments[env].get('token', None)
|
|
1511
|
-
self.environments[env] = environment.get(env, environment)
|
|
1512
|
-
self.environments[env]['token'] = token
|
|
1513
|
-
verify_ssl = self.environments[env].get('verify_ssl', None)
|
|
1514
|
-
if verify_ssl is not None and isinstance(verify_ssl, str):
|
|
1515
|
-
self.environments[env]['verify_ssl'] = True if verify_ssl.lower() == 'true' else False
|
|
1516
|
-
else:
|
|
1517
|
-
matched_env = [env_url for env_url, env_dict in environments.items() if env_dict['alias'] == env]
|
|
1518
|
-
if len(matched_env) != 1:
|
|
1519
|
-
known_aliases = [env_dict['alias'] for env_url, env_dict in environments.items()]
|
|
1520
|
-
raise ConnectionError(
|
|
1521
|
-
'Unknown platform environment: "{}". Known: {}'.format(env, ', '.join(known_aliases)))
|
|
1522
|
-
env = matched_env[0]
|
|
1523
|
-
if self.environment != env:
|
|
1524
|
-
self.environment = env
|
|
1525
|
-
self.__gate_url_for_requests = None
|
|
1526
|
-
# reset local token
|
|
1527
|
-
self._token = None
|
|
1528
|
-
self.refresh_token_active = True
|
|
1529
|
-
logger.info('Platform environment: {}'.format(self.environment))
|
|
1530
|
-
if self.token_expired():
|
|
1531
|
-
logger.info('Token expired, Please login.')
|
|
1532
|
-
|
|
1533
|
-
##########
|
|
1534
|
-
# Log in #
|
|
1535
|
-
##########
|
|
1536
|
-
def login_secret(self, email, password, client_id, client_secret=None, force=False):
|
|
1537
|
-
"""
|
|
1538
|
-
Login with email and password from environment variables.
|
|
1539
|
-
If already logged in with same user - login will NOT happen. see "force"
|
|
1540
|
-
|
|
1541
|
-
:param email: user email.
|
|
1542
|
-
:param password: user password
|
|
1543
|
-
:param client_id: auth0 client id
|
|
1544
|
-
:param client_secret: secret that match the client id
|
|
1545
|
-
:param force: force login. in case login with same user but want to get a new JWT
|
|
1546
|
-
:return:
|
|
1547
|
-
"""
|
|
1548
|
-
logger.warning('dl.login_secret is deprecated. Please use dl.login_m2m instead.')
|
|
1549
|
-
return login_secret(api_client=self,
|
|
1550
|
-
email=email,
|
|
1551
|
-
password=password,
|
|
1552
|
-
client_id=client_id,
|
|
1553
|
-
client_secret=client_secret,
|
|
1554
|
-
force=force)
|
|
1555
|
-
|
|
1556
|
-
def login_m2m(self, email, password, client_id=None, client_secret=None, force=False):
|
|
1557
|
-
"""
|
|
1558
|
-
Login with email and password from environment variables
|
|
1559
|
-
:param email: user email. if already logged in with same user - login will NOT happen. see "force"
|
|
1560
|
-
:param password: user password
|
|
1561
|
-
:param client_id:
|
|
1562
|
-
:param client_secret:
|
|
1563
|
-
:param force: force login. in case login with same user but want to get a new JWT
|
|
1564
|
-
:return:
|
|
1565
|
-
"""
|
|
1566
|
-
res = login_m2m(api_client=self,
|
|
1567
|
-
email=email,
|
|
1568
|
-
password=password,
|
|
1569
|
-
client_id=client_id,
|
|
1570
|
-
client_secret=client_secret,
|
|
1571
|
-
force=force)
|
|
1572
|
-
if res:
|
|
1573
|
-
|
|
1574
|
-
return res
|
|
1575
|
-
|
|
1576
|
-
def login_token(self, token):
|
|
1577
|
-
"""
|
|
1578
|
-
Login using existing token
|
|
1579
|
-
:param token: a valid token
|
|
1580
|
-
:return:
|
|
1581
|
-
"""
|
|
1582
|
-
current_token = self.token
|
|
1583
|
-
self.token = token
|
|
1584
|
-
success, response = self.gen_request(req_type='get', path='/users/me')
|
|
1585
|
-
if not response.ok:
|
|
1586
|
-
# switch back to before
|
|
1587
|
-
self.token = current_token
|
|
1588
|
-
raise ValueError(f"Invalid API key provided. Error: {response.text}")
|
|
1589
|
-
|
|
1590
|
-
def login_api_key(self, api_key):
|
|
1591
|
-
"""
|
|
1592
|
-
Login using API key
|
|
1593
|
-
:param api_key: a valid API key
|
|
1594
|
-
:return:
|
|
1595
|
-
"""
|
|
1596
|
-
current_token = self.token
|
|
1597
|
-
self.token = api_key
|
|
1598
|
-
success, response = self.gen_request(req_type='get', path='/users/me')
|
|
1599
|
-
if not response.ok:
|
|
1600
|
-
# switch back to before
|
|
1601
|
-
self.token = current_token
|
|
1602
|
-
raise ValueError(f"Invalid API key provided. Error: {response.text}")
|
|
1603
|
-
|
|
1604
|
-
@property
|
|
1605
|
-
def login_domain(self):
|
|
1606
|
-
if self._login_domain is None:
|
|
1607
|
-
self._login_domain = self.environments[self.environment].get('login_domain', None)
|
|
1608
|
-
return self._login_domain
|
|
1609
|
-
|
|
1610
|
-
@login_domain.setter
|
|
1611
|
-
def login_domain(self, domain: str):
|
|
1612
|
-
if domain is not None and not isinstance(domain, str):
|
|
1613
|
-
raise exceptions.PlatformException('400', 'domain should be a string value')
|
|
1614
|
-
self._login_domain = domain
|
|
1615
|
-
self.environments[self.environment]['login_domain'] = domain
|
|
1616
|
-
self.cookie_io.put('login_parameters', self.environments)
|
|
1617
|
-
|
|
1618
|
-
def login(self, audience=None, auth0_url=None, client_id=None, callback_port=None):
|
|
1619
|
-
"""
|
|
1620
|
-
Login using Auth0.
|
|
1621
|
-
:return:
|
|
1622
|
-
"""
|
|
1623
|
-
res = login(
|
|
1624
|
-
api_client=self,
|
|
1625
|
-
audience=audience,
|
|
1626
|
-
auth0_url=auth0_url,
|
|
1627
|
-
client_id=client_id,
|
|
1628
|
-
login_domain=self.login_domain,
|
|
1629
|
-
callback_port=callback_port
|
|
1630
|
-
)
|
|
1631
|
-
if res:
|
|
1632
|
-
|
|
1633
|
-
return res
|
|
1634
|
-
|
|
1635
|
-
def _send_login_event(self, user_type, login_type):
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
def logout(self):
|
|
1646
|
-
"""
|
|
1647
|
-
Logout.
|
|
1648
|
-
:return:
|
|
1649
|
-
"""
|
|
1650
|
-
return logout(api_client=self)
|
|
1651
|
-
|
|
1652
|
-
def _renew_token_in_dual_agent(self):
|
|
1653
|
-
renewed = False
|
|
1654
|
-
try:
|
|
1655
|
-
proxy_port = os.environ.get('AGENT_PROXY_MAIN_PORT') or "1001"
|
|
1656
|
-
resp = requests.get('http://localhost:{port}/get_jwt'.format(port=proxy_port))
|
|
1657
|
-
if resp.ok:
|
|
1658
|
-
self.token = resp.json()['jwt']
|
|
1659
|
-
renewed = True
|
|
1660
|
-
else:
|
|
1661
|
-
self.print_bad_response(resp)
|
|
1662
|
-
except Exception:
|
|
1663
|
-
logger.exception('Failed to get token from proxy')
|
|
1664
|
-
|
|
1665
|
-
return renewed
|
|
1666
|
-
|
|
1667
|
-
def renew_token(self):
|
|
1668
|
-
refresh_method = os.environ.get('DTLPY_REFRESH_TOKEN_METHOD', None)
|
|
1669
|
-
if refresh_method is not None and refresh_method == 'proxy':
|
|
1670
|
-
res = self._renew_token_in_dual_agent()
|
|
1671
|
-
else:
|
|
1672
|
-
res = self._renew_token_with_refresh_token()
|
|
1673
|
-
if res:
|
|
1674
|
-
|
|
1675
|
-
return res
|
|
1676
|
-
|
|
1677
|
-
def generate_api_key(self, description: str = None, login: bool = False):
|
|
1678
|
-
"""
|
|
1679
|
-
Generate an API key for a user
|
|
1680
|
-
:param description: description for the API key
|
|
1681
|
-
:param login: if True, login with the new API key
|
|
1682
|
-
:return: User token
|
|
1683
|
-
"""
|
|
1684
|
-
user_email = self.info()['user_email']
|
|
1685
|
-
payload = {
|
|
1686
|
-
'userId': user_email
|
|
1687
|
-
}
|
|
1688
|
-
if description:
|
|
1689
|
-
if not isinstance(description, str):
|
|
1690
|
-
raise ValueError('description should be a string')
|
|
1691
|
-
payload['description'] = description
|
|
1692
|
-
success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
|
|
1693
|
-
if not success:
|
|
1694
|
-
raise exceptions.PlatformException(response)
|
|
1695
|
-
if login:
|
|
1696
|
-
self.login_api_key(response.json()['jwt'])
|
|
1697
|
-
return True
|
|
1698
|
-
|
|
1699
|
-
return response.json()['jwt']
|
|
1700
|
-
|
|
1701
|
-
def _renew_token_with_refresh_token(self):
|
|
1702
|
-
renewed = False
|
|
1703
|
-
if self.refresh_token_active is False:
|
|
1704
|
-
return renewed
|
|
1705
|
-
logger.debug('RefreshToken: Started')
|
|
1706
|
-
if self.token is None or self.token == '':
|
|
1707
|
-
# token is missing
|
|
1708
|
-
logger.debug('RefreshToken: Missing token.')
|
|
1709
|
-
self.refresh_token_active = False
|
|
1710
|
-
if self.refresh_token is None or self.refresh_token == '':
|
|
1711
|
-
# missing refresh token
|
|
1712
|
-
logger.debug('RefreshToken: Missing "refresh_token"')
|
|
1713
|
-
self.refresh_token_active = False
|
|
1714
|
-
if self.environment not in self.environments.keys():
|
|
1715
|
-
# env params missing
|
|
1716
|
-
logger.debug('RefreshToken: Missing environments params for refreshing token')
|
|
1717
|
-
self.refresh_token_active = False
|
|
1718
|
-
|
|
1719
|
-
if self.refresh_token_active is False:
|
|
1720
|
-
return renewed
|
|
1721
|
-
|
|
1722
|
-
refresh_token = self.refresh_token
|
|
1723
|
-
|
|
1724
|
-
env_params = self.environments[self.environment]
|
|
1725
|
-
if 'gate_url' not in env_params:
|
|
1726
|
-
env_params['gate_url'] = gate_url_from_host(environment=self.environment)
|
|
1727
|
-
self.environments[self.environment] = env_params
|
|
1728
|
-
token_endpoint = "{}/token?default".format(env_params['gate_url'])
|
|
1729
|
-
|
|
1730
|
-
payload = {
|
|
1731
|
-
'type': 'refresh_token',
|
|
1732
|
-
'refresh_token': refresh_token
|
|
1733
|
-
}
|
|
1734
|
-
logger.debug("RefreshToken: Refreshing token via {}".format(token_endpoint))
|
|
1735
|
-
resp = requests.request(
|
|
1736
|
-
"POST",
|
|
1737
|
-
token_endpoint,
|
|
1738
|
-
json=payload,
|
|
1739
|
-
headers={'content-type': 'application/json'},
|
|
1740
|
-
verify=self.verify
|
|
1741
|
-
)
|
|
1742
|
-
if not resp.ok:
|
|
1743
|
-
logger.debug('RefreshToken: Failed')
|
|
1744
|
-
self.print_bad_response(resp)
|
|
1745
|
-
else:
|
|
1746
|
-
response_dict = resp.json()
|
|
1747
|
-
# get new token
|
|
1748
|
-
final_token = response_dict['id_token']
|
|
1749
|
-
self.token = final_token
|
|
1750
|
-
self.refresh_token = refresh_token
|
|
1751
|
-
# set status back to pending
|
|
1752
|
-
logger.debug('RefreshToken: Success')
|
|
1753
|
-
renewed = True
|
|
1754
|
-
return renewed
|
|
1755
|
-
|
|
1756
|
-
def set_api_counter(self, filepath):
|
|
1757
|
-
self.calls_counter = CallsCounter(filepath=filepath)
|
|
1758
|
-
|
|
1759
|
-
def _get_resource_url(self, url):
|
|
1760
|
-
|
|
1761
|
-
env = self._environments[self._environment]['alias']
|
|
1762
|
-
head = self._environments[self._environment].get('url', None)
|
|
1763
|
-
# TODO need to deprecate somehow (the following)
|
|
1764
|
-
if head is None:
|
|
1765
|
-
if env == 'prod':
|
|
1766
|
-
head = 'https://console.dataloop.ai/'
|
|
1767
|
-
elif env == 'dev':
|
|
1768
|
-
head = 'https://dev-con.dataloop.ai/'
|
|
1769
|
-
elif env == 'rc':
|
|
1770
|
-
head = 'https://rc-con.dataloop.ai/'
|
|
1771
|
-
elif env in ['local', 'minikube_local_mac']:
|
|
1772
|
-
head = 'https://localhost:8443/'
|
|
1773
|
-
elif env == 'new-dev':
|
|
1774
|
-
head = 'https://custom1-gate.dataloop.ai/'
|
|
1775
|
-
else:
|
|
1776
|
-
raise exceptions.PlatformException(error='400', message='Unknown environment: {}'.format(env))
|
|
1777
|
-
|
|
1778
|
-
return head + url
|
|
1779
|
-
|
|
1780
|
-
def _open_in_web(self, url):
|
|
1781
|
-
import webbrowser
|
|
1782
|
-
webbrowser.open(url=url, new=2, autoraise=True)
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
client = ApiClient()
|
|
1
|
+
"""
|
|
2
|
+
Dataloop platform calls
|
|
3
|
+
"""
|
|
4
|
+
import aiohttp.client_exceptions
|
|
5
|
+
import requests_toolbelt
|
|
6
|
+
import multiprocessing
|
|
7
|
+
import threading
|
|
8
|
+
import traceback
|
|
9
|
+
import datetime
|
|
10
|
+
import requests
|
|
11
|
+
import aiohttp
|
|
12
|
+
import logging
|
|
13
|
+
import asyncio
|
|
14
|
+
import certifi
|
|
15
|
+
import base64
|
|
16
|
+
import enum
|
|
17
|
+
import time
|
|
18
|
+
import tqdm
|
|
19
|
+
import json
|
|
20
|
+
import sys
|
|
21
|
+
import ssl
|
|
22
|
+
import jwt
|
|
23
|
+
import os
|
|
24
|
+
import io
|
|
25
|
+
import concurrent
|
|
26
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
27
|
+
from requests.adapters import HTTPAdapter
|
|
28
|
+
from urllib3.util import Retry
|
|
29
|
+
from functools import wraps
|
|
30
|
+
import numpy as np
|
|
31
|
+
import inspect
|
|
32
|
+
from requests.models import Response
|
|
33
|
+
from dtlpy.caches.cache import CacheManger, CacheConfig
|
|
34
|
+
from .calls_counter import CallsCounter
|
|
35
|
+
from .cookie import CookieIO
|
|
36
|
+
from .logins import login, logout, login_secret, login_m2m, gate_url_from_host
|
|
37
|
+
from .async_utils import AsyncResponse, AsyncUploadStream, AsyncResponseError, AsyncThreadEventLoop
|
|
38
|
+
# from .events import Events
|
|
39
|
+
from .service_defaults import DEFAULT_ENVIRONMENTS, DEFAULT_ENVIRONMENT
|
|
40
|
+
from .aihttp_retry import RetryClient
|
|
41
|
+
from .. import miscellaneous, exceptions, __version__
|
|
42
|
+
|
|
43
|
+
logger = logging.getLogger(name='dtlpy')
|
|
44
|
+
threadLock = threading.Lock()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def format_message(message):
|
|
48
|
+
if message and isinstance(message, str):
|
|
49
|
+
return message.replace('\\n', '\n')
|
|
50
|
+
return message
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class VerboseLoggingLevel:
|
|
54
|
+
DEBUG = "debug"
|
|
55
|
+
INFO = "info"
|
|
56
|
+
WARNING = "warning"
|
|
57
|
+
ERROR = "error"
|
|
58
|
+
CRITICAL = "critical"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class PlatformError(Exception):
|
|
62
|
+
"""
|
|
63
|
+
Error handling for api calls
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(self, resp):
|
|
67
|
+
msg = ''
|
|
68
|
+
if hasattr(resp, 'status_code'):
|
|
69
|
+
msg += '<Response [{}]>'.format(resp.status_code)
|
|
70
|
+
if hasattr(resp, 'reason'):
|
|
71
|
+
msg += '<Reason [{}]>'.format(format_message(resp.reason))
|
|
72
|
+
elif hasattr(resp, 'text'):
|
|
73
|
+
msg += '<Reason [{}]>'.format(format_message(resp.text))
|
|
74
|
+
super().__init__(msg)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class Callbacks:
|
|
78
|
+
def __init__(self):
|
|
79
|
+
self._callbacks = {}
|
|
80
|
+
|
|
81
|
+
class CallbackEvent(str, enum.Enum):
|
|
82
|
+
DATASET_EXPORT = 'datasetExport'
|
|
83
|
+
ITEMS_UPLOAD = 'itemUpload'
|
|
84
|
+
|
|
85
|
+
def add(self, event, func):
|
|
86
|
+
|
|
87
|
+
if not callable(func):
|
|
88
|
+
raise ValueError(f"The provided callback for {event} is not callable")
|
|
89
|
+
if event not in list(self.CallbackEvent):
|
|
90
|
+
raise ValueError(f"Unknown event: {event!r}, allowed events are: {list(self.CallbackEvent)}")
|
|
91
|
+
self._callbacks[event] = func
|
|
92
|
+
|
|
93
|
+
def get(self, name):
|
|
94
|
+
return self._callbacks.get(name)
|
|
95
|
+
|
|
96
|
+
def run_on_event(self, event, context, progress):
|
|
97
|
+
callback = self.get(event)
|
|
98
|
+
if callback is not None:
|
|
99
|
+
callback(progress=progress, context=context)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class Verbose:
|
|
103
|
+
__DEFAULT_LOGGING_LEVEL = 'warning'
|
|
104
|
+
__DEFAULT_DISABLE_PROGRESS_BAR = False
|
|
105
|
+
__DEFAULT_PRINT_ALL_RESPONSES = False
|
|
106
|
+
__PRINT_ERROR_LOGS = False
|
|
107
|
+
__DEFAULT_PROGRESS_BAR_SETTINGS = {
|
|
108
|
+
'Iterate Pages': False,
|
|
109
|
+
'Command Progress': False,
|
|
110
|
+
'Download Dataset': False,
|
|
111
|
+
'Download Item': False,
|
|
112
|
+
'Upload Items': False,
|
|
113
|
+
'Download Annotations': False,
|
|
114
|
+
'Upload Annotations': False,
|
|
115
|
+
'Convert Annotations': False
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
def __init__(self, cookie):
|
|
119
|
+
self.cookie = cookie
|
|
120
|
+
dictionary = self.cookie.get('verbose')
|
|
121
|
+
if isinstance(dictionary, dict):
|
|
122
|
+
self.from_cookie(dictionary)
|
|
123
|
+
else:
|
|
124
|
+
self._logging_level = self.__DEFAULT_LOGGING_LEVEL
|
|
125
|
+
self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
|
|
126
|
+
self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
|
|
127
|
+
self._print_error_logs = self.__PRINT_ERROR_LOGS
|
|
128
|
+
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
129
|
+
if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
|
|
130
|
+
self._print_error_logs = True
|
|
131
|
+
self.to_cookie()
|
|
132
|
+
|
|
133
|
+
def to_cookie(self):
|
|
134
|
+
dictionary = {'logging_level': self._logging_level,
|
|
135
|
+
'disable_progress_bar': self._disable_progress_bar,
|
|
136
|
+
'print_all_responses': self._print_all_responses,
|
|
137
|
+
'print_error_logs': self._print_error_logs,
|
|
138
|
+
'progress_bar_setting': json.dumps(self._progress_bar_settings)
|
|
139
|
+
}
|
|
140
|
+
self.cookie.put(key='verbose', value=dictionary)
|
|
141
|
+
|
|
142
|
+
def from_cookie(self, dictionary):
|
|
143
|
+
self._logging_level = dictionary.get('logging_level', self.__DEFAULT_LOGGING_LEVEL)
|
|
144
|
+
self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
|
|
145
|
+
self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
|
|
146
|
+
self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
|
|
147
|
+
progress_bar_settings = dictionary.get('progress_bar_setting', None)
|
|
148
|
+
if progress_bar_settings is None:
|
|
149
|
+
self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
|
|
150
|
+
else:
|
|
151
|
+
self._progress_bar_settings = json.loads(progress_bar_settings)
|
|
152
|
+
|
|
153
|
+
@property
|
|
154
|
+
def disable_progress_bar_iterate_pages(self):
|
|
155
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
|
|
156
|
+
|
|
157
|
+
@disable_progress_bar_iterate_pages.setter
|
|
158
|
+
def disable_progress_bar_iterate_pages(self, val):
|
|
159
|
+
self._progress_bar_settings['Iterate Pages'] = val
|
|
160
|
+
self.to_cookie()
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def disable_progress_bar_command_progress(self):
|
|
164
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
|
|
165
|
+
|
|
166
|
+
@disable_progress_bar_command_progress.setter
|
|
167
|
+
def disable_progress_bar_command_progress(self, val):
|
|
168
|
+
self._progress_bar_settings['Command Progress'] = val
|
|
169
|
+
self.to_cookie()
|
|
170
|
+
|
|
171
|
+
@property
|
|
172
|
+
def disable_progress_bar_download_item(self):
|
|
173
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
|
|
174
|
+
|
|
175
|
+
@disable_progress_bar_download_item.setter
|
|
176
|
+
def disable_progress_bar_download_item(self, val):
|
|
177
|
+
self._progress_bar_settings['Download Item'] = val
|
|
178
|
+
self.to_cookie()
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def disable_progress_bar_download_dataset(self):
|
|
182
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
|
|
183
|
+
|
|
184
|
+
@disable_progress_bar_download_dataset.setter
|
|
185
|
+
def disable_progress_bar_download_dataset(self, val):
|
|
186
|
+
self._progress_bar_settings['Download Dataset'] = val
|
|
187
|
+
self.to_cookie()
|
|
188
|
+
|
|
189
|
+
@property
|
|
190
|
+
def disable_progress_bar_upload_items(self):
|
|
191
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
|
|
192
|
+
|
|
193
|
+
@disable_progress_bar_upload_items.setter
|
|
194
|
+
def disable_progress_bar_upload_items(self, val):
|
|
195
|
+
self._progress_bar_settings['Upload Items'] = val
|
|
196
|
+
self.to_cookie()
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def disable_progress_bar_download_annotations(self):
|
|
200
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
|
|
201
|
+
|
|
202
|
+
@disable_progress_bar_download_annotations.setter
|
|
203
|
+
def disable_progress_bar_download_annotations(self, val):
|
|
204
|
+
self._progress_bar_settings['Download Annotations'] = val
|
|
205
|
+
self.to_cookie()
|
|
206
|
+
|
|
207
|
+
@property
|
|
208
|
+
def disable_progress_bar_upload_annotations(self):
|
|
209
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
|
|
210
|
+
|
|
211
|
+
@disable_progress_bar_upload_annotations.setter
|
|
212
|
+
def disable_progress_bar_upload_annotations(self, val):
|
|
213
|
+
self._progress_bar_settings['Upload Annotations'] = val
|
|
214
|
+
self.to_cookie()
|
|
215
|
+
|
|
216
|
+
@property
|
|
217
|
+
def disable_progress_bar_convert_annotations(self):
|
|
218
|
+
return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
|
|
219
|
+
|
|
220
|
+
@disable_progress_bar_convert_annotations.setter
|
|
221
|
+
def disable_progress_bar_convert_annotations(self, val):
|
|
222
|
+
self._progress_bar_settings['Convert Annotations'] = val
|
|
223
|
+
self.to_cookie()
|
|
224
|
+
|
|
225
|
+
@property
|
|
226
|
+
def disable_progress_bar(self):
|
|
227
|
+
return self._disable_progress_bar
|
|
228
|
+
|
|
229
|
+
@disable_progress_bar.setter
|
|
230
|
+
def disable_progress_bar(self, val):
|
|
231
|
+
self._disable_progress_bar = val
|
|
232
|
+
self.to_cookie()
|
|
233
|
+
|
|
234
|
+
@property
|
|
235
|
+
def logging_level(self):
|
|
236
|
+
return self._logging_level
|
|
237
|
+
|
|
238
|
+
@logging_level.setter
|
|
239
|
+
def logging_level(self, val):
|
|
240
|
+
self._logging_level = val
|
|
241
|
+
# set log level
|
|
242
|
+
logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self._logging_level.upper()])
|
|
243
|
+
# write to cookie
|
|
244
|
+
self.to_cookie()
|
|
245
|
+
|
|
246
|
+
@property
|
|
247
|
+
def print_all_responses(self):
|
|
248
|
+
return self._print_all_responses
|
|
249
|
+
|
|
250
|
+
@print_all_responses.setter
|
|
251
|
+
def print_all_responses(self, val):
|
|
252
|
+
self._print_all_responses = val
|
|
253
|
+
self.to_cookie()
|
|
254
|
+
|
|
255
|
+
@property
|
|
256
|
+
def print_error_logs(self):
|
|
257
|
+
return self._print_error_logs
|
|
258
|
+
|
|
259
|
+
@print_error_logs.setter
|
|
260
|
+
def print_error_logs(self, val):
|
|
261
|
+
self._print_error_logs = val
|
|
262
|
+
self.to_cookie()
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class CacheMode:
|
|
266
|
+
__DEFAULT_ENABLE_CACHE = True
|
|
267
|
+
__DEFAULT_CHUNK_CACHE = 200000
|
|
268
|
+
|
|
269
|
+
def __init__(self, cookie):
|
|
270
|
+
self.cookie = cookie
|
|
271
|
+
dictionary = self.cookie.get('cache_mode')
|
|
272
|
+
if isinstance(dictionary, dict):
|
|
273
|
+
self.from_cookie(dictionary)
|
|
274
|
+
else:
|
|
275
|
+
self._enable_cache = self.__DEFAULT_ENABLE_CACHE
|
|
276
|
+
self._chunk_cache = self.__DEFAULT_CHUNK_CACHE
|
|
277
|
+
self.to_cookie()
|
|
278
|
+
|
|
279
|
+
def to_cookie(self):
|
|
280
|
+
dictionary = {'enable_cache': self._enable_cache,
|
|
281
|
+
'chunk_cache': self._chunk_cache}
|
|
282
|
+
self.cookie.put(key='cache_mode', value=dictionary)
|
|
283
|
+
|
|
284
|
+
def from_cookie(self, dictionary):
|
|
285
|
+
self._enable_cache = dictionary.get('enable_cache', self.__DEFAULT_ENABLE_CACHE)
|
|
286
|
+
self._chunk_cache = dictionary.get('chunk_cache', self.__DEFAULT_CHUNK_CACHE)
|
|
287
|
+
|
|
288
|
+
@property
|
|
289
|
+
def enable_cache(self):
|
|
290
|
+
return self._enable_cache
|
|
291
|
+
|
|
292
|
+
@enable_cache.setter
|
|
293
|
+
def enable_cache(self, val: bool):
|
|
294
|
+
if not isinstance(val, bool):
|
|
295
|
+
raise exceptions.PlatformException(error=400,
|
|
296
|
+
message="input must be of type bool")
|
|
297
|
+
self._enable_cache = val
|
|
298
|
+
self.to_cookie()
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def chunk_cache(self):
|
|
302
|
+
return self._chunk_cache
|
|
303
|
+
|
|
304
|
+
@chunk_cache.setter
|
|
305
|
+
def chunk_cache(self, val):
|
|
306
|
+
self._chunk_cache = val
|
|
307
|
+
self.to_cookie()
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class SDKCache:
|
|
311
|
+
__DEFAULT_USE_CACHE = False
|
|
312
|
+
__DEFAULT_CACHE_PATH = os.path.join(os.path.expanduser('~'), '.dataloop', 'obj_cache')
|
|
313
|
+
__DEFAULT_CACHE_PATH_BIN = os.path.join(os.path.expanduser('~'), '.dataloop')
|
|
314
|
+
__DEFAULT_CONFIGS_CACHE = CacheConfig().to_string()
|
|
315
|
+
__DEFAULT_BINARY_CACHE_SIZE = 1000
|
|
316
|
+
|
|
317
|
+
def __init__(self, cookie):
|
|
318
|
+
self.cookie = cookie
|
|
319
|
+
dictionary = self.cookie.get('cache_configs')
|
|
320
|
+
if isinstance(dictionary, dict):
|
|
321
|
+
self.from_cookie(dictionary)
|
|
322
|
+
else:
|
|
323
|
+
self._cache_path = self.__DEFAULT_CACHE_PATH
|
|
324
|
+
self._cache_path_bin = self.__DEFAULT_CACHE_PATH_BIN
|
|
325
|
+
self._configs = self.__DEFAULT_CONFIGS_CACHE
|
|
326
|
+
self._bin_size = self.__DEFAULT_BINARY_CACHE_SIZE
|
|
327
|
+
self._use_cache = self.__DEFAULT_USE_CACHE
|
|
328
|
+
self.to_cookie()
|
|
329
|
+
|
|
330
|
+
def to_cookie(self):
|
|
331
|
+
dictionary = {'cache_path': self._cache_path,
|
|
332
|
+
'cache_path_bin': self._cache_path_bin,
|
|
333
|
+
'configs': self._configs,
|
|
334
|
+
'bin_size': self._bin_size,
|
|
335
|
+
'use_cache': self._use_cache}
|
|
336
|
+
self.cookie.put(key='cache_configs', value=dictionary)
|
|
337
|
+
|
|
338
|
+
def from_cookie(self, dictionary):
|
|
339
|
+
self._cache_path = dictionary.get('cache_path', self.__DEFAULT_CACHE_PATH)
|
|
340
|
+
self._cache_path_bin = dictionary.get('cache_path_bin', self.__DEFAULT_CACHE_PATH_BIN)
|
|
341
|
+
self._configs = dictionary.get('configs', self.__DEFAULT_CONFIGS_CACHE)
|
|
342
|
+
self._bin_size = dictionary.get('bin_size', self.__DEFAULT_BINARY_CACHE_SIZE)
|
|
343
|
+
self._use_cache = dictionary.get('use_cache', self.__DEFAULT_USE_CACHE)
|
|
344
|
+
|
|
345
|
+
@property
|
|
346
|
+
def cache_path(self):
|
|
347
|
+
return self._cache_path
|
|
348
|
+
|
|
349
|
+
@property
|
|
350
|
+
def cache_path_bin(self):
|
|
351
|
+
return self._cache_path_bin
|
|
352
|
+
|
|
353
|
+
@cache_path_bin.setter
|
|
354
|
+
def cache_path_bin(self, val: str):
|
|
355
|
+
if not isinstance(val, str):
|
|
356
|
+
raise exceptions.PlatformException(error=400,
|
|
357
|
+
message="input must be of type str")
|
|
358
|
+
self._cache_path_bin = val
|
|
359
|
+
os.environ['DEFAULT_CACHE_PATH'] = val
|
|
360
|
+
self.to_cookie()
|
|
361
|
+
|
|
362
|
+
@property
|
|
363
|
+
def use_cache(self):
|
|
364
|
+
return self._use_cache
|
|
365
|
+
|
|
366
|
+
@use_cache.setter
|
|
367
|
+
def use_cache(self, val: bool):
|
|
368
|
+
if not isinstance(val, bool):
|
|
369
|
+
raise exceptions.PlatformException(error=400,
|
|
370
|
+
message="input must be of type bool")
|
|
371
|
+
self._use_cache = val
|
|
372
|
+
self.to_cookie()
|
|
373
|
+
|
|
374
|
+
@property
|
|
375
|
+
def configs(self):
|
|
376
|
+
return self._configs
|
|
377
|
+
|
|
378
|
+
@configs.setter
|
|
379
|
+
def configs(self, val):
|
|
380
|
+
if isinstance(val, CacheConfig):
|
|
381
|
+
val = val.to_string()
|
|
382
|
+
if not isinstance(val, str):
|
|
383
|
+
raise exceptions.PlatformException(error=400,
|
|
384
|
+
message="input must be of type str or CacheConfig")
|
|
385
|
+
self._configs = val
|
|
386
|
+
self.to_cookie()
|
|
387
|
+
|
|
388
|
+
@property
|
|
389
|
+
def bin_size(self):
|
|
390
|
+
return self._bin_size
|
|
391
|
+
|
|
392
|
+
@bin_size.setter
|
|
393
|
+
def bin_size(self, val: int):
|
|
394
|
+
if not isinstance(val, int):
|
|
395
|
+
raise exceptions.PlatformException(error=400,
|
|
396
|
+
message="input must be of type int")
|
|
397
|
+
self._bin_size = val
|
|
398
|
+
self.to_cookie()
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
class Attributes2:
|
|
402
|
+
__DEFAULT_USE_ATTRIBUTE = False
|
|
403
|
+
|
|
404
|
+
def __init__(self, cookie):
|
|
405
|
+
self.cookie = cookie
|
|
406
|
+
dictionary = self.cookie.get('use_attributes_2')
|
|
407
|
+
if isinstance(dictionary, dict):
|
|
408
|
+
self.from_cookie(dictionary)
|
|
409
|
+
else:
|
|
410
|
+
self._use_attributes_2 = self.__DEFAULT_USE_ATTRIBUTE
|
|
411
|
+
self.to_cookie()
|
|
412
|
+
|
|
413
|
+
def to_cookie(self):
|
|
414
|
+
dictionary = {'use_attributes_2': self._use_attributes_2}
|
|
415
|
+
self.cookie.put(key='use_attributes_2', value=dictionary)
|
|
416
|
+
|
|
417
|
+
def from_cookie(self, dictionary):
|
|
418
|
+
self._use_attributes_2 = dictionary.get('use_attributes_2', self.__DEFAULT_USE_ATTRIBUTE)
|
|
419
|
+
|
|
420
|
+
@property
|
|
421
|
+
def use_attributes_2(self):
|
|
422
|
+
return self._use_attributes_2
|
|
423
|
+
|
|
424
|
+
@use_attributes_2.setter
|
|
425
|
+
def use_attributes_2(self, val: bool):
|
|
426
|
+
if not isinstance(val, bool):
|
|
427
|
+
raise exceptions.PlatformException(error=400,
|
|
428
|
+
message="input must be of type bool")
|
|
429
|
+
self._use_attributes_2 = val
|
|
430
|
+
os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
|
|
431
|
+
self.to_cookie()
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
class Decorators:
|
|
435
|
+
@staticmethod
|
|
436
|
+
def token_expired_decorator(method):
|
|
437
|
+
@wraps(method)
|
|
438
|
+
def decorated_method(inst, *args, **kwargs):
|
|
439
|
+
# save event
|
|
440
|
+
frm = inspect.stack()[1]
|
|
441
|
+
|
|
442
|
+
# before the method call
|
|
443
|
+
kwargs.update({'stack': frm})
|
|
444
|
+
if inst.token_expired():
|
|
445
|
+
if inst.renew_token_method() is False:
|
|
446
|
+
raise exceptions.PlatformException('600', 'Token expired, Please login.'
|
|
447
|
+
'\nSDK login options: dl.login(), dl.login_token(), '
|
|
448
|
+
'dl.login_m2m()'
|
|
449
|
+
'\nCLI login options: dlp login, dlp login-token, '
|
|
450
|
+
'dlp login-m2m')
|
|
451
|
+
# the actual method call
|
|
452
|
+
result = method(inst, *args, **kwargs)
|
|
453
|
+
# after the method call
|
|
454
|
+
return result
|
|
455
|
+
|
|
456
|
+
return decorated_method
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
class ApiClient:
|
|
460
|
+
"""
|
|
461
|
+
API calls to Dataloop gate
|
|
462
|
+
"""
|
|
463
|
+
|
|
464
|
+
def __init__(self, token=None, num_processes=None, cookie_filepath=None):
|
|
465
|
+
############
|
|
466
|
+
# Initiate #
|
|
467
|
+
############
|
|
468
|
+
# define local params - read only once from cookie file
|
|
469
|
+
self.lock = threading.Lock()
|
|
470
|
+
self.renew_token_method = self.renew_token
|
|
471
|
+
self.is_cli = False
|
|
472
|
+
self.session = None
|
|
473
|
+
self.default_headers = dict()
|
|
474
|
+
self._token = None
|
|
475
|
+
self._environments = None
|
|
476
|
+
self._environment = None
|
|
477
|
+
self._verbose = None
|
|
478
|
+
self._callbacks = None
|
|
479
|
+
self._cache_state = None
|
|
480
|
+
self._attributes_mode = None
|
|
481
|
+
self._cache_configs = None
|
|
482
|
+
self._sdk_cache = None
|
|
483
|
+
self._fetch_entities = None
|
|
484
|
+
# define other params
|
|
485
|
+
self.last_response = None
|
|
486
|
+
self.last_request = None
|
|
487
|
+
self.platform_exception = None
|
|
488
|
+
self.last_curl = None
|
|
489
|
+
self.minimal_print = True
|
|
490
|
+
# start refresh token
|
|
491
|
+
self.refresh_token_active = True
|
|
492
|
+
# event and pools
|
|
493
|
+
self._thread_pools = dict()
|
|
494
|
+
self._event_loop = None
|
|
495
|
+
self._login_domain = None
|
|
496
|
+
self.__gate_url_for_requests = None
|
|
497
|
+
|
|
498
|
+
# TODO- remove before release - only for debugging
|
|
499
|
+
self._stopped_pools = list()
|
|
500
|
+
|
|
501
|
+
if cookie_filepath is None:
|
|
502
|
+
self.cookie_io = CookieIO.init()
|
|
503
|
+
else:
|
|
504
|
+
self.cookie_io = CookieIO(path=cookie_filepath)
|
|
505
|
+
assert isinstance(self.cookie_io, CookieIO)
|
|
506
|
+
self.state_io = CookieIO.init_local_cookie(create=False)
|
|
507
|
+
assert isinstance(self.state_io, CookieIO)
|
|
508
|
+
|
|
509
|
+
##################
|
|
510
|
+
# configurations #
|
|
511
|
+
##################
|
|
512
|
+
# check for proxies in connection
|
|
513
|
+
self.check_proxy()
|
|
514
|
+
|
|
515
|
+
# set token if input
|
|
516
|
+
if token is not None:
|
|
517
|
+
self.token = token
|
|
518
|
+
|
|
519
|
+
# STDOUT
|
|
520
|
+
self.remove_keys_list = ['contributors', 'url', 'annotations', 'items', 'export', 'directoryTree',
|
|
521
|
+
'attributes', 'partitions', 'metadata', 'stream', 'createdAt', 'updatedAt', 'arch']
|
|
522
|
+
|
|
523
|
+
# API calls counter
|
|
524
|
+
counter_filepath = os.path.join(os.path.dirname(self.cookie_io.COOKIE), 'calls_counter.json')
|
|
525
|
+
self.calls_counter = CallsCounter(filepath=counter_filepath)
|
|
526
|
+
|
|
527
|
+
# create a global thread pool to run multi threading
|
|
528
|
+
if num_processes is None:
|
|
529
|
+
num_processes = 3 * multiprocessing.cpu_count()
|
|
530
|
+
self._num_processes = num_processes
|
|
531
|
+
self._thread_pools_names = {'item.download': num_processes,
|
|
532
|
+
'item.status_update': num_processes,
|
|
533
|
+
'item.page': num_processes,
|
|
534
|
+
'annotation.upload': num_processes,
|
|
535
|
+
'annotation.download': num_processes,
|
|
536
|
+
'annotation.update': num_processes,
|
|
537
|
+
'entity.create': num_processes,
|
|
538
|
+
'dataset.download': num_processes}
|
|
539
|
+
# set logging level
|
|
540
|
+
logging.getLogger(name='dtlpy').handlers[0].setLevel(logging._nameToLevel[self.verbose.logging_level.upper()])
|
|
541
|
+
os.environ["USE_ATTRIBUTE_2"] = json.dumps(self.attributes_mode.use_attributes_2)
|
|
542
|
+
|
|
543
|
+
self.cache = None
|
|
544
|
+
#######################
|
|
545
|
+
# start event tracker #
|
|
546
|
+
# self.event_tracker = Events(client_api=self)
|
|
547
|
+
# self.event_tracker.daemon = True
|
|
548
|
+
# self.event_tracker.start()
|
|
549
|
+
self.upload_session_timeout = int(os.environ.get('UPLOAD_SESSION_TIMEOUT', 0))
|
|
550
|
+
self.upload_chunk_timeout = int(os.environ.get('UPLOAD_CHUNK_TIMEOUT', 2 * 60))
|
|
551
|
+
|
|
552
|
+
@property
|
|
553
|
+
def event_loop(self):
|
|
554
|
+
self.lock.acquire()
|
|
555
|
+
if self._event_loop is None:
|
|
556
|
+
self._event_loop = self.create_event_loop_thread()
|
|
557
|
+
elif not self._event_loop.loop.is_running():
|
|
558
|
+
if self._event_loop.is_alive():
|
|
559
|
+
self._event_loop.stop()
|
|
560
|
+
self._event_loop = self.create_event_loop_thread()
|
|
561
|
+
self.lock.release()
|
|
562
|
+
return self._event_loop
|
|
563
|
+
|
|
564
|
+
def build_cache(self, cache_config=None):
|
|
565
|
+
if cache_config is None:
|
|
566
|
+
cache_config_json = os.environ.get('CACHE_CONFIG', None)
|
|
567
|
+
if cache_config_json is None:
|
|
568
|
+
if self.sdk_cache.use_cache:
|
|
569
|
+
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=self.sdk_cache.configs)
|
|
570
|
+
else:
|
|
571
|
+
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config_json)
|
|
572
|
+
if cache_config:
|
|
573
|
+
# cache paths
|
|
574
|
+
if os.environ.get('DEFAULT_CACHE_PATH', None) is None:
|
|
575
|
+
os.environ['DEFAULT_CACHE_PATH'] = self.sdk_cache.cache_path_bin
|
|
576
|
+
else:
|
|
577
|
+
self.sdk_cache.cache_path_bin = os.environ['DEFAULT_CACHE_PATH']
|
|
578
|
+
|
|
579
|
+
if not os.path.isdir(self.sdk_cache.cache_path_bin):
|
|
580
|
+
os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
|
|
581
|
+
|
|
582
|
+
if not os.path.isfile(os.path.join(self.sdk_cache.cache_path_bin, 'cacheConfig.json')):
|
|
583
|
+
os.makedirs(self.sdk_cache.cache_path_bin, exist_ok=True)
|
|
584
|
+
|
|
585
|
+
if isinstance(cache_config, str):
|
|
586
|
+
self.sdk_cache.configs = cache_config
|
|
587
|
+
cache_config = CacheConfig.from_string(cls=CacheConfig, base64_string=cache_config)
|
|
588
|
+
elif isinstance(cache_config, CacheConfig):
|
|
589
|
+
self.sdk_cache.configs = cache_config.to_string()
|
|
590
|
+
else:
|
|
591
|
+
raise Exception("config should be of type str or CacheConfig")
|
|
592
|
+
try:
|
|
593
|
+
self.cache = CacheManger(cache_configs=[cache_config], bin_cache_size=self.sdk_cache.bin_size)
|
|
594
|
+
self.cache.ping()
|
|
595
|
+
self.sdk_cache.use_cache = True
|
|
596
|
+
except Exception as e:
|
|
597
|
+
logger.warning("Cache build error {}".format(e))
|
|
598
|
+
self.cache = None
|
|
599
|
+
|
|
600
|
+
def __del__(self):
|
|
601
|
+
for name, pool in self._thread_pools.items():
|
|
602
|
+
pool.shutdown()
|
|
603
|
+
self.event_loop.stop()
|
|
604
|
+
|
|
605
|
+
def _build_request_headers(self, headers=None):
|
|
606
|
+
if headers is None:
|
|
607
|
+
headers = dict()
|
|
608
|
+
if not isinstance(headers, dict):
|
|
609
|
+
raise exceptions.PlatformException(
|
|
610
|
+
error='400',
|
|
611
|
+
message="Input 'headers' must be a dictionary, got: {}".format(type(headers)))
|
|
612
|
+
headers.update(self.default_headers)
|
|
613
|
+
headers.update(self.auth)
|
|
614
|
+
headers.update({'User-Agent': requests_toolbelt.user_agent('dtlpy', __version__)})
|
|
615
|
+
return headers
|
|
616
|
+
|
|
617
|
+
@property
|
|
618
|
+
def num_processes(self):
|
|
619
|
+
return self._num_processes
|
|
620
|
+
|
|
621
|
+
@num_processes.setter
|
|
622
|
+
def num_processes(self, num_processes):
|
|
623
|
+
if num_processes == self._num_processes:
|
|
624
|
+
# same number. no need to do anything
|
|
625
|
+
return
|
|
626
|
+
self._num_processes = num_processes
|
|
627
|
+
for pool_name in self._thread_pools_names:
|
|
628
|
+
self._thread_pools_names[pool_name] = num_processes
|
|
629
|
+
|
|
630
|
+
for pool in self._thread_pools:
|
|
631
|
+
self._thread_pools[pool].shutdown()
|
|
632
|
+
self._thread_pools = dict()
|
|
633
|
+
|
|
634
|
+
def create_event_loop_thread(self):
|
|
635
|
+
loop = asyncio.new_event_loop()
|
|
636
|
+
event_loop = AsyncThreadEventLoop(loop=loop,
|
|
637
|
+
n=self._num_processes)
|
|
638
|
+
event_loop.daemon = True
|
|
639
|
+
event_loop.start()
|
|
640
|
+
time.sleep(1)
|
|
641
|
+
return event_loop
|
|
642
|
+
|
|
643
|
+
def thread_pools(self, pool_name):
|
|
644
|
+
if pool_name not in self._thread_pools_names:
|
|
645
|
+
raise ValueError('unknown thread pool name: {}. known name: {}'.format(
|
|
646
|
+
pool_name,
|
|
647
|
+
list(self._thread_pools_names.keys())))
|
|
648
|
+
num_processes = self._thread_pools_names[pool_name]
|
|
649
|
+
if pool_name not in self._thread_pools or self._thread_pools[pool_name]._shutdown:
|
|
650
|
+
self._thread_pools[pool_name] = ThreadPoolExecutor(max_workers=num_processes)
|
|
651
|
+
pool = self._thread_pools[pool_name]
|
|
652
|
+
assert isinstance(pool, concurrent.futures.ThreadPoolExecutor)
|
|
653
|
+
return pool
|
|
654
|
+
|
|
655
|
+
@property
|
|
656
|
+
def verify(self):
|
|
657
|
+
environments = self.environments
|
|
658
|
+
verify = True
|
|
659
|
+
if self.environment in environments:
|
|
660
|
+
if 'verify_ssl' in environments[self.environment]:
|
|
661
|
+
verify = environments[self.environment]['verify_ssl']
|
|
662
|
+
return verify
|
|
663
|
+
|
|
664
|
+
@property
|
|
665
|
+
def use_ssl_context(self):
|
|
666
|
+
environments = self.environments
|
|
667
|
+
use_ssl_context = False
|
|
668
|
+
if self.environment in environments:
|
|
669
|
+
if 'use_ssl_context' in environments[self.environment]:
|
|
670
|
+
use_ssl_context = environments[self.environment]['use_ssl_context']
|
|
671
|
+
return use_ssl_context
|
|
672
|
+
|
|
673
|
+
@property
|
|
674
|
+
def auth(self):
|
|
675
|
+
return {'authorization': 'Bearer ' + self.token}
|
|
676
|
+
|
|
677
|
+
@property
|
|
678
|
+
def environment(self):
|
|
679
|
+
_environment = self._environment
|
|
680
|
+
if _environment is None:
|
|
681
|
+
_environment = self.cookie_io.get('url')
|
|
682
|
+
if _environment is None:
|
|
683
|
+
_environment = DEFAULT_ENVIRONMENT
|
|
684
|
+
self._environment = _environment
|
|
685
|
+
return _environment
|
|
686
|
+
|
|
687
|
+
@environment.setter
|
|
688
|
+
def environment(self, env):
|
|
689
|
+
self._environment = env
|
|
690
|
+
self.cookie_io.put('url', env)
|
|
691
|
+
|
|
692
|
+
@property
|
|
693
|
+
def fetch_entities(self):
|
|
694
|
+
if self._fetch_entities is None:
|
|
695
|
+
self._fetch_entities = self.cookie_io.get('fetch_entities')
|
|
696
|
+
if self._fetch_entities is None:
|
|
697
|
+
self.fetch_entities = True # default
|
|
698
|
+
return self._fetch_entities
|
|
699
|
+
|
|
700
|
+
@fetch_entities.setter
|
|
701
|
+
def fetch_entities(self, val):
|
|
702
|
+
self._fetch_entities = val
|
|
703
|
+
self.cookie_io.put('fetch_entities', val)
|
|
704
|
+
|
|
705
|
+
@property
|
|
706
|
+
def environments(self):
|
|
707
|
+
"""
|
|
708
|
+
List of known environments
|
|
709
|
+
:return:
|
|
710
|
+
"""
|
|
711
|
+
# get environment login parameters
|
|
712
|
+
_environments = self._environments
|
|
713
|
+
if _environments is None:
|
|
714
|
+
# take from cookie
|
|
715
|
+
_environments = self.cookie_io.get('login_parameters')
|
|
716
|
+
# if cookie is None - init with defaults
|
|
717
|
+
if _environments is None:
|
|
718
|
+
# default
|
|
719
|
+
_environments = DEFAULT_ENVIRONMENTS
|
|
720
|
+
# save to local variable
|
|
721
|
+
self.environments = _environments
|
|
722
|
+
else:
|
|
723
|
+
# save from cookie to ram
|
|
724
|
+
self._environments = _environments
|
|
725
|
+
return _environments
|
|
726
|
+
|
|
727
|
+
@environments.setter
|
|
728
|
+
def environments(self, env_dict):
|
|
729
|
+
self._environments = env_dict
|
|
730
|
+
self.cookie_io.put(key='login_parameters', value=self._environments)
|
|
731
|
+
|
|
732
|
+
@property
|
|
733
|
+
def verbose(self):
|
|
734
|
+
if self._verbose is None:
|
|
735
|
+
self._verbose = Verbose(cookie=self.cookie_io)
|
|
736
|
+
assert isinstance(self._verbose, Verbose)
|
|
737
|
+
return self._verbose
|
|
738
|
+
|
|
739
|
+
@property
|
|
740
|
+
def cache_state(self):
|
|
741
|
+
if self._cache_state is None:
|
|
742
|
+
self._cache_state = CacheMode(cookie=self.cookie_io)
|
|
743
|
+
assert isinstance(self._cache_state, CacheMode)
|
|
744
|
+
return self._cache_state
|
|
745
|
+
|
|
746
|
+
@property
|
|
747
|
+
def attributes_mode(self):
|
|
748
|
+
if self._attributes_mode is None:
|
|
749
|
+
self._attributes_mode = Attributes2(cookie=self.cookie_io)
|
|
750
|
+
assert isinstance(self._attributes_mode, Attributes2)
|
|
751
|
+
return self._attributes_mode
|
|
752
|
+
|
|
753
|
+
@property
|
|
754
|
+
def sdk_cache(self):
|
|
755
|
+
if self._sdk_cache is None:
|
|
756
|
+
self._sdk_cache = SDKCache(cookie=self.cookie_io)
|
|
757
|
+
assert isinstance(self._sdk_cache, SDKCache)
|
|
758
|
+
return self._sdk_cache
|
|
759
|
+
|
|
760
|
+
@property
|
|
761
|
+
def callbacks(self):
|
|
762
|
+
if self._callbacks is None:
|
|
763
|
+
self._callbacks = Callbacks()
|
|
764
|
+
assert isinstance(self._callbacks, Callbacks)
|
|
765
|
+
return self._callbacks
|
|
766
|
+
|
|
767
|
+
def add_callback(self, event, func):
|
|
768
|
+
"""
|
|
769
|
+
function to add callback to the client
|
|
770
|
+
:param event: dl.CallbackEvent enum, name of the callback
|
|
771
|
+
:param func: function to call with 2 arguments: progress and context
|
|
772
|
+
"""
|
|
773
|
+
self.callbacks.add(event, func)
|
|
774
|
+
|
|
775
|
+
@property
|
|
776
|
+
def token(self):
|
|
777
|
+
_token = self._token
|
|
778
|
+
if _token is None:
|
|
779
|
+
environments = self.environments
|
|
780
|
+
if self.environment in environments:
|
|
781
|
+
if 'token' in environments[self.environment]:
|
|
782
|
+
_token = environments[self.environment]['token']
|
|
783
|
+
return _token
|
|
784
|
+
|
|
785
|
+
@token.setter
|
|
786
|
+
def token(self, token):
|
|
787
|
+
# set to variable
|
|
788
|
+
self._token = token
|
|
789
|
+
self.refresh_token = None
|
|
790
|
+
# set to cookie file
|
|
791
|
+
environments = self.environments
|
|
792
|
+
if self.environment in environments:
|
|
793
|
+
environments[self.environment]['token'] = token
|
|
794
|
+
else:
|
|
795
|
+
environments[self.environment] = {'token': token}
|
|
796
|
+
self.environments = environments
|
|
797
|
+
|
|
798
|
+
@property
|
|
799
|
+
def refresh_token(self):
|
|
800
|
+
environments = self.environments
|
|
801
|
+
refresh_token = None
|
|
802
|
+
if self.environment in environments:
|
|
803
|
+
if 'refresh_token' in environments[self.environment]:
|
|
804
|
+
refresh_token = environments[self.environment]['refresh_token']
|
|
805
|
+
return refresh_token
|
|
806
|
+
|
|
807
|
+
@refresh_token.setter
|
|
808
|
+
def refresh_token(self, token):
|
|
809
|
+
environments = self.environments
|
|
810
|
+
if self.environment in environments:
|
|
811
|
+
environments[self.environment]['refresh_token'] = token
|
|
812
|
+
else:
|
|
813
|
+
environments[self.environment] = {'refresh_token': token}
|
|
814
|
+
self.refresh_token_active = True
|
|
815
|
+
self.environments = environments
|
|
816
|
+
|
|
817
|
+
def add_environment(self, environment,
|
|
818
|
+
audience=None,
|
|
819
|
+
client_id=None,
|
|
820
|
+
auth0_url=None,
|
|
821
|
+
verify_ssl=True,
|
|
822
|
+
token=None,
|
|
823
|
+
refresh_token=None,
|
|
824
|
+
alias=None,
|
|
825
|
+
use_ssl_context=False,
|
|
826
|
+
gate_url=None,
|
|
827
|
+
url=None,
|
|
828
|
+
login_domain=None
|
|
829
|
+
):
|
|
830
|
+
environments = self.environments
|
|
831
|
+
if environment in environments:
|
|
832
|
+
logger.warning('Environment exists. Overwriting. env: {}'.format(environment))
|
|
833
|
+
if token is None:
|
|
834
|
+
token = None
|
|
835
|
+
if alias is None:
|
|
836
|
+
alias = None
|
|
837
|
+
environments[environment] = {'audience': audience,
|
|
838
|
+
'client_id': client_id,
|
|
839
|
+
'auth0_url': auth0_url,
|
|
840
|
+
'alias': alias,
|
|
841
|
+
'token': token,
|
|
842
|
+
'gate_url': gate_url,
|
|
843
|
+
'refresh_token': refresh_token,
|
|
844
|
+
'verify_ssl': verify_ssl,
|
|
845
|
+
'use_ssl_context': use_ssl_context,
|
|
846
|
+
'url': url,
|
|
847
|
+
'login_domain': login_domain}
|
|
848
|
+
self.environments = environments
|
|
849
|
+
|
|
850
|
+
def info(self, with_token=True):
|
|
851
|
+
"""
|
|
852
|
+
Return a dictionary with current information: env, user, token
|
|
853
|
+
:param with_token:
|
|
854
|
+
:return:
|
|
855
|
+
"""
|
|
856
|
+
user_email = 'null'
|
|
857
|
+
if self.token is not None:
|
|
858
|
+
payload = jwt.decode(self.token, algorithms=['HS256'],
|
|
859
|
+
verify=False, options={'verify_signature': False})
|
|
860
|
+
user_email = payload['email']
|
|
861
|
+
information = {'environment': self.environment,
|
|
862
|
+
'user_email': user_email}
|
|
863
|
+
if with_token:
|
|
864
|
+
information['token'] = self.token
|
|
865
|
+
return information
|
|
866
|
+
|
|
867
|
+
@property
|
|
868
|
+
def base_gate_url(self):
|
|
869
|
+
if self.__gate_url_for_requests is None:
|
|
870
|
+
self.__gate_url_for_requests = self.environment
|
|
871
|
+
internal_requests_url = os.environ.get('INTERNAL_REQUESTS_URL', None)
|
|
872
|
+
if internal_requests_url is not None:
|
|
873
|
+
self.__gate_url_for_requests = internal_requests_url
|
|
874
|
+
return self.__gate_url_for_requests
|
|
875
|
+
|
|
876
|
+
def export_curl_request(self, req_type, path, headers=None, json_req=None, files=None, data=None):
|
|
877
|
+
curl, prepared = self._build_gen_request(req_type=req_type,
|
|
878
|
+
path=path,
|
|
879
|
+
headers=headers,
|
|
880
|
+
json_req=json_req,
|
|
881
|
+
files=files,
|
|
882
|
+
data=data)
|
|
883
|
+
return curl
|
|
884
|
+
|
|
885
|
+
def _build_gen_request(self, req_type, path, headers, json_req, files, data):
|
|
886
|
+
req_type = req_type.upper()
|
|
887
|
+
valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
|
|
888
|
+
assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
|
|
889
|
+
|
|
890
|
+
# prepare request
|
|
891
|
+
req = requests.Request(method=req_type,
|
|
892
|
+
url=self.base_gate_url + path,
|
|
893
|
+
json=json_req,
|
|
894
|
+
files=files,
|
|
895
|
+
data=data,
|
|
896
|
+
headers=self._build_request_headers(headers=headers))
|
|
897
|
+
# prepare to send
|
|
898
|
+
prepared = req.prepare()
|
|
899
|
+
# save curl for debug
|
|
900
|
+
command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
|
|
901
|
+
method = prepared.method
|
|
902
|
+
uri = prepared.url
|
|
903
|
+
data = prepared.body
|
|
904
|
+
headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
|
|
905
|
+
headers = " -H ".join(headers)
|
|
906
|
+
curl = command.format(method=method, headers=headers, data=data, uri=uri)
|
|
907
|
+
return curl, prepared
|
|
908
|
+
|
|
909
|
+
def _convert_json_to_response(self, response_json):
|
|
910
|
+
the_response = Response()
|
|
911
|
+
the_response._content = json.dumps(response_json).encode('utf-8')
|
|
912
|
+
return the_response
|
|
913
|
+
|
|
914
|
+
def _cache_on(self, request):
|
|
915
|
+
if self.cache is not None and self.sdk_cache.use_cache:
|
|
916
|
+
pure_request = request.split('?')[0]
|
|
917
|
+
valid_req = ['annotation', 'item', 'dataset', 'project', 'task', 'assignment']
|
|
918
|
+
for req_type in valid_req:
|
|
919
|
+
if req_type in pure_request:
|
|
920
|
+
return True
|
|
921
|
+
return False
|
|
922
|
+
|
|
923
|
+
@Decorators.token_expired_decorator
|
|
924
|
+
def gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
|
|
925
|
+
log_error=True, dataset_id=None, **kwargs):
|
|
926
|
+
"""
|
|
927
|
+
Generic request from platform
|
|
928
|
+
:param req_type: type of the request: GET, POST etc
|
|
929
|
+
:param path: url (without host header - take from environment)
|
|
930
|
+
:param data: data to pass to request
|
|
931
|
+
:param json_req: json to pass to request
|
|
932
|
+
:param files: files to pass to request
|
|
933
|
+
:param stream: stream to pass the request
|
|
934
|
+
:param headers: headers to pass to request. auth will be added to it
|
|
935
|
+
:param log_error: if true - print the error log of the request
|
|
936
|
+
:param dataset_id: dataset id needed in stream True
|
|
937
|
+
:param kwargs: kwargs
|
|
938
|
+
:return:
|
|
939
|
+
"""
|
|
940
|
+
success, resp, cache_values = False, None, []
|
|
941
|
+
if self.cache is None and 'sdk' not in path:
|
|
942
|
+
self.build_cache()
|
|
943
|
+
if req_type.lower() not in ['patch', 'put', 'post', 'delete'] and self._cache_on(request=path):
|
|
944
|
+
try:
|
|
945
|
+
if stream:
|
|
946
|
+
if dataset_id is None:
|
|
947
|
+
raise ValueError("must provide a dataset id")
|
|
948
|
+
success, cache_values = self.cache.read_stream(request_path=path, dataset_id=dataset_id)
|
|
949
|
+
|
|
950
|
+
else:
|
|
951
|
+
success, cache_values = self.cache.read(request_path=path)
|
|
952
|
+
if success:
|
|
953
|
+
resp = self._convert_json_to_response(cache_values)
|
|
954
|
+
except Exception as e:
|
|
955
|
+
logger.warning("Cache error {}".format(e))
|
|
956
|
+
success, resp = False, None
|
|
957
|
+
|
|
958
|
+
if not success and not resp:
|
|
959
|
+
success, resp = self._gen_request(req_type=req_type,
|
|
960
|
+
path=path,
|
|
961
|
+
data=data,
|
|
962
|
+
json_req=json_req,
|
|
963
|
+
files=files,
|
|
964
|
+
stream=stream,
|
|
965
|
+
headers=headers,
|
|
966
|
+
log_error=log_error)
|
|
967
|
+
|
|
968
|
+
if success and self._cache_on(request=path):
|
|
969
|
+
try:
|
|
970
|
+
if stream:
|
|
971
|
+
res = self.cache.write_stream(request_path=path,
|
|
972
|
+
response=resp,
|
|
973
|
+
dataset_id=dataset_id)
|
|
974
|
+
if res != '':
|
|
975
|
+
resp = self._convert_json_to_response(res)
|
|
976
|
+
else:
|
|
977
|
+
if req_type == 'delete':
|
|
978
|
+
self.cache.invalidate(path=path)
|
|
979
|
+
else:
|
|
980
|
+
try:
|
|
981
|
+
resp_list = resp.json()
|
|
982
|
+
write = True
|
|
983
|
+
if isinstance(resp_list, list):
|
|
984
|
+
pass
|
|
985
|
+
elif isinstance(resp_list, dict):
|
|
986
|
+
if 'hasNextPage' in resp_list:
|
|
987
|
+
resp_list = resp_list['items']
|
|
988
|
+
elif 'id' in resp_list:
|
|
989
|
+
resp_list = [resp_list]
|
|
990
|
+
else:
|
|
991
|
+
write = False
|
|
992
|
+
else:
|
|
993
|
+
raise exceptions.PlatformException(error='400', message="unsupported return type")
|
|
994
|
+
if write:
|
|
995
|
+
self.cache.write(list_entities_json=resp_list)
|
|
996
|
+
except:
|
|
997
|
+
raise exceptions.PlatformException(error='400', message="failed to set cache")
|
|
998
|
+
except Exception as e:
|
|
999
|
+
logger.warning("Cache error {}".format(e))
|
|
1000
|
+
self.cache = None
|
|
1001
|
+
# only for projects events
|
|
1002
|
+
# if success:
|
|
1003
|
+
# if 'stack' in kwargs:
|
|
1004
|
+
# self.event_tracker.put(event=kwargs.get('stack'), resp=resp, path=path)
|
|
1005
|
+
return success, resp
|
|
1006
|
+
|
|
1007
|
+
def _gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None,
|
|
1008
|
+
log_error=True):
|
|
1009
|
+
"""
|
|
1010
|
+
Generic request from platform
|
|
1011
|
+
:param req_type: type of the request: GET, POST etc
|
|
1012
|
+
:param path: url (without host header - take from environment)
|
|
1013
|
+
:param data: data to pass to request
|
|
1014
|
+
:param json_req: json to pass to request
|
|
1015
|
+
:param files: files to pass to request
|
|
1016
|
+
:param stream: stream to pass the request
|
|
1017
|
+
:param headers: headers to pass to request. auth will be added to it
|
|
1018
|
+
:param log_error: if true - print the error log of the request
|
|
1019
|
+
:return:
|
|
1020
|
+
"""
|
|
1021
|
+
curl, prepared = self._build_gen_request(req_type=req_type,
|
|
1022
|
+
path=path,
|
|
1023
|
+
headers=headers,
|
|
1024
|
+
json_req=json_req,
|
|
1025
|
+
files=files,
|
|
1026
|
+
data=data)
|
|
1027
|
+
self.last_curl = curl
|
|
1028
|
+
self.last_request = prepared
|
|
1029
|
+
# send request
|
|
1030
|
+
try:
|
|
1031
|
+
resp = self.send_session(prepared=prepared, stream=stream)
|
|
1032
|
+
except Exception:
|
|
1033
|
+
logger.error(self.print_request(req=prepared, to_return=True))
|
|
1034
|
+
raise
|
|
1035
|
+
self.last_response = resp
|
|
1036
|
+
# handle output
|
|
1037
|
+
if not resp.ok:
|
|
1038
|
+
self.print_bad_response(resp, log_error=log_error and not self.is_cli)
|
|
1039
|
+
return_type = False
|
|
1040
|
+
else:
|
|
1041
|
+
try:
|
|
1042
|
+
# print only what is printable (dont print get steam etc..)
|
|
1043
|
+
if not stream:
|
|
1044
|
+
self.print_response(resp)
|
|
1045
|
+
except ValueError:
|
|
1046
|
+
# no JSON returned
|
|
1047
|
+
pass
|
|
1048
|
+
return_type = True
|
|
1049
|
+
return return_type, resp
|
|
1050
|
+
|
|
1051
|
+
@Decorators.token_expired_decorator
|
|
1052
|
+
async def gen_async_request(self,
|
|
1053
|
+
req_type,
|
|
1054
|
+
path,
|
|
1055
|
+
data=None,
|
|
1056
|
+
json_req=None,
|
|
1057
|
+
files=None,
|
|
1058
|
+
stream=None,
|
|
1059
|
+
headers=None,
|
|
1060
|
+
log_error=True,
|
|
1061
|
+
filepath=None,
|
|
1062
|
+
chunk_size=8192,
|
|
1063
|
+
pbar=None,
|
|
1064
|
+
is_dataloop=True,
|
|
1065
|
+
**kwargs):
|
|
1066
|
+
req_type = req_type.upper()
|
|
1067
|
+
valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH']
|
|
1068
|
+
assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type
|
|
1069
|
+
|
|
1070
|
+
# prepare request
|
|
1071
|
+
if is_dataloop:
|
|
1072
|
+
full_url = self.base_gate_url + path
|
|
1073
|
+
headers_req = self._build_request_headers(headers=headers)
|
|
1074
|
+
else:
|
|
1075
|
+
full_url = path
|
|
1076
|
+
headers = dict()
|
|
1077
|
+
headers_req = headers
|
|
1078
|
+
|
|
1079
|
+
if headers is not None:
|
|
1080
|
+
if not isinstance(headers, dict):
|
|
1081
|
+
raise exceptions.PlatformException(error='400', message="Input 'headers' must be a dictionary")
|
|
1082
|
+
for k, v in headers.items():
|
|
1083
|
+
headers_req[k] = v
|
|
1084
|
+
req = requests.Request(method=req_type,
|
|
1085
|
+
url=full_url,
|
|
1086
|
+
json=json_req,
|
|
1087
|
+
files=files,
|
|
1088
|
+
data=data,
|
|
1089
|
+
headers=headers_req)
|
|
1090
|
+
# prepare to send
|
|
1091
|
+
prepared = req.prepare()
|
|
1092
|
+
# save curl for debug
|
|
1093
|
+
command = "curl -X {method} -H {headers} -d '{data}' '{uri}'"
|
|
1094
|
+
headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()]
|
|
1095
|
+
headers = " -H ".join(headers)
|
|
1096
|
+
curl = command.format(method=prepared.method,
|
|
1097
|
+
headers=headers,
|
|
1098
|
+
data=prepared.body,
|
|
1099
|
+
uri=prepared.url)
|
|
1100
|
+
self.last_curl = curl
|
|
1101
|
+
self.last_request = prepared
|
|
1102
|
+
# send request
|
|
1103
|
+
try:
|
|
1104
|
+
timeout = aiohttp.ClientTimeout(total=0)
|
|
1105
|
+
async with RetryClient(headers=headers_req,
|
|
1106
|
+
timeout=timeout) as session:
|
|
1107
|
+
try:
|
|
1108
|
+
async with session._request(request=session._client.request,
|
|
1109
|
+
url=self.base_gate_url + path,
|
|
1110
|
+
method=req_type,
|
|
1111
|
+
json=json_req,
|
|
1112
|
+
data=data,
|
|
1113
|
+
headers=headers_req,
|
|
1114
|
+
chunked=stream,
|
|
1115
|
+
retry_attempts=5,
|
|
1116
|
+
ssl=self.verify,
|
|
1117
|
+
retry_exceptions={aiohttp.client_exceptions.ClientOSError,
|
|
1118
|
+
aiohttp.client_exceptions.ServerDisconnectedError,
|
|
1119
|
+
aiohttp.client_exceptions.ClientPayloadError},
|
|
1120
|
+
raise_for_status=False) as request:
|
|
1121
|
+
if stream:
|
|
1122
|
+
pbar = self.__get_pbar(pbar=pbar,
|
|
1123
|
+
total_length=request.headers.get("content-length"))
|
|
1124
|
+
if filepath is not None:
|
|
1125
|
+
to_close = False
|
|
1126
|
+
if isinstance(filepath, str):
|
|
1127
|
+
to_close = True
|
|
1128
|
+
buffer = open(filepath, 'wb')
|
|
1129
|
+
elif isinstance(filepath, io.BytesIO):
|
|
1130
|
+
pass
|
|
1131
|
+
else:
|
|
1132
|
+
raise ValueError('unknown data type to write file: {}'.format(type(filepath)))
|
|
1133
|
+
try:
|
|
1134
|
+
while True:
|
|
1135
|
+
chunk = await request.content.read(chunk_size)
|
|
1136
|
+
await asyncio.sleep(0)
|
|
1137
|
+
if not chunk:
|
|
1138
|
+
break
|
|
1139
|
+
buffer.write(chunk)
|
|
1140
|
+
if pbar is not None:
|
|
1141
|
+
pbar.update(len(chunk))
|
|
1142
|
+
finally:
|
|
1143
|
+
if to_close:
|
|
1144
|
+
buffer.close()
|
|
1145
|
+
|
|
1146
|
+
if pbar is not None:
|
|
1147
|
+
pbar.close()
|
|
1148
|
+
text = await request.text()
|
|
1149
|
+
try:
|
|
1150
|
+
_json = await request.json()
|
|
1151
|
+
except Exception:
|
|
1152
|
+
_json = dict()
|
|
1153
|
+
response = AsyncResponse(text=text,
|
|
1154
|
+
_json=_json,
|
|
1155
|
+
async_resp=request)
|
|
1156
|
+
except Exception as err:
|
|
1157
|
+
response = AsyncResponseError(error=err, trace=traceback.format_exc())
|
|
1158
|
+
finally:
|
|
1159
|
+
with threadLock:
|
|
1160
|
+
self.calls_counter.add()
|
|
1161
|
+
except Exception:
|
|
1162
|
+
logger.error(self.print_request(req=prepared, to_return=True))
|
|
1163
|
+
raise
|
|
1164
|
+
self.last_response = response
|
|
1165
|
+
# handle output
|
|
1166
|
+
if not response.ok:
|
|
1167
|
+
self.print_bad_response(response, log_error=log_error and not self.is_cli)
|
|
1168
|
+
return_type = False
|
|
1169
|
+
else:
|
|
1170
|
+
try:
|
|
1171
|
+
# print only what is printable (dont print get steam etc..)
|
|
1172
|
+
if not stream:
|
|
1173
|
+
self.print_response(response)
|
|
1174
|
+
except ValueError:
|
|
1175
|
+
# no JSON returned
|
|
1176
|
+
pass
|
|
1177
|
+
return_type = True
|
|
1178
|
+
return return_type, response
|
|
1179
|
+
|
|
1180
|
+
@Decorators.token_expired_decorator
|
|
1181
|
+
async def upload_file_async(self,
|
|
1182
|
+
to_upload,
|
|
1183
|
+
item_type,
|
|
1184
|
+
item_size,
|
|
1185
|
+
remote_url,
|
|
1186
|
+
uploaded_filename,
|
|
1187
|
+
remote_path=None,
|
|
1188
|
+
callback=None,
|
|
1189
|
+
mode='skip',
|
|
1190
|
+
item_metadata=None,
|
|
1191
|
+
headers=None,
|
|
1192
|
+
item_description=None,
|
|
1193
|
+
**kwargs):
|
|
1194
|
+
headers = self._build_request_headers(headers=headers)
|
|
1195
|
+
pbar = None
|
|
1196
|
+
if callback is None:
|
|
1197
|
+
if item_size > 10e6:
|
|
1198
|
+
# size larger than 10MB
|
|
1199
|
+
pbar = tqdm.tqdm(total=item_size,
|
|
1200
|
+
unit="B",
|
|
1201
|
+
unit_scale=True,
|
|
1202
|
+
unit_divisor=1024,
|
|
1203
|
+
position=1,
|
|
1204
|
+
file=sys.stdout,
|
|
1205
|
+
disable=self.verbose.disable_progress_bar_upload_items,
|
|
1206
|
+
desc='Upload Items')
|
|
1207
|
+
|
|
1208
|
+
def callback(bytes_read):
|
|
1209
|
+
pbar.update(bytes_read)
|
|
1210
|
+
else:
|
|
1211
|
+
def callback(bytes_read):
|
|
1212
|
+
pass
|
|
1213
|
+
|
|
1214
|
+
timeout = aiohttp.ClientTimeout(total=self.upload_session_timeout)
|
|
1215
|
+
async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
|
|
1216
|
+
try:
|
|
1217
|
+
form = aiohttp.FormData({})
|
|
1218
|
+
form.add_field('type', item_type)
|
|
1219
|
+
form.add_field('path', os.path.join(remote_path, uploaded_filename).replace('\\', '/'))
|
|
1220
|
+
if item_metadata is not None:
|
|
1221
|
+
form.add_field('metadata', json.dumps(item_metadata))
|
|
1222
|
+
if item_description is not None:
|
|
1223
|
+
form.add_field('description', item_description)
|
|
1224
|
+
form.add_field('file', AsyncUploadStream(buffer=to_upload,
|
|
1225
|
+
callback=callback,
|
|
1226
|
+
name=uploaded_filename,
|
|
1227
|
+
chunk_timeout=self.upload_chunk_timeout))
|
|
1228
|
+
url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
|
|
1229
|
+
|
|
1230
|
+
# use SSL context
|
|
1231
|
+
ssl_context = None
|
|
1232
|
+
if self.use_ssl_context:
|
|
1233
|
+
ssl_context = ssl.create_default_context(cafile=certifi.where())
|
|
1234
|
+
async with session.post(url,
|
|
1235
|
+
data=form,
|
|
1236
|
+
verify_ssl=self.verify,
|
|
1237
|
+
ssl=ssl_context) as resp:
|
|
1238
|
+
self.last_request = resp.request_info
|
|
1239
|
+
command = "curl -X {method} -H {headers} -d '{uri}'"
|
|
1240
|
+
headers = ['"{0}: {1}"'.format(k, v) for k, v in resp.request_info.headers.items()]
|
|
1241
|
+
headers = " -H ".join(headers)
|
|
1242
|
+
self.last_curl = command.format(method=resp.request_info.method,
|
|
1243
|
+
headers=headers,
|
|
1244
|
+
uri=resp.request_info.url)
|
|
1245
|
+
text = await resp.text()
|
|
1246
|
+
try:
|
|
1247
|
+
_json = await resp.json()
|
|
1248
|
+
except:
|
|
1249
|
+
_json = dict()
|
|
1250
|
+
response = AsyncResponse(text=text,
|
|
1251
|
+
_json=_json,
|
|
1252
|
+
async_resp=resp)
|
|
1253
|
+
except Exception as err:
|
|
1254
|
+
response = AsyncResponseError(error=err, trace=traceback.format_exc())
|
|
1255
|
+
finally:
|
|
1256
|
+
if pbar is not None:
|
|
1257
|
+
pbar.close()
|
|
1258
|
+
with threadLock:
|
|
1259
|
+
self.calls_counter.add()
|
|
1260
|
+
if response.ok and self.cache is not None:
|
|
1261
|
+
try:
|
|
1262
|
+
self.cache.write(list_entities_json=[response.json()])
|
|
1263
|
+
dataset_id = url.split('/')[-2]
|
|
1264
|
+
self.cache.write_stream(request_path=url,
|
|
1265
|
+
buffer=to_upload,
|
|
1266
|
+
file_name=uploaded_filename,
|
|
1267
|
+
entity_id=response.json()['id'],
|
|
1268
|
+
dataset_id=dataset_id)
|
|
1269
|
+
except:
|
|
1270
|
+
logger.warning("Failed to add the file to the cache")
|
|
1271
|
+
return response
|
|
1272
|
+
|
|
1273
|
+
def __get_pbar(self, pbar, total_length):
|
|
1274
|
+
# decide if create progress bar for item
|
|
1275
|
+
if pbar:
|
|
1276
|
+
try:
|
|
1277
|
+
if total_length is not None and int(total_length) > 10e6: # size larger than 10 MB:
|
|
1278
|
+
pbar = tqdm.tqdm(total=int(total_length),
|
|
1279
|
+
unit='B',
|
|
1280
|
+
unit_scale=True,
|
|
1281
|
+
unit_divisor=1024,
|
|
1282
|
+
position=1,
|
|
1283
|
+
file=sys.stdout,
|
|
1284
|
+
disable=self.verbose.disable_progress_bar)
|
|
1285
|
+
else:
|
|
1286
|
+
pbar = None
|
|
1287
|
+
except Exception as err:
|
|
1288
|
+
pbar = None
|
|
1289
|
+
logger.debug('Cant decide downloaded file length, bar will not be presented: {}'.format(err))
|
|
1290
|
+
return pbar
|
|
1291
|
+
|
|
1292
|
+
def send_session(self, prepared, stream=None):
|
|
1293
|
+
if self.session is None:
|
|
1294
|
+
self.session = requests.Session()
|
|
1295
|
+
retry = Retry(
|
|
1296
|
+
total=5,
|
|
1297
|
+
read=5,
|
|
1298
|
+
connect=5,
|
|
1299
|
+
backoff_factor=1,
|
|
1300
|
+
# use on any request type
|
|
1301
|
+
allowed_methods=False,
|
|
1302
|
+
# force retry on those status responses
|
|
1303
|
+
status_forcelist=(501, 502, 503, 504, 505, 506, 507, 508, 510, 511),
|
|
1304
|
+
raise_on_status=False
|
|
1305
|
+
)
|
|
1306
|
+
adapter = HTTPAdapter(max_retries=retry,
|
|
1307
|
+
pool_maxsize=np.sum(list(self._thread_pools_names.values())),
|
|
1308
|
+
pool_connections=np.sum(list(self._thread_pools_names.values())))
|
|
1309
|
+
self.session.mount('http://', adapter)
|
|
1310
|
+
self.session.mount('https://', adapter)
|
|
1311
|
+
resp = self.session.send(request=prepared, stream=stream, verify=self.verify, timeout=120)
|
|
1312
|
+
|
|
1313
|
+
with threadLock:
|
|
1314
|
+
self.calls_counter.add()
|
|
1315
|
+
|
|
1316
|
+
return resp
|
|
1317
|
+
|
|
1318
|
+
@staticmethod
|
|
1319
|
+
def check_proxy():
|
|
1320
|
+
"""
|
|
1321
|
+
Verify that dataloop urls are not blocked
|
|
1322
|
+
:return:
|
|
1323
|
+
"""
|
|
1324
|
+
proxy_envs = ['HTTP', 'HTTPS', 'http', 'https']
|
|
1325
|
+
dataloop_urls = ['dev-gate.dataloop.ai',
|
|
1326
|
+
'gate.dataloop.ai',
|
|
1327
|
+
'dataloop-development.auth0.com',
|
|
1328
|
+
'dataloop-production.auth0.com']
|
|
1329
|
+
if True in [env in os.environ for env in proxy_envs]:
|
|
1330
|
+
# check if proxy exists
|
|
1331
|
+
if True in [env in os.environ for env in ['no_proxy', 'NO_PROXY']]:
|
|
1332
|
+
# check if no_proxy exists
|
|
1333
|
+
if 'no_proxy' in os.environ:
|
|
1334
|
+
# check if dataloop urls in no_proxy
|
|
1335
|
+
if True not in [url in os.environ['no_proxy'] for url in dataloop_urls]:
|
|
1336
|
+
# no dataloop url exists in no_proxy
|
|
1337
|
+
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1338
|
+
else:
|
|
1339
|
+
# check if dataloop urls in no_proxy
|
|
1340
|
+
if True not in [url in os.environ['NO_PROXY'] for url in dataloop_urls]:
|
|
1341
|
+
# no dataloop url exists in no_proxy
|
|
1342
|
+
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1343
|
+
else:
|
|
1344
|
+
logger.warning('Proxy is used, make sure dataloop urls are in "no_proxy" environment variable')
|
|
1345
|
+
|
|
1346
|
+
def token_expired(self, t=60):
|
|
1347
|
+
"""
|
|
1348
|
+
Check token validation
|
|
1349
|
+
:param t: time ahead interval in seconds
|
|
1350
|
+
"""
|
|
1351
|
+
try:
|
|
1352
|
+
if self.token is None or self.token == '':
|
|
1353
|
+
expired = True
|
|
1354
|
+
else:
|
|
1355
|
+
payload = jwt.decode(self.token, algorithms=['HS256'],
|
|
1356
|
+
options={'verify_signature': False}, verify=False)
|
|
1357
|
+
d = datetime.datetime.now(datetime.timezone.utc)
|
|
1358
|
+
epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
|
|
1359
|
+
now = (d - epoch).total_seconds()
|
|
1360
|
+
exp = payload['exp']
|
|
1361
|
+
if now < (exp - t):
|
|
1362
|
+
expired = False
|
|
1363
|
+
else:
|
|
1364
|
+
expired = True
|
|
1365
|
+
except jwt.exceptions.DecodeError:
|
|
1366
|
+
logger.exception('Invalid token.')
|
|
1367
|
+
expired = True
|
|
1368
|
+
except Exception:
|
|
1369
|
+
logger.exception('Unknown error:')
|
|
1370
|
+
expired = True
|
|
1371
|
+
if expired:
|
|
1372
|
+
if self.renew_token_method():
|
|
1373
|
+
expired = False
|
|
1374
|
+
return expired
|
|
1375
|
+
|
|
1376
|
+
@staticmethod
|
|
1377
|
+
def is_json_serializable(response):
|
|
1378
|
+
try:
|
|
1379
|
+
response_json = response.json()
|
|
1380
|
+
return True, response_json
|
|
1381
|
+
except ValueError:
|
|
1382
|
+
return False, None
|
|
1383
|
+
|
|
1384
|
+
##########
|
|
1385
|
+
# STDOUT #
|
|
1386
|
+
##########
|
|
1387
|
+
def print_response(self, resp=None):
|
|
1388
|
+
"""
|
|
1389
|
+
Print tabulate response
|
|
1390
|
+
:param resp: response from requests
|
|
1391
|
+
:return:
|
|
1392
|
+
"""
|
|
1393
|
+
try:
|
|
1394
|
+
if resp is None:
|
|
1395
|
+
resp = self.last_response
|
|
1396
|
+
is_json_serializable, results = self.is_json_serializable(response=resp)
|
|
1397
|
+
if self.verbose.print_all_responses and is_json_serializable:
|
|
1398
|
+
if isinstance(results, dict):
|
|
1399
|
+
to_print = miscellaneous.List([results])
|
|
1400
|
+
elif isinstance(results, list):
|
|
1401
|
+
to_print = miscellaneous.List(results)
|
|
1402
|
+
else:
|
|
1403
|
+
logger.debug('Unknown response type: {}. cant print'.format(type(results)))
|
|
1404
|
+
return
|
|
1405
|
+
request_id = resp.headers.get('x-request-id', 'na')
|
|
1406
|
+
logger.debug('--- [Request] Start ---')
|
|
1407
|
+
logger.debug(self.print_request(req=resp.request, to_return=True))
|
|
1408
|
+
logger.debug('--- [Request] End ---')
|
|
1409
|
+
logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
|
|
1410
|
+
to_print.print(show_all=False, level='debug')
|
|
1411
|
+
logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
|
|
1412
|
+
except Exception:
|
|
1413
|
+
logger.exception('Printing response from gate:')
|
|
1414
|
+
|
|
1415
|
+
def print_bad_response(self, resp=None, log_error=True):
|
|
1416
|
+
"""
|
|
1417
|
+
Print error from platform
|
|
1418
|
+
:param resp:
|
|
1419
|
+
:param log_error: print error log (to use when trying request more than once)
|
|
1420
|
+
:return:
|
|
1421
|
+
"""
|
|
1422
|
+
if resp is None:
|
|
1423
|
+
resp = self.last_response
|
|
1424
|
+
msg = ''
|
|
1425
|
+
if hasattr(resp, 'status_code'):
|
|
1426
|
+
msg += '[Response <{val}>]'.format(val=resp.status_code)
|
|
1427
|
+
if hasattr(resp, 'reason'):
|
|
1428
|
+
msg += '[Reason: {val}]'.format(val=resp.reason)
|
|
1429
|
+
if hasattr(resp, 'text') and isinstance(resp.text, str):
|
|
1430
|
+
msg += '[Text: {val}]'.format(val=format_message(resp.text))
|
|
1431
|
+
|
|
1432
|
+
request_id = resp.headers.get('x-request-id', 'na')
|
|
1433
|
+
logger.debug('--- [Request] Start ---')
|
|
1434
|
+
logger.debug(self.print_request(req=resp.request, to_return=True))
|
|
1435
|
+
logger.debug('--- [Request] End ---')
|
|
1436
|
+
logger.debug('--- [Response][x-request-id:{}] Start ---'.format(request_id))
|
|
1437
|
+
if log_error:
|
|
1438
|
+
logger.error(msg)
|
|
1439
|
+
else:
|
|
1440
|
+
logger.debug(msg)
|
|
1441
|
+
logger.debug('--- [Response][x-request-id:{}] End ---'.format(request_id))
|
|
1442
|
+
self.platform_exception = PlatformError(resp)
|
|
1443
|
+
|
|
1444
|
+
def print_request(self, req=None, to_return=False, with_auth=False):
|
|
1445
|
+
"""
|
|
1446
|
+
Print a request to the platform
|
|
1447
|
+
:param req:
|
|
1448
|
+
:param to_return: return string instead of printing
|
|
1449
|
+
:param with_auth: print authentication
|
|
1450
|
+
:return:
|
|
1451
|
+
"""
|
|
1452
|
+
if not req:
|
|
1453
|
+
req = self.last_request
|
|
1454
|
+
|
|
1455
|
+
headers = list()
|
|
1456
|
+
for k, v in req.headers.items():
|
|
1457
|
+
if k == 'authorization' and not with_auth:
|
|
1458
|
+
continue
|
|
1459
|
+
headers.append('{}: {}'.format(k, v))
|
|
1460
|
+
if hasattr(req, 'body'):
|
|
1461
|
+
body = req.body
|
|
1462
|
+
elif isinstance(req, aiohttp.RequestInfo):
|
|
1463
|
+
body = {'multipart': 'true'}
|
|
1464
|
+
else:
|
|
1465
|
+
body = dict()
|
|
1466
|
+
|
|
1467
|
+
# remove secrets and passwords
|
|
1468
|
+
try:
|
|
1469
|
+
body = json.loads(body)
|
|
1470
|
+
if isinstance(body, dict):
|
|
1471
|
+
for key, value in body.items():
|
|
1472
|
+
hide = any([field in key for field in ['secret', 'password']])
|
|
1473
|
+
if hide:
|
|
1474
|
+
body[key] = '*' * len(value)
|
|
1475
|
+
except Exception:
|
|
1476
|
+
pass
|
|
1477
|
+
|
|
1478
|
+
msg = '{}\n{}\n{}'.format(
|
|
1479
|
+
req.method + ' ' + str(req.url),
|
|
1480
|
+
'\n'.join(headers),
|
|
1481
|
+
body,
|
|
1482
|
+
)
|
|
1483
|
+
if to_return:
|
|
1484
|
+
return msg
|
|
1485
|
+
else:
|
|
1486
|
+
print(msg)
|
|
1487
|
+
|
|
1488
|
+
################
|
|
1489
|
+
# Environments #
|
|
1490
|
+
################
|
|
1491
|
+
def setenv(self, env):
|
|
1492
|
+
"""
|
|
1493
|
+
Set environment
|
|
1494
|
+
:param env:
|
|
1495
|
+
:return:
|
|
1496
|
+
"""
|
|
1497
|
+
|
|
1498
|
+
environments = self.environments
|
|
1499
|
+
if env.startswith('http'):
|
|
1500
|
+
if env not in environments.keys():
|
|
1501
|
+
msg = 'Unknown environment. Please add environment to SDK ("add_environment" method)'
|
|
1502
|
+
logger.error(msg)
|
|
1503
|
+
raise ConnectionError(msg)
|
|
1504
|
+
elif env == 'custom':
|
|
1505
|
+
custom_env = os.environ.get('DTLPY_CUSTOM_ENV', None)
|
|
1506
|
+
environment = json.loads(base64.b64decode(custom_env.encode()).decode())
|
|
1507
|
+
env = environment.pop('url')
|
|
1508
|
+
token = None
|
|
1509
|
+
if self.environments.get(env):
|
|
1510
|
+
token = self.environments[env].get('token', None)
|
|
1511
|
+
self.environments[env] = environment.get(env, environment)
|
|
1512
|
+
self.environments[env]['token'] = token
|
|
1513
|
+
verify_ssl = self.environments[env].get('verify_ssl', None)
|
|
1514
|
+
if verify_ssl is not None and isinstance(verify_ssl, str):
|
|
1515
|
+
self.environments[env]['verify_ssl'] = True if verify_ssl.lower() == 'true' else False
|
|
1516
|
+
else:
|
|
1517
|
+
matched_env = [env_url for env_url, env_dict in environments.items() if env_dict['alias'] == env]
|
|
1518
|
+
if len(matched_env) != 1:
|
|
1519
|
+
known_aliases = [env_dict['alias'] for env_url, env_dict in environments.items()]
|
|
1520
|
+
raise ConnectionError(
|
|
1521
|
+
'Unknown platform environment: "{}". Known: {}'.format(env, ', '.join(known_aliases)))
|
|
1522
|
+
env = matched_env[0]
|
|
1523
|
+
if self.environment != env:
|
|
1524
|
+
self.environment = env
|
|
1525
|
+
self.__gate_url_for_requests = None
|
|
1526
|
+
# reset local token
|
|
1527
|
+
self._token = None
|
|
1528
|
+
self.refresh_token_active = True
|
|
1529
|
+
logger.info('Platform environment: {}'.format(self.environment))
|
|
1530
|
+
if self.token_expired():
|
|
1531
|
+
logger.info('Token expired, Please login.')
|
|
1532
|
+
|
|
1533
|
+
##########
|
|
1534
|
+
# Log in #
|
|
1535
|
+
##########
|
|
1536
|
+
def login_secret(self, email, password, client_id, client_secret=None, force=False):
|
|
1537
|
+
"""
|
|
1538
|
+
Login with email and password from environment variables.
|
|
1539
|
+
If already logged in with same user - login will NOT happen. see "force"
|
|
1540
|
+
|
|
1541
|
+
:param email: user email.
|
|
1542
|
+
:param password: user password
|
|
1543
|
+
:param client_id: auth0 client id
|
|
1544
|
+
:param client_secret: secret that match the client id
|
|
1545
|
+
:param force: force login. in case login with same user but want to get a new JWT
|
|
1546
|
+
:return:
|
|
1547
|
+
"""
|
|
1548
|
+
logger.warning('dl.login_secret is deprecated. Please use dl.login_m2m instead.')
|
|
1549
|
+
return login_secret(api_client=self,
|
|
1550
|
+
email=email,
|
|
1551
|
+
password=password,
|
|
1552
|
+
client_id=client_id,
|
|
1553
|
+
client_secret=client_secret,
|
|
1554
|
+
force=force)
|
|
1555
|
+
|
|
1556
|
+
def login_m2m(self, email, password, client_id=None, client_secret=None, force=False):
|
|
1557
|
+
"""
|
|
1558
|
+
Login with email and password from environment variables
|
|
1559
|
+
:param email: user email. if already logged in with same user - login will NOT happen. see "force"
|
|
1560
|
+
:param password: user password
|
|
1561
|
+
:param client_id:
|
|
1562
|
+
:param client_secret:
|
|
1563
|
+
:param force: force login. in case login with same user but want to get a new JWT
|
|
1564
|
+
:return:
|
|
1565
|
+
"""
|
|
1566
|
+
res = login_m2m(api_client=self,
|
|
1567
|
+
email=email,
|
|
1568
|
+
password=password,
|
|
1569
|
+
client_id=client_id,
|
|
1570
|
+
client_secret=client_secret,
|
|
1571
|
+
force=force)
|
|
1572
|
+
# if res:
|
|
1573
|
+
# self._send_login_event(user_type='human', login_type='m2m')
|
|
1574
|
+
return res
|
|
1575
|
+
|
|
1576
|
+
def login_token(self, token):
|
|
1577
|
+
"""
|
|
1578
|
+
Login using existing token
|
|
1579
|
+
:param token: a valid token
|
|
1580
|
+
:return:
|
|
1581
|
+
"""
|
|
1582
|
+
current_token = self.token
|
|
1583
|
+
self.token = token
|
|
1584
|
+
success, response = self.gen_request(req_type='get', path='/users/me')
|
|
1585
|
+
if not response.ok:
|
|
1586
|
+
# switch back to before
|
|
1587
|
+
self.token = current_token
|
|
1588
|
+
raise ValueError(f"Invalid API key provided. Error: {response.text}")
|
|
1589
|
+
|
|
1590
|
+
def login_api_key(self, api_key):
|
|
1591
|
+
"""
|
|
1592
|
+
Login using API key
|
|
1593
|
+
:param api_key: a valid API key
|
|
1594
|
+
:return:
|
|
1595
|
+
"""
|
|
1596
|
+
current_token = self.token
|
|
1597
|
+
self.token = api_key
|
|
1598
|
+
success, response = self.gen_request(req_type='get', path='/users/me')
|
|
1599
|
+
if not response.ok:
|
|
1600
|
+
# switch back to before
|
|
1601
|
+
self.token = current_token
|
|
1602
|
+
raise ValueError(f"Invalid API key provided. Error: {response.text}")
|
|
1603
|
+
|
|
1604
|
+
@property
|
|
1605
|
+
def login_domain(self):
|
|
1606
|
+
if self._login_domain is None:
|
|
1607
|
+
self._login_domain = self.environments[self.environment].get('login_domain', None)
|
|
1608
|
+
return self._login_domain
|
|
1609
|
+
|
|
1610
|
+
@login_domain.setter
|
|
1611
|
+
def login_domain(self, domain: str):
|
|
1612
|
+
if domain is not None and not isinstance(domain, str):
|
|
1613
|
+
raise exceptions.PlatformException('400', 'domain should be a string value')
|
|
1614
|
+
self._login_domain = domain
|
|
1615
|
+
self.environments[self.environment]['login_domain'] = domain
|
|
1616
|
+
self.cookie_io.put('login_parameters', self.environments)
|
|
1617
|
+
|
|
1618
|
+
def login(self, audience=None, auth0_url=None, client_id=None, callback_port=None):
|
|
1619
|
+
"""
|
|
1620
|
+
Login using Auth0.
|
|
1621
|
+
:return:
|
|
1622
|
+
"""
|
|
1623
|
+
res = login(
|
|
1624
|
+
api_client=self,
|
|
1625
|
+
audience=audience,
|
|
1626
|
+
auth0_url=auth0_url,
|
|
1627
|
+
client_id=client_id,
|
|
1628
|
+
login_domain=self.login_domain,
|
|
1629
|
+
callback_port=callback_port
|
|
1630
|
+
)
|
|
1631
|
+
# if res:
|
|
1632
|
+
# self._send_login_event(user_type='human', login_type='interactive')
|
|
1633
|
+
return res
|
|
1634
|
+
|
|
1635
|
+
# def _send_login_event(self, user_type, login_type):
|
|
1636
|
+
# event_payload = {
|
|
1637
|
+
# 'event': 'dtlpy:login',
|
|
1638
|
+
# 'properties': {
|
|
1639
|
+
# 'login_type': login_type,
|
|
1640
|
+
# 'user_type': user_type
|
|
1641
|
+
# }
|
|
1642
|
+
# }
|
|
1643
|
+
# self.event_tracker.put(event=event_payload)
|
|
1644
|
+
|
|
1645
|
+
def logout(self):
|
|
1646
|
+
"""
|
|
1647
|
+
Logout.
|
|
1648
|
+
:return:
|
|
1649
|
+
"""
|
|
1650
|
+
return logout(api_client=self)
|
|
1651
|
+
|
|
1652
|
+
def _renew_token_in_dual_agent(self):
|
|
1653
|
+
renewed = False
|
|
1654
|
+
try:
|
|
1655
|
+
proxy_port = os.environ.get('AGENT_PROXY_MAIN_PORT') or "1001"
|
|
1656
|
+
resp = requests.get('http://localhost:{port}/get_jwt'.format(port=proxy_port))
|
|
1657
|
+
if resp.ok:
|
|
1658
|
+
self.token = resp.json()['jwt']
|
|
1659
|
+
renewed = True
|
|
1660
|
+
else:
|
|
1661
|
+
self.print_bad_response(resp)
|
|
1662
|
+
except Exception:
|
|
1663
|
+
logger.exception('Failed to get token from proxy')
|
|
1664
|
+
|
|
1665
|
+
return renewed
|
|
1666
|
+
|
|
1667
|
+
def renew_token(self):
|
|
1668
|
+
refresh_method = os.environ.get('DTLPY_REFRESH_TOKEN_METHOD', None)
|
|
1669
|
+
if refresh_method is not None and refresh_method == 'proxy':
|
|
1670
|
+
res = self._renew_token_in_dual_agent()
|
|
1671
|
+
else:
|
|
1672
|
+
res = self._renew_token_with_refresh_token()
|
|
1673
|
+
# if res:
|
|
1674
|
+
# self._send_login_event(user_type='human', login_type='refresh')
|
|
1675
|
+
return res
|
|
1676
|
+
|
|
1677
|
+
def generate_api_key(self, description: str = None, login: bool = False):
|
|
1678
|
+
"""
|
|
1679
|
+
Generate an API key for a user
|
|
1680
|
+
:param description: description for the API key
|
|
1681
|
+
:param login: if True, login with the new API key
|
|
1682
|
+
:return: User token
|
|
1683
|
+
"""
|
|
1684
|
+
user_email = self.info()['user_email']
|
|
1685
|
+
payload = {
|
|
1686
|
+
'userId': user_email
|
|
1687
|
+
}
|
|
1688
|
+
if description:
|
|
1689
|
+
if not isinstance(description, str):
|
|
1690
|
+
raise ValueError('description should be a string')
|
|
1691
|
+
payload['description'] = description
|
|
1692
|
+
success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
|
|
1693
|
+
if not success:
|
|
1694
|
+
raise exceptions.PlatformException(response)
|
|
1695
|
+
if login:
|
|
1696
|
+
self.login_api_key(response.json()['jwt'])
|
|
1697
|
+
return True
|
|
1698
|
+
|
|
1699
|
+
return response.json()['jwt']
|
|
1700
|
+
|
|
1701
|
+
def _renew_token_with_refresh_token(self):
|
|
1702
|
+
renewed = False
|
|
1703
|
+
if self.refresh_token_active is False:
|
|
1704
|
+
return renewed
|
|
1705
|
+
logger.debug('RefreshToken: Started')
|
|
1706
|
+
if self.token is None or self.token == '':
|
|
1707
|
+
# token is missing
|
|
1708
|
+
logger.debug('RefreshToken: Missing token.')
|
|
1709
|
+
self.refresh_token_active = False
|
|
1710
|
+
if self.refresh_token is None or self.refresh_token == '':
|
|
1711
|
+
# missing refresh token
|
|
1712
|
+
logger.debug('RefreshToken: Missing "refresh_token"')
|
|
1713
|
+
self.refresh_token_active = False
|
|
1714
|
+
if self.environment not in self.environments.keys():
|
|
1715
|
+
# env params missing
|
|
1716
|
+
logger.debug('RefreshToken: Missing environments params for refreshing token')
|
|
1717
|
+
self.refresh_token_active = False
|
|
1718
|
+
|
|
1719
|
+
if self.refresh_token_active is False:
|
|
1720
|
+
return renewed
|
|
1721
|
+
|
|
1722
|
+
refresh_token = self.refresh_token
|
|
1723
|
+
|
|
1724
|
+
env_params = self.environments[self.environment]
|
|
1725
|
+
if 'gate_url' not in env_params:
|
|
1726
|
+
env_params['gate_url'] = gate_url_from_host(environment=self.environment)
|
|
1727
|
+
self.environments[self.environment] = env_params
|
|
1728
|
+
token_endpoint = "{}/token?default".format(env_params['gate_url'])
|
|
1729
|
+
|
|
1730
|
+
payload = {
|
|
1731
|
+
'type': 'refresh_token',
|
|
1732
|
+
'refresh_token': refresh_token
|
|
1733
|
+
}
|
|
1734
|
+
logger.debug("RefreshToken: Refreshing token via {}".format(token_endpoint))
|
|
1735
|
+
resp = requests.request(
|
|
1736
|
+
"POST",
|
|
1737
|
+
token_endpoint,
|
|
1738
|
+
json=payload,
|
|
1739
|
+
headers={'content-type': 'application/json'},
|
|
1740
|
+
verify=self.verify
|
|
1741
|
+
)
|
|
1742
|
+
if not resp.ok:
|
|
1743
|
+
logger.debug('RefreshToken: Failed')
|
|
1744
|
+
self.print_bad_response(resp)
|
|
1745
|
+
else:
|
|
1746
|
+
response_dict = resp.json()
|
|
1747
|
+
# get new token
|
|
1748
|
+
final_token = response_dict['id_token']
|
|
1749
|
+
self.token = final_token
|
|
1750
|
+
self.refresh_token = refresh_token
|
|
1751
|
+
# set status back to pending
|
|
1752
|
+
logger.debug('RefreshToken: Success')
|
|
1753
|
+
renewed = True
|
|
1754
|
+
return renewed
|
|
1755
|
+
|
|
1756
|
+
def set_api_counter(self, filepath):
|
|
1757
|
+
self.calls_counter = CallsCounter(filepath=filepath)
|
|
1758
|
+
|
|
1759
|
+
def _get_resource_url(self, url):
|
|
1760
|
+
|
|
1761
|
+
env = self._environments[self._environment]['alias']
|
|
1762
|
+
head = self._environments[self._environment].get('url', None)
|
|
1763
|
+
# TODO need to deprecate somehow (the following)
|
|
1764
|
+
if head is None:
|
|
1765
|
+
if env == 'prod':
|
|
1766
|
+
head = 'https://console.dataloop.ai/'
|
|
1767
|
+
elif env == 'dev':
|
|
1768
|
+
head = 'https://dev-con.dataloop.ai/'
|
|
1769
|
+
elif env == 'rc':
|
|
1770
|
+
head = 'https://rc-con.dataloop.ai/'
|
|
1771
|
+
elif env in ['local', 'minikube_local_mac']:
|
|
1772
|
+
head = 'https://localhost:8443/'
|
|
1773
|
+
elif env == 'new-dev':
|
|
1774
|
+
head = 'https://custom1-gate.dataloop.ai/'
|
|
1775
|
+
else:
|
|
1776
|
+
raise exceptions.PlatformException(error='400', message='Unknown environment: {}'.format(env))
|
|
1777
|
+
|
|
1778
|
+
return head + url
|
|
1779
|
+
|
|
1780
|
+
def _open_in_web(self, url):
|
|
1781
|
+
import webbrowser
|
|
1782
|
+
webbrowser.open(url=url, new=2, autoraise=True)
|
|
1783
|
+
|
|
1784
|
+
|
|
1785
|
+
client = ApiClient()
|