dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -347
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -292
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -449
- dtlpy/entities/dataset.py +1299 -1299
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -235
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +798 -798
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +959 -959
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -505
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +963 -963
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1257 -1230
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -152
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -439
- dtlpy/repositories/datasets.py +1504 -1504
- dtlpy/repositories/downloader.py +976 -923
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -482
- dtlpy/repositories/executions.py +815 -815
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +255 -255
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -912
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -1000
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +419 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -661
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1785 -1785
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
- dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
- dtlpy-1.116.6.dist-info/RECORD +239 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
- dtlpy-1.115.44.dist-info/RECORD +0 -240
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/services/cookie.py
CHANGED
|
@@ -1,115 +1,115 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Dataloop cookie state
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import os
|
|
6
|
-
import time
|
|
7
|
-
import json
|
|
8
|
-
import logging
|
|
9
|
-
import random
|
|
10
|
-
from .service_defaults import DATALOOP_PATH
|
|
11
|
-
from filelock import FileLock
|
|
12
|
-
|
|
13
|
-
logger = logging.getLogger(name='dtlpy')
|
|
14
|
-
|
|
15
|
-
NUM_TRIES = 3
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class CookieIO:
|
|
19
|
-
"""
|
|
20
|
-
Cookie interface for Dataloop parameters
|
|
21
|
-
"""
|
|
22
|
-
|
|
23
|
-
def __init__(self, path, create=True, local=False):
|
|
24
|
-
self.COOKIE = path
|
|
25
|
-
self.local = local
|
|
26
|
-
if create:
|
|
27
|
-
self.create()
|
|
28
|
-
|
|
29
|
-
@staticmethod
|
|
30
|
-
def init():
|
|
31
|
-
global_cookie_file = os.path.join(DATALOOP_PATH, 'cookie.json')
|
|
32
|
-
return CookieIO(global_cookie_file)
|
|
33
|
-
|
|
34
|
-
@staticmethod
|
|
35
|
-
def init_local_cookie(create=False):
|
|
36
|
-
local_cookie_file = os.path.join(os.getcwd(), '.dataloop', 'state.json')
|
|
37
|
-
return CookieIO(local_cookie_file, create=create, local=True)
|
|
38
|
-
|
|
39
|
-
@staticmethod
|
|
40
|
-
def init_package_json_cookie(create=False):
|
|
41
|
-
package_json_file = os.path.join(os.getcwd(), 'package.json')
|
|
42
|
-
return CookieIO(package_json_file, create=create, local=True)
|
|
43
|
-
|
|
44
|
-
def create(self):
|
|
45
|
-
# create directory '.dataloop' if not exists
|
|
46
|
-
if not os.path.isdir(os.path.dirname(self.COOKIE)):
|
|
47
|
-
os.makedirs(os.path.dirname(self.COOKIE))
|
|
48
|
-
|
|
49
|
-
if not os.path.isfile(self.COOKIE) or os.path.getsize(self.COOKIE) == 0:
|
|
50
|
-
logger.debug('COOKIE.create: File: {}'.format(self.COOKIE))
|
|
51
|
-
self.reset()
|
|
52
|
-
try:
|
|
53
|
-
with FileLock(self.COOKIE + ".lock"):
|
|
54
|
-
with open(self.COOKIE, 'r') as f:
|
|
55
|
-
json.load(f)
|
|
56
|
-
except ValueError:
|
|
57
|
-
print('FATAL ERROR: COOKIE {!r} is corrupted. please fix or delete the file.'.format(self.COOKIE))
|
|
58
|
-
raise SystemExit
|
|
59
|
-
|
|
60
|
-
def read_json(self, create=False):
|
|
61
|
-
# which cookie
|
|
62
|
-
if self.local:
|
|
63
|
-
self.COOKIE = os.path.join(os.getcwd(), '.dataloop', 'state.json')
|
|
64
|
-
|
|
65
|
-
# check if file exists - and create
|
|
66
|
-
if not os.path.isfile(self.COOKIE) and create:
|
|
67
|
-
self.create()
|
|
68
|
-
|
|
69
|
-
# check if file exists
|
|
70
|
-
if not os.path.isfile(self.COOKIE):
|
|
71
|
-
logger.debug('COOKIE.read: File does not exist: {}. Return None'.format(self.COOKIE))
|
|
72
|
-
cfg = {}
|
|
73
|
-
else:
|
|
74
|
-
# read cookie
|
|
75
|
-
cfg = {}
|
|
76
|
-
for i in range(NUM_TRIES):
|
|
77
|
-
try:
|
|
78
|
-
with FileLock(self.COOKIE + ".lock"):
|
|
79
|
-
with open(self.COOKIE, 'r') as fp:
|
|
80
|
-
cfg = json.load(fp)
|
|
81
|
-
break
|
|
82
|
-
except Exception:
|
|
83
|
-
if i == (NUM_TRIES - 1):
|
|
84
|
-
raise
|
|
85
|
-
time.sleep(random.random())
|
|
86
|
-
continue
|
|
87
|
-
return cfg
|
|
88
|
-
|
|
89
|
-
def get(self, key):
|
|
90
|
-
if key not in ['calls_counter']:
|
|
91
|
-
# ignore logging for some keys
|
|
92
|
-
logger.debug('COOKIE.read: key: {}'.format(key))
|
|
93
|
-
cfg = self.read_json()
|
|
94
|
-
if key in cfg.keys():
|
|
95
|
-
value = cfg[key]
|
|
96
|
-
else:
|
|
97
|
-
logger.debug(msg='Key not in platform cookie file: {}. Return None'.format(key))
|
|
98
|
-
value = None
|
|
99
|
-
return value
|
|
100
|
-
|
|
101
|
-
def put(self, key, value):
|
|
102
|
-
if key not in ['calls_counter']:
|
|
103
|
-
# ignore logging for some keys
|
|
104
|
-
logger.debug('COOKIE.write: key: {}'.format(key))
|
|
105
|
-
# read and write
|
|
106
|
-
cfg = self.read_json(create=True)
|
|
107
|
-
cfg[key] = value
|
|
108
|
-
with FileLock(self.COOKIE + ".lock"):
|
|
109
|
-
with open(self.COOKIE, 'w') as fp:
|
|
110
|
-
json.dump(cfg, fp, indent=2)
|
|
111
|
-
|
|
112
|
-
def reset(self):
|
|
113
|
-
with FileLock(self.COOKIE + ".lock"):
|
|
114
|
-
with open(self.COOKIE, 'w') as fp:
|
|
115
|
-
json.dump({}, fp, indent=2)
|
|
1
|
+
"""
|
|
2
|
+
Dataloop cookie state
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import time
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import random
|
|
10
|
+
from .service_defaults import DATALOOP_PATH
|
|
11
|
+
from filelock import FileLock
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(name='dtlpy')
|
|
14
|
+
|
|
15
|
+
NUM_TRIES = 3
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class CookieIO:
|
|
19
|
+
"""
|
|
20
|
+
Cookie interface for Dataloop parameters
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, path, create=True, local=False):
|
|
24
|
+
self.COOKIE = path
|
|
25
|
+
self.local = local
|
|
26
|
+
if create:
|
|
27
|
+
self.create()
|
|
28
|
+
|
|
29
|
+
@staticmethod
|
|
30
|
+
def init():
|
|
31
|
+
global_cookie_file = os.path.join(DATALOOP_PATH, 'cookie.json')
|
|
32
|
+
return CookieIO(global_cookie_file)
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def init_local_cookie(create=False):
|
|
36
|
+
local_cookie_file = os.path.join(os.getcwd(), '.dataloop', 'state.json')
|
|
37
|
+
return CookieIO(local_cookie_file, create=create, local=True)
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def init_package_json_cookie(create=False):
|
|
41
|
+
package_json_file = os.path.join(os.getcwd(), 'package.json')
|
|
42
|
+
return CookieIO(package_json_file, create=create, local=True)
|
|
43
|
+
|
|
44
|
+
def create(self):
|
|
45
|
+
# create directory '.dataloop' if not exists
|
|
46
|
+
if not os.path.isdir(os.path.dirname(self.COOKIE)):
|
|
47
|
+
os.makedirs(os.path.dirname(self.COOKIE))
|
|
48
|
+
|
|
49
|
+
if not os.path.isfile(self.COOKIE) or os.path.getsize(self.COOKIE) == 0:
|
|
50
|
+
logger.debug('COOKIE.create: File: {}'.format(self.COOKIE))
|
|
51
|
+
self.reset()
|
|
52
|
+
try:
|
|
53
|
+
with FileLock(self.COOKIE + ".lock"):
|
|
54
|
+
with open(self.COOKIE, 'r') as f:
|
|
55
|
+
json.load(f)
|
|
56
|
+
except ValueError:
|
|
57
|
+
print('FATAL ERROR: COOKIE {!r} is corrupted. please fix or delete the file.'.format(self.COOKIE))
|
|
58
|
+
raise SystemExit
|
|
59
|
+
|
|
60
|
+
def read_json(self, create=False):
|
|
61
|
+
# which cookie
|
|
62
|
+
if self.local:
|
|
63
|
+
self.COOKIE = os.path.join(os.getcwd(), '.dataloop', 'state.json')
|
|
64
|
+
|
|
65
|
+
# check if file exists - and create
|
|
66
|
+
if not os.path.isfile(self.COOKIE) and create:
|
|
67
|
+
self.create()
|
|
68
|
+
|
|
69
|
+
# check if file exists
|
|
70
|
+
if not os.path.isfile(self.COOKIE):
|
|
71
|
+
logger.debug('COOKIE.read: File does not exist: {}. Return None'.format(self.COOKIE))
|
|
72
|
+
cfg = {}
|
|
73
|
+
else:
|
|
74
|
+
# read cookie
|
|
75
|
+
cfg = {}
|
|
76
|
+
for i in range(NUM_TRIES):
|
|
77
|
+
try:
|
|
78
|
+
with FileLock(self.COOKIE + ".lock"):
|
|
79
|
+
with open(self.COOKIE, 'r') as fp:
|
|
80
|
+
cfg = json.load(fp)
|
|
81
|
+
break
|
|
82
|
+
except Exception:
|
|
83
|
+
if i == (NUM_TRIES - 1):
|
|
84
|
+
raise
|
|
85
|
+
time.sleep(random.random())
|
|
86
|
+
continue
|
|
87
|
+
return cfg
|
|
88
|
+
|
|
89
|
+
def get(self, key):
|
|
90
|
+
if key not in ['calls_counter']:
|
|
91
|
+
# ignore logging for some keys
|
|
92
|
+
logger.debug('COOKIE.read: key: {}'.format(key))
|
|
93
|
+
cfg = self.read_json()
|
|
94
|
+
if key in cfg.keys():
|
|
95
|
+
value = cfg[key]
|
|
96
|
+
else:
|
|
97
|
+
logger.debug(msg='Key not in platform cookie file: {}. Return None'.format(key))
|
|
98
|
+
value = None
|
|
99
|
+
return value
|
|
100
|
+
|
|
101
|
+
def put(self, key, value):
|
|
102
|
+
if key not in ['calls_counter']:
|
|
103
|
+
# ignore logging for some keys
|
|
104
|
+
logger.debug('COOKIE.write: key: {}'.format(key))
|
|
105
|
+
# read and write
|
|
106
|
+
cfg = self.read_json(create=True)
|
|
107
|
+
cfg[key] = value
|
|
108
|
+
with FileLock(self.COOKIE + ".lock"):
|
|
109
|
+
with open(self.COOKIE, 'w') as fp:
|
|
110
|
+
json.dump(cfg, fp, indent=2)
|
|
111
|
+
|
|
112
|
+
def reset(self):
|
|
113
|
+
with FileLock(self.COOKIE + ".lock"):
|
|
114
|
+
with open(self.COOKIE, 'w') as fp:
|
|
115
|
+
json.dump({}, fp, indent=2)
|
dtlpy/services/create_logger.py
CHANGED
|
@@ -1,156 +1,156 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import threading
|
|
3
|
-
import logging.handlers
|
|
4
|
-
import os
|
|
5
|
-
|
|
6
|
-
from .service_defaults import DATALOOP_PATH
|
|
7
|
-
|
|
8
|
-
logger = logging.getLogger(name='dtlpy')
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class DataloopLogger(logging.handlers.BaseRotatingHandler):
|
|
12
|
-
"""
|
|
13
|
-
Based on logging.handlers.RotatingFileHandler
|
|
14
|
-
Create a new log file after reached maxBytes
|
|
15
|
-
Delete logs older than a threshold default is week)
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
def __init__(self, filename, mode='a', maxBytes=0, encoding='utf-8', delay=False):
|
|
19
|
-
if maxBytes > 0:
|
|
20
|
-
mode = 'a'
|
|
21
|
-
super().__init__(filename=filename, mode=mode, encoding=encoding, delay=delay)
|
|
22
|
-
self.maxBytes = maxBytes
|
|
23
|
-
DataloopLogger.clean_dataloop_cache()
|
|
24
|
-
|
|
25
|
-
@staticmethod
|
|
26
|
-
def clean_dataloop_cache(cache_path=DATALOOP_PATH, max_param=None):
|
|
27
|
-
try:
|
|
28
|
-
async_clean = True
|
|
29
|
-
dir_list = [os.path.join(cache_path, d) for d in os.listdir(cache_path)
|
|
30
|
-
if os.path.isdir(os.path.join(cache_path, d))]
|
|
31
|
-
for path in dir_list:
|
|
32
|
-
if 'cache' not in path:
|
|
33
|
-
if async_clean:
|
|
34
|
-
worker = threading.Thread(target=DataloopLogger.clean_dataloop_cache_thread,
|
|
35
|
-
kwargs={'path': path,
|
|
36
|
-
'max_param': max_param})
|
|
37
|
-
worker.daemon = True
|
|
38
|
-
worker.start()
|
|
39
|
-
else:
|
|
40
|
-
DataloopLogger.clean_dataloop_cache_thread(path=path, max_param=max_param)
|
|
41
|
-
except Exception as err:
|
|
42
|
-
logger.exception(err)
|
|
43
|
-
|
|
44
|
-
@staticmethod
|
|
45
|
-
def get_clean_parameter_per(path):
|
|
46
|
-
# (60 * 60 * 24 * 7): # sec * min * hour * days - delete if older than a week
|
|
47
|
-
# 1e6 100MB
|
|
48
|
-
path_param = [{'type': 'datasets', 'max_time': 60 * 60 * 24 * 30},
|
|
49
|
-
{'type': 'items', 'max_time': 60 * 60 * 24 * 30},
|
|
50
|
-
{'type': 'logs', 'max_time': 60 * 60 * 24 * 7, 'max_size': 200 * 1e6},
|
|
51
|
-
{'type': 'projects', 'max_time': 60 * 60 * 24 * 30}]
|
|
52
|
-
for param in path_param:
|
|
53
|
-
if param['type'] in path:
|
|
54
|
-
return param
|
|
55
|
-
return {'type': 'default', 'max_time': 60 * 60 * 24 * 30}
|
|
56
|
-
|
|
57
|
-
@staticmethod
|
|
58
|
-
def clean_dataloop_cache_thread(path, total_cache_size=0, max_param=None):
|
|
59
|
-
try:
|
|
60
|
-
is_root = False
|
|
61
|
-
if max_param is None:
|
|
62
|
-
max_param = DataloopLogger.get_clean_parameter_per(path)
|
|
63
|
-
is_root = True
|
|
64
|
-
|
|
65
|
-
now = datetime.datetime.timestamp(datetime.datetime.now())
|
|
66
|
-
files = [os.path.join(path, f) for f in os.listdir(path)]
|
|
67
|
-
files.sort(key=lambda x: -os.path.getmtime(x)) # newer first
|
|
68
|
-
for filepath in files:
|
|
69
|
-
if os.path.isdir(filepath):
|
|
70
|
-
total_cache_size = DataloopLogger. \
|
|
71
|
-
clean_dataloop_cache_thread(filepath, total_cache_size=total_cache_size, max_param=max_param)
|
|
72
|
-
# Remove the dir if empty
|
|
73
|
-
if len(os.listdir(filepath)) == 0:
|
|
74
|
-
os.rmdir(filepath)
|
|
75
|
-
continue
|
|
76
|
-
if 'max_time' in max_param:
|
|
77
|
-
file_time = os.path.getmtime(filepath)
|
|
78
|
-
if (now - file_time) > max_param['max_time']:
|
|
79
|
-
try:
|
|
80
|
-
os.remove(filepath)
|
|
81
|
-
except Exception as e:
|
|
82
|
-
logger.warning("Old log file can not be removed: {}".format(e))
|
|
83
|
-
continue
|
|
84
|
-
if 'max_size' in max_param:
|
|
85
|
-
file_size = os.path.getsize(filepath)
|
|
86
|
-
if (total_cache_size + file_size) > max_param['max_size']:
|
|
87
|
-
try:
|
|
88
|
-
os.remove(filepath)
|
|
89
|
-
except Exception as e:
|
|
90
|
-
logger.warning("Old log file can not be removed: {}".format(e))
|
|
91
|
-
continue
|
|
92
|
-
total_cache_size += file_size
|
|
93
|
-
if is_root:
|
|
94
|
-
logger.debug("clean_dataloop_cache_thread for {} directory has been ended".format(path))
|
|
95
|
-
return total_cache_size
|
|
96
|
-
except Exception as err:
|
|
97
|
-
logger.exception(err)
|
|
98
|
-
|
|
99
|
-
@staticmethod
|
|
100
|
-
def get_log_path():
|
|
101
|
-
log_path = os.path.join(DATALOOP_PATH, 'logs')
|
|
102
|
-
if not os.path.isdir(log_path):
|
|
103
|
-
os.makedirs(log_path, exist_ok=True)
|
|
104
|
-
return log_path
|
|
105
|
-
|
|
106
|
-
@staticmethod
|
|
107
|
-
def get_log_filepath():
|
|
108
|
-
log_path = DataloopLogger.get_log_path()
|
|
109
|
-
log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d_%H-%M-%S')))
|
|
110
|
-
return log_filepath
|
|
111
|
-
|
|
112
|
-
def doRollover(self):
|
|
113
|
-
"""
|
|
114
|
-
Do a rollover, as described in __init__().
|
|
115
|
-
"""
|
|
116
|
-
if self.stream:
|
|
117
|
-
self.stream.close()
|
|
118
|
-
self.stream = None
|
|
119
|
-
# clean older logs (week old)
|
|
120
|
-
DataloopLogger.clean_dataloop_cache()
|
|
121
|
-
# create new log
|
|
122
|
-
self.baseFilename = DataloopLogger.get_log_filepath()
|
|
123
|
-
if not self.delay:
|
|
124
|
-
self.stream = self._open()
|
|
125
|
-
|
|
126
|
-
def shouldRollover(self, record):
|
|
127
|
-
"""
|
|
128
|
-
Determine if rollover should occur.
|
|
129
|
-
|
|
130
|
-
Basically, see if the supplied record would cause the file to exceed
|
|
131
|
-
the size limit we have.
|
|
132
|
-
"""
|
|
133
|
-
if self.stream is None: # delay was set...
|
|
134
|
-
self.stream = self._open()
|
|
135
|
-
if self.maxBytes > 0: # are we rolling over?
|
|
136
|
-
msg = "%s\n" % self.format(record)
|
|
137
|
-
self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
|
|
138
|
-
if self.stream.tell() + len(msg) >= self.maxBytes:
|
|
139
|
-
return 1
|
|
140
|
-
return 0
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
class DtlpyFilter(logging.Filter):
|
|
144
|
-
def __init__(self, package_path):
|
|
145
|
-
super(DtlpyFilter, self).__init__(name='dtlpy')
|
|
146
|
-
self._package_path = package_path
|
|
147
|
-
|
|
148
|
-
def filter(self, record):
|
|
149
|
-
pathname = record.pathname
|
|
150
|
-
try:
|
|
151
|
-
relativepath = os.path.splitext(os.path.relpath(pathname, self._package_path))[0]
|
|
152
|
-
relativepath = relativepath.replace(os.sep, '.')
|
|
153
|
-
except Exception:
|
|
154
|
-
relativepath = ''
|
|
155
|
-
record.relativepath = relativepath
|
|
156
|
-
return True
|
|
1
|
+
import datetime
|
|
2
|
+
import threading
|
|
3
|
+
import logging.handlers
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from .service_defaults import DATALOOP_PATH
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(name='dtlpy')
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DataloopLogger(logging.handlers.BaseRotatingHandler):
|
|
12
|
+
"""
|
|
13
|
+
Based on logging.handlers.RotatingFileHandler
|
|
14
|
+
Create a new log file after reached maxBytes
|
|
15
|
+
Delete logs older than a threshold default is week)
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, filename, mode='a', maxBytes=0, encoding='utf-8', delay=False):
|
|
19
|
+
if maxBytes > 0:
|
|
20
|
+
mode = 'a'
|
|
21
|
+
super().__init__(filename=filename, mode=mode, encoding=encoding, delay=delay)
|
|
22
|
+
self.maxBytes = maxBytes
|
|
23
|
+
DataloopLogger.clean_dataloop_cache()
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def clean_dataloop_cache(cache_path=DATALOOP_PATH, max_param=None):
|
|
27
|
+
try:
|
|
28
|
+
async_clean = True
|
|
29
|
+
dir_list = [os.path.join(cache_path, d) for d in os.listdir(cache_path)
|
|
30
|
+
if os.path.isdir(os.path.join(cache_path, d))]
|
|
31
|
+
for path in dir_list:
|
|
32
|
+
if 'cache' not in path:
|
|
33
|
+
if async_clean:
|
|
34
|
+
worker = threading.Thread(target=DataloopLogger.clean_dataloop_cache_thread,
|
|
35
|
+
kwargs={'path': path,
|
|
36
|
+
'max_param': max_param})
|
|
37
|
+
worker.daemon = True
|
|
38
|
+
worker.start()
|
|
39
|
+
else:
|
|
40
|
+
DataloopLogger.clean_dataloop_cache_thread(path=path, max_param=max_param)
|
|
41
|
+
except Exception as err:
|
|
42
|
+
logger.exception(err)
|
|
43
|
+
|
|
44
|
+
@staticmethod
|
|
45
|
+
def get_clean_parameter_per(path):
|
|
46
|
+
# (60 * 60 * 24 * 7): # sec * min * hour * days - delete if older than a week
|
|
47
|
+
# 1e6 100MB
|
|
48
|
+
path_param = [{'type': 'datasets', 'max_time': 60 * 60 * 24 * 30},
|
|
49
|
+
{'type': 'items', 'max_time': 60 * 60 * 24 * 30},
|
|
50
|
+
{'type': 'logs', 'max_time': 60 * 60 * 24 * 7, 'max_size': 200 * 1e6},
|
|
51
|
+
{'type': 'projects', 'max_time': 60 * 60 * 24 * 30}]
|
|
52
|
+
for param in path_param:
|
|
53
|
+
if param['type'] in path:
|
|
54
|
+
return param
|
|
55
|
+
return {'type': 'default', 'max_time': 60 * 60 * 24 * 30}
|
|
56
|
+
|
|
57
|
+
@staticmethod
|
|
58
|
+
def clean_dataloop_cache_thread(path, total_cache_size=0, max_param=None):
|
|
59
|
+
try:
|
|
60
|
+
is_root = False
|
|
61
|
+
if max_param is None:
|
|
62
|
+
max_param = DataloopLogger.get_clean_parameter_per(path)
|
|
63
|
+
is_root = True
|
|
64
|
+
|
|
65
|
+
now = datetime.datetime.timestamp(datetime.datetime.now())
|
|
66
|
+
files = [os.path.join(path, f) for f in os.listdir(path)]
|
|
67
|
+
files.sort(key=lambda x: -os.path.getmtime(x)) # newer first
|
|
68
|
+
for filepath in files:
|
|
69
|
+
if os.path.isdir(filepath):
|
|
70
|
+
total_cache_size = DataloopLogger. \
|
|
71
|
+
clean_dataloop_cache_thread(filepath, total_cache_size=total_cache_size, max_param=max_param)
|
|
72
|
+
# Remove the dir if empty
|
|
73
|
+
if len(os.listdir(filepath)) == 0:
|
|
74
|
+
os.rmdir(filepath)
|
|
75
|
+
continue
|
|
76
|
+
if 'max_time' in max_param:
|
|
77
|
+
file_time = os.path.getmtime(filepath)
|
|
78
|
+
if (now - file_time) > max_param['max_time']:
|
|
79
|
+
try:
|
|
80
|
+
os.remove(filepath)
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.warning("Old log file can not be removed: {}".format(e))
|
|
83
|
+
continue
|
|
84
|
+
if 'max_size' in max_param:
|
|
85
|
+
file_size = os.path.getsize(filepath)
|
|
86
|
+
if (total_cache_size + file_size) > max_param['max_size']:
|
|
87
|
+
try:
|
|
88
|
+
os.remove(filepath)
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.warning("Old log file can not be removed: {}".format(e))
|
|
91
|
+
continue
|
|
92
|
+
total_cache_size += file_size
|
|
93
|
+
if is_root:
|
|
94
|
+
logger.debug("clean_dataloop_cache_thread for {} directory has been ended".format(path))
|
|
95
|
+
return total_cache_size
|
|
96
|
+
except Exception as err:
|
|
97
|
+
logger.exception(err)
|
|
98
|
+
|
|
99
|
+
@staticmethod
|
|
100
|
+
def get_log_path():
|
|
101
|
+
log_path = os.path.join(DATALOOP_PATH, 'logs')
|
|
102
|
+
if not os.path.isdir(log_path):
|
|
103
|
+
os.makedirs(log_path, exist_ok=True)
|
|
104
|
+
return log_path
|
|
105
|
+
|
|
106
|
+
@staticmethod
|
|
107
|
+
def get_log_filepath():
|
|
108
|
+
log_path = DataloopLogger.get_log_path()
|
|
109
|
+
log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d_%H-%M-%S')))
|
|
110
|
+
return log_filepath
|
|
111
|
+
|
|
112
|
+
def doRollover(self):
|
|
113
|
+
"""
|
|
114
|
+
Do a rollover, as described in __init__().
|
|
115
|
+
"""
|
|
116
|
+
if self.stream:
|
|
117
|
+
self.stream.close()
|
|
118
|
+
self.stream = None
|
|
119
|
+
# clean older logs (week old)
|
|
120
|
+
DataloopLogger.clean_dataloop_cache()
|
|
121
|
+
# create new log
|
|
122
|
+
self.baseFilename = DataloopLogger.get_log_filepath()
|
|
123
|
+
if not self.delay:
|
|
124
|
+
self.stream = self._open()
|
|
125
|
+
|
|
126
|
+
def shouldRollover(self, record):
|
|
127
|
+
"""
|
|
128
|
+
Determine if rollover should occur.
|
|
129
|
+
|
|
130
|
+
Basically, see if the supplied record would cause the file to exceed
|
|
131
|
+
the size limit we have.
|
|
132
|
+
"""
|
|
133
|
+
if self.stream is None: # delay was set...
|
|
134
|
+
self.stream = self._open()
|
|
135
|
+
if self.maxBytes > 0: # are we rolling over?
|
|
136
|
+
msg = "%s\n" % self.format(record)
|
|
137
|
+
self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
|
|
138
|
+
if self.stream.tell() + len(msg) >= self.maxBytes:
|
|
139
|
+
return 1
|
|
140
|
+
return 0
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class DtlpyFilter(logging.Filter):
|
|
144
|
+
def __init__(self, package_path):
|
|
145
|
+
super(DtlpyFilter, self).__init__(name='dtlpy')
|
|
146
|
+
self._package_path = package_path
|
|
147
|
+
|
|
148
|
+
def filter(self, record):
|
|
149
|
+
pathname = record.pathname
|
|
150
|
+
try:
|
|
151
|
+
relativepath = os.path.splitext(os.path.relpath(pathname, self._package_path))[0]
|
|
152
|
+
relativepath = relativepath.replace(os.sep, '.')
|
|
153
|
+
except Exception:
|
|
154
|
+
relativepath = ''
|
|
155
|
+
record.relativepath = relativepath
|
|
156
|
+
return True
|