dtlpy 1.115.44__py3-none-any.whl → 1.116.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. dtlpy/__init__.py +491 -491
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/__init__.py +26 -26
  4. dtlpy/assets/code_server/config.yaml +2 -2
  5. dtlpy/assets/code_server/installation.sh +24 -24
  6. dtlpy/assets/code_server/launch.json +13 -13
  7. dtlpy/assets/code_server/settings.json +2 -2
  8. dtlpy/assets/main.py +53 -53
  9. dtlpy/assets/main_partial.py +18 -18
  10. dtlpy/assets/mock.json +11 -11
  11. dtlpy/assets/model_adapter.py +83 -83
  12. dtlpy/assets/package.json +61 -61
  13. dtlpy/assets/package_catalog.json +29 -29
  14. dtlpy/assets/package_gitignore +307 -307
  15. dtlpy/assets/service_runners/__init__.py +33 -33
  16. dtlpy/assets/service_runners/converter.py +96 -96
  17. dtlpy/assets/service_runners/multi_method.py +49 -49
  18. dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
  19. dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
  20. dtlpy/assets/service_runners/multi_method_item.py +52 -52
  21. dtlpy/assets/service_runners/multi_method_json.py +52 -52
  22. dtlpy/assets/service_runners/single_method.py +37 -37
  23. dtlpy/assets/service_runners/single_method_annotation.py +43 -43
  24. dtlpy/assets/service_runners/single_method_dataset.py +43 -43
  25. dtlpy/assets/service_runners/single_method_item.py +41 -41
  26. dtlpy/assets/service_runners/single_method_json.py +42 -42
  27. dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
  28. dtlpy/assets/voc_annotation_template.xml +23 -23
  29. dtlpy/caches/base_cache.py +32 -32
  30. dtlpy/caches/cache.py +473 -473
  31. dtlpy/caches/dl_cache.py +201 -201
  32. dtlpy/caches/filesystem_cache.py +89 -89
  33. dtlpy/caches/redis_cache.py +84 -84
  34. dtlpy/dlp/__init__.py +20 -20
  35. dtlpy/dlp/cli_utilities.py +367 -367
  36. dtlpy/dlp/command_executor.py +764 -764
  37. dtlpy/dlp/dlp +1 -1
  38. dtlpy/dlp/dlp.bat +1 -1
  39. dtlpy/dlp/dlp.py +128 -128
  40. dtlpy/dlp/parser.py +651 -651
  41. dtlpy/entities/__init__.py +83 -83
  42. dtlpy/entities/analytic.py +347 -347
  43. dtlpy/entities/annotation.py +1879 -1879
  44. dtlpy/entities/annotation_collection.py +699 -699
  45. dtlpy/entities/annotation_definitions/__init__.py +20 -20
  46. dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
  47. dtlpy/entities/annotation_definitions/box.py +195 -195
  48. dtlpy/entities/annotation_definitions/classification.py +67 -67
  49. dtlpy/entities/annotation_definitions/comparison.py +72 -72
  50. dtlpy/entities/annotation_definitions/cube.py +204 -204
  51. dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
  52. dtlpy/entities/annotation_definitions/description.py +32 -32
  53. dtlpy/entities/annotation_definitions/ellipse.py +124 -124
  54. dtlpy/entities/annotation_definitions/free_text.py +62 -62
  55. dtlpy/entities/annotation_definitions/gis.py +69 -69
  56. dtlpy/entities/annotation_definitions/note.py +139 -139
  57. dtlpy/entities/annotation_definitions/point.py +117 -117
  58. dtlpy/entities/annotation_definitions/polygon.py +182 -182
  59. dtlpy/entities/annotation_definitions/polyline.py +111 -111
  60. dtlpy/entities/annotation_definitions/pose.py +92 -92
  61. dtlpy/entities/annotation_definitions/ref_image.py +86 -86
  62. dtlpy/entities/annotation_definitions/segmentation.py +240 -240
  63. dtlpy/entities/annotation_definitions/subtitle.py +34 -34
  64. dtlpy/entities/annotation_definitions/text.py +85 -85
  65. dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
  66. dtlpy/entities/app.py +220 -220
  67. dtlpy/entities/app_module.py +107 -107
  68. dtlpy/entities/artifact.py +174 -174
  69. dtlpy/entities/assignment.py +399 -399
  70. dtlpy/entities/base_entity.py +214 -214
  71. dtlpy/entities/bot.py +113 -113
  72. dtlpy/entities/codebase.py +292 -292
  73. dtlpy/entities/collection.py +38 -38
  74. dtlpy/entities/command.py +169 -169
  75. dtlpy/entities/compute.py +449 -449
  76. dtlpy/entities/dataset.py +1299 -1299
  77. dtlpy/entities/directory_tree.py +44 -44
  78. dtlpy/entities/dpk.py +470 -470
  79. dtlpy/entities/driver.py +235 -235
  80. dtlpy/entities/execution.py +397 -397
  81. dtlpy/entities/feature.py +124 -124
  82. dtlpy/entities/feature_set.py +145 -145
  83. dtlpy/entities/filters.py +798 -798
  84. dtlpy/entities/gis_item.py +107 -107
  85. dtlpy/entities/integration.py +184 -184
  86. dtlpy/entities/item.py +959 -959
  87. dtlpy/entities/label.py +123 -123
  88. dtlpy/entities/links.py +85 -85
  89. dtlpy/entities/message.py +175 -175
  90. dtlpy/entities/model.py +684 -684
  91. dtlpy/entities/node.py +1005 -1005
  92. dtlpy/entities/ontology.py +810 -803
  93. dtlpy/entities/organization.py +287 -287
  94. dtlpy/entities/package.py +657 -657
  95. dtlpy/entities/package_defaults.py +5 -5
  96. dtlpy/entities/package_function.py +185 -185
  97. dtlpy/entities/package_module.py +113 -113
  98. dtlpy/entities/package_slot.py +118 -118
  99. dtlpy/entities/paged_entities.py +299 -299
  100. dtlpy/entities/pipeline.py +624 -624
  101. dtlpy/entities/pipeline_execution.py +279 -279
  102. dtlpy/entities/project.py +394 -394
  103. dtlpy/entities/prompt_item.py +505 -505
  104. dtlpy/entities/recipe.py +301 -301
  105. dtlpy/entities/reflect_dict.py +102 -102
  106. dtlpy/entities/resource_execution.py +138 -138
  107. dtlpy/entities/service.py +963 -963
  108. dtlpy/entities/service_driver.py +117 -117
  109. dtlpy/entities/setting.py +294 -294
  110. dtlpy/entities/task.py +495 -495
  111. dtlpy/entities/time_series.py +143 -143
  112. dtlpy/entities/trigger.py +426 -426
  113. dtlpy/entities/user.py +118 -118
  114. dtlpy/entities/webhook.py +124 -124
  115. dtlpy/examples/__init__.py +19 -19
  116. dtlpy/examples/add_labels.py +135 -135
  117. dtlpy/examples/add_metadata_to_item.py +21 -21
  118. dtlpy/examples/annotate_items_using_model.py +65 -65
  119. dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
  120. dtlpy/examples/annotations_convert_to_voc.py +9 -9
  121. dtlpy/examples/annotations_convert_to_yolo.py +9 -9
  122. dtlpy/examples/convert_annotation_types.py +51 -51
  123. dtlpy/examples/converter.py +143 -143
  124. dtlpy/examples/copy_annotations.py +22 -22
  125. dtlpy/examples/copy_folder.py +31 -31
  126. dtlpy/examples/create_annotations.py +51 -51
  127. dtlpy/examples/create_video_annotations.py +83 -83
  128. dtlpy/examples/delete_annotations.py +26 -26
  129. dtlpy/examples/filters.py +113 -113
  130. dtlpy/examples/move_item.py +23 -23
  131. dtlpy/examples/play_video_annotation.py +13 -13
  132. dtlpy/examples/show_item_and_mask.py +53 -53
  133. dtlpy/examples/triggers.py +49 -49
  134. dtlpy/examples/upload_batch_of_items.py +20 -20
  135. dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
  136. dtlpy/examples/upload_items_with_modalities.py +43 -43
  137. dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
  138. dtlpy/examples/upload_yolo_format_annotations.py +70 -70
  139. dtlpy/exceptions.py +125 -125
  140. dtlpy/miscellaneous/__init__.py +20 -20
  141. dtlpy/miscellaneous/dict_differ.py +95 -95
  142. dtlpy/miscellaneous/git_utils.py +217 -217
  143. dtlpy/miscellaneous/json_utils.py +14 -14
  144. dtlpy/miscellaneous/list_print.py +105 -105
  145. dtlpy/miscellaneous/zipping.py +130 -130
  146. dtlpy/ml/__init__.py +20 -20
  147. dtlpy/ml/base_feature_extractor_adapter.py +27 -27
  148. dtlpy/ml/base_model_adapter.py +1257 -1230
  149. dtlpy/ml/metrics.py +461 -461
  150. dtlpy/ml/predictions_utils.py +274 -274
  151. dtlpy/ml/summary_writer.py +57 -57
  152. dtlpy/ml/train_utils.py +60 -60
  153. dtlpy/new_instance.py +252 -252
  154. dtlpy/repositories/__init__.py +56 -56
  155. dtlpy/repositories/analytics.py +85 -85
  156. dtlpy/repositories/annotations.py +916 -916
  157. dtlpy/repositories/apps.py +383 -383
  158. dtlpy/repositories/artifacts.py +452 -452
  159. dtlpy/repositories/assignments.py +599 -599
  160. dtlpy/repositories/bots.py +213 -213
  161. dtlpy/repositories/codebases.py +559 -559
  162. dtlpy/repositories/collections.py +332 -332
  163. dtlpy/repositories/commands.py +152 -152
  164. dtlpy/repositories/compositions.py +61 -61
  165. dtlpy/repositories/computes.py +439 -439
  166. dtlpy/repositories/datasets.py +1504 -1504
  167. dtlpy/repositories/downloader.py +976 -923
  168. dtlpy/repositories/dpks.py +433 -433
  169. dtlpy/repositories/drivers.py +482 -482
  170. dtlpy/repositories/executions.py +815 -815
  171. dtlpy/repositories/feature_sets.py +226 -226
  172. dtlpy/repositories/features.py +255 -255
  173. dtlpy/repositories/integrations.py +484 -484
  174. dtlpy/repositories/items.py +912 -912
  175. dtlpy/repositories/messages.py +94 -94
  176. dtlpy/repositories/models.py +1000 -1000
  177. dtlpy/repositories/nodes.py +80 -80
  178. dtlpy/repositories/ontologies.py +511 -511
  179. dtlpy/repositories/organizations.py +525 -525
  180. dtlpy/repositories/packages.py +1941 -1941
  181. dtlpy/repositories/pipeline_executions.py +451 -451
  182. dtlpy/repositories/pipelines.py +640 -640
  183. dtlpy/repositories/projects.py +539 -539
  184. dtlpy/repositories/recipes.py +419 -399
  185. dtlpy/repositories/resource_executions.py +137 -137
  186. dtlpy/repositories/schema.py +120 -120
  187. dtlpy/repositories/service_drivers.py +213 -213
  188. dtlpy/repositories/services.py +1704 -1704
  189. dtlpy/repositories/settings.py +339 -339
  190. dtlpy/repositories/tasks.py +1477 -1477
  191. dtlpy/repositories/times_series.py +278 -278
  192. dtlpy/repositories/triggers.py +536 -536
  193. dtlpy/repositories/upload_element.py +257 -257
  194. dtlpy/repositories/uploader.py +661 -661
  195. dtlpy/repositories/webhooks.py +249 -249
  196. dtlpy/services/__init__.py +22 -22
  197. dtlpy/services/aihttp_retry.py +131 -131
  198. dtlpy/services/api_client.py +1785 -1785
  199. dtlpy/services/api_reference.py +40 -40
  200. dtlpy/services/async_utils.py +133 -133
  201. dtlpy/services/calls_counter.py +44 -44
  202. dtlpy/services/check_sdk.py +68 -68
  203. dtlpy/services/cookie.py +115 -115
  204. dtlpy/services/create_logger.py +156 -156
  205. dtlpy/services/events.py +84 -84
  206. dtlpy/services/logins.py +235 -235
  207. dtlpy/services/reporter.py +256 -256
  208. dtlpy/services/service_defaults.py +91 -91
  209. dtlpy/utilities/__init__.py +20 -20
  210. dtlpy/utilities/annotations/__init__.py +16 -16
  211. dtlpy/utilities/annotations/annotation_converters.py +269 -269
  212. dtlpy/utilities/base_package_runner.py +285 -264
  213. dtlpy/utilities/converter.py +1650 -1650
  214. dtlpy/utilities/dataset_generators/__init__.py +1 -1
  215. dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
  216. dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
  217. dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
  218. dtlpy/utilities/local_development/__init__.py +1 -1
  219. dtlpy/utilities/local_development/local_session.py +179 -179
  220. dtlpy/utilities/reports/__init__.py +2 -2
  221. dtlpy/utilities/reports/figures.py +343 -343
  222. dtlpy/utilities/reports/report.py +71 -71
  223. dtlpy/utilities/videos/__init__.py +17 -17
  224. dtlpy/utilities/videos/video_player.py +598 -598
  225. dtlpy/utilities/videos/videos.py +470 -470
  226. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
  227. dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
  228. {dtlpy-1.115.44.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
  229. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -186
  230. dtlpy-1.116.6.dist-info/RECORD +239 -0
  231. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
  232. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
  233. tests/features/environment.py +551 -551
  234. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  235. dtlpy-1.115.44.data/scripts/dlp.bat +0 -2
  236. dtlpy-1.115.44.dist-info/RECORD +0 -240
  237. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
  238. {dtlpy-1.115.44.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/services/cookie.py CHANGED
@@ -1,115 +1,115 @@
1
- """
2
- Dataloop cookie state
3
- """
4
-
5
- import os
6
- import time
7
- import json
8
- import logging
9
- import random
10
- from .service_defaults import DATALOOP_PATH
11
- from filelock import FileLock
12
-
13
- logger = logging.getLogger(name='dtlpy')
14
-
15
- NUM_TRIES = 3
16
-
17
-
18
- class CookieIO:
19
- """
20
- Cookie interface for Dataloop parameters
21
- """
22
-
23
- def __init__(self, path, create=True, local=False):
24
- self.COOKIE = path
25
- self.local = local
26
- if create:
27
- self.create()
28
-
29
- @staticmethod
30
- def init():
31
- global_cookie_file = os.path.join(DATALOOP_PATH, 'cookie.json')
32
- return CookieIO(global_cookie_file)
33
-
34
- @staticmethod
35
- def init_local_cookie(create=False):
36
- local_cookie_file = os.path.join(os.getcwd(), '.dataloop', 'state.json')
37
- return CookieIO(local_cookie_file, create=create, local=True)
38
-
39
- @staticmethod
40
- def init_package_json_cookie(create=False):
41
- package_json_file = os.path.join(os.getcwd(), 'package.json')
42
- return CookieIO(package_json_file, create=create, local=True)
43
-
44
- def create(self):
45
- # create directory '.dataloop' if not exists
46
- if not os.path.isdir(os.path.dirname(self.COOKIE)):
47
- os.makedirs(os.path.dirname(self.COOKIE))
48
-
49
- if not os.path.isfile(self.COOKIE) or os.path.getsize(self.COOKIE) == 0:
50
- logger.debug('COOKIE.create: File: {}'.format(self.COOKIE))
51
- self.reset()
52
- try:
53
- with FileLock(self.COOKIE + ".lock"):
54
- with open(self.COOKIE, 'r') as f:
55
- json.load(f)
56
- except ValueError:
57
- print('FATAL ERROR: COOKIE {!r} is corrupted. please fix or delete the file.'.format(self.COOKIE))
58
- raise SystemExit
59
-
60
- def read_json(self, create=False):
61
- # which cookie
62
- if self.local:
63
- self.COOKIE = os.path.join(os.getcwd(), '.dataloop', 'state.json')
64
-
65
- # check if file exists - and create
66
- if not os.path.isfile(self.COOKIE) and create:
67
- self.create()
68
-
69
- # check if file exists
70
- if not os.path.isfile(self.COOKIE):
71
- logger.debug('COOKIE.read: File does not exist: {}. Return None'.format(self.COOKIE))
72
- cfg = {}
73
- else:
74
- # read cookie
75
- cfg = {}
76
- for i in range(NUM_TRIES):
77
- try:
78
- with FileLock(self.COOKIE + ".lock"):
79
- with open(self.COOKIE, 'r') as fp:
80
- cfg = json.load(fp)
81
- break
82
- except Exception:
83
- if i == (NUM_TRIES - 1):
84
- raise
85
- time.sleep(random.random())
86
- continue
87
- return cfg
88
-
89
- def get(self, key):
90
- if key not in ['calls_counter']:
91
- # ignore logging for some keys
92
- logger.debug('COOKIE.read: key: {}'.format(key))
93
- cfg = self.read_json()
94
- if key in cfg.keys():
95
- value = cfg[key]
96
- else:
97
- logger.debug(msg='Key not in platform cookie file: {}. Return None'.format(key))
98
- value = None
99
- return value
100
-
101
- def put(self, key, value):
102
- if key not in ['calls_counter']:
103
- # ignore logging for some keys
104
- logger.debug('COOKIE.write: key: {}'.format(key))
105
- # read and write
106
- cfg = self.read_json(create=True)
107
- cfg[key] = value
108
- with FileLock(self.COOKIE + ".lock"):
109
- with open(self.COOKIE, 'w') as fp:
110
- json.dump(cfg, fp, indent=2)
111
-
112
- def reset(self):
113
- with FileLock(self.COOKIE + ".lock"):
114
- with open(self.COOKIE, 'w') as fp:
115
- json.dump({}, fp, indent=2)
1
+ """
2
+ Dataloop cookie state
3
+ """
4
+
5
+ import os
6
+ import time
7
+ import json
8
+ import logging
9
+ import random
10
+ from .service_defaults import DATALOOP_PATH
11
+ from filelock import FileLock
12
+
13
+ logger = logging.getLogger(name='dtlpy')
14
+
15
+ NUM_TRIES = 3
16
+
17
+
18
+ class CookieIO:
19
+ """
20
+ Cookie interface for Dataloop parameters
21
+ """
22
+
23
+ def __init__(self, path, create=True, local=False):
24
+ self.COOKIE = path
25
+ self.local = local
26
+ if create:
27
+ self.create()
28
+
29
+ @staticmethod
30
+ def init():
31
+ global_cookie_file = os.path.join(DATALOOP_PATH, 'cookie.json')
32
+ return CookieIO(global_cookie_file)
33
+
34
+ @staticmethod
35
+ def init_local_cookie(create=False):
36
+ local_cookie_file = os.path.join(os.getcwd(), '.dataloop', 'state.json')
37
+ return CookieIO(local_cookie_file, create=create, local=True)
38
+
39
+ @staticmethod
40
+ def init_package_json_cookie(create=False):
41
+ package_json_file = os.path.join(os.getcwd(), 'package.json')
42
+ return CookieIO(package_json_file, create=create, local=True)
43
+
44
+ def create(self):
45
+ # create directory '.dataloop' if not exists
46
+ if not os.path.isdir(os.path.dirname(self.COOKIE)):
47
+ os.makedirs(os.path.dirname(self.COOKIE))
48
+
49
+ if not os.path.isfile(self.COOKIE) or os.path.getsize(self.COOKIE) == 0:
50
+ logger.debug('COOKIE.create: File: {}'.format(self.COOKIE))
51
+ self.reset()
52
+ try:
53
+ with FileLock(self.COOKIE + ".lock"):
54
+ with open(self.COOKIE, 'r') as f:
55
+ json.load(f)
56
+ except ValueError:
57
+ print('FATAL ERROR: COOKIE {!r} is corrupted. please fix or delete the file.'.format(self.COOKIE))
58
+ raise SystemExit
59
+
60
+ def read_json(self, create=False):
61
+ # which cookie
62
+ if self.local:
63
+ self.COOKIE = os.path.join(os.getcwd(), '.dataloop', 'state.json')
64
+
65
+ # check if file exists - and create
66
+ if not os.path.isfile(self.COOKIE) and create:
67
+ self.create()
68
+
69
+ # check if file exists
70
+ if not os.path.isfile(self.COOKIE):
71
+ logger.debug('COOKIE.read: File does not exist: {}. Return None'.format(self.COOKIE))
72
+ cfg = {}
73
+ else:
74
+ # read cookie
75
+ cfg = {}
76
+ for i in range(NUM_TRIES):
77
+ try:
78
+ with FileLock(self.COOKIE + ".lock"):
79
+ with open(self.COOKIE, 'r') as fp:
80
+ cfg = json.load(fp)
81
+ break
82
+ except Exception:
83
+ if i == (NUM_TRIES - 1):
84
+ raise
85
+ time.sleep(random.random())
86
+ continue
87
+ return cfg
88
+
89
+ def get(self, key):
90
+ if key not in ['calls_counter']:
91
+ # ignore logging for some keys
92
+ logger.debug('COOKIE.read: key: {}'.format(key))
93
+ cfg = self.read_json()
94
+ if key in cfg.keys():
95
+ value = cfg[key]
96
+ else:
97
+ logger.debug(msg='Key not in platform cookie file: {}. Return None'.format(key))
98
+ value = None
99
+ return value
100
+
101
+ def put(self, key, value):
102
+ if key not in ['calls_counter']:
103
+ # ignore logging for some keys
104
+ logger.debug('COOKIE.write: key: {}'.format(key))
105
+ # read and write
106
+ cfg = self.read_json(create=True)
107
+ cfg[key] = value
108
+ with FileLock(self.COOKIE + ".lock"):
109
+ with open(self.COOKIE, 'w') as fp:
110
+ json.dump(cfg, fp, indent=2)
111
+
112
+ def reset(self):
113
+ with FileLock(self.COOKIE + ".lock"):
114
+ with open(self.COOKIE, 'w') as fp:
115
+ json.dump({}, fp, indent=2)
@@ -1,156 +1,156 @@
1
- import datetime
2
- import threading
3
- import logging.handlers
4
- import os
5
-
6
- from .service_defaults import DATALOOP_PATH
7
-
8
- logger = logging.getLogger(name='dtlpy')
9
-
10
-
11
- class DataloopLogger(logging.handlers.BaseRotatingHandler):
12
- """
13
- Based on logging.handlers.RotatingFileHandler
14
- Create a new log file after reached maxBytes
15
- Delete logs older than a threshold default is week)
16
- """
17
-
18
- def __init__(self, filename, mode='a', maxBytes=0, encoding='utf-8', delay=False):
19
- if maxBytes > 0:
20
- mode = 'a'
21
- super().__init__(filename=filename, mode=mode, encoding=encoding, delay=delay)
22
- self.maxBytes = maxBytes
23
- DataloopLogger.clean_dataloop_cache()
24
-
25
- @staticmethod
26
- def clean_dataloop_cache(cache_path=DATALOOP_PATH, max_param=None):
27
- try:
28
- async_clean = True
29
- dir_list = [os.path.join(cache_path, d) for d in os.listdir(cache_path)
30
- if os.path.isdir(os.path.join(cache_path, d))]
31
- for path in dir_list:
32
- if 'cache' not in path:
33
- if async_clean:
34
- worker = threading.Thread(target=DataloopLogger.clean_dataloop_cache_thread,
35
- kwargs={'path': path,
36
- 'max_param': max_param})
37
- worker.daemon = True
38
- worker.start()
39
- else:
40
- DataloopLogger.clean_dataloop_cache_thread(path=path, max_param=max_param)
41
- except Exception as err:
42
- logger.exception(err)
43
-
44
- @staticmethod
45
- def get_clean_parameter_per(path):
46
- # (60 * 60 * 24 * 7): # sec * min * hour * days - delete if older than a week
47
- # 1e6 100MB
48
- path_param = [{'type': 'datasets', 'max_time': 60 * 60 * 24 * 30},
49
- {'type': 'items', 'max_time': 60 * 60 * 24 * 30},
50
- {'type': 'logs', 'max_time': 60 * 60 * 24 * 7, 'max_size': 200 * 1e6},
51
- {'type': 'projects', 'max_time': 60 * 60 * 24 * 30}]
52
- for param in path_param:
53
- if param['type'] in path:
54
- return param
55
- return {'type': 'default', 'max_time': 60 * 60 * 24 * 30}
56
-
57
- @staticmethod
58
- def clean_dataloop_cache_thread(path, total_cache_size=0, max_param=None):
59
- try:
60
- is_root = False
61
- if max_param is None:
62
- max_param = DataloopLogger.get_clean_parameter_per(path)
63
- is_root = True
64
-
65
- now = datetime.datetime.timestamp(datetime.datetime.now())
66
- files = [os.path.join(path, f) for f in os.listdir(path)]
67
- files.sort(key=lambda x: -os.path.getmtime(x)) # newer first
68
- for filepath in files:
69
- if os.path.isdir(filepath):
70
- total_cache_size = DataloopLogger. \
71
- clean_dataloop_cache_thread(filepath, total_cache_size=total_cache_size, max_param=max_param)
72
- # Remove the dir if empty
73
- if len(os.listdir(filepath)) == 0:
74
- os.rmdir(filepath)
75
- continue
76
- if 'max_time' in max_param:
77
- file_time = os.path.getmtime(filepath)
78
- if (now - file_time) > max_param['max_time']:
79
- try:
80
- os.remove(filepath)
81
- except Exception as e:
82
- logger.warning("Old log file can not be removed: {}".format(e))
83
- continue
84
- if 'max_size' in max_param:
85
- file_size = os.path.getsize(filepath)
86
- if (total_cache_size + file_size) > max_param['max_size']:
87
- try:
88
- os.remove(filepath)
89
- except Exception as e:
90
- logger.warning("Old log file can not be removed: {}".format(e))
91
- continue
92
- total_cache_size += file_size
93
- if is_root:
94
- logger.debug("clean_dataloop_cache_thread for {} directory has been ended".format(path))
95
- return total_cache_size
96
- except Exception as err:
97
- logger.exception(err)
98
-
99
- @staticmethod
100
- def get_log_path():
101
- log_path = os.path.join(DATALOOP_PATH, 'logs')
102
- if not os.path.isdir(log_path):
103
- os.makedirs(log_path, exist_ok=True)
104
- return log_path
105
-
106
- @staticmethod
107
- def get_log_filepath():
108
- log_path = DataloopLogger.get_log_path()
109
- log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d_%H-%M-%S')))
110
- return log_filepath
111
-
112
- def doRollover(self):
113
- """
114
- Do a rollover, as described in __init__().
115
- """
116
- if self.stream:
117
- self.stream.close()
118
- self.stream = None
119
- # clean older logs (week old)
120
- DataloopLogger.clean_dataloop_cache()
121
- # create new log
122
- self.baseFilename = DataloopLogger.get_log_filepath()
123
- if not self.delay:
124
- self.stream = self._open()
125
-
126
- def shouldRollover(self, record):
127
- """
128
- Determine if rollover should occur.
129
-
130
- Basically, see if the supplied record would cause the file to exceed
131
- the size limit we have.
132
- """
133
- if self.stream is None: # delay was set...
134
- self.stream = self._open()
135
- if self.maxBytes > 0: # are we rolling over?
136
- msg = "%s\n" % self.format(record)
137
- self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
138
- if self.stream.tell() + len(msg) >= self.maxBytes:
139
- return 1
140
- return 0
141
-
142
-
143
- class DtlpyFilter(logging.Filter):
144
- def __init__(self, package_path):
145
- super(DtlpyFilter, self).__init__(name='dtlpy')
146
- self._package_path = package_path
147
-
148
- def filter(self, record):
149
- pathname = record.pathname
150
- try:
151
- relativepath = os.path.splitext(os.path.relpath(pathname, self._package_path))[0]
152
- relativepath = relativepath.replace(os.sep, '.')
153
- except Exception:
154
- relativepath = ''
155
- record.relativepath = relativepath
156
- return True
1
+ import datetime
2
+ import threading
3
+ import logging.handlers
4
+ import os
5
+
6
+ from .service_defaults import DATALOOP_PATH
7
+
8
+ logger = logging.getLogger(name='dtlpy')
9
+
10
+
11
+ class DataloopLogger(logging.handlers.BaseRotatingHandler):
12
+ """
13
+ Based on logging.handlers.RotatingFileHandler
14
+ Create a new log file after reached maxBytes
15
+ Delete logs older than a threshold default is week)
16
+ """
17
+
18
+ def __init__(self, filename, mode='a', maxBytes=0, encoding='utf-8', delay=False):
19
+ if maxBytes > 0:
20
+ mode = 'a'
21
+ super().__init__(filename=filename, mode=mode, encoding=encoding, delay=delay)
22
+ self.maxBytes = maxBytes
23
+ DataloopLogger.clean_dataloop_cache()
24
+
25
+ @staticmethod
26
+ def clean_dataloop_cache(cache_path=DATALOOP_PATH, max_param=None):
27
+ try:
28
+ async_clean = True
29
+ dir_list = [os.path.join(cache_path, d) for d in os.listdir(cache_path)
30
+ if os.path.isdir(os.path.join(cache_path, d))]
31
+ for path in dir_list:
32
+ if 'cache' not in path:
33
+ if async_clean:
34
+ worker = threading.Thread(target=DataloopLogger.clean_dataloop_cache_thread,
35
+ kwargs={'path': path,
36
+ 'max_param': max_param})
37
+ worker.daemon = True
38
+ worker.start()
39
+ else:
40
+ DataloopLogger.clean_dataloop_cache_thread(path=path, max_param=max_param)
41
+ except Exception as err:
42
+ logger.exception(err)
43
+
44
+ @staticmethod
45
+ def get_clean_parameter_per(path):
46
+ # (60 * 60 * 24 * 7): # sec * min * hour * days - delete if older than a week
47
+ # 1e6 100MB
48
+ path_param = [{'type': 'datasets', 'max_time': 60 * 60 * 24 * 30},
49
+ {'type': 'items', 'max_time': 60 * 60 * 24 * 30},
50
+ {'type': 'logs', 'max_time': 60 * 60 * 24 * 7, 'max_size': 200 * 1e6},
51
+ {'type': 'projects', 'max_time': 60 * 60 * 24 * 30}]
52
+ for param in path_param:
53
+ if param['type'] in path:
54
+ return param
55
+ return {'type': 'default', 'max_time': 60 * 60 * 24 * 30}
56
+
57
+ @staticmethod
58
+ def clean_dataloop_cache_thread(path, total_cache_size=0, max_param=None):
59
+ try:
60
+ is_root = False
61
+ if max_param is None:
62
+ max_param = DataloopLogger.get_clean_parameter_per(path)
63
+ is_root = True
64
+
65
+ now = datetime.datetime.timestamp(datetime.datetime.now())
66
+ files = [os.path.join(path, f) for f in os.listdir(path)]
67
+ files.sort(key=lambda x: -os.path.getmtime(x)) # newer first
68
+ for filepath in files:
69
+ if os.path.isdir(filepath):
70
+ total_cache_size = DataloopLogger. \
71
+ clean_dataloop_cache_thread(filepath, total_cache_size=total_cache_size, max_param=max_param)
72
+ # Remove the dir if empty
73
+ if len(os.listdir(filepath)) == 0:
74
+ os.rmdir(filepath)
75
+ continue
76
+ if 'max_time' in max_param:
77
+ file_time = os.path.getmtime(filepath)
78
+ if (now - file_time) > max_param['max_time']:
79
+ try:
80
+ os.remove(filepath)
81
+ except Exception as e:
82
+ logger.warning("Old log file can not be removed: {}".format(e))
83
+ continue
84
+ if 'max_size' in max_param:
85
+ file_size = os.path.getsize(filepath)
86
+ if (total_cache_size + file_size) > max_param['max_size']:
87
+ try:
88
+ os.remove(filepath)
89
+ except Exception as e:
90
+ logger.warning("Old log file can not be removed: {}".format(e))
91
+ continue
92
+ total_cache_size += file_size
93
+ if is_root:
94
+ logger.debug("clean_dataloop_cache_thread for {} directory has been ended".format(path))
95
+ return total_cache_size
96
+ except Exception as err:
97
+ logger.exception(err)
98
+
99
+ @staticmethod
100
+ def get_log_path():
101
+ log_path = os.path.join(DATALOOP_PATH, 'logs')
102
+ if not os.path.isdir(log_path):
103
+ os.makedirs(log_path, exist_ok=True)
104
+ return log_path
105
+
106
+ @staticmethod
107
+ def get_log_filepath():
108
+ log_path = DataloopLogger.get_log_path()
109
+ log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d_%H-%M-%S')))
110
+ return log_filepath
111
+
112
+ def doRollover(self):
113
+ """
114
+ Do a rollover, as described in __init__().
115
+ """
116
+ if self.stream:
117
+ self.stream.close()
118
+ self.stream = None
119
+ # clean older logs (week old)
120
+ DataloopLogger.clean_dataloop_cache()
121
+ # create new log
122
+ self.baseFilename = DataloopLogger.get_log_filepath()
123
+ if not self.delay:
124
+ self.stream = self._open()
125
+
126
+ def shouldRollover(self, record):
127
+ """
128
+ Determine if rollover should occur.
129
+
130
+ Basically, see if the supplied record would cause the file to exceed
131
+ the size limit we have.
132
+ """
133
+ if self.stream is None: # delay was set...
134
+ self.stream = self._open()
135
+ if self.maxBytes > 0: # are we rolling over?
136
+ msg = "%s\n" % self.format(record)
137
+ self.stream.seek(0, 2) # due to non-posix-compliant Windows feature
138
+ if self.stream.tell() + len(msg) >= self.maxBytes:
139
+ return 1
140
+ return 0
141
+
142
+
143
+ class DtlpyFilter(logging.Filter):
144
+ def __init__(self, package_path):
145
+ super(DtlpyFilter, self).__init__(name='dtlpy')
146
+ self._package_path = package_path
147
+
148
+ def filter(self, record):
149
+ pathname = record.pathname
150
+ try:
151
+ relativepath = os.path.splitext(os.path.relpath(pathname, self._package_path))[0]
152
+ relativepath = relativepath.replace(os.sep, '.')
153
+ except Exception:
154
+ relativepath = ''
155
+ record.relativepath = relativepath
156
+ return True