clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (300) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/cli/__main__.py +1 -1
  3. clarifai/cli/base.py +144 -136
  4. clarifai/cli/compute_cluster.py +45 -31
  5. clarifai/cli/deployment.py +93 -76
  6. clarifai/cli/model.py +578 -180
  7. clarifai/cli/nodepool.py +100 -82
  8. clarifai/client/__init__.py +12 -2
  9. clarifai/client/app.py +973 -911
  10. clarifai/client/auth/helper.py +345 -342
  11. clarifai/client/auth/register.py +7 -7
  12. clarifai/client/auth/stub.py +107 -106
  13. clarifai/client/base.py +185 -178
  14. clarifai/client/compute_cluster.py +214 -180
  15. clarifai/client/dataset.py +793 -698
  16. clarifai/client/deployment.py +55 -50
  17. clarifai/client/input.py +1223 -1088
  18. clarifai/client/lister.py +47 -45
  19. clarifai/client/model.py +1939 -1717
  20. clarifai/client/model_client.py +525 -502
  21. clarifai/client/module.py +82 -73
  22. clarifai/client/nodepool.py +358 -213
  23. clarifai/client/runner.py +58 -0
  24. clarifai/client/search.py +342 -309
  25. clarifai/client/user.py +419 -414
  26. clarifai/client/workflow.py +294 -274
  27. clarifai/constants/dataset.py +11 -17
  28. clarifai/constants/model.py +8 -2
  29. clarifai/datasets/export/inputs_annotations.py +233 -217
  30. clarifai/datasets/upload/base.py +63 -51
  31. clarifai/datasets/upload/features.py +43 -38
  32. clarifai/datasets/upload/image.py +237 -207
  33. clarifai/datasets/upload/loaders/coco_captions.py +34 -32
  34. clarifai/datasets/upload/loaders/coco_detection.py +72 -65
  35. clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
  36. clarifai/datasets/upload/loaders/xview_detection.py +274 -132
  37. clarifai/datasets/upload/multimodal.py +55 -46
  38. clarifai/datasets/upload/text.py +55 -47
  39. clarifai/datasets/upload/utils.py +250 -234
  40. clarifai/errors.py +51 -50
  41. clarifai/models/api.py +260 -238
  42. clarifai/modules/css.py +50 -50
  43. clarifai/modules/pages.py +33 -33
  44. clarifai/rag/rag.py +312 -288
  45. clarifai/rag/utils.py +91 -84
  46. clarifai/runners/models/model_builder.py +906 -802
  47. clarifai/runners/models/model_class.py +370 -331
  48. clarifai/runners/models/model_run_locally.py +459 -419
  49. clarifai/runners/models/model_runner.py +170 -162
  50. clarifai/runners/models/model_servicer.py +78 -70
  51. clarifai/runners/server.py +111 -101
  52. clarifai/runners/utils/code_script.py +225 -187
  53. clarifai/runners/utils/const.py +4 -1
  54. clarifai/runners/utils/data_types/__init__.py +12 -0
  55. clarifai/runners/utils/data_types/data_types.py +598 -0
  56. clarifai/runners/utils/data_utils.py +387 -440
  57. clarifai/runners/utils/loader.py +247 -227
  58. clarifai/runners/utils/method_signatures.py +411 -386
  59. clarifai/runners/utils/openai_convertor.py +108 -109
  60. clarifai/runners/utils/serializers.py +175 -179
  61. clarifai/runners/utils/url_fetcher.py +35 -35
  62. clarifai/schema/search.py +56 -63
  63. clarifai/urls/helper.py +125 -102
  64. clarifai/utils/cli.py +129 -123
  65. clarifai/utils/config.py +127 -87
  66. clarifai/utils/constants.py +49 -0
  67. clarifai/utils/evaluation/helpers.py +503 -466
  68. clarifai/utils/evaluation/main.py +431 -393
  69. clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
  70. clarifai/utils/logging.py +324 -306
  71. clarifai/utils/misc.py +60 -56
  72. clarifai/utils/model_train.py +165 -146
  73. clarifai/utils/protobuf.py +126 -103
  74. clarifai/versions.py +3 -1
  75. clarifai/workflows/export.py +48 -50
  76. clarifai/workflows/utils.py +39 -36
  77. clarifai/workflows/validate.py +55 -43
  78. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
  79. clarifai-11.4.0.dist-info/RECORD +109 -0
  80. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
  81. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  82. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  83. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  84. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  85. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  86. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  87. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  88. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  89. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  90. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  91. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  92. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  93. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  94. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  95. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  96. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  97. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  98. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  99. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  100. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  101. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  102. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  103. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  104. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  105. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  106. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  107. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  108. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  109. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  110. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  111. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  112. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  113. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  114. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  115. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  116. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  117. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  118. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  119. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  120. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  121. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  122. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  123. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  124. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  125. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  126. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  127. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  128. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  129. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  130. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  131. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  132. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  133. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  134. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  135. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  136. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  137. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  138. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  139. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  140. clarifai/client/cli/__init__.py +0 -0
  141. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  142. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  143. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  144. clarifai/client/cli/base_cli.py +0 -88
  145. clarifai/client/cli/model_cli.py +0 -29
  146. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  147. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  148. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  149. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  150. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  151. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  152. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  153. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  154. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  155. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  156. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  157. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  158. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  159. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  160. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  161. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  162. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  163. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  164. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  165. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  166. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  167. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  168. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  169. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  170. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  171. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  172. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  173. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  174. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  175. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  176. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  177. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  178. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  179. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  180. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  181. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  182. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  183. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  184. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  185. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  186. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  187. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  188. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  189. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  190. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  191. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  192. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  193. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  194. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  195. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  196. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  197. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  198. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  199. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  200. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
  201. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
  202. clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
  203. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  204. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  205. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  206. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  207. clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
  208. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  209. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  210. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  211. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  212. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  213. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  214. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  215. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  216. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  217. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  218. clarifai/runners/models/base_typed_model.py +0 -238
  219. clarifai/runners/models/model_class_refract.py +0 -80
  220. clarifai/runners/models/model_upload.py +0 -607
  221. clarifai/runners/models/temp.py +0 -25
  222. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  223. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  224. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  225. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  226. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  227. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  228. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  229. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  230. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  231. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  232. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  233. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  234. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  235. clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
  236. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  237. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  238. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  239. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  240. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  241. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  242. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  243. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  244. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  245. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  246. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  247. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  248. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  249. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  250. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  251. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  252. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  253. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  254. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  255. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  256. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  257. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  258. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  259. clarifai/runners/utils/data_handler.py +0 -231
  260. clarifai/runners/utils/data_handler_refract.py +0 -213
  261. clarifai/runners/utils/data_types.py +0 -469
  262. clarifai/runners/utils/logger.py +0 -0
  263. clarifai/runners/utils/openai_format.py +0 -87
  264. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  265. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  266. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  267. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  268. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  269. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  270. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  271. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  272. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  273. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  274. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  275. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  276. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  277. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  278. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  279. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  280. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  281. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  282. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  283. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  284. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  285. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  286. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  287. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  288. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  289. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  290. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  291. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  292. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  293. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  294. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  295. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  296. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  297. clarifai-11.3.0rc2.dist-info/RECORD +0 -322
  298. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
  299. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
  300. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
clarifai/utils/logging.py CHANGED
@@ -17,8 +17,22 @@ JSON_LOGGER_NAME = "clarifai-json"
17
17
  JSON_LOG_KEY = 'msg'
18
18
  JSON_DEFAULT_CHAR_LENGTH = 400
19
19
  FIELD_BLACKLIST = [
20
- 'msg', 'message', 'account', 'levelno', 'created', 'threadName', 'name', 'processName',
21
- 'module', 'funcName', 'msecs', 'relativeCreated', 'pathname', 'args', 'thread', 'process'
20
+ 'msg',
21
+ 'message',
22
+ 'account',
23
+ 'levelno',
24
+ 'created',
25
+ 'threadName',
26
+ 'name',
27
+ 'processName',
28
+ 'module',
29
+ 'funcName',
30
+ 'msecs',
31
+ 'relativeCreated',
32
+ 'pathname',
33
+ 'args',
34
+ 'thread',
35
+ 'process',
22
36
  ]
23
37
  COLORS = {
24
38
  'ARGUMENTS': '\033[90m', # Gray
@@ -28,12 +42,14 @@ COLORS = {
28
42
  'ERROR': '\033[31m', # Red
29
43
  'CRITICAL': '\033[31m', # Red
30
44
  'TIME': '\033[34m',
31
- 'RESET': '\033[0m'
45
+ 'RESET': '\033[0m',
32
46
  }
33
- LOG_FORMAT = f"[%(levelname)s] {COLORS.get('TIME')}%(asctime)s{COLORS.get('RESET')} %(message)s |" \
34
- f"{COLORS.get('ARGUMENTS')} " \
35
- f"%(optional_args)s " \
36
- f"thread=%(thread)d {COLORS.get('RESET')}"
47
+ LOG_FORMAT = (
48
+ f"[%(levelname)s] {COLORS.get('TIME')}%(asctime)s{COLORS.get('RESET')} %(message)s |"
49
+ f"{COLORS.get('ARGUMENTS')} "
50
+ f"%(optional_args)s "
51
+ f"thread=%(thread)d {COLORS.get('RESET')}"
52
+ )
37
53
 
38
54
  # Create thread local storage that the format() call below uses.
39
55
  # This is only used by the json_logger in the appropriate CLARIFAI_DEPLOY levels.
@@ -41,378 +57,380 @@ thread_log_info = threading.local()
41
57
 
42
58
 
43
59
  def get_logger_context():
44
- return thread_log_info.__dict__
60
+ return thread_log_info.__dict__
45
61
 
46
62
 
47
63
  def set_logger_context(**kwargs):
48
- thread_log_info.__dict__.update(kwargs)
64
+ thread_log_info.__dict__.update(kwargs)
49
65
 
50
66
 
51
67
  def clear_logger_context():
52
- thread_log_info.__dict__.clear()
68
+ thread_log_info.__dict__.clear()
53
69
 
54
70
 
55
71
  def restore_logger_context(context):
56
- thread_log_info.__dict__.clear()
57
- thread_log_info.__dict__.update(context)
72
+ thread_log_info.__dict__.clear()
73
+ thread_log_info.__dict__.update(context)
58
74
 
59
75
 
60
76
  def get_req_id_from_context():
61
- ctx = get_logger_context()
62
- return ctx.get('req_id', '')
77
+ ctx = get_logger_context()
78
+ return ctx.get('req_id', '')
63
79
 
64
80
 
65
81
  def display_workflow_tree(nodes_data: List[Dict]) -> None:
66
- """Displays a tree of the workflow nodes."""
67
- from rich import print as rprint
68
- from rich.tree import Tree
69
-
70
- # Create a mapping of node_id to the list of node_ids that are connected to it.
71
- node_adj_mapping = defaultdict(list)
72
- # Create a mapping of node_id to the node data info.
73
- nodes_data_dict = {}
74
- for node in nodes_data:
75
- nodes_data_dict[node["id"]] = node
76
- if node.get("node_inputs", "") == "":
77
- node_adj_mapping["Input"].append(node["id"])
78
- else:
79
- for node_input in node["node_inputs"]:
80
- node_adj_mapping[node_input["node_id"]].append(node["id"])
81
-
82
- # Get all leaf nodes.
83
- leaf_node_ids = set()
84
- for node_id in list(nodes_data_dict.keys()):
85
- if node_adj_mapping.get(node_id, "") == "":
86
- leaf_node_ids.add(node_id)
87
-
88
- def build_node_tree(node_id="Input"):
89
- """Recursively builds a rich tree of the workflow nodes."""
90
- # Set the style of the current node.
91
- style_str = "green" if node_id in leaf_node_ids else "white"
92
-
93
- # Create a Tree object for the current node.
94
- if node_id != "Input":
95
- node_table = table_from_dict(
96
- [nodes_data_dict[node_id]["model"]],
97
- column_names=["id", "model_type_id", "app_id", "user_id"],
98
- title="Node: " + node_id)
99
-
100
- tree = Tree(node_table, style=style_str, guide_style="underline2 white")
101
- else:
102
- tree = Tree(f"[green] {node_id}", style=style_str, guide_style="underline2 white")
82
+ """Displays a tree of the workflow nodes."""
83
+ from rich import print as rprint
84
+ from rich.tree import Tree
85
+
86
+ # Create a mapping of node_id to the list of node_ids that are connected to it.
87
+ node_adj_mapping = defaultdict(list)
88
+ # Create a mapping of node_id to the node data info.
89
+ nodes_data_dict = {}
90
+ for node in nodes_data:
91
+ nodes_data_dict[node["id"]] = node
92
+ if node.get("node_inputs", "") == "":
93
+ node_adj_mapping["Input"].append(node["id"])
94
+ else:
95
+ for node_input in node["node_inputs"]:
96
+ node_adj_mapping[node_input["node_id"]].append(node["id"])
97
+
98
+ # Get all leaf nodes.
99
+ leaf_node_ids = set()
100
+ for node_id in list(nodes_data_dict.keys()):
101
+ if node_adj_mapping.get(node_id, "") == "":
102
+ leaf_node_ids.add(node_id)
103
+
104
+ def build_node_tree(node_id="Input"):
105
+ """Recursively builds a rich tree of the workflow nodes."""
106
+ # Set the style of the current node.
107
+ style_str = "green" if node_id in leaf_node_ids else "white"
108
+
109
+ # Create a Tree object for the current node.
110
+ if node_id != "Input":
111
+ node_table = table_from_dict(
112
+ [nodes_data_dict[node_id]["model"]],
113
+ column_names=["id", "model_type_id", "app_id", "user_id"],
114
+ title="Node: " + node_id,
115
+ )
116
+
117
+ tree = Tree(node_table, style=style_str, guide_style="underline2 white")
118
+ else:
119
+ tree = Tree(f"[green] {node_id}", style=style_str, guide_style="underline2 white")
103
120
 
104
- # Recursively add the child nodes of the current node to the tree.
105
- for child in node_adj_mapping.get(node_id, []):
106
- tree.add(build_node_tree(child))
121
+ # Recursively add the child nodes of the current node to the tree.
122
+ for child in node_adj_mapping.get(node_id, []):
123
+ tree.add(build_node_tree(child))
107
124
 
108
- # Return the tree.
109
- return tree
125
+ # Return the tree.
126
+ return tree
110
127
 
111
- tree = build_node_tree("Input")
112
- rprint(tree)
128
+ tree = build_node_tree("Input")
129
+ rprint(tree)
113
130
 
114
131
 
115
- def table_from_dict(data: List[Dict], column_names: List[str],
116
- title: str = "") -> 'rich.Table': #noqa F821
117
- """Use this function for printing tables from a list of dicts."""
118
- from rich.table import Table
119
- table = Table(title=title, show_lines=False, show_header=True, header_style="blue")
120
- for column_name in column_names:
121
- table.add_column(column_name)
122
- for row in data:
123
- req_row = [row.get(column_name, "") for column_name in column_names]
124
- table.add_row(*req_row)
125
- return table
132
+ def table_from_dict(data: List[Dict], column_names: List[str], title: str = "") -> 'rich.Table': # noqa F821
133
+ """Use this function for printing tables from a list of dicts."""
134
+ from rich.table import Table
135
+
136
+ table = Table(title=title, show_lines=False, show_header=True, header_style="blue")
137
+ for column_name in column_names:
138
+ table.add_column(column_name)
139
+ for row in data:
140
+ req_row = [row.get(column_name, "") for column_name in column_names]
141
+ table.add_row(*req_row)
142
+ return table
126
143
 
127
144
 
128
145
  def _get_library_name() -> str:
129
- return __name__.split(".")[0]
146
+ return __name__.split(".")[0]
130
147
 
131
148
 
132
149
  def _configure_logger(name: str, logger_level: Union[int, str] = logging.NOTSET) -> None:
133
- """Configure the logger with the specified name."""
134
-
135
- logger = logging.getLogger(name)
136
- logger.setLevel(logger_level)
137
-
138
- # Remove existing handlers
139
- for handler in logger.handlers[:]:
140
- logger.removeHandler(handler)
141
-
142
- # If ENABLE_JSON_LOGGER is 'true' then definitely use json logger.
143
- # If ENABLE_JSON_LOGGER is 'false' then definitely don't use json logger.
144
- # If ENABLE_JSON_LOGGER is not set, then use json logger if in k8s.
145
- enabled_json = os.getenv('ENABLE_JSON_LOGGER', None)
146
- in_k8s = 'KUBERNETES_SERVICE_HOST' in os.environ
147
- handler = logging.StreamHandler()
148
- handler.setLevel(logger_level)
149
- if enabled_json == 'true' or (in_k8s and enabled_json != 'false'):
150
- # Add the json handler and formatter
151
- formatter = JsonFormatter()
152
- handler.setFormatter(formatter)
153
- else:
154
- # create formatter and add it to the handlers
155
- formatter = TerminalFormatter(LOG_FORMAT)
156
- handler.setFormatter(formatter)
157
- # add the handlers to the logger
158
- logger.addHandler(handler)
150
+ """Configure the logger with the specified name."""
151
+
152
+ logger = logging.getLogger(name)
153
+ logger.setLevel(logger_level)
154
+
155
+ # Remove existing handlers
156
+ for handler in logger.handlers[:]:
157
+ logger.removeHandler(handler)
158
+
159
+ # If ENABLE_JSON_LOGGER is 'true' then definitely use json logger.
160
+ # If ENABLE_JSON_LOGGER is 'false' then definitely don't use json logger.
161
+ # If ENABLE_JSON_LOGGER is not set, then use json logger if in k8s.
162
+ enabled_json = os.getenv('ENABLE_JSON_LOGGER', None)
163
+ in_k8s = 'KUBERNETES_SERVICE_HOST' in os.environ
164
+ handler = logging.StreamHandler()
165
+ handler.setLevel(logger_level)
166
+ if enabled_json == 'true' or (in_k8s and enabled_json != 'false'):
167
+ # Add the json handler and formatter
168
+ formatter = JsonFormatter()
169
+ handler.setFormatter(formatter)
170
+ else:
171
+ # create formatter and add it to the handlers
172
+ formatter = TerminalFormatter(LOG_FORMAT)
173
+ handler.setFormatter(formatter)
174
+ # add the handlers to the logger
175
+ logger.addHandler(handler)
159
176
 
160
177
 
161
- def get_logger(logger_level: Union[int, str] = logging.NOTSET,
162
- name: Optional[str] = None) -> logging.Logger:
163
- """Return a logger with the specified name."""
178
+ def get_logger(
179
+ logger_level: Union[int, str] = logging.NOTSET, name: Optional[str] = None
180
+ ) -> logging.Logger:
181
+ """Return a logger with the specified name."""
164
182
 
165
- if name is None:
166
- name = _get_library_name()
183
+ if name is None:
184
+ name = _get_library_name()
167
185
 
168
- _configure_logger(name, logger_level)
169
- return logging.getLogger(name)
186
+ _configure_logger(name, logger_level)
187
+ return logging.getLogger(name)
170
188
 
171
189
 
172
190
  def add_file_handler(logger: logging.Logger, file_path: str, log_level: str = 'WARNING') -> None:
173
- """Add a file handler to the logger."""
174
- file_handler = logging.FileHandler(file_path)
175
- file_handler.setLevel(log_level)
176
- logger.addHandler(file_handler)
191
+ """Add a file handler to the logger."""
192
+ file_handler = logging.FileHandler(file_path)
193
+ file_handler.setLevel(log_level)
194
+ logger.addHandler(file_handler)
177
195
 
178
196
 
179
- def process_log_files(log_file_path: str,) -> tuple:
180
- """Processes log files to get failed inputs and annotations.
197
+ def process_log_files(
198
+ log_file_path: str,
199
+ ) -> tuple:
200
+ """Processes log files to get failed inputs and annotations.
181
201
 
182
202
  Args:
183
203
  log_file_path (str): path to the log file
184
204
  """
185
- import re
186
- duplicate_input_ids = []
187
- failed_input_ids = []
188
- pattern = re.compile(r'\| +(\d+) +\| +(\S+) +\| +(.+?) +\| +(.+?) +\| +(.+?) +\| +(.+?) \|')
189
- try:
190
- with open(log_file_path, 'r') as file:
191
- log_content = file.read()
192
- matches = pattern.findall(log_content)
193
- for match in matches:
194
- index = int(match[0])
195
- input_id = match[1]
196
- status = match[2]
197
- if status == "Input has a duplicate ID.":
198
- duplicate_input_ids.append({"Index": index, "Input_ID": input_id})
199
- else:
200
- failed_input_ids.append({"Index": index, "Input_ID": input_id})
205
+ import re
201
206
 
202
- except Exception as e:
203
- print(f"Error Processing log file {log_file_path}:{e}")
204
- return [], []
207
+ duplicate_input_ids = []
208
+ failed_input_ids = []
209
+ pattern = re.compile(r'\| +(\d+) +\| +(\S+) +\| +(.+?) +\| +(.+?) +\| +(.+?) +\| +(.+?) \|')
210
+ try:
211
+ with open(log_file_path, 'r') as file:
212
+ log_content = file.read()
213
+ matches = pattern.findall(log_content)
214
+ for match in matches:
215
+ index = int(match[0])
216
+ input_id = match[1]
217
+ status = match[2]
218
+ if status == "Input has a duplicate ID.":
219
+ duplicate_input_ids.append({"Index": index, "Input_ID": input_id})
220
+ else:
221
+ failed_input_ids.append({"Index": index, "Input_ID": input_id})
205
222
 
206
- return duplicate_input_ids, failed_input_ids
223
+ except Exception as e:
224
+ print(f"Error Processing log file {log_file_path}:{e}")
225
+ return [], []
226
+
227
+ return duplicate_input_ids, failed_input_ids
207
228
 
208
229
 
209
230
  def display_concept_relations_tree(relations_dict: Dict[str, Any]) -> None:
210
- """Print all the concept relations of the app in rich tree format.
231
+ """Print all the concept relations of the app in rich tree format.
211
232
 
212
233
  Args:
213
234
  relations_dict (dict): A dict of concept relations info.
214
235
  """
215
- from rich import print as rprint
216
- from rich.tree import Tree
217
- for parent, children in relations_dict.items():
218
- tree = Tree(parent)
219
- for child in children:
220
- tree.add(child)
221
- rprint(tree)
236
+ from rich import print as rprint
237
+ from rich.tree import Tree
238
+
239
+ for parent, children in relations_dict.items():
240
+ tree = Tree(parent)
241
+ for child in children:
242
+ tree.add(child)
243
+ rprint(tree)
222
244
 
223
245
 
224
246
  def _default_json_default(obj):
225
- """
226
- Handle objects that could not be serialized to JSON automatically.
247
+ """
248
+ Handle objects that could not be serialized to JSON automatically.
227
249
 
228
- Coerce everything to strings.
229
- All objects representing time get output as ISO8601.
230
- """
231
- if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
232
- return obj.isoformat()
233
- else:
234
- return _object_to_string_with_truncation(obj)
250
+ Coerce everything to strings.
251
+ All objects representing time get output as ISO8601.
252
+ """
253
+ if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
254
+ return obj.isoformat()
255
+ else:
256
+ return _object_to_string_with_truncation(obj)
235
257
 
236
258
 
237
259
  def _object_to_string_with_truncation(obj) -> str:
238
- """
239
- Truncate object string.
240
-
241
- It's preferred to not log objects that could cause triggering this function,
242
- It's better to extract important parts form them and log them as regular Python types,
243
- like str or int, which won't be passed to this functon.
244
-
245
- This message brings additional information to the logs
246
- that could help to find and fix truncation cases.
247
- - hardcoded part of the message could be used for the looking all entries in logs
248
- - obj class could help with detail investigation
249
- """
250
-
251
- objstr = str(obj)
252
- if len(objstr) > JSON_DEFAULT_CHAR_LENGTH:
253
- type_name = type(obj).__name__
254
- truncated = objstr[:JSON_DEFAULT_CHAR_LENGTH]
255
- objstr = f"{truncated}...[{type_name} was truncated, len={len(objstr)} chars]"
256
- return objstr
260
+ """
261
+ Truncate object string.
257
262
 
263
+ It's preferred to not log objects that could cause triggering this function,
264
+ It's better to extract important parts form them and log them as regular Python types,
265
+ like str or int, which won't be passed to this functon.
258
266
 
259
- class JsonFormatter(logging.Formatter):
260
-
261
- def __init__(self,
262
- fmt=None,
263
- datefmt=None,
264
- style='%',
265
- json_cls=None,
266
- json_default=_default_json_default):
267
- """
268
- :param fmt: Config as a JSON string, allowed fields;
269
- extra: provide extra fields always present in logs
270
- source_host: override source host name
271
- :param datefmt: Date format to use (required by logging.Formatter
272
- interface but not used)
273
- :param json_cls: JSON encoder to forward to json.dumps
274
- :param json_default: Default JSON representation for unknown types,
275
- by default coerce everything to a string
267
+ This message brings additional information to the logs
268
+ that could help to find and fix truncation cases.
269
+ - hardcoded part of the message could be used for the looking all entries in logs
270
+ - obj class could help with detail investigation
276
271
  """
277
272
 
278
- if fmt is not None:
279
- self._fmt = json.loads(fmt)
280
- else:
281
- self._fmt = {}
282
- self.json_default = json_default
283
- self.json_cls = json_cls
284
- if 'extra' not in self._fmt:
285
- self.defaults = {}
286
- else:
287
- self.defaults = self._fmt['extra']
288
- if 'source_host' in self._fmt:
289
- self.source_host = self._fmt['source_host']
290
- else:
291
- try:
292
- self.source_host = socket.gethostname()
293
- except Exception:
294
- self.source_host = ""
295
-
296
- self.extra_blacklist_fields = []
297
- extra_blacklist_fields = os.getenv('EXTRA_JSON_LOGGER_BLACKLIST_FIELDS', None)
298
- if extra_blacklist_fields:
299
- self.extra_blacklist_fields = extra_blacklist_fields.split(",")
300
-
301
- def _build_fields(self, defaults, fields):
302
- """Return provided fields including any in defaults
303
- """
304
- return dict(list(defaults.get('@fields', {}).items()) + list(fields.items()))
305
-
306
- # Override the format function to fit Clarifai
307
- def format(self, record):
308
- fields = record.__dict__.copy()
309
-
310
- # logger.info({...}) directly.
311
- if isinstance(record.msg, dict):
312
- fields.update(record.msg)
313
- fields.pop('msg')
314
- msg = ""
315
- else: # logger.info("message", {...})
316
- if isinstance(record.args, dict):
317
- fields.update(record.args)
318
- msg = record.getMessage()
319
- for k in FIELD_BLACKLIST:
320
- fields.pop(k, None)
321
- for k in self.extra_blacklist_fields:
322
- fields.pop(k, None)
323
- # Rename 'levelname' to 'level' and make the value lowercase to match Go logs
324
- level = fields.pop('levelname', None)
325
- if level:
326
- fields['level'] = level.lower()
327
-
328
- # Get the thread local data
329
- req_id = getattr(thread_log_info, 'req_id', None)
330
- if req_id:
331
- fields['req_id'] = req_id
332
- orig_req_id = getattr(thread_log_info, 'orig_req_id', None)
333
- if orig_req_id:
334
- fields['orig_req_id'] = orig_req_id
335
- # Get the thread local data
336
- requester = getattr(thread_log_info, 'requester', None)
337
- if requester:
338
- fields['requester'] = requester
339
-
340
- user_id = getattr(thread_log_info, 'user_id', None)
341
- if requester:
342
- fields['user_id'] = user_id
343
-
344
- if hasattr(thread_log_info, 'start_time'):
345
- #pylint: disable=no-member
346
- fields['duration_ms'] = (time.time() - thread_log_info.start_time) * 1000
347
-
348
- if 'exc_info' in fields:
349
- if fields['exc_info']:
350
- formatted = traceback.format_exception(*fields['exc_info'])
351
- fields['exception'] = formatted
352
-
353
- fields.pop('exc_info')
354
-
355
- if 'exc_text' in fields and not fields['exc_text']:
356
- fields.pop('exc_text')
357
-
358
- logr = self.defaults.copy()
359
-
360
- logr.update({
361
- JSON_LOG_KEY: msg,
362
- '@timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
363
- })
364
-
365
- logr.update(fields)
273
+ objstr = str(obj)
274
+ if len(objstr) > JSON_DEFAULT_CHAR_LENGTH:
275
+ type_name = type(obj).__name__
276
+ truncated = objstr[:JSON_DEFAULT_CHAR_LENGTH]
277
+ objstr = f"{truncated}...[{type_name} was truncated, len={len(objstr)} chars]"
278
+ return objstr
366
279
 
367
- try:
368
- return json.dumps(logr, default=self.json_default, cls=self.json_cls)
369
- except Exception:
370
280
 
371
- type, value, tb = sys.exc_info()
372
- return json.dumps(
373
- {
374
- "msg": f"Fail to format log {type.__name__}({value}), {logr}",
375
- "formatting_traceback": "\n".join(traceback.format_tb(tb)),
376
- },
377
- default=self.json_default,
378
- cls=self.json_cls,
379
- )
281
+ class JsonFormatter(logging.Formatter):
282
+ def __init__(
283
+ self, fmt=None, datefmt=None, style='%', json_cls=None, json_default=_default_json_default
284
+ ):
285
+ """
286
+ :param fmt: Config as a JSON string, allowed fields;
287
+ extra: provide extra fields always present in logs
288
+ source_host: override source host name
289
+ :param datefmt: Date format to use (required by logging.Formatter
290
+ interface but not used)
291
+ :param json_cls: JSON encoder to forward to json.dumps
292
+ :param json_default: Default JSON representation for unknown types,
293
+ by default coerce everything to a string
294
+ """
295
+
296
+ if fmt is not None:
297
+ self._fmt = json.loads(fmt)
298
+ else:
299
+ self._fmt = {}
300
+ self.json_default = json_default
301
+ self.json_cls = json_cls
302
+ if 'extra' not in self._fmt:
303
+ self.defaults = {}
304
+ else:
305
+ self.defaults = self._fmt['extra']
306
+ if 'source_host' in self._fmt:
307
+ self.source_host = self._fmt['source_host']
308
+ else:
309
+ try:
310
+ self.source_host = socket.gethostname()
311
+ except Exception:
312
+ self.source_host = ""
313
+
314
+ self.extra_blacklist_fields = []
315
+ extra_blacklist_fields = os.getenv('EXTRA_JSON_LOGGER_BLACKLIST_FIELDS', None)
316
+ if extra_blacklist_fields:
317
+ self.extra_blacklist_fields = extra_blacklist_fields.split(",")
318
+
319
+ def _build_fields(self, defaults, fields):
320
+ """Return provided fields including any in defaults"""
321
+ return dict(list(defaults.get('@fields', {}).items()) + list(fields.items()))
322
+
323
+ # Override the format function to fit Clarifai
324
+ def format(self, record):
325
+ fields = record.__dict__.copy()
326
+
327
+ # logger.info({...}) directly.
328
+ if isinstance(record.msg, dict):
329
+ fields.update(record.msg)
330
+ fields.pop('msg')
331
+ msg = ""
332
+ else: # logger.info("message", {...})
333
+ if isinstance(record.args, dict):
334
+ fields.update(record.args)
335
+ msg = record.getMessage()
336
+ for k in FIELD_BLACKLIST:
337
+ fields.pop(k, None)
338
+ for k in self.extra_blacklist_fields:
339
+ fields.pop(k, None)
340
+ # Rename 'levelname' to 'level' and make the value lowercase to match Go logs
341
+ level = fields.pop('levelname', None)
342
+ if level:
343
+ fields['level'] = level.lower()
344
+
345
+ # Get the thread local data
346
+ req_id = getattr(thread_log_info, 'req_id', None)
347
+ if req_id:
348
+ fields['req_id'] = req_id
349
+ orig_req_id = getattr(thread_log_info, 'orig_req_id', None)
350
+ if orig_req_id:
351
+ fields['orig_req_id'] = orig_req_id
352
+ # Get the thread local data
353
+ requester = getattr(thread_log_info, 'requester', None)
354
+ if requester:
355
+ fields['requester'] = requester
356
+
357
+ user_id = getattr(thread_log_info, 'user_id', None)
358
+ if requester:
359
+ fields['user_id'] = user_id
360
+
361
+ if hasattr(thread_log_info, 'start_time'):
362
+ # pylint: disable=no-member
363
+ fields['duration_ms'] = (time.time() - thread_log_info.start_time) * 1000
364
+
365
+ if 'exc_info' in fields:
366
+ if fields['exc_info']:
367
+ formatted = traceback.format_exception(*fields['exc_info'])
368
+ fields['exception'] = formatted
369
+
370
+ fields.pop('exc_info')
371
+
372
+ if 'exc_text' in fields and not fields['exc_text']:
373
+ fields.pop('exc_text')
374
+
375
+ logr = self.defaults.copy()
376
+
377
+ logr.update(
378
+ {
379
+ JSON_LOG_KEY: msg,
380
+ '@timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
381
+ }
382
+ )
383
+
384
+ logr.update(fields)
385
+
386
+ try:
387
+ return json.dumps(logr, default=self.json_default, cls=self.json_cls)
388
+ except Exception:
389
+ type, value, tb = sys.exc_info()
390
+ return json.dumps(
391
+ {
392
+ "msg": f"Fail to format log {type.__name__}({value}), {logr}",
393
+ "formatting_traceback": "\n".join(traceback.format_tb(tb)),
394
+ },
395
+ default=self.json_default,
396
+ cls=self.json_cls,
397
+ )
380
398
 
381
399
 
382
400
  class TerminalFormatter(logging.Formatter):
383
- """ If you have fields in your Formatter (see setup_logger where we setup the format strings) then
384
- you can set them on the record using a filter. We do that for req_id here which is a request
385
- specific field. This allows us to find requests easily between services.
386
- """
401
+ """If you have fields in your Formatter (see setup_logger where we setup the format strings) then
402
+ you can set them on the record using a filter. We do that for req_id here which is a request
403
+ specific field. This allows us to find requests easily between services.
404
+ """
387
405
 
388
- def format(self, record):
389
- record.optional_args = []
406
+ def format(self, record):
407
+ record.optional_args = []
390
408
 
391
- user_id = getattr(thread_log_info, 'user_id', None)
392
- if user_id is not None:
393
- record.optional_args.append("user_id=" + user_id)
409
+ user_id = getattr(thread_log_info, 'user_id', None)
410
+ if user_id is not None:
411
+ record.optional_args.append("user_id=" + user_id)
394
412
 
395
- app_id = getattr(thread_log_info, 'app_id', None)
396
- if app_id is not None:
397
- record.optional_args.append("app_id=" + app_id)
413
+ app_id = getattr(thread_log_info, 'app_id', None)
414
+ if app_id is not None:
415
+ record.optional_args.append("app_id=" + app_id)
398
416
 
399
- req_id = getattr(thread_log_info, 'req_id', None)
400
- if req_id is not None:
401
- record.optional_args.append("req_id=" + req_id)
417
+ req_id = getattr(thread_log_info, 'req_id', None)
418
+ if req_id is not None:
419
+ record.optional_args.append("req_id=" + req_id)
402
420
 
403
- record.optional_args = " ".join(record.optional_args)
421
+ record.optional_args = " ".join(record.optional_args)
404
422
 
405
- color_code = COLORS.get(record.levelname, '')
423
+ color_code = COLORS.get(record.levelname, '')
406
424
 
407
- record.levelname = f"{color_code}{record.levelname}{COLORS.get('RESET')}"
408
- record.msg = f"{color_code}{str(record.msg)}{COLORS.get('RESET')}"
425
+ record.levelname = f"{color_code}{record.levelname}{COLORS.get('RESET')}"
426
+ record.msg = f"{color_code}{str(record.msg)}{COLORS.get('RESET')}"
409
427
 
410
- return super(TerminalFormatter, self).format(record)
428
+ return super(TerminalFormatter, self).format(record)
411
429
 
412
- def formatTime(self, record, datefmt=None):
413
- # Note we didn't go with UTC here as it's easier to understand time in your time zone.
414
- # The json logger leverages UTC though.
415
- return datetime.datetime.fromtimestamp(record.created).strftime('%H:%M:%S.%f')
430
+ def formatTime(self, record, datefmt=None):
431
+ # Note we didn't go with UTC here as it's easier to understand time in your time zone.
432
+ # The json logger leverages UTC though.
433
+ return datetime.datetime.fromtimestamp(record.created).strftime('%H:%M:%S.%f')
416
434
 
417
435
 
418
436
  # the default logger for the SDK.