clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/__main__.py +1 -1
- clarifai/cli/base.py +144 -136
- clarifai/cli/compute_cluster.py +45 -31
- clarifai/cli/deployment.py +93 -76
- clarifai/cli/model.py +578 -180
- clarifai/cli/nodepool.py +100 -82
- clarifai/client/__init__.py +12 -2
- clarifai/client/app.py +973 -911
- clarifai/client/auth/helper.py +345 -342
- clarifai/client/auth/register.py +7 -7
- clarifai/client/auth/stub.py +107 -106
- clarifai/client/base.py +185 -178
- clarifai/client/compute_cluster.py +214 -180
- clarifai/client/dataset.py +793 -698
- clarifai/client/deployment.py +55 -50
- clarifai/client/input.py +1223 -1088
- clarifai/client/lister.py +47 -45
- clarifai/client/model.py +1939 -1717
- clarifai/client/model_client.py +525 -502
- clarifai/client/module.py +82 -73
- clarifai/client/nodepool.py +358 -213
- clarifai/client/runner.py +58 -0
- clarifai/client/search.py +342 -309
- clarifai/client/user.py +419 -414
- clarifai/client/workflow.py +294 -274
- clarifai/constants/dataset.py +11 -17
- clarifai/constants/model.py +8 -2
- clarifai/datasets/export/inputs_annotations.py +233 -217
- clarifai/datasets/upload/base.py +63 -51
- clarifai/datasets/upload/features.py +43 -38
- clarifai/datasets/upload/image.py +237 -207
- clarifai/datasets/upload/loaders/coco_captions.py +34 -32
- clarifai/datasets/upload/loaders/coco_detection.py +72 -65
- clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
- clarifai/datasets/upload/loaders/xview_detection.py +274 -132
- clarifai/datasets/upload/multimodal.py +55 -46
- clarifai/datasets/upload/text.py +55 -47
- clarifai/datasets/upload/utils.py +250 -234
- clarifai/errors.py +51 -50
- clarifai/models/api.py +260 -238
- clarifai/modules/css.py +50 -50
- clarifai/modules/pages.py +33 -33
- clarifai/rag/rag.py +312 -288
- clarifai/rag/utils.py +91 -84
- clarifai/runners/models/model_builder.py +906 -802
- clarifai/runners/models/model_class.py +370 -331
- clarifai/runners/models/model_run_locally.py +459 -419
- clarifai/runners/models/model_runner.py +170 -162
- clarifai/runners/models/model_servicer.py +78 -70
- clarifai/runners/server.py +111 -101
- clarifai/runners/utils/code_script.py +225 -187
- clarifai/runners/utils/const.py +4 -1
- clarifai/runners/utils/data_types/__init__.py +12 -0
- clarifai/runners/utils/data_types/data_types.py +598 -0
- clarifai/runners/utils/data_utils.py +387 -440
- clarifai/runners/utils/loader.py +247 -227
- clarifai/runners/utils/method_signatures.py +411 -386
- clarifai/runners/utils/openai_convertor.py +108 -109
- clarifai/runners/utils/serializers.py +175 -179
- clarifai/runners/utils/url_fetcher.py +35 -35
- clarifai/schema/search.py +56 -63
- clarifai/urls/helper.py +125 -102
- clarifai/utils/cli.py +129 -123
- clarifai/utils/config.py +127 -87
- clarifai/utils/constants.py +49 -0
- clarifai/utils/evaluation/helpers.py +503 -466
- clarifai/utils/evaluation/main.py +431 -393
- clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
- clarifai/utils/logging.py +324 -306
- clarifai/utils/misc.py +60 -56
- clarifai/utils/model_train.py +165 -146
- clarifai/utils/protobuf.py +126 -103
- clarifai/versions.py +3 -1
- clarifai/workflows/export.py +48 -50
- clarifai/workflows/utils.py +39 -36
- clarifai/workflows/validate.py +55 -43
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
- clarifai-11.4.0.dist-info/RECORD +109 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/errors.cpython-311.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
- clarifai/client/cli/__init__.py +0 -0
- clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/base_cli.py +0 -88
- clarifai/client/cli/model_cli.py +0 -29
- clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
- clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_class_refract.py +0 -80
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/models/temp.py +0 -25
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -231
- clarifai/runners/utils/data_handler_refract.py +0 -213
- clarifai/runners/utils/data_types.py +0 -469
- clarifai/runners/utils/logger.py +0 -0
- clarifai/runners/utils/openai_format.py +0 -87
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
- clarifai-11.3.0rc2.dist-info/RECORD +0 -322
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
clarifai/utils/logging.py
CHANGED
@@ -17,8 +17,22 @@ JSON_LOGGER_NAME = "clarifai-json"
|
|
17
17
|
JSON_LOG_KEY = 'msg'
|
18
18
|
JSON_DEFAULT_CHAR_LENGTH = 400
|
19
19
|
FIELD_BLACKLIST = [
|
20
|
-
'msg',
|
21
|
-
'
|
20
|
+
'msg',
|
21
|
+
'message',
|
22
|
+
'account',
|
23
|
+
'levelno',
|
24
|
+
'created',
|
25
|
+
'threadName',
|
26
|
+
'name',
|
27
|
+
'processName',
|
28
|
+
'module',
|
29
|
+
'funcName',
|
30
|
+
'msecs',
|
31
|
+
'relativeCreated',
|
32
|
+
'pathname',
|
33
|
+
'args',
|
34
|
+
'thread',
|
35
|
+
'process',
|
22
36
|
]
|
23
37
|
COLORS = {
|
24
38
|
'ARGUMENTS': '\033[90m', # Gray
|
@@ -28,12 +42,14 @@ COLORS = {
|
|
28
42
|
'ERROR': '\033[31m', # Red
|
29
43
|
'CRITICAL': '\033[31m', # Red
|
30
44
|
'TIME': '\033[34m',
|
31
|
-
'RESET': '\033[0m'
|
45
|
+
'RESET': '\033[0m',
|
32
46
|
}
|
33
|
-
LOG_FORMAT =
|
34
|
-
|
35
|
-
|
36
|
-
|
47
|
+
LOG_FORMAT = (
|
48
|
+
f"[%(levelname)s] {COLORS.get('TIME')}%(asctime)s{COLORS.get('RESET')} %(message)s |"
|
49
|
+
f"{COLORS.get('ARGUMENTS')} "
|
50
|
+
f"%(optional_args)s "
|
51
|
+
f"thread=%(thread)d {COLORS.get('RESET')}"
|
52
|
+
)
|
37
53
|
|
38
54
|
# Create thread local storage that the format() call below uses.
|
39
55
|
# This is only used by the json_logger in the appropriate CLARIFAI_DEPLOY levels.
|
@@ -41,378 +57,380 @@ thread_log_info = threading.local()
|
|
41
57
|
|
42
58
|
|
43
59
|
def get_logger_context():
|
44
|
-
|
60
|
+
return thread_log_info.__dict__
|
45
61
|
|
46
62
|
|
47
63
|
def set_logger_context(**kwargs):
|
48
|
-
|
64
|
+
thread_log_info.__dict__.update(kwargs)
|
49
65
|
|
50
66
|
|
51
67
|
def clear_logger_context():
|
52
|
-
|
68
|
+
thread_log_info.__dict__.clear()
|
53
69
|
|
54
70
|
|
55
71
|
def restore_logger_context(context):
|
56
|
-
|
57
|
-
|
72
|
+
thread_log_info.__dict__.clear()
|
73
|
+
thread_log_info.__dict__.update(context)
|
58
74
|
|
59
75
|
|
60
76
|
def get_req_id_from_context():
|
61
|
-
|
62
|
-
|
77
|
+
ctx = get_logger_context()
|
78
|
+
return ctx.get('req_id', '')
|
63
79
|
|
64
80
|
|
65
81
|
def display_workflow_tree(nodes_data: List[Dict]) -> None:
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
82
|
+
"""Displays a tree of the workflow nodes."""
|
83
|
+
from rich import print as rprint
|
84
|
+
from rich.tree import Tree
|
85
|
+
|
86
|
+
# Create a mapping of node_id to the list of node_ids that are connected to it.
|
87
|
+
node_adj_mapping = defaultdict(list)
|
88
|
+
# Create a mapping of node_id to the node data info.
|
89
|
+
nodes_data_dict = {}
|
90
|
+
for node in nodes_data:
|
91
|
+
nodes_data_dict[node["id"]] = node
|
92
|
+
if node.get("node_inputs", "") == "":
|
93
|
+
node_adj_mapping["Input"].append(node["id"])
|
94
|
+
else:
|
95
|
+
for node_input in node["node_inputs"]:
|
96
|
+
node_adj_mapping[node_input["node_id"]].append(node["id"])
|
97
|
+
|
98
|
+
# Get all leaf nodes.
|
99
|
+
leaf_node_ids = set()
|
100
|
+
for node_id in list(nodes_data_dict.keys()):
|
101
|
+
if node_adj_mapping.get(node_id, "") == "":
|
102
|
+
leaf_node_ids.add(node_id)
|
103
|
+
|
104
|
+
def build_node_tree(node_id="Input"):
|
105
|
+
"""Recursively builds a rich tree of the workflow nodes."""
|
106
|
+
# Set the style of the current node.
|
107
|
+
style_str = "green" if node_id in leaf_node_ids else "white"
|
108
|
+
|
109
|
+
# Create a Tree object for the current node.
|
110
|
+
if node_id != "Input":
|
111
|
+
node_table = table_from_dict(
|
112
|
+
[nodes_data_dict[node_id]["model"]],
|
113
|
+
column_names=["id", "model_type_id", "app_id", "user_id"],
|
114
|
+
title="Node: " + node_id,
|
115
|
+
)
|
116
|
+
|
117
|
+
tree = Tree(node_table, style=style_str, guide_style="underline2 white")
|
118
|
+
else:
|
119
|
+
tree = Tree(f"[green] {node_id}", style=style_str, guide_style="underline2 white")
|
103
120
|
|
104
|
-
|
105
|
-
|
106
|
-
|
121
|
+
# Recursively add the child nodes of the current node to the tree.
|
122
|
+
for child in node_adj_mapping.get(node_id, []):
|
123
|
+
tree.add(build_node_tree(child))
|
107
124
|
|
108
|
-
|
109
|
-
|
125
|
+
# Return the tree.
|
126
|
+
return tree
|
110
127
|
|
111
|
-
|
112
|
-
|
128
|
+
tree = build_node_tree("Input")
|
129
|
+
rprint(tree)
|
113
130
|
|
114
131
|
|
115
|
-
def table_from_dict(data: List[Dict], column_names: List[str],
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
132
|
+
def table_from_dict(data: List[Dict], column_names: List[str], title: str = "") -> 'rich.Table': # noqa F821
|
133
|
+
"""Use this function for printing tables from a list of dicts."""
|
134
|
+
from rich.table import Table
|
135
|
+
|
136
|
+
table = Table(title=title, show_lines=False, show_header=True, header_style="blue")
|
137
|
+
for column_name in column_names:
|
138
|
+
table.add_column(column_name)
|
139
|
+
for row in data:
|
140
|
+
req_row = [row.get(column_name, "") for column_name in column_names]
|
141
|
+
table.add_row(*req_row)
|
142
|
+
return table
|
126
143
|
|
127
144
|
|
128
145
|
def _get_library_name() -> str:
|
129
|
-
|
146
|
+
return __name__.split(".")[0]
|
130
147
|
|
131
148
|
|
132
149
|
def _configure_logger(name: str, logger_level: Union[int, str] = logging.NOTSET) -> None:
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
150
|
+
"""Configure the logger with the specified name."""
|
151
|
+
|
152
|
+
logger = logging.getLogger(name)
|
153
|
+
logger.setLevel(logger_level)
|
154
|
+
|
155
|
+
# Remove existing handlers
|
156
|
+
for handler in logger.handlers[:]:
|
157
|
+
logger.removeHandler(handler)
|
158
|
+
|
159
|
+
# If ENABLE_JSON_LOGGER is 'true' then definitely use json logger.
|
160
|
+
# If ENABLE_JSON_LOGGER is 'false' then definitely don't use json logger.
|
161
|
+
# If ENABLE_JSON_LOGGER is not set, then use json logger if in k8s.
|
162
|
+
enabled_json = os.getenv('ENABLE_JSON_LOGGER', None)
|
163
|
+
in_k8s = 'KUBERNETES_SERVICE_HOST' in os.environ
|
164
|
+
handler = logging.StreamHandler()
|
165
|
+
handler.setLevel(logger_level)
|
166
|
+
if enabled_json == 'true' or (in_k8s and enabled_json != 'false'):
|
167
|
+
# Add the json handler and formatter
|
168
|
+
formatter = JsonFormatter()
|
169
|
+
handler.setFormatter(formatter)
|
170
|
+
else:
|
171
|
+
# create formatter and add it to the handlers
|
172
|
+
formatter = TerminalFormatter(LOG_FORMAT)
|
173
|
+
handler.setFormatter(formatter)
|
174
|
+
# add the handlers to the logger
|
175
|
+
logger.addHandler(handler)
|
159
176
|
|
160
177
|
|
161
|
-
def get_logger(
|
162
|
-
|
163
|
-
|
178
|
+
def get_logger(
|
179
|
+
logger_level: Union[int, str] = logging.NOTSET, name: Optional[str] = None
|
180
|
+
) -> logging.Logger:
|
181
|
+
"""Return a logger with the specified name."""
|
164
182
|
|
165
|
-
|
166
|
-
|
183
|
+
if name is None:
|
184
|
+
name = _get_library_name()
|
167
185
|
|
168
|
-
|
169
|
-
|
186
|
+
_configure_logger(name, logger_level)
|
187
|
+
return logging.getLogger(name)
|
170
188
|
|
171
189
|
|
172
190
|
def add_file_handler(logger: logging.Logger, file_path: str, log_level: str = 'WARNING') -> None:
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
191
|
+
"""Add a file handler to the logger."""
|
192
|
+
file_handler = logging.FileHandler(file_path)
|
193
|
+
file_handler.setLevel(log_level)
|
194
|
+
logger.addHandler(file_handler)
|
177
195
|
|
178
196
|
|
179
|
-
def process_log_files(
|
180
|
-
|
197
|
+
def process_log_files(
|
198
|
+
log_file_path: str,
|
199
|
+
) -> tuple:
|
200
|
+
"""Processes log files to get failed inputs and annotations.
|
181
201
|
|
182
202
|
Args:
|
183
203
|
log_file_path (str): path to the log file
|
184
204
|
"""
|
185
|
-
|
186
|
-
duplicate_input_ids = []
|
187
|
-
failed_input_ids = []
|
188
|
-
pattern = re.compile(r'\| +(\d+) +\| +(\S+) +\| +(.+?) +\| +(.+?) +\| +(.+?) +\| +(.+?) \|')
|
189
|
-
try:
|
190
|
-
with open(log_file_path, 'r') as file:
|
191
|
-
log_content = file.read()
|
192
|
-
matches = pattern.findall(log_content)
|
193
|
-
for match in matches:
|
194
|
-
index = int(match[0])
|
195
|
-
input_id = match[1]
|
196
|
-
status = match[2]
|
197
|
-
if status == "Input has a duplicate ID.":
|
198
|
-
duplicate_input_ids.append({"Index": index, "Input_ID": input_id})
|
199
|
-
else:
|
200
|
-
failed_input_ids.append({"Index": index, "Input_ID": input_id})
|
205
|
+
import re
|
201
206
|
|
202
|
-
|
203
|
-
|
204
|
-
|
207
|
+
duplicate_input_ids = []
|
208
|
+
failed_input_ids = []
|
209
|
+
pattern = re.compile(r'\| +(\d+) +\| +(\S+) +\| +(.+?) +\| +(.+?) +\| +(.+?) +\| +(.+?) \|')
|
210
|
+
try:
|
211
|
+
with open(log_file_path, 'r') as file:
|
212
|
+
log_content = file.read()
|
213
|
+
matches = pattern.findall(log_content)
|
214
|
+
for match in matches:
|
215
|
+
index = int(match[0])
|
216
|
+
input_id = match[1]
|
217
|
+
status = match[2]
|
218
|
+
if status == "Input has a duplicate ID.":
|
219
|
+
duplicate_input_ids.append({"Index": index, "Input_ID": input_id})
|
220
|
+
else:
|
221
|
+
failed_input_ids.append({"Index": index, "Input_ID": input_id})
|
205
222
|
|
206
|
-
|
223
|
+
except Exception as e:
|
224
|
+
print(f"Error Processing log file {log_file_path}:{e}")
|
225
|
+
return [], []
|
226
|
+
|
227
|
+
return duplicate_input_ids, failed_input_ids
|
207
228
|
|
208
229
|
|
209
230
|
def display_concept_relations_tree(relations_dict: Dict[str, Any]) -> None:
|
210
|
-
|
231
|
+
"""Print all the concept relations of the app in rich tree format.
|
211
232
|
|
212
233
|
Args:
|
213
234
|
relations_dict (dict): A dict of concept relations info.
|
214
235
|
"""
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
236
|
+
from rich import print as rprint
|
237
|
+
from rich.tree import Tree
|
238
|
+
|
239
|
+
for parent, children in relations_dict.items():
|
240
|
+
tree = Tree(parent)
|
241
|
+
for child in children:
|
242
|
+
tree.add(child)
|
243
|
+
rprint(tree)
|
222
244
|
|
223
245
|
|
224
246
|
def _default_json_default(obj):
|
225
|
-
|
226
|
-
|
247
|
+
"""
|
248
|
+
Handle objects that could not be serialized to JSON automatically.
|
227
249
|
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
250
|
+
Coerce everything to strings.
|
251
|
+
All objects representing time get output as ISO8601.
|
252
|
+
"""
|
253
|
+
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
254
|
+
return obj.isoformat()
|
255
|
+
else:
|
256
|
+
return _object_to_string_with_truncation(obj)
|
235
257
|
|
236
258
|
|
237
259
|
def _object_to_string_with_truncation(obj) -> str:
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
It's preferred to not log objects that could cause triggering this function,
|
242
|
-
It's better to extract important parts form them and log them as regular Python types,
|
243
|
-
like str or int, which won't be passed to this functon.
|
244
|
-
|
245
|
-
This message brings additional information to the logs
|
246
|
-
that could help to find and fix truncation cases.
|
247
|
-
- hardcoded part of the message could be used for the looking all entries in logs
|
248
|
-
- obj class could help with detail investigation
|
249
|
-
"""
|
250
|
-
|
251
|
-
objstr = str(obj)
|
252
|
-
if len(objstr) > JSON_DEFAULT_CHAR_LENGTH:
|
253
|
-
type_name = type(obj).__name__
|
254
|
-
truncated = objstr[:JSON_DEFAULT_CHAR_LENGTH]
|
255
|
-
objstr = f"{truncated}...[{type_name} was truncated, len={len(objstr)} chars]"
|
256
|
-
return objstr
|
260
|
+
"""
|
261
|
+
Truncate object string.
|
257
262
|
|
263
|
+
It's preferred to not log objects that could cause triggering this function,
|
264
|
+
It's better to extract important parts form them and log them as regular Python types,
|
265
|
+
like str or int, which won't be passed to this functon.
|
258
266
|
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
datefmt=None,
|
264
|
-
style='%',
|
265
|
-
json_cls=None,
|
266
|
-
json_default=_default_json_default):
|
267
|
-
"""
|
268
|
-
:param fmt: Config as a JSON string, allowed fields;
|
269
|
-
extra: provide extra fields always present in logs
|
270
|
-
source_host: override source host name
|
271
|
-
:param datefmt: Date format to use (required by logging.Formatter
|
272
|
-
interface but not used)
|
273
|
-
:param json_cls: JSON encoder to forward to json.dumps
|
274
|
-
:param json_default: Default JSON representation for unknown types,
|
275
|
-
by default coerce everything to a string
|
267
|
+
This message brings additional information to the logs
|
268
|
+
that could help to find and fix truncation cases.
|
269
|
+
- hardcoded part of the message could be used for the looking all entries in logs
|
270
|
+
- obj class could help with detail investigation
|
276
271
|
"""
|
277
272
|
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
if 'extra' not in self._fmt:
|
285
|
-
self.defaults = {}
|
286
|
-
else:
|
287
|
-
self.defaults = self._fmt['extra']
|
288
|
-
if 'source_host' in self._fmt:
|
289
|
-
self.source_host = self._fmt['source_host']
|
290
|
-
else:
|
291
|
-
try:
|
292
|
-
self.source_host = socket.gethostname()
|
293
|
-
except Exception:
|
294
|
-
self.source_host = ""
|
295
|
-
|
296
|
-
self.extra_blacklist_fields = []
|
297
|
-
extra_blacklist_fields = os.getenv('EXTRA_JSON_LOGGER_BLACKLIST_FIELDS', None)
|
298
|
-
if extra_blacklist_fields:
|
299
|
-
self.extra_blacklist_fields = extra_blacklist_fields.split(",")
|
300
|
-
|
301
|
-
def _build_fields(self, defaults, fields):
|
302
|
-
"""Return provided fields including any in defaults
|
303
|
-
"""
|
304
|
-
return dict(list(defaults.get('@fields', {}).items()) + list(fields.items()))
|
305
|
-
|
306
|
-
# Override the format function to fit Clarifai
|
307
|
-
def format(self, record):
|
308
|
-
fields = record.__dict__.copy()
|
309
|
-
|
310
|
-
# logger.info({...}) directly.
|
311
|
-
if isinstance(record.msg, dict):
|
312
|
-
fields.update(record.msg)
|
313
|
-
fields.pop('msg')
|
314
|
-
msg = ""
|
315
|
-
else: # logger.info("message", {...})
|
316
|
-
if isinstance(record.args, dict):
|
317
|
-
fields.update(record.args)
|
318
|
-
msg = record.getMessage()
|
319
|
-
for k in FIELD_BLACKLIST:
|
320
|
-
fields.pop(k, None)
|
321
|
-
for k in self.extra_blacklist_fields:
|
322
|
-
fields.pop(k, None)
|
323
|
-
# Rename 'levelname' to 'level' and make the value lowercase to match Go logs
|
324
|
-
level = fields.pop('levelname', None)
|
325
|
-
if level:
|
326
|
-
fields['level'] = level.lower()
|
327
|
-
|
328
|
-
# Get the thread local data
|
329
|
-
req_id = getattr(thread_log_info, 'req_id', None)
|
330
|
-
if req_id:
|
331
|
-
fields['req_id'] = req_id
|
332
|
-
orig_req_id = getattr(thread_log_info, 'orig_req_id', None)
|
333
|
-
if orig_req_id:
|
334
|
-
fields['orig_req_id'] = orig_req_id
|
335
|
-
# Get the thread local data
|
336
|
-
requester = getattr(thread_log_info, 'requester', None)
|
337
|
-
if requester:
|
338
|
-
fields['requester'] = requester
|
339
|
-
|
340
|
-
user_id = getattr(thread_log_info, 'user_id', None)
|
341
|
-
if requester:
|
342
|
-
fields['user_id'] = user_id
|
343
|
-
|
344
|
-
if hasattr(thread_log_info, 'start_time'):
|
345
|
-
#pylint: disable=no-member
|
346
|
-
fields['duration_ms'] = (time.time() - thread_log_info.start_time) * 1000
|
347
|
-
|
348
|
-
if 'exc_info' in fields:
|
349
|
-
if fields['exc_info']:
|
350
|
-
formatted = traceback.format_exception(*fields['exc_info'])
|
351
|
-
fields['exception'] = formatted
|
352
|
-
|
353
|
-
fields.pop('exc_info')
|
354
|
-
|
355
|
-
if 'exc_text' in fields and not fields['exc_text']:
|
356
|
-
fields.pop('exc_text')
|
357
|
-
|
358
|
-
logr = self.defaults.copy()
|
359
|
-
|
360
|
-
logr.update({
|
361
|
-
JSON_LOG_KEY: msg,
|
362
|
-
'@timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
|
363
|
-
})
|
364
|
-
|
365
|
-
logr.update(fields)
|
273
|
+
objstr = str(obj)
|
274
|
+
if len(objstr) > JSON_DEFAULT_CHAR_LENGTH:
|
275
|
+
type_name = type(obj).__name__
|
276
|
+
truncated = objstr[:JSON_DEFAULT_CHAR_LENGTH]
|
277
|
+
objstr = f"{truncated}...[{type_name} was truncated, len={len(objstr)} chars]"
|
278
|
+
return objstr
|
366
279
|
|
367
|
-
try:
|
368
|
-
return json.dumps(logr, default=self.json_default, cls=self.json_cls)
|
369
|
-
except Exception:
|
370
280
|
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
281
|
+
class JsonFormatter(logging.Formatter):
|
282
|
+
def __init__(
|
283
|
+
self, fmt=None, datefmt=None, style='%', json_cls=None, json_default=_default_json_default
|
284
|
+
):
|
285
|
+
"""
|
286
|
+
:param fmt: Config as a JSON string, allowed fields;
|
287
|
+
extra: provide extra fields always present in logs
|
288
|
+
source_host: override source host name
|
289
|
+
:param datefmt: Date format to use (required by logging.Formatter
|
290
|
+
interface but not used)
|
291
|
+
:param json_cls: JSON encoder to forward to json.dumps
|
292
|
+
:param json_default: Default JSON representation for unknown types,
|
293
|
+
by default coerce everything to a string
|
294
|
+
"""
|
295
|
+
|
296
|
+
if fmt is not None:
|
297
|
+
self._fmt = json.loads(fmt)
|
298
|
+
else:
|
299
|
+
self._fmt = {}
|
300
|
+
self.json_default = json_default
|
301
|
+
self.json_cls = json_cls
|
302
|
+
if 'extra' not in self._fmt:
|
303
|
+
self.defaults = {}
|
304
|
+
else:
|
305
|
+
self.defaults = self._fmt['extra']
|
306
|
+
if 'source_host' in self._fmt:
|
307
|
+
self.source_host = self._fmt['source_host']
|
308
|
+
else:
|
309
|
+
try:
|
310
|
+
self.source_host = socket.gethostname()
|
311
|
+
except Exception:
|
312
|
+
self.source_host = ""
|
313
|
+
|
314
|
+
self.extra_blacklist_fields = []
|
315
|
+
extra_blacklist_fields = os.getenv('EXTRA_JSON_LOGGER_BLACKLIST_FIELDS', None)
|
316
|
+
if extra_blacklist_fields:
|
317
|
+
self.extra_blacklist_fields = extra_blacklist_fields.split(",")
|
318
|
+
|
319
|
+
def _build_fields(self, defaults, fields):
|
320
|
+
"""Return provided fields including any in defaults"""
|
321
|
+
return dict(list(defaults.get('@fields', {}).items()) + list(fields.items()))
|
322
|
+
|
323
|
+
# Override the format function to fit Clarifai
|
324
|
+
def format(self, record):
|
325
|
+
fields = record.__dict__.copy()
|
326
|
+
|
327
|
+
# logger.info({...}) directly.
|
328
|
+
if isinstance(record.msg, dict):
|
329
|
+
fields.update(record.msg)
|
330
|
+
fields.pop('msg')
|
331
|
+
msg = ""
|
332
|
+
else: # logger.info("message", {...})
|
333
|
+
if isinstance(record.args, dict):
|
334
|
+
fields.update(record.args)
|
335
|
+
msg = record.getMessage()
|
336
|
+
for k in FIELD_BLACKLIST:
|
337
|
+
fields.pop(k, None)
|
338
|
+
for k in self.extra_blacklist_fields:
|
339
|
+
fields.pop(k, None)
|
340
|
+
# Rename 'levelname' to 'level' and make the value lowercase to match Go logs
|
341
|
+
level = fields.pop('levelname', None)
|
342
|
+
if level:
|
343
|
+
fields['level'] = level.lower()
|
344
|
+
|
345
|
+
# Get the thread local data
|
346
|
+
req_id = getattr(thread_log_info, 'req_id', None)
|
347
|
+
if req_id:
|
348
|
+
fields['req_id'] = req_id
|
349
|
+
orig_req_id = getattr(thread_log_info, 'orig_req_id', None)
|
350
|
+
if orig_req_id:
|
351
|
+
fields['orig_req_id'] = orig_req_id
|
352
|
+
# Get the thread local data
|
353
|
+
requester = getattr(thread_log_info, 'requester', None)
|
354
|
+
if requester:
|
355
|
+
fields['requester'] = requester
|
356
|
+
|
357
|
+
user_id = getattr(thread_log_info, 'user_id', None)
|
358
|
+
if requester:
|
359
|
+
fields['user_id'] = user_id
|
360
|
+
|
361
|
+
if hasattr(thread_log_info, 'start_time'):
|
362
|
+
# pylint: disable=no-member
|
363
|
+
fields['duration_ms'] = (time.time() - thread_log_info.start_time) * 1000
|
364
|
+
|
365
|
+
if 'exc_info' in fields:
|
366
|
+
if fields['exc_info']:
|
367
|
+
formatted = traceback.format_exception(*fields['exc_info'])
|
368
|
+
fields['exception'] = formatted
|
369
|
+
|
370
|
+
fields.pop('exc_info')
|
371
|
+
|
372
|
+
if 'exc_text' in fields and not fields['exc_text']:
|
373
|
+
fields.pop('exc_text')
|
374
|
+
|
375
|
+
logr = self.defaults.copy()
|
376
|
+
|
377
|
+
logr.update(
|
378
|
+
{
|
379
|
+
JSON_LOG_KEY: msg,
|
380
|
+
'@timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
|
381
|
+
}
|
382
|
+
)
|
383
|
+
|
384
|
+
logr.update(fields)
|
385
|
+
|
386
|
+
try:
|
387
|
+
return json.dumps(logr, default=self.json_default, cls=self.json_cls)
|
388
|
+
except Exception:
|
389
|
+
type, value, tb = sys.exc_info()
|
390
|
+
return json.dumps(
|
391
|
+
{
|
392
|
+
"msg": f"Fail to format log {type.__name__}({value}), {logr}",
|
393
|
+
"formatting_traceback": "\n".join(traceback.format_tb(tb)),
|
394
|
+
},
|
395
|
+
default=self.json_default,
|
396
|
+
cls=self.json_cls,
|
397
|
+
)
|
380
398
|
|
381
399
|
|
382
400
|
class TerminalFormatter(logging.Formatter):
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
401
|
+
"""If you have fields in your Formatter (see setup_logger where we setup the format strings) then
|
402
|
+
you can set them on the record using a filter. We do that for req_id here which is a request
|
403
|
+
specific field. This allows us to find requests easily between services.
|
404
|
+
"""
|
387
405
|
|
388
|
-
|
389
|
-
|
406
|
+
def format(self, record):
|
407
|
+
record.optional_args = []
|
390
408
|
|
391
|
-
|
392
|
-
|
393
|
-
|
409
|
+
user_id = getattr(thread_log_info, 'user_id', None)
|
410
|
+
if user_id is not None:
|
411
|
+
record.optional_args.append("user_id=" + user_id)
|
394
412
|
|
395
|
-
|
396
|
-
|
397
|
-
|
413
|
+
app_id = getattr(thread_log_info, 'app_id', None)
|
414
|
+
if app_id is not None:
|
415
|
+
record.optional_args.append("app_id=" + app_id)
|
398
416
|
|
399
|
-
|
400
|
-
|
401
|
-
|
417
|
+
req_id = getattr(thread_log_info, 'req_id', None)
|
418
|
+
if req_id is not None:
|
419
|
+
record.optional_args.append("req_id=" + req_id)
|
402
420
|
|
403
|
-
|
421
|
+
record.optional_args = " ".join(record.optional_args)
|
404
422
|
|
405
|
-
|
423
|
+
color_code = COLORS.get(record.levelname, '')
|
406
424
|
|
407
|
-
|
408
|
-
|
425
|
+
record.levelname = f"{color_code}{record.levelname}{COLORS.get('RESET')}"
|
426
|
+
record.msg = f"{color_code}{str(record.msg)}{COLORS.get('RESET')}"
|
409
427
|
|
410
|
-
|
428
|
+
return super(TerminalFormatter, self).format(record)
|
411
429
|
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
430
|
+
def formatTime(self, record, datefmt=None):
|
431
|
+
# Note we didn't go with UTC here as it's easier to understand time in your time zone.
|
432
|
+
# The json logger leverages UTC though.
|
433
|
+
return datetime.datetime.fromtimestamp(record.created).strftime('%H:%M:%S.%f')
|
416
434
|
|
417
435
|
|
418
436
|
# the default logger for the SDK.
|