monocle-apptrace 0.3.0b7__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/exporters/monocle_exporters.py +5 -2
- monocle_apptrace/instrumentation/common/__init__.py +1 -1
- monocle_apptrace/instrumentation/common/constants.py +7 -1
- monocle_apptrace/instrumentation/common/instrumentor.py +20 -9
- monocle_apptrace/instrumentation/common/span_handler.py +36 -25
- monocle_apptrace/instrumentation/common/tracing.md +68 -0
- monocle_apptrace/instrumentation/common/utils.py +25 -10
- monocle_apptrace/instrumentation/common/wrapper.py +24 -22
- monocle_apptrace/instrumentation/common/wrapper_method.py +5 -2
- monocle_apptrace/instrumentation/metamodel/anthropic/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/anthropic/_helper.py +64 -0
- monocle_apptrace/instrumentation/metamodel/anthropic/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/anthropic/entities/inference.py +72 -0
- monocle_apptrace/instrumentation/metamodel/anthropic/methods.py +24 -0
- monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py +2 -2
- monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +1 -1
- monocle_apptrace/instrumentation/metamodel/flask/_helper.py +45 -3
- monocle_apptrace/instrumentation/metamodel/flask/entities/http.py +49 -0
- monocle_apptrace/instrumentation/metamodel/flask/methods.py +10 -1
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +4 -1
- monocle_apptrace/instrumentation/metamodel/haystack/methods.py +1 -4
- monocle_apptrace/instrumentation/metamodel/langchain/_helper.py +12 -4
- monocle_apptrace/instrumentation/metamodel/langchain/methods.py +6 -14
- monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +2 -15
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py +9 -4
- monocle_apptrace/instrumentation/metamodel/openai/methods.py +16 -2
- monocle_apptrace/instrumentation/metamodel/requests/_helper.py +31 -0
- monocle_apptrace/instrumentation/metamodel/requests/entities/http.py +51 -0
- monocle_apptrace/instrumentation/metamodel/requests/methods.py +2 -1
- monocle_apptrace/instrumentation/metamodel/teamsai/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +58 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/__init__.py +0 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +80 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +70 -0
- monocle_apptrace/instrumentation/metamodel/teamsai/methods.py +26 -0
- {monocle_apptrace-0.3.0b7.dist-info → monocle_apptrace-0.3.1.dist-info}/METADATA +2 -1
- {monocle_apptrace-0.3.0b7.dist-info → monocle_apptrace-0.3.1.dist-info}/RECORD +41 -26
- {monocle_apptrace-0.3.0b7.dist-info → monocle_apptrace-0.3.1.dist-info}/WHEEL +0 -0
- {monocle_apptrace-0.3.0b7.dist-info → monocle_apptrace-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.3.0b7.dist-info → monocle_apptrace-0.3.1.dist-info}/licenses/NOTICE +0 -0
|
@@ -18,9 +18,12 @@ monocle_exporters: Dict[str, Any] = {
|
|
|
18
18
|
}
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
def get_monocle_exporter() -> List[SpanExporter]:
|
|
21
|
+
def get_monocle_exporter(exporters_list:str=None) -> List[SpanExporter]:
|
|
22
22
|
# Retrieve the MONOCLE_EXPORTER environment variable and split it into a list
|
|
23
|
-
|
|
23
|
+
if exporters_list:
|
|
24
|
+
exporter_names = exporters_list.split(",")
|
|
25
|
+
else:
|
|
26
|
+
exporter_names = os.environ.get("MONOCLE_EXPORTER", "file").split(",")
|
|
24
27
|
exporters = []
|
|
25
28
|
|
|
26
29
|
# Create task processor for AWS Lambda environment
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
from .instrumentor import setup_monocle_telemetry, start_trace, stop_trace, start_scope, stop_scope, http_route_handler, monocle_trace_scope, monocle_trace_scope_method, monocle_trace
|
|
2
|
-
from .utils import MonocleSpanException
|
|
2
|
+
from .utils import MonocleSpanException
|
|
@@ -13,6 +13,7 @@ GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
|
|
|
13
13
|
|
|
14
14
|
# Azure naming reference can be found here
|
|
15
15
|
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
16
|
+
# https://docs.aws.amazon.com/resource-explorer/latest/userguide/supported-resource-types.html#services-lookoutmetrics
|
|
16
17
|
AZURE_FUNCTION_NAME = "azure.func"
|
|
17
18
|
AZURE_APP_SERVICE_NAME = "azure.asp"
|
|
18
19
|
AZURE_ML_SERVICE_NAME = "azure.mlw"
|
|
@@ -48,6 +49,9 @@ llm_type_map = {
|
|
|
48
49
|
"sagemakerllm": "aws_sagemaker",
|
|
49
50
|
"chatbedrock": "aws_bedrock",
|
|
50
51
|
"openaigenerator": "openai",
|
|
52
|
+
"bedrockruntime":"aws_bedrock",
|
|
53
|
+
"sagemakerruntime":"aws_sagemaker",
|
|
54
|
+
"chatanthropic":"anthropic",
|
|
51
55
|
}
|
|
52
56
|
|
|
53
57
|
MONOCLE_INSTRUMENTOR = "monocle_apptrace"
|
|
@@ -67,5 +71,7 @@ SCOPE_METHOD_FILE = 'monocle_scopes.json'
|
|
|
67
71
|
SCOPE_CONFIG_PATH = 'MONOCLE_SCOPE_CONFIG_PATH'
|
|
68
72
|
TRACE_PROPOGATION_URLS = "MONOCLE_TRACE_PROPAGATATION_URLS"
|
|
69
73
|
WORKFLOW_TYPE_KEY = "monocle.workflow_type"
|
|
74
|
+
ADD_NEW_WORKFLOW = "monocle.add_new_workflow"
|
|
70
75
|
WORKFLOW_TYPE_GENERIC = "workflow.generic"
|
|
71
|
-
MONOCLE_SDK_VERSION = "monocle_apptrace.version"
|
|
76
|
+
MONOCLE_SDK_VERSION = "monocle_apptrace.version"
|
|
77
|
+
MONOCLE_SDK_LANGUAGE = "monocle_apptrace.language"
|
|
@@ -3,6 +3,7 @@ import inspect
|
|
|
3
3
|
from typing import Collection, Dict, List, Union
|
|
4
4
|
import random
|
|
5
5
|
import uuid
|
|
6
|
+
import inspect
|
|
6
7
|
from opentelemetry import trace
|
|
7
8
|
from contextlib import contextmanager
|
|
8
9
|
from opentelemetry.context import attach, get_value, set_value, get_current, detach
|
|
@@ -13,6 +14,7 @@ from opentelemetry.sdk.trace import TracerProvider, Span, id_generator
|
|
|
13
14
|
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
|
14
15
|
from opentelemetry.sdk.trace import Span, TracerProvider
|
|
15
16
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanProcessor
|
|
17
|
+
from opentelemetry.sdk.trace.export import SpanExporter
|
|
16
18
|
from opentelemetry.trace import get_tracer
|
|
17
19
|
from wrapt import wrap_function_wrapper
|
|
18
20
|
from opentelemetry.trace.propagation import set_span_in_context, _SPAN_KEY
|
|
@@ -27,7 +29,7 @@ from monocle_apptrace.instrumentation.common.wrapper import scope_wrapper, ascop
|
|
|
27
29
|
from monocle_apptrace.instrumentation.common.utils import (
|
|
28
30
|
set_scope, remove_scope, http_route_handler, load_scopes, async_wrapper, http_async_route_handler
|
|
29
31
|
)
|
|
30
|
-
from monocle_apptrace.instrumentation.common.constants import MONOCLE_INSTRUMENTOR,
|
|
32
|
+
from monocle_apptrace.instrumentation.common.constants import MONOCLE_INSTRUMENTOR, WORKFLOW_TYPE_GENERIC
|
|
31
33
|
from functools import wraps
|
|
32
34
|
logger = logging.getLogger(__name__)
|
|
33
35
|
|
|
@@ -39,19 +41,22 @@ monocle_tracer_provider: TracerProvider = None
|
|
|
39
41
|
|
|
40
42
|
class MonocleInstrumentor(BaseInstrumentor):
|
|
41
43
|
workflow_name: str = ""
|
|
42
|
-
user_wrapper_methods: list[Union[dict,WrapperMethod]] = []
|
|
44
|
+
user_wrapper_methods: list[Union[dict,WrapperMethod]] = [],
|
|
45
|
+
exporters: list[SpanExporter] = [],
|
|
43
46
|
instrumented_method_list: list[object] = []
|
|
44
|
-
handlers:Dict[str,SpanHandler] =
|
|
47
|
+
handlers:Dict[str,SpanHandler] = None # dict of handlers
|
|
45
48
|
union_with_default_methods: bool = False
|
|
46
49
|
|
|
47
50
|
def __init__(
|
|
48
51
|
self,
|
|
49
52
|
handlers,
|
|
50
53
|
user_wrapper_methods: list[Union[dict,WrapperMethod]] = None,
|
|
54
|
+
exporters: list[SpanExporter] = None,
|
|
51
55
|
union_with_default_methods: bool = True
|
|
52
56
|
) -> None:
|
|
53
57
|
self.user_wrapper_methods = user_wrapper_methods or []
|
|
54
58
|
self.handlers = handlers
|
|
59
|
+
self.exporters = exporters
|
|
55
60
|
if self.handlers is not None:
|
|
56
61
|
for key, val in MONOCLE_SPAN_HANDLERS.items():
|
|
57
62
|
if key not in self.handlers:
|
|
@@ -155,7 +160,8 @@ def setup_monocle_telemetry(
|
|
|
155
160
|
span_processors: List[SpanProcessor] = None,
|
|
156
161
|
span_handlers: Dict[str,SpanHandler] = None,
|
|
157
162
|
wrapper_methods: List[Union[dict,WrapperMethod]] = None,
|
|
158
|
-
union_with_default_methods: bool = True
|
|
163
|
+
union_with_default_methods: bool = True,
|
|
164
|
+
monocle_exporters_list:str = None) -> None:
|
|
159
165
|
"""
|
|
160
166
|
Set up Monocle telemetry for the application.
|
|
161
167
|
|
|
@@ -165,7 +171,7 @@ def setup_monocle_telemetry(
|
|
|
165
171
|
The name of the workflow to be used as the service name in telemetry.
|
|
166
172
|
span_processors : List[SpanProcessor], optional
|
|
167
173
|
Custom span processors to use instead of the default ones. If None,
|
|
168
|
-
BatchSpanProcessors with Monocle exporters will be used.
|
|
174
|
+
BatchSpanProcessors with Monocle exporters will be used. This can't be combined with `monocle_exporters_list`.
|
|
169
175
|
span_handlers : Dict[str, SpanHandler], optional
|
|
170
176
|
Dictionary of span handlers to be used by the instrumentor, mapping handler names to handler objects.
|
|
171
177
|
wrapper_methods : List[Union[dict, WrapperMethod]], optional
|
|
@@ -173,11 +179,16 @@ def setup_monocle_telemetry(
|
|
|
173
179
|
union_with_default_methods : bool, default=True
|
|
174
180
|
If True, combine the provided wrapper_methods with the default methods.
|
|
175
181
|
If False, only use the provided wrapper_methods.
|
|
182
|
+
monocle_exporters_list : str, optional
|
|
183
|
+
Comma-separated list of exporters to use. This will override the env setting MONOCLE_EXPORTERS.
|
|
184
|
+
Supported exporters are: s3, blob, okahu, file, memory, console. This can't be combined with `span_processors`.
|
|
176
185
|
"""
|
|
177
186
|
resource = Resource(attributes={
|
|
178
187
|
SERVICE_NAME: workflow_name
|
|
179
188
|
})
|
|
180
|
-
|
|
189
|
+
if span_processors and monocle_exporters_list:
|
|
190
|
+
raise ValueError("span_processors and monocle_exporters_list can't be used together")
|
|
191
|
+
exporters:List[SpanExporter] = get_monocle_exporter(monocle_exporters_list)
|
|
181
192
|
span_processors = span_processors or [BatchSpanProcessor(exporter) for exporter in exporters]
|
|
182
193
|
set_tracer_provider(TracerProvider(resource=resource))
|
|
183
194
|
attach(set_value("workflow_name", workflow_name))
|
|
@@ -192,7 +203,7 @@ def setup_monocle_telemetry(
|
|
|
192
203
|
get_tracer_provider().add_span_processor(processor)
|
|
193
204
|
if is_proxy_provider:
|
|
194
205
|
trace.set_tracer_provider(get_tracer_provider())
|
|
195
|
-
instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods or [],
|
|
206
|
+
instrumentor = MonocleInstrumentor(user_wrapper_methods=wrapper_methods or [], exporters=exporters,
|
|
196
207
|
handlers=span_handlers, union_with_default_methods = union_with_default_methods)
|
|
197
208
|
# instrumentor.app_name = workflow_name
|
|
198
209
|
if not instrumentor.is_instrumented_by_opentelemetry:
|
|
@@ -228,7 +239,7 @@ def start_trace():
|
|
|
228
239
|
updated_span_context = set_span_in_context(span=span)
|
|
229
240
|
SpanHandler.set_default_monocle_attributes(span)
|
|
230
241
|
SpanHandler.set_workflow_properties(span)
|
|
231
|
-
token =
|
|
242
|
+
token = attach(updated_span_context)
|
|
232
243
|
return token
|
|
233
244
|
except:
|
|
234
245
|
logger.warning("Failed to start trace")
|
|
@@ -250,7 +261,7 @@ def stop_trace(token) -> None:
|
|
|
250
261
|
if parent_span is not None:
|
|
251
262
|
parent_span.end()
|
|
252
263
|
if token is not None:
|
|
253
|
-
|
|
264
|
+
detach(token)
|
|
254
265
|
except:
|
|
255
266
|
logger.warning("Failed to stop trace")
|
|
256
267
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import os
|
|
3
|
-
from
|
|
3
|
+
from contextlib import contextmanager
|
|
4
4
|
from opentelemetry.context import get_value, set_value, attach, detach
|
|
5
5
|
from opentelemetry.sdk.trace import Span
|
|
6
6
|
from opentelemetry.trace.status import Status, StatusCode
|
|
@@ -8,9 +8,9 @@ from monocle_apptrace.instrumentation.common.constants import (
|
|
|
8
8
|
QUERY,
|
|
9
9
|
service_name_map,
|
|
10
10
|
service_type_map,
|
|
11
|
-
MONOCLE_SDK_VERSION
|
|
11
|
+
MONOCLE_SDK_VERSION, MONOCLE_SDK_LANGUAGE
|
|
12
12
|
)
|
|
13
|
-
from monocle_apptrace.instrumentation.common.utils import set_attribute, get_scopes, MonocleSpanException
|
|
13
|
+
from monocle_apptrace.instrumentation.common.utils import set_attribute, get_scopes, MonocleSpanException, get_monocle_version
|
|
14
14
|
from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY, WORKFLOW_TYPE_GENERIC
|
|
15
15
|
|
|
16
16
|
logger = logging.getLogger(__name__)
|
|
@@ -39,9 +39,9 @@ class SpanHandler:
|
|
|
39
39
|
pass
|
|
40
40
|
|
|
41
41
|
def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
|
|
45
45
|
return False
|
|
46
46
|
|
|
47
47
|
def pre_task_processing(self, to_wrap, wrapped, instance, args,kwargs, span):
|
|
@@ -51,11 +51,8 @@ class SpanHandler:
|
|
|
51
51
|
@staticmethod
|
|
52
52
|
def set_default_monocle_attributes(span: Span):
|
|
53
53
|
""" Set default monocle attributes for all spans """
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
span.set_attribute(MONOCLE_SDK_VERSION, sdk_version)
|
|
57
|
-
except Exception as e:
|
|
58
|
-
logger.warning("Exception finding monocle-apptrace version.")
|
|
54
|
+
span.set_attribute(MONOCLE_SDK_VERSION, get_monocle_version())
|
|
55
|
+
span.set_attribute(MONOCLE_SDK_LANGUAGE, "python")
|
|
59
56
|
for scope_key, scope_value in get_scopes().items():
|
|
60
57
|
span.set_attribute(f"scope.{scope_key}", scope_value)
|
|
61
58
|
|
|
@@ -64,8 +61,14 @@ class SpanHandler:
|
|
|
64
61
|
""" Set attributes of workflow if this is a root span"""
|
|
65
62
|
SpanHandler.set_workflow_attributes(to_wrap, span)
|
|
66
63
|
SpanHandler.set_app_hosting_identifier_attribute(span)
|
|
64
|
+
|
|
67
65
|
span.set_status(StatusCode.OK)
|
|
68
66
|
|
|
67
|
+
@staticmethod
|
|
68
|
+
def set_non_workflow_properties(span: Span, to_wrap = None):
|
|
69
|
+
workflow_name = SpanHandler.get_workflow_name(span=span)
|
|
70
|
+
if workflow_name:
|
|
71
|
+
span.set_attribute("workflow.name", workflow_name)
|
|
69
72
|
|
|
70
73
|
def post_task_processing(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span):
|
|
71
74
|
if span.status.status_code == StatusCode.UNSET:
|
|
@@ -79,7 +82,8 @@ class SpanHandler:
|
|
|
79
82
|
span_index = 0
|
|
80
83
|
if SpanHandler.is_root_span(span):
|
|
81
84
|
span_index = 2 # root span will have workflow and hosting entities pre-populated
|
|
82
|
-
if
|
|
85
|
+
if not self.skip_processor(to_wrap, wrapped, instance, args, kwargs) and (
|
|
86
|
+
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
83
87
|
output_processor=to_wrap['output_processor']
|
|
84
88
|
if 'type' in output_processor:
|
|
85
89
|
span.set_attribute("span.type", output_processor['type'])
|
|
@@ -107,6 +111,8 @@ class SpanHandler:
|
|
|
107
111
|
span_index += 1
|
|
108
112
|
else:
|
|
109
113
|
logger.debug("attributes not found or incorrect written in entity json")
|
|
114
|
+
else:
|
|
115
|
+
span.set_attribute("span.type", "generic")
|
|
110
116
|
|
|
111
117
|
# set scopes as attributes by calling get_scopes()
|
|
112
118
|
# scopes is a Mapping[str:object], iterate directly with .items()
|
|
@@ -118,7 +124,8 @@ class SpanHandler:
|
|
|
118
124
|
|
|
119
125
|
|
|
120
126
|
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, result, span):
|
|
121
|
-
if
|
|
127
|
+
if not self.skip_processor(to_wrap, wrapped, instance, args, kwargs) and (
|
|
128
|
+
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
122
129
|
output_processor=to_wrap['output_processor']
|
|
123
130
|
arguments = {"instance": instance, "args": args, "kwargs": kwargs, "result": result}
|
|
124
131
|
if 'events' in output_processor:
|
|
@@ -147,6 +154,7 @@ class SpanHandler:
|
|
|
147
154
|
span_index = 1
|
|
148
155
|
workflow_name = SpanHandler.get_workflow_name(span=span)
|
|
149
156
|
if workflow_name:
|
|
157
|
+
span.update_name("workflow")
|
|
150
158
|
span.set_attribute("span.type", "workflow")
|
|
151
159
|
span.set_attribute(f"entity.{span_index}.name", workflow_name)
|
|
152
160
|
workflow_type = SpanHandler.get_workflow_type(to_wrap)
|
|
@@ -186,26 +194,19 @@ class SpanHandler:
|
|
|
186
194
|
@staticmethod
|
|
187
195
|
def is_root_span(curr_span: Span) -> bool:
|
|
188
196
|
try:
|
|
189
|
-
if curr_span is not None and hasattr(curr_span, "parent"):
|
|
197
|
+
if curr_span is not None and hasattr(curr_span, "parent") or curr_span.context.trace_state:
|
|
190
198
|
return curr_span.parent is None
|
|
191
199
|
except Exception as e:
|
|
192
200
|
logger.warning(f"Error finding root span: {e}")
|
|
193
201
|
|
|
194
|
-
def is_non_workflow_root_span(self, curr_span: Span, to_wrap) -> bool:
|
|
195
|
-
return SpanHandler.is_root_span(curr_span) and to_wrap.get("span_type") != "workflow"
|
|
196
|
-
|
|
197
|
-
def is_workflow_span_active(self):
|
|
198
|
-
return get_value(WORKFLOW_TYPE_KEY) is not None
|
|
199
|
-
|
|
200
202
|
@staticmethod
|
|
201
203
|
def attach_workflow_type(to_wrap=None, context=None):
|
|
202
204
|
token = None
|
|
203
205
|
if to_wrap:
|
|
204
|
-
|
|
206
|
+
workflow_type = SpanHandler.get_workflow_type(to_wrap)
|
|
207
|
+
if workflow_type != WORKFLOW_TYPE_GENERIC:
|
|
205
208
|
token = attach(set_value(WORKFLOW_TYPE_KEY,
|
|
206
209
|
SpanHandler.get_workflow_type(to_wrap), context))
|
|
207
|
-
else:
|
|
208
|
-
token = attach(set_value(WORKFLOW_TYPE_KEY, WORKFLOW_TYPE_GENERIC, context))
|
|
209
210
|
return token
|
|
210
211
|
|
|
211
212
|
@staticmethod
|
|
@@ -213,8 +214,18 @@ class SpanHandler:
|
|
|
213
214
|
if token:
|
|
214
215
|
return detach(token)
|
|
215
216
|
|
|
217
|
+
@staticmethod
|
|
218
|
+
@contextmanager
|
|
219
|
+
def workflow_type(to_wrap=None):
|
|
220
|
+
token = SpanHandler.attach_workflow_type(to_wrap)
|
|
221
|
+
try:
|
|
222
|
+
yield
|
|
223
|
+
finally:
|
|
224
|
+
SpanHandler.detach_workflow_type(token)
|
|
225
|
+
|
|
226
|
+
|
|
216
227
|
class NonFrameworkSpanHandler(SpanHandler):
|
|
217
228
|
|
|
218
|
-
# If the language framework is being executed, then skip generating direct openAI
|
|
219
|
-
def
|
|
229
|
+
# If the language framework is being executed, then skip generating direct openAI attributes and events
|
|
230
|
+
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
|
|
220
231
|
return get_value(WORKFLOW_TYPE_KEY) in WORKFLOW_TYPE_MAP.values()
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# Monocle tracing: concepts and principles
|
|
2
|
+
|
|
3
|
+
## Span
|
|
4
|
+
Span is an observation of a code/method executed. Each span has a unique ID. It records the start time and end time of the code's execution along with additional information relevant to that operation. Before the code execution starts, a span object is created in the memory of the host process executing this code. It'll capture the current time as start of time of span. At this stage the span is considered active. It'll stay active till the code execution ends. Once the code execution is complete, it'll record the current time as end time, capture any additional relevant information (eg argument, return value, environment setttings etc.). Now the span is marked as closed and it will be queued to be saved to some configured storage.
|
|
5
|
+
Note that the code that generated this span could in turn call other methods that are also instrumented. Those will generate spans of their own. These will be "child" spans which will refer to the span ID of the calling code as "parent" span. An initial span which has no parent is referred as "root" span.
|
|
6
|
+
|
|
7
|
+
## Trace
|
|
8
|
+
A trace is a collection of spans with a common ID called traceID. When the first active span gets created, a new unique traceID is generated and assigned to that span. All the child spans generated by execution of other instrumented code/methods will share the same traceID. Once this top span ends, this trace ends. This way all the code executed as part of the top level instrumented code will have a common traceID to group them together. For example, consider following sequence where `f1()` is the first instrumented method is executed, it calls other instrumented methods `f2(),f3(),f4() and f5()`
|
|
9
|
+
```
|
|
10
|
+
f1()--> f2() --> f3()
|
|
11
|
+
--> f4() --> f5()
|
|
12
|
+
```
|
|
13
|
+
In the above sequence, all method execution will generate a span each and they all will have a common traceID. Now if a new instrumented methods is executed after f1() finishes, it will be the first active span in the process's execution context and a will get a new traceID.
|
|
14
|
+
|
|
15
|
+
### Trace ID propogation
|
|
16
|
+
Each child span inherits the parent's trace ID. When spans running in same process, it captures it from process memory/context etc. But consider the above example again, where the `f4()-->f5()` code is not part of the process that executing f1(). It's a remote call, say over REST. From the overall application's point of view, the work done if `f4()` and `f5()` is part of `f1()` and you want same traceID associated with all spans. You want the instrumentation to seamlessly pass the tracedID over such remote calls and continue that instead of generating a new one. It's the responsibility of Monocle to provide such mechanism to make thsi trace ID propogation transparent to the application logic and architecture.
|
|
17
|
+
|
|
18
|
+
## Propogation
|
|
19
|
+
When the execution logic spans mulitple processes using remote calling mechanisms like REST, you want the trace ID also to propogate from process that originated it to the one that's continueing the remote execution. Monocle supports seamlessly propogating traceID over REST if both the sides for the trace execution are instrumented.
|
|
20
|
+
|
|
21
|
+
## Types of spans in Monocle
|
|
22
|
+
Monocle extends these generic span types by enriching additional attributes/data for genAI specific operations.
|
|
23
|
+
### GenAI spans
|
|
24
|
+
There are the core spans that capture details of genAI component operations like call to an LLM or vectore store. The purpose of these spans is to capture the details the applications interaction with core genAI comoponents. These spans are triggered by pre-instrumented methods that handle such operations.
|
|
25
|
+
- Inference span
|
|
26
|
+
Represents interaction with LLMs, captures details like model, prompts, response and other metadata (eg tokens)
|
|
27
|
+
- Retrieval span
|
|
28
|
+
Represents interactions with vector stores like embedding creating, vector retrieval etc. Captures the model, search query, response, vector embedding etc.
|
|
29
|
+
|
|
30
|
+
### anchor spans
|
|
31
|
+
These are the spans that are created by a top level method that anchors a higher level of abstraction for underlying core genAI APIs. For example a langchain.invoke() which under the cover calls langchain.llm_invoke() or langchain.vector_retrieval(). Consider following psuedo code of a langchain rag pattern API,
|
|
32
|
+
```
|
|
33
|
+
response = rag_chain.invoke(prompt)
|
|
34
|
+
--> cleaned_prompt = llm1.chat(prompt)
|
|
35
|
+
--> context = vector_store.retrieve(cleaned_prompt)
|
|
36
|
+
--> response = llm2.chat(system_prompt+context+cleaned_prompt)
|
|
37
|
+
--> return response
|
|
38
|
+
```
|
|
39
|
+
If we only instrument the top level invoke call, then we'll trace the top level prompt and response interaction between application and langchain. But we'll miss the details like how a system prompt was added and send to mulitple LLMs and what context was extracted from a vector store etc. On the other hand, if we only instrument the low level calls to LLM and vector, then we'll miss the fact that those are part of same RAG. Hence we instrument all of them. This exaple would genearte an anchor spna for `invoke()` method, a retrieval span for `retrieve()` method and two inference spans for each `chat()` method. All of these will have common traceID.
|
|
40
|
+
The anchor spans also provides an observation window of your application interaction with an high level SDK or service. It will illustrate facts such as how much time take by the genAI service invocation compared to other local logic.
|
|
41
|
+
|
|
42
|
+
### Workflow spans
|
|
43
|
+
Workflow spans are synthetic spans that are created to trace the full trace. It captures the summary of the full trace including the time window, the process running this code (set as `workflow_name` in the API to enab le Monocle instrumentation) and runtime environment details such as hosting service (Azure function, Lambda function etc).
|
|
44
|
+
The workflow spans is generated when a new trace starts or when a trace is propogated. They provide the base line observation window for the entire trace or a fragment of trace executed in a process.
|
|
45
|
+
Consider following example,
|
|
46
|
+
```
|
|
47
|
+
setup_monocle_telemetry(workflow='bot')
|
|
48
|
+
rag_chain.invoke()
|
|
49
|
+
--> context = retrieval()
|
|
50
|
+
--> new_prompt = REST --> azure.func.chat(prompt) -->
|
|
51
|
+
setup_monocle_telemetry(workflow='moderator')
|
|
52
|
+
return llm(moderator_system_prompt+prompt)
|
|
53
|
+
--> response = llm(new_prompt)
|
|
54
|
+
```
|
|
55
|
+
This will generate following spans:
|
|
56
|
+
```
|
|
57
|
+
Span{name='workflow.bot', type= workflow, traceID = xx1, spanID = yy0, parentID=None} ==> Workflow for new trace start
|
|
58
|
+
Span{name='chain.invoke', type=anchor, traceID = xx1, spanID = yy1, parentID=yy0} ==> anchor span for chain invoke
|
|
59
|
+
Span{name='chain.retrieval', type=retrieval, traceID = xx1, spanID = yy2, parentID = yy1} ==> Retrieval API span
|
|
60
|
+
Span{name='workflow.moderator', type=workflow, traceID = xx1, spanID = zz1, parentID=yy1} ==> Workflow for propogated trace fragement
|
|
61
|
+
Span{name='az.func.chat', type=anchor, traceID = xx1, spanID = zz2, parentID=zz1} ==> anchor span for az function invoke
|
|
62
|
+
Span{name='chain.infer', type=inference, traceID = xx1, spanID = zz2, parentID=zz2} ==> inference
|
|
63
|
+
Span{name='chain.infer',type=inference, traceID = xx1, spanID = yy3, parentID=yy1} ==> inference
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## Scopes
|
|
67
|
+
Scope is an way of grouping across traces. It's a tag with a value that can either be specified or auto generated (GUID) by Monocle. There can be any number of scopes active in an application code at a given point in time. All the active scopes are recorded in every span that's emmitted.
|
|
68
|
+
|
|
@@ -9,7 +9,8 @@ from opentelemetry.trace.propagation import _SPAN_KEY
|
|
|
9
9
|
from opentelemetry.sdk.trace import id_generator, TracerProvider
|
|
10
10
|
from opentelemetry.propagate import inject, extract
|
|
11
11
|
from opentelemetry import baggage
|
|
12
|
-
from monocle_apptrace.instrumentation.common.constants import MONOCLE_SCOPE_NAME_PREFIX, SCOPE_METHOD_FILE, SCOPE_CONFIG_PATH, llm_type_map
|
|
12
|
+
from monocle_apptrace.instrumentation.common.constants import MONOCLE_SCOPE_NAME_PREFIX, SCOPE_METHOD_FILE, SCOPE_CONFIG_PATH, llm_type_map, MONOCLE_SDK_VERSION, ADD_NEW_WORKFLOW
|
|
13
|
+
from importlib.metadata import version
|
|
13
14
|
|
|
14
15
|
T = TypeVar('T')
|
|
15
16
|
U = TypeVar('U')
|
|
@@ -21,6 +22,12 @@ embedding_model_context = {}
|
|
|
21
22
|
scope_id_generator = id_generator.RandomIdGenerator()
|
|
22
23
|
http_scopes:dict[str:str] = {}
|
|
23
24
|
|
|
25
|
+
try:
|
|
26
|
+
monocle_sdk_version = version("monocle_apptrace")
|
|
27
|
+
except Exception as e:
|
|
28
|
+
monocle_sdk_version = "unknown"
|
|
29
|
+
logger.warning("Exception finding monocle-apptrace version.")
|
|
30
|
+
|
|
24
31
|
class MonocleSpanException(Exception):
|
|
25
32
|
def __init__(self, err_message:str):
|
|
26
33
|
"""
|
|
@@ -96,8 +103,8 @@ def with_tracer_wrapper(func):
|
|
|
96
103
|
def resolve_from_alias(my_map, alias):
|
|
97
104
|
"""Find a alias that is not none from list of aliases"""
|
|
98
105
|
|
|
99
|
-
for i in alias
|
|
100
|
-
if i in my_map.keys():
|
|
106
|
+
for i in alias:
|
|
107
|
+
if i in my_map.keys() and my_map[i] is not None:
|
|
101
108
|
return my_map[i]
|
|
102
109
|
return None
|
|
103
110
|
|
|
@@ -236,6 +243,7 @@ def set_scopes_from_baggage(baggage_context:Context):
|
|
|
236
243
|
def extract_http_headers(headers) -> object:
|
|
237
244
|
global http_scopes
|
|
238
245
|
trace_context:Context = extract(headers, context=get_current())
|
|
246
|
+
trace_context = set_value(ADD_NEW_WORKFLOW, True, trace_context)
|
|
239
247
|
imported_scope:dict[str, object] = {}
|
|
240
248
|
for http_header, http_scope in http_scopes.items():
|
|
241
249
|
if http_header in headers:
|
|
@@ -311,6 +319,19 @@ def async_wrapper(method, scope_name=None, scope_value=None, headers=None, *args
|
|
|
311
319
|
if token:
|
|
312
320
|
remove_scope(token)
|
|
313
321
|
|
|
322
|
+
def get_monocle_version() -> str:
|
|
323
|
+
global monocle_sdk_version
|
|
324
|
+
return monocle_sdk_version
|
|
325
|
+
|
|
326
|
+
def add_monocle_trace_state(headers:dict[str:str]) -> None:
|
|
327
|
+
if headers is None:
|
|
328
|
+
return
|
|
329
|
+
monocle_trace_state = f"{MONOCLE_SDK_VERSION}={get_monocle_version()}"
|
|
330
|
+
if 'tracestate' in headers:
|
|
331
|
+
headers['tracestate'] = f"{headers['tracestate']},{monocle_trace_state}"
|
|
332
|
+
else:
|
|
333
|
+
headers['tracestate'] = monocle_trace_state
|
|
334
|
+
|
|
314
335
|
class Option(Generic[T]):
|
|
315
336
|
def __init__(self, value: Optional[T]):
|
|
316
337
|
self.value = value
|
|
@@ -343,14 +364,8 @@ def try_option(func: Callable[..., T], *args, **kwargs) -> Option[T]:
|
|
|
343
364
|
|
|
344
365
|
def get_llm_type(instance):
|
|
345
366
|
try:
|
|
367
|
+
t_name = type(instance).__name__.lower()
|
|
346
368
|
llm_type = llm_type_map.get(type(instance).__name__.lower())
|
|
347
369
|
return llm_type
|
|
348
370
|
except:
|
|
349
371
|
pass
|
|
350
|
-
|
|
351
|
-
def resolve_from_alias(my_map, alias):
|
|
352
|
-
"""Find a alias that is not none from list of aliases"""
|
|
353
|
-
for i in alias:
|
|
354
|
-
if i in my_map.keys():
|
|
355
|
-
return my_map[i]
|
|
356
|
-
return None
|
|
@@ -11,7 +11,7 @@ from monocle_apptrace.instrumentation.common.utils import (
|
|
|
11
11
|
remove_scope,
|
|
12
12
|
async_wrapper
|
|
13
13
|
)
|
|
14
|
-
from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY
|
|
14
|
+
from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY, ADD_NEW_WORKFLOW
|
|
15
15
|
logger = logging.getLogger(__name__)
|
|
16
16
|
|
|
17
17
|
def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
|
|
@@ -19,9 +19,7 @@ def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to
|
|
|
19
19
|
if instance.__class__.__name__ in ("AgentExecutor"):
|
|
20
20
|
return wrapped(*args, **kwargs)
|
|
21
21
|
|
|
22
|
-
if
|
|
23
|
-
name = f"{to_wrap.get('span_name')}.{instance.name.lower()}"
|
|
24
|
-
elif to_wrap.get("span_name"):
|
|
22
|
+
if to_wrap.get("span_name"):
|
|
25
23
|
name = to_wrap.get("span_name")
|
|
26
24
|
else:
|
|
27
25
|
name = get_fully_qualified_class_name(instance)
|
|
@@ -30,40 +28,44 @@ def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to
|
|
|
30
28
|
token = None
|
|
31
29
|
try:
|
|
32
30
|
handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
|
|
33
|
-
|
|
34
|
-
if not to_wrap.get('skip_span'):
|
|
35
|
-
token = SpanHandler.attach_workflow_type(to_wrap=to_wrap)
|
|
36
|
-
if skip_scan:
|
|
31
|
+
if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
|
|
37
32
|
if async_task:
|
|
38
33
|
return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
|
|
39
34
|
else:
|
|
40
35
|
return_value = wrapped(*args, **kwargs)
|
|
41
36
|
else:
|
|
42
|
-
|
|
37
|
+
add_workflow_span = get_value(ADD_NEW_WORKFLOW) == True
|
|
38
|
+
token = attach(set_value(ADD_NEW_WORKFLOW, False))
|
|
39
|
+
try:
|
|
40
|
+
return_value = span_processor(name, async_task, tracer, handler, add_workflow_span,
|
|
41
|
+
to_wrap, wrapped, instance, args, kwargs)
|
|
42
|
+
finally:
|
|
43
|
+
detach(token)
|
|
43
44
|
return return_value
|
|
44
45
|
finally:
|
|
45
|
-
handler.detach_workflow_type(token)
|
|
46
46
|
handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
47
47
|
|
|
48
|
-
def span_processor(name: str, async_task: bool, tracer: Tracer, handler: SpanHandler,
|
|
48
|
+
def span_processor(name: str, async_task: bool, tracer: Tracer, handler: SpanHandler, add_workflow_span:bool,
|
|
49
|
+
to_wrap, wrapped, instance, args, kwargs):
|
|
49
50
|
# For singleton spans, eg OpenAI inference generate a workflow span to format the workflow specific attributes
|
|
50
51
|
return_value = None
|
|
51
52
|
with tracer.start_as_current_span(name) as span:
|
|
52
53
|
# Since Spanhandler can be overridden, ensure we set default monocle attributes.
|
|
53
54
|
SpanHandler.set_default_monocle_attributes(span)
|
|
54
|
-
if SpanHandler.is_root_span(span):
|
|
55
|
-
SpanHandler.set_workflow_properties(span, to_wrap)
|
|
56
|
-
if handler.is_non_workflow_root_span(span, to_wrap):
|
|
55
|
+
if SpanHandler.is_root_span(span) or add_workflow_span:
|
|
57
56
|
# This is a direct API call of a non-framework type, call the span_processor recursively for the actual span
|
|
58
|
-
|
|
57
|
+
SpanHandler.set_workflow_properties(span, to_wrap)
|
|
58
|
+
return_value = span_processor(name, async_task, tracer, handler, False, to_wrap, wrapped, instance, args, kwargs)
|
|
59
59
|
else:
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
60
|
+
with SpanHandler.workflow_type(to_wrap):
|
|
61
|
+
SpanHandler.set_non_workflow_properties(span)
|
|
62
|
+
handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
|
|
63
|
+
if async_task:
|
|
64
|
+
return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
|
|
65
|
+
else:
|
|
66
|
+
return_value = wrapped(*args, **kwargs)
|
|
67
|
+
handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
68
|
+
handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
67
69
|
return return_value
|
|
68
70
|
|
|
69
71
|
@with_tracer_wrapper
|
|
@@ -12,9 +12,11 @@ from monocle_apptrace.instrumentation.metamodel.haystack.methods import (HAYSTAC
|
|
|
12
12
|
from monocle_apptrace.instrumentation.metamodel.openai.methods import (OPENAI_METHODS,)
|
|
13
13
|
from monocle_apptrace.instrumentation.metamodel.langgraph.methods import LANGGRAPH_METHODS
|
|
14
14
|
from monocle_apptrace.instrumentation.metamodel.flask.methods import (FLASK_METHODS, )
|
|
15
|
-
from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler
|
|
15
|
+
from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler, FlaskResponseSpanHandler
|
|
16
16
|
from monocle_apptrace.instrumentation.metamodel.requests.methods import (REQUESTS_METHODS, )
|
|
17
17
|
from monocle_apptrace.instrumentation.metamodel.requests._helper import RequestSpanHandler
|
|
18
|
+
from monocle_apptrace.instrumentation.metamodel.teamsai.methods import (TEAMAI_METHODS, )
|
|
19
|
+
from monocle_apptrace.instrumentation.metamodel.anthropic.methods import (ANTHROPIC_METHODS, )
|
|
18
20
|
|
|
19
21
|
class WrapperMethod:
|
|
20
22
|
def __init__(
|
|
@@ -61,12 +63,13 @@ class WrapperMethod:
|
|
|
61
63
|
def get_span_handler(self) -> SpanHandler:
|
|
62
64
|
return self.span_handler()
|
|
63
65
|
|
|
64
|
-
DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS
|
|
66
|
+
DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS
|
|
65
67
|
|
|
66
68
|
MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
|
|
67
69
|
"default": SpanHandler(),
|
|
68
70
|
"botocore_handler": BotoCoreSpanHandler(),
|
|
69
71
|
"flask_handler": FlaskSpanHandler(),
|
|
72
|
+
"flask_response_handler": FlaskResponseSpanHandler(),
|
|
70
73
|
"request_handler": RequestSpanHandler(),
|
|
71
74
|
"non_framework_handler": NonFrameworkSpanHandler()
|
|
72
75
|
}
|
|
File without changes
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides utility functions for extracting system, user,
|
|
3
|
+
and assistant messages from various input formats.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
from monocle_apptrace.instrumentation.common.utils import (
|
|
8
|
+
Option,
|
|
9
|
+
get_keys_as_tuple,
|
|
10
|
+
get_nested_value,
|
|
11
|
+
try_option,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
def extract_provider_name(instance):
|
|
18
|
+
provider_url: Option[str] = try_option(getattr, instance._client.base_url, 'host')
|
|
19
|
+
return provider_url.unwrap_or(None)
|
|
20
|
+
|
|
21
|
+
def extract_inference_endpoint(instance):
|
|
22
|
+
inference_endpoint: Option[str] = try_option(getattr, instance._client, 'base_url').map(str)
|
|
23
|
+
if inference_endpoint.is_none() and "meta" in instance.client.__dict__:
|
|
24
|
+
inference_endpoint = try_option(getattr, instance.client.meta, 'endpoint_url').map(str)
|
|
25
|
+
|
|
26
|
+
return inference_endpoint.unwrap_or(extract_provider_name(instance))
|
|
27
|
+
|
|
28
|
+
def extract_messages(kwargs):
|
|
29
|
+
"""Extract system and user messages"""
|
|
30
|
+
try:
|
|
31
|
+
messages = []
|
|
32
|
+
if 'messages' in kwargs and len(kwargs['messages']) >0:
|
|
33
|
+
for msg in kwargs['messages']:
|
|
34
|
+
if msg.get('content') and msg.get('role'):
|
|
35
|
+
messages.append({msg['role']: msg['content']})
|
|
36
|
+
|
|
37
|
+
return [str(message) for message in messages]
|
|
38
|
+
except Exception as e:
|
|
39
|
+
logger.warning("Warning: Error occurred in extract_messages: %s", str(e))
|
|
40
|
+
return []
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def extract_assistant_message(response):
|
|
44
|
+
try:
|
|
45
|
+
if response is not None and hasattr(response,"content") and len(response.content) >0:
|
|
46
|
+
if hasattr(response.content[0],"text"):
|
|
47
|
+
return response.content[0].text
|
|
48
|
+
except (IndexError, AttributeError) as e:
|
|
49
|
+
logger.warning("Warning: Error occurred in extract_assistant_message: %s", str(e))
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
def update_span_from_llm_response(response):
|
|
53
|
+
meta_dict = {}
|
|
54
|
+
if response is not None and hasattr(response, "usage"):
|
|
55
|
+
if hasattr(response, "usage") and response.usage is not None:
|
|
56
|
+
token_usage = response.usage
|
|
57
|
+
else:
|
|
58
|
+
response_metadata = response.response_metadata
|
|
59
|
+
token_usage = response_metadata.get("token_usage")
|
|
60
|
+
if token_usage is not None:
|
|
61
|
+
meta_dict.update({"completion_tokens": getattr(response.usage, "output_tokens", 0)})
|
|
62
|
+
meta_dict.update({"prompt_tokens": getattr(response.usage, "input_tokens", 0)})
|
|
63
|
+
meta_dict.update({"total_tokens": getattr(response.usage, "input_tokens", 0)+getattr(response.usage, "output_tokens", 0)})
|
|
64
|
+
return meta_dict
|
|
File without changes
|