monocle-apptrace 0.4.0b1__py3-none-any.whl → 0.4.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/instrumentation/common/constants.py +4 -4
- monocle_apptrace/instrumentation/common/span_handler.py +54 -28
- monocle_apptrace/instrumentation/common/utils.py +28 -0
- monocle_apptrace/instrumentation/common/wrapper.py +89 -73
- monocle_apptrace/instrumentation/common/wrapper_method.py +3 -1
- monocle_apptrace/instrumentation/metamodel/flask/_helper.py +1 -1
- monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +1 -1
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py +26 -5
- monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +11 -1
- monocle_apptrace/instrumentation/metamodel/openai/methods.py +6 -6
- monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +37 -5
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +12 -32
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +14 -16
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b3.dist-info}/METADATA +22 -18
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b3.dist-info}/RECORD +21 -21
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b3.dist-info}/WHEEL +0 -0
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b3.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b3.dist-info}/licenses/NOTICE +0 -0
|
@@ -14,10 +14,10 @@ GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
|
|
|
14
14
|
# Azure naming reference can be found here
|
|
15
15
|
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
16
16
|
# https://docs.aws.amazon.com/resource-explorer/latest/userguide/supported-resource-types.html#services-lookoutmetrics
|
|
17
|
-
AZURE_FUNCTION_NAME = "
|
|
18
|
-
AZURE_APP_SERVICE_NAME = "
|
|
19
|
-
AZURE_ML_SERVICE_NAME = "
|
|
20
|
-
AWS_LAMBDA_SERVICE_NAME = "
|
|
17
|
+
AZURE_FUNCTION_NAME = "azure_func"
|
|
18
|
+
AZURE_APP_SERVICE_NAME = "azure_webapp"
|
|
19
|
+
AZURE_ML_SERVICE_NAME = "azure_ml"
|
|
20
|
+
AWS_LAMBDA_SERVICE_NAME = "aws_lambda"
|
|
21
21
|
GITHUB_CODESPACE_SERVICE_NAME = "github_codespace"
|
|
22
22
|
|
|
23
23
|
# Env variables to identify infra service type
|
|
@@ -42,8 +42,17 @@ class SpanHandler:
|
|
|
42
42
|
def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
|
|
43
43
|
return False
|
|
44
44
|
|
|
45
|
-
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) ->
|
|
46
|
-
return
|
|
45
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
46
|
+
return []
|
|
47
|
+
|
|
48
|
+
def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
|
|
49
|
+
span_type:str = None
|
|
50
|
+
if 'type' in output_processor:
|
|
51
|
+
span_type = output_processor['type']
|
|
52
|
+
span.set_attribute("span.type", span_type)
|
|
53
|
+
else:
|
|
54
|
+
logger.warning("type of span not found or incorrect written in entity json")
|
|
55
|
+
return span_type
|
|
47
56
|
|
|
48
57
|
def pre_task_processing(self, to_wrap, wrapped, instance, args,kwargs, span):
|
|
49
58
|
if "pipeline" in to_wrap['package']:
|
|
@@ -69,30 +78,32 @@ class SpanHandler:
|
|
|
69
78
|
workflow_name = SpanHandler.get_workflow_name(span=span)
|
|
70
79
|
if workflow_name:
|
|
71
80
|
span.set_attribute("workflow.name", workflow_name)
|
|
81
|
+
span.set_attribute("span.type", "generic")
|
|
72
82
|
|
|
73
83
|
def post_task_processing(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span):
|
|
74
|
-
|
|
75
|
-
span.set_status(StatusCode.OK)
|
|
84
|
+
pass
|
|
76
85
|
|
|
77
|
-
def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span) -> bool:
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
86
|
+
def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span, parent_span = None, ex:Exception = None) -> bool:
|
|
87
|
+
try:
|
|
88
|
+
detected_error_in_attribute = self.hydrate_attributes(to_wrap, wrapped, instance, args, kwargs, result, span)
|
|
89
|
+
detected_error_in_event = self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, result, span, parent_span, ex)
|
|
90
|
+
if detected_error_in_attribute or detected_error_in_event:
|
|
91
|
+
span.set_attribute(MONOCLE_DETECTED_SPAN_ERROR, True)
|
|
92
|
+
finally:
|
|
93
|
+
if span.status.status_code == StatusCode.UNSET and ex is None:
|
|
94
|
+
span.set_status(StatusCode.OK)
|
|
82
95
|
|
|
83
96
|
def hydrate_attributes(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span) -> bool:
|
|
84
97
|
detected_error:bool = False
|
|
85
98
|
span_index = 0
|
|
86
99
|
if SpanHandler.is_root_span(span):
|
|
87
100
|
span_index = 2 # root span will have workflow and hosting entities pre-populated
|
|
88
|
-
if
|
|
89
|
-
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
101
|
+
if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
|
|
90
102
|
output_processor=to_wrap['output_processor']
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
if 'attributes' in output_processor:
|
|
103
|
+
self.set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
|
|
104
|
+
skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
|
|
105
|
+
|
|
106
|
+
if 'attributes' in output_processor and 'attributes' not in skip_processors:
|
|
96
107
|
for processors in output_processor["attributes"]:
|
|
97
108
|
for processor in processors:
|
|
98
109
|
attribute = processor.get('attribute')
|
|
@@ -113,10 +124,6 @@ class SpanHandler:
|
|
|
113
124
|
else:
|
|
114
125
|
logger.debug(f"{' and '.join([key for key in ['attribute', 'accessor'] if not processor.get(key)])} not found or incorrect in entity JSON")
|
|
115
126
|
span_index += 1
|
|
116
|
-
else:
|
|
117
|
-
logger.debug("attributes not found or incorrect written in entity json")
|
|
118
|
-
else:
|
|
119
|
-
span.set_attribute("span.type", "generic")
|
|
120
127
|
|
|
121
128
|
# set scopes as attributes by calling get_scopes()
|
|
122
129
|
# scopes is a Mapping[str:object], iterate directly with .items()
|
|
@@ -127,16 +134,21 @@ class SpanHandler:
|
|
|
127
134
|
span.set_attribute("entity.count", span_index)
|
|
128
135
|
return detected_error
|
|
129
136
|
|
|
130
|
-
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span) -> bool:
|
|
137
|
+
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span, parent_span=None, ex:Exception=None) -> bool:
|
|
131
138
|
detected_error:bool = False
|
|
132
|
-
if
|
|
133
|
-
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
139
|
+
if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
|
|
134
140
|
output_processor=to_wrap['output_processor']
|
|
135
|
-
|
|
136
|
-
|
|
141
|
+
skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
|
|
142
|
+
|
|
143
|
+
arguments = {"instance": instance, "args": args, "kwargs": kwargs, "result": ret_result, "exception":ex}
|
|
144
|
+
# Process events if they are defined in the output_processor.
|
|
145
|
+
# In case of inference.modelapi skip the event processing unless the span has an exception
|
|
146
|
+
if 'events' in output_processor and ('events' not in skip_processors or ex is not None):
|
|
137
147
|
events = output_processor['events']
|
|
138
148
|
for event in events:
|
|
139
149
|
event_name = event.get("name")
|
|
150
|
+
if 'events.'+event_name in skip_processors and ex is None:
|
|
151
|
+
continue
|
|
140
152
|
event_attributes = {}
|
|
141
153
|
attributes = event.get("attributes", [])
|
|
142
154
|
for attribute in attributes:
|
|
@@ -231,7 +243,7 @@ class SpanHandler:
|
|
|
231
243
|
|
|
232
244
|
@staticmethod
|
|
233
245
|
@contextmanager
|
|
234
|
-
def workflow_type(to_wrap=None):
|
|
246
|
+
def workflow_type(to_wrap=None, span:Span=None):
|
|
235
247
|
token = SpanHandler.attach_workflow_type(to_wrap)
|
|
236
248
|
try:
|
|
237
249
|
yield
|
|
@@ -241,6 +253,20 @@ class SpanHandler:
|
|
|
241
253
|
|
|
242
254
|
class NonFrameworkSpanHandler(SpanHandler):
|
|
243
255
|
|
|
256
|
+
def get_workflow_name_in_progress(self) -> str:
|
|
257
|
+
return get_value(WORKFLOW_TYPE_KEY)
|
|
258
|
+
|
|
259
|
+
def is_framework_span_in_progess(self) -> bool:
|
|
260
|
+
return self.get_workflow_name_in_progress() in WORKFLOW_TYPE_MAP.values()
|
|
261
|
+
|
|
244
262
|
# If the language framework is being executed, then skip generating direct openAI attributes and events
|
|
245
|
-
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) ->
|
|
246
|
-
|
|
263
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
264
|
+
if self.is_framework_span_in_progess():
|
|
265
|
+
return ["attributes", "events"]
|
|
266
|
+
|
|
267
|
+
def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
|
|
268
|
+
span_type = super().set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
|
|
269
|
+
if self.is_framework_span_in_progess() and span_type is not None:
|
|
270
|
+
span_type = span_type+".modelapi"
|
|
271
|
+
span.set_attribute("span.type", span_type)
|
|
272
|
+
return span_type
|
|
@@ -236,6 +236,13 @@ def set_scopes_from_baggage(baggage_context:Context):
|
|
|
236
236
|
scope_name = scope_key[len(MONOCLE_SCOPE_NAME_PREFIX):]
|
|
237
237
|
set_scope(scope_name, scope_value)
|
|
238
238
|
|
|
239
|
+
def get_parent_span() -> Span:
|
|
240
|
+
parent_span: Span = None
|
|
241
|
+
_parent_span_context = get_current()
|
|
242
|
+
if _parent_span_context is not None and _parent_span_context.get(_SPAN_KEY, None):
|
|
243
|
+
parent_span = _parent_span_context.get(_SPAN_KEY, None)
|
|
244
|
+
return parent_span
|
|
245
|
+
|
|
239
246
|
def extract_http_headers(headers) -> object:
|
|
240
247
|
global http_scopes
|
|
241
248
|
trace_context:Context = extract(headers, context=get_current())
|
|
@@ -362,6 +369,27 @@ def get_llm_type(instance):
|
|
|
362
369
|
except:
|
|
363
370
|
pass
|
|
364
371
|
|
|
372
|
+
def get_status(arguments):
|
|
373
|
+
if arguments['exception'] is not None:
|
|
374
|
+
return 'error'
|
|
375
|
+
else:
|
|
376
|
+
return 'success'
|
|
377
|
+
|
|
378
|
+
def get_exception_status_code(arguments):
|
|
379
|
+
if arguments['exception'] is not None and hasattr(arguments['exception'], 'code'):
|
|
380
|
+
return arguments['exception'].code
|
|
381
|
+
else:
|
|
382
|
+
return 'error'
|
|
383
|
+
|
|
384
|
+
def get_exception_message(arguments):
|
|
385
|
+
if arguments['exception'] is not None:
|
|
386
|
+
if hasattr(arguments['exception'], 'message'):
|
|
387
|
+
return arguments['exception'].message
|
|
388
|
+
else:
|
|
389
|
+
return arguments['exception'].__str__()
|
|
390
|
+
else:
|
|
391
|
+
return ''
|
|
392
|
+
|
|
365
393
|
def patch_instance_method(obj, method_name, func):
|
|
366
394
|
"""
|
|
367
395
|
Patch a special method (like __iter__) for a single instance.
|
|
@@ -9,7 +9,8 @@ from monocle_apptrace.instrumentation.common.utils import (
|
|
|
9
9
|
set_scopes,
|
|
10
10
|
with_tracer_wrapper,
|
|
11
11
|
set_scope,
|
|
12
|
-
remove_scope
|
|
12
|
+
remove_scope,
|
|
13
|
+
get_parent_span
|
|
13
14
|
)
|
|
14
15
|
from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY, ADD_NEW_WORKFLOW
|
|
15
16
|
logger = logging.getLogger(__name__)
|
|
@@ -30,12 +31,22 @@ def pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped,
|
|
|
30
31
|
SpanHandler.set_workflow_properties(span, to_wrap)
|
|
31
32
|
else:
|
|
32
33
|
SpanHandler.set_non_workflow_properties(span)
|
|
33
|
-
|
|
34
|
+
try:
|
|
35
|
+
handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
|
|
36
|
+
except Exception as e:
|
|
37
|
+
logger.info(f"Warning: Error occurred in pre_task_processing: {e}")
|
|
34
38
|
|
|
35
|
-
def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span):
|
|
39
|
+
def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, parent_span=None, ex = None):
|
|
36
40
|
if not (SpanHandler.is_root_span(span) or get_value(ADD_NEW_WORKFLOW) == True):
|
|
37
|
-
|
|
38
|
-
|
|
41
|
+
try:
|
|
42
|
+
handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span, parent_span, ex)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
logger.info(f"Warning: Error occurred in hydrate_span: {e}")
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
48
|
+
except Exception as e:
|
|
49
|
+
logger.info(f"Warning: Error occurred in post_task_processing: {e}")
|
|
39
50
|
|
|
40
51
|
def get_span_name(to_wrap, instance):
|
|
41
52
|
if to_wrap.get("span_name"):
|
|
@@ -49,44 +60,45 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
|
|
|
49
60
|
name = get_span_name(to_wrap, instance)
|
|
50
61
|
return_value = None
|
|
51
62
|
span_status = None
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
if SpanHandler.is_root_span(span) or add_workflow_span:
|
|
57
|
-
# Recursive call for the actual span
|
|
58
|
-
return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
59
|
-
span.set_status(span_status)
|
|
60
|
-
else:
|
|
61
|
-
with SpanHandler.workflow_type(to_wrap):
|
|
62
|
-
return_value = wrapped(*args, **kwargs)
|
|
63
|
-
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
64
|
-
span_status = span.status
|
|
65
|
-
else:
|
|
66
|
-
span = tracer.start_span(name)
|
|
67
|
-
|
|
63
|
+
auto_close_span = get_auto_close_span(to_wrap, kwargs)
|
|
64
|
+
parent_span = get_parent_span()
|
|
65
|
+
with tracer.start_as_current_span(name, end_on_exit=auto_close_span) as span:
|
|
68
66
|
pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
|
|
69
67
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
span.
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
68
|
+
if SpanHandler.is_root_span(span) or add_workflow_span:
|
|
69
|
+
# Recursive call for the actual span
|
|
70
|
+
return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
71
|
+
span.set_status(span_status)
|
|
72
|
+
if not auto_close_span:
|
|
73
|
+
span.end()
|
|
74
|
+
else:
|
|
75
|
+
ex:Exception = None
|
|
76
|
+
try:
|
|
77
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
78
|
+
return_value = wrapped(*args, **kwargs)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
ex = e
|
|
81
|
+
raise
|
|
82
|
+
finally:
|
|
83
|
+
def post_process_span_internal(ret_val):
|
|
84
|
+
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span, parent_span ,ex)
|
|
85
|
+
if not auto_close_span:
|
|
86
|
+
span.end()
|
|
87
|
+
if ex is None and not auto_close_span and to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
|
|
88
|
+
to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
|
|
89
|
+
else:
|
|
90
|
+
post_process_span_internal(return_value)
|
|
91
|
+
span_status = span.status
|
|
83
92
|
return return_value, span_status
|
|
84
93
|
|
|
85
94
|
def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
|
|
86
95
|
return_value = None
|
|
87
96
|
token = None
|
|
88
97
|
try:
|
|
89
|
-
|
|
98
|
+
try:
|
|
99
|
+
handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
|
|
100
|
+
except Exception as e:
|
|
101
|
+
logger.info(f"Warning: Error occurred in pre_tracing: {e}")
|
|
90
102
|
if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
|
|
91
103
|
return_value = wrapped(*args, **kwargs)
|
|
92
104
|
else:
|
|
@@ -98,45 +110,44 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
|
|
|
98
110
|
detach(token)
|
|
99
111
|
return return_value
|
|
100
112
|
finally:
|
|
101
|
-
|
|
102
|
-
|
|
113
|
+
try:
|
|
114
|
+
handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
115
|
+
except Exception as e:
|
|
116
|
+
logger.info(f"Warning: Error occurred in post_tracing: {e}")
|
|
103
117
|
|
|
104
118
|
async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
|
|
105
119
|
# Main span processing logic
|
|
106
120
|
name = get_span_name(to_wrap, instance)
|
|
107
121
|
return_value = None
|
|
108
122
|
span_status = None
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
if SpanHandler.is_root_span(span) or add_workflow_span:
|
|
114
|
-
# Recursive call for the actual span
|
|
115
|
-
return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
116
|
-
span.set_status(span_status)
|
|
117
|
-
else:
|
|
118
|
-
with SpanHandler.workflow_type(to_wrap):
|
|
119
|
-
return_value = await wrapped(*args, **kwargs)
|
|
120
|
-
span_status = span.status
|
|
121
|
-
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
122
|
-
else:
|
|
123
|
-
span = tracer.start_span(name)
|
|
124
|
-
|
|
123
|
+
auto_close_span = get_auto_close_span(to_wrap, kwargs)
|
|
124
|
+
parent_span = get_parent_span()
|
|
125
|
+
with tracer.start_as_current_span(name, end_on_exit=auto_close_span) as span:
|
|
125
126
|
pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
|
|
126
127
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
span.
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
128
|
+
if SpanHandler.is_root_span(span) or add_workflow_span:
|
|
129
|
+
# Recursive call for the actual span
|
|
130
|
+
return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
131
|
+
span.set_status(span_status)
|
|
132
|
+
if not auto_close_span:
|
|
133
|
+
span.end()
|
|
134
|
+
else:
|
|
135
|
+
ex:Exception = None
|
|
136
|
+
try:
|
|
137
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
138
|
+
return_value = await wrapped(*args, **kwargs)
|
|
139
|
+
except Exception as e:
|
|
140
|
+
ex = e
|
|
141
|
+
raise
|
|
142
|
+
finally:
|
|
143
|
+
def post_process_span_internal(ret_val):
|
|
144
|
+
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span, parent_span,ex)
|
|
145
|
+
if not auto_close_span:
|
|
146
|
+
span.end()
|
|
147
|
+
if ex is None and not auto_close_span and to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
|
|
148
|
+
to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
|
|
149
|
+
else:
|
|
150
|
+
post_process_span_internal(return_value)
|
|
140
151
|
span_status = span.status
|
|
141
152
|
return return_value, span.status
|
|
142
153
|
|
|
@@ -144,7 +155,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
|
|
|
144
155
|
return_value = None
|
|
145
156
|
token = None
|
|
146
157
|
try:
|
|
147
|
-
|
|
158
|
+
try:
|
|
159
|
+
handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.info(f"Warning: Error occurred in pre_tracing: {e}")
|
|
148
162
|
if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
|
|
149
163
|
return_value = await wrapped(*args, **kwargs)
|
|
150
164
|
else:
|
|
@@ -156,7 +170,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
|
|
|
156
170
|
detach(token)
|
|
157
171
|
return return_value
|
|
158
172
|
finally:
|
|
159
|
-
|
|
173
|
+
try:
|
|
174
|
+
handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.info(f"Warning: Error occurred in post_tracing: {e}")
|
|
160
177
|
|
|
161
178
|
@with_tracer_wrapper
|
|
162
179
|
def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
|
|
@@ -193,7 +210,7 @@ async def ascope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped,
|
|
|
193
210
|
@with_tracer_wrapper
|
|
194
211
|
def scopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
|
|
195
212
|
scope_values = to_wrap.get('scope_values', None)
|
|
196
|
-
scope_values = evaluate_scope_values(args, kwargs, scope_values)
|
|
213
|
+
scope_values = evaluate_scope_values(args, kwargs, to_wrap, scope_values)
|
|
197
214
|
token = None
|
|
198
215
|
try:
|
|
199
216
|
if scope_values:
|
|
@@ -207,7 +224,7 @@ def scopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, insta
|
|
|
207
224
|
@with_tracer_wrapper
|
|
208
225
|
async def ascopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
|
|
209
226
|
scope_values = to_wrap.get('scope_values', None)
|
|
210
|
-
scope_values = evaluate_scope_values(args, kwargs, scope_values)
|
|
227
|
+
scope_values = evaluate_scope_values(args, kwargs, to_wrap, scope_values)
|
|
211
228
|
token = None
|
|
212
229
|
try:
|
|
213
230
|
if scope_values:
|
|
@@ -218,7 +235,7 @@ async def ascopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped
|
|
|
218
235
|
if token:
|
|
219
236
|
remove_scope(token)
|
|
220
237
|
|
|
221
|
-
def evaluate_scope_values(args, kwargs, scope_values):
|
|
238
|
+
def evaluate_scope_values(args, kwargs, to_wrap, scope_values):
|
|
222
239
|
if callable(scope_values):
|
|
223
240
|
try:
|
|
224
241
|
scope_values = scope_values(args, kwargs)
|
|
@@ -227,5 +244,4 @@ def evaluate_scope_values(args, kwargs, scope_values):
|
|
|
227
244
|
scope_values = None
|
|
228
245
|
if isinstance(scope_values, dict):
|
|
229
246
|
return scope_values
|
|
230
|
-
return None
|
|
231
|
-
|
|
247
|
+
return None
|
|
@@ -10,6 +10,7 @@ from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
|
|
|
10
10
|
from monocle_apptrace.instrumentation.metamodel.llamaindex.methods import (LLAMAINDEX_METHODS, )
|
|
11
11
|
from monocle_apptrace.instrumentation.metamodel.haystack.methods import (HAYSTACK_METHODS, )
|
|
12
12
|
from monocle_apptrace.instrumentation.metamodel.openai.methods import (OPENAI_METHODS,)
|
|
13
|
+
from monocle_apptrace.instrumentation.metamodel.openai._helper import OpenAISpanHandler
|
|
13
14
|
from monocle_apptrace.instrumentation.metamodel.langgraph.methods import LANGGRAPH_METHODS
|
|
14
15
|
from monocle_apptrace.instrumentation.metamodel.flask.methods import (FLASK_METHODS, )
|
|
15
16
|
from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler, FlaskResponseSpanHandler
|
|
@@ -76,5 +77,6 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
|
|
|
76
77
|
"flask_handler": FlaskSpanHandler(),
|
|
77
78
|
"flask_response_handler": FlaskResponseSpanHandler(),
|
|
78
79
|
"request_handler": RequestSpanHandler(),
|
|
79
|
-
"non_framework_handler": NonFrameworkSpanHandler()
|
|
80
|
+
"non_framework_handler": NonFrameworkSpanHandler(),
|
|
81
|
+
"openai_handler": OpenAISpanHandler(),
|
|
80
82
|
}
|
|
@@ -70,7 +70,7 @@ class FlaskResponseSpanHandler(SpanHandler):
|
|
|
70
70
|
if _parent_span_context is not None:
|
|
71
71
|
parent_span: Span = _parent_span_context.get(_SPAN_KEY, None)
|
|
72
72
|
if parent_span is not None:
|
|
73
|
-
self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, return_value, parent_span)
|
|
73
|
+
self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, return_value, parent_span=parent_span)
|
|
74
74
|
except Exception as e:
|
|
75
75
|
logger.info(f"Failed to propogate flask response: {e}")
|
|
76
76
|
super().post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
@@ -121,7 +121,7 @@ def update_span_from_llm_response(response, instance):
|
|
|
121
121
|
if response is not None and isinstance(response, dict):
|
|
122
122
|
if "meta" in response:
|
|
123
123
|
token_usage = response["meta"][0]["usage"]
|
|
124
|
-
|
|
124
|
+
elif "replies" in response: # and "meta" in response["replies"][0]:
|
|
125
125
|
token_usage = response["replies"][0].meta["usage"]
|
|
126
126
|
if token_usage is not None:
|
|
127
127
|
temperature = instance.__dict__.get("temperature", None)
|
|
@@ -6,11 +6,11 @@ and assistant messages from various input formats.
|
|
|
6
6
|
import logging
|
|
7
7
|
from monocle_apptrace.instrumentation.common.utils import (
|
|
8
8
|
Option,
|
|
9
|
-
get_keys_as_tuple,
|
|
10
|
-
get_nested_value,
|
|
11
9
|
try_option,
|
|
10
|
+
get_exception_message,
|
|
11
|
+
get_parent_span
|
|
12
12
|
)
|
|
13
|
-
|
|
13
|
+
from monocle_apptrace.instrumentation.common.span_handler import NonFrameworkSpanHandler, WORKFLOW_TYPE_MAP
|
|
14
14
|
|
|
15
15
|
logger = logging.getLogger(__name__)
|
|
16
16
|
|
|
@@ -34,8 +34,11 @@ def extract_messages(kwargs):
|
|
|
34
34
|
return []
|
|
35
35
|
|
|
36
36
|
|
|
37
|
-
def extract_assistant_message(
|
|
37
|
+
def extract_assistant_message(arguments):
|
|
38
38
|
try:
|
|
39
|
+
if arguments["exception"] is not None:
|
|
40
|
+
return get_exception_message(arguments)
|
|
41
|
+
response = arguments["result"]
|
|
39
42
|
if hasattr(response,"output_text") and len(response.output_text):
|
|
40
43
|
return response.output_text
|
|
41
44
|
if response is not None and hasattr(response,"choices") and len(response.choices) >0:
|
|
@@ -114,4 +117,22 @@ def get_inference_type(instance):
|
|
|
114
117
|
if inference_type.unwrap_or(None):
|
|
115
118
|
return 'azure_openai'
|
|
116
119
|
else:
|
|
117
|
-
return 'openai'
|
|
120
|
+
return 'openai'
|
|
121
|
+
|
|
122
|
+
class OpenAISpanHandler(NonFrameworkSpanHandler):
|
|
123
|
+
def is_teams_span_in_progress(self) -> bool:
|
|
124
|
+
return self.is_framework_span_in_progess() and self.get_workflow_name_in_progress() == WORKFLOW_TYPE_MAP["teams.ai"]
|
|
125
|
+
|
|
126
|
+
# If openAI is being called by Teams AI SDK, then retain the metadata part of the span events
|
|
127
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
128
|
+
if self.is_teams_span_in_progress():
|
|
129
|
+
return ["attributes", "events.data.input", "events.data.output"]
|
|
130
|
+
else:
|
|
131
|
+
return super().skip_processor(to_wrap, wrapped, instance, span, args, kwargs)
|
|
132
|
+
|
|
133
|
+
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span, parent_span=None, ex:Exception=None) -> bool:
|
|
134
|
+
# If openAI is being called by Teams AI SDK, then copy parent
|
|
135
|
+
if self.is_teams_span_in_progress() and ex is None:
|
|
136
|
+
return super().hydrate_events(to_wrap, wrapped, instance, args, kwargs, ret_result, span=parent_span, parent_span=None, ex=ex)
|
|
137
|
+
|
|
138
|
+
return super().hydrate_events(to_wrap, wrapped, instance, args, kwargs, ret_result, span, parent_span=parent_span, ex=ex)
|
|
@@ -8,6 +8,8 @@ from monocle_apptrace.instrumentation.metamodel.openai import (
|
|
|
8
8
|
from monocle_apptrace.instrumentation.common.utils import (
|
|
9
9
|
patch_instance_method,
|
|
10
10
|
resolve_from_alias,
|
|
11
|
+
get_status,
|
|
12
|
+
get_exception_status_code
|
|
11
13
|
)
|
|
12
14
|
|
|
13
15
|
logger = logging.getLogger(__name__)
|
|
@@ -199,8 +201,16 @@ INFERENCE = {
|
|
|
199
201
|
"_comment": "this is result from LLM",
|
|
200
202
|
"attribute": "response",
|
|
201
203
|
"accessor": lambda arguments: _helper.extract_assistant_message(
|
|
202
|
-
arguments
|
|
204
|
+
arguments,
|
|
203
205
|
),
|
|
206
|
+
},
|
|
207
|
+
{
|
|
208
|
+
"attribute": "status",
|
|
209
|
+
"accessor": lambda arguments: get_status(arguments)
|
|
210
|
+
},
|
|
211
|
+
{
|
|
212
|
+
"attribute": "status_code",
|
|
213
|
+
"accessor": lambda arguments: get_exception_status_code(arguments)
|
|
204
214
|
}
|
|
205
215
|
],
|
|
206
216
|
},
|
|
@@ -12,7 +12,7 @@ OPENAI_METHODS = [
|
|
|
12
12
|
"object": "Completions",
|
|
13
13
|
"method": "create",
|
|
14
14
|
"wrapper_method": task_wrapper,
|
|
15
|
-
"span_handler": "
|
|
15
|
+
"span_handler": "openai_handler",
|
|
16
16
|
"output_processor": INFERENCE
|
|
17
17
|
},
|
|
18
18
|
{
|
|
@@ -20,7 +20,7 @@ OPENAI_METHODS = [
|
|
|
20
20
|
"object": "AsyncCompletions",
|
|
21
21
|
"method": "create",
|
|
22
22
|
"wrapper_method": atask_wrapper,
|
|
23
|
-
"span_handler": "
|
|
23
|
+
"span_handler": "openai_handler",
|
|
24
24
|
"output_processor": INFERENCE
|
|
25
25
|
},
|
|
26
26
|
{
|
|
@@ -28,7 +28,7 @@ OPENAI_METHODS = [
|
|
|
28
28
|
"object": "Embeddings",
|
|
29
29
|
"method": "create",
|
|
30
30
|
"wrapper_method": task_wrapper,
|
|
31
|
-
"span_handler": "
|
|
31
|
+
"span_handler": "openai_handler",
|
|
32
32
|
"output_processor": RETRIEVAL
|
|
33
33
|
},
|
|
34
34
|
{
|
|
@@ -36,7 +36,7 @@ OPENAI_METHODS = [
|
|
|
36
36
|
"object": "AsyncEmbeddings",
|
|
37
37
|
"method": "create",
|
|
38
38
|
"wrapper_method": atask_wrapper,
|
|
39
|
-
"span_handler": "
|
|
39
|
+
"span_handler": "openai_handler",
|
|
40
40
|
"output_processor": RETRIEVAL
|
|
41
41
|
},
|
|
42
42
|
{
|
|
@@ -44,7 +44,7 @@ OPENAI_METHODS = [
|
|
|
44
44
|
"object": "Responses",
|
|
45
45
|
"method": "create",
|
|
46
46
|
"wrapper_method": task_wrapper,
|
|
47
|
-
"span_handler": "
|
|
47
|
+
"span_handler": "openai_handler",
|
|
48
48
|
"output_processor": INFERENCE
|
|
49
49
|
},
|
|
50
50
|
{
|
|
@@ -52,7 +52,7 @@ OPENAI_METHODS = [
|
|
|
52
52
|
"object": "AsyncResponses",
|
|
53
53
|
"method": "create",
|
|
54
54
|
"wrapper_method": atask_wrapper,
|
|
55
|
-
"span_handler": "
|
|
55
|
+
"span_handler": "openai_handler",
|
|
56
56
|
"output_processor": INFERENCE
|
|
57
57
|
}
|
|
58
58
|
|
|
@@ -4,6 +4,8 @@ from monocle_apptrace.instrumentation.common.utils import (
|
|
|
4
4
|
get_keys_as_tuple,
|
|
5
5
|
get_nested_value,
|
|
6
6
|
try_option,
|
|
7
|
+
get_exception_message,
|
|
8
|
+
get_exception_status_code
|
|
7
9
|
)
|
|
8
10
|
def capture_input(arguments):
|
|
9
11
|
"""
|
|
@@ -57,11 +59,41 @@ def capture_prompt_info(arguments):
|
|
|
57
59
|
except Exception as e:
|
|
58
60
|
return f"Error capturing prompt: {str(e)}"
|
|
59
61
|
|
|
60
|
-
def
|
|
61
|
-
if
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
62
|
+
def get_status_code(arguments):
|
|
63
|
+
if arguments["exception"] is not None:
|
|
64
|
+
return get_exception_status_code(arguments)
|
|
65
|
+
elif hasattr(arguments["result"], "status"):
|
|
66
|
+
return arguments["result"].status
|
|
67
|
+
else:
|
|
68
|
+
return 'success'
|
|
69
|
+
|
|
70
|
+
def get_status(arguments):
|
|
71
|
+
if arguments["exception"] is not None:
|
|
72
|
+
return 'error'
|
|
73
|
+
elif get_status_code(arguments) == 'success':
|
|
74
|
+
return 'success'
|
|
75
|
+
else:
|
|
76
|
+
return 'error'
|
|
77
|
+
|
|
78
|
+
def get_response(arguments) -> str:
|
|
79
|
+
status = get_status_code(arguments)
|
|
80
|
+
response:str = ""
|
|
81
|
+
if status == 'success':
|
|
82
|
+
if hasattr(arguments["result"], "message"):
|
|
83
|
+
response = arguments["result"].message.content
|
|
84
|
+
else:
|
|
85
|
+
response = str(arguments["result"])
|
|
86
|
+
else:
|
|
87
|
+
if arguments["exception"] is not None:
|
|
88
|
+
response = get_exception_message(arguments)
|
|
89
|
+
elif hasattr(arguments["result"], "error"):
|
|
90
|
+
response = arguments["result"].error
|
|
91
|
+
return response
|
|
92
|
+
|
|
93
|
+
def check_status(arguments):
|
|
94
|
+
status = get_status_code(arguments)
|
|
95
|
+
if status != 'success':
|
|
96
|
+
raise MonocleSpanException(f"{status}")
|
|
65
97
|
|
|
66
98
|
def extract_provider_name(instance):
|
|
67
99
|
provider_url: Option[str] = try_option(getattr, instance._client.base_url, 'host')
|
|
@@ -28,42 +28,22 @@ ACTIONPLANNER_OUTPUT_PROCESSOR = {
|
|
|
28
28
|
{
|
|
29
29
|
"attribute": "tokenizer",
|
|
30
30
|
"accessor": lambda arguments: arguments["instance"]._options.tokenizer.__class__.__name__ if hasattr(arguments["instance"], "_options") else "GPTTokenizer"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"attribute": "prompt_name",
|
|
34
|
+
"accessor": _helper.capture_prompt_info
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
"attribute": "validator",
|
|
38
|
+
"accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"attribute": "memory_type",
|
|
42
|
+
"accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
|
|
31
43
|
}
|
|
32
44
|
]
|
|
33
45
|
],
|
|
34
46
|
"events": [
|
|
35
|
-
{
|
|
36
|
-
"name": "data.input",
|
|
37
|
-
"_comment": "input configuration to ActionPlanner",
|
|
38
|
-
"attributes": [
|
|
39
|
-
{
|
|
40
|
-
"attribute": "prompt_name",
|
|
41
|
-
"accessor": _helper.capture_prompt_info
|
|
42
|
-
},
|
|
43
|
-
{
|
|
44
|
-
"attribute": "validator",
|
|
45
|
-
"accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
|
|
46
|
-
},
|
|
47
|
-
{
|
|
48
|
-
"attribute": "memory_type",
|
|
49
|
-
"accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
|
|
50
|
-
}
|
|
51
|
-
]
|
|
52
|
-
},
|
|
53
|
-
{
|
|
54
|
-
"name": "data.output",
|
|
55
|
-
"_comment": "output from ActionPlanner",
|
|
56
|
-
"attributes": [
|
|
57
|
-
{
|
|
58
|
-
"attribute": "status",
|
|
59
|
-
"accessor": lambda arguments: _helper.status_check(arguments)
|
|
60
|
-
},
|
|
61
|
-
{
|
|
62
|
-
"attribute": "response",
|
|
63
|
-
"accessor": lambda arguments: arguments["result"].message.content if hasattr(arguments["result"], "message") else str(arguments["result"])
|
|
64
|
-
}
|
|
65
|
-
]
|
|
66
|
-
},
|
|
67
47
|
{
|
|
68
48
|
"name": "metadata",
|
|
69
49
|
"attributes": [
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py
CHANGED
|
@@ -3,7 +3,7 @@ from monocle_apptrace.instrumentation.metamodel.teamsai import (
|
|
|
3
3
|
)
|
|
4
4
|
from monocle_apptrace.instrumentation.common.utils import get_llm_type
|
|
5
5
|
TEAMAI_OUTPUT_PROCESSOR = {
|
|
6
|
-
"type": "inference",
|
|
6
|
+
"type": "inference.framework",
|
|
7
7
|
"attributes": [
|
|
8
8
|
[
|
|
9
9
|
{
|
|
@@ -52,25 +52,23 @@ TEAMAI_OUTPUT_PROCESSOR = {
|
|
|
52
52
|
"name": "data.output",
|
|
53
53
|
"_comment": "output from Teams AI",
|
|
54
54
|
"attributes": [
|
|
55
|
+
{
|
|
56
|
+
"attribute": "status",
|
|
57
|
+
"accessor": lambda arguments: _helper.get_status(arguments)
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
"attribute": "status_code",
|
|
61
|
+
"accessor": lambda arguments: _helper.get_status_code(arguments)
|
|
62
|
+
},
|
|
55
63
|
{
|
|
56
64
|
"attribute": "response",
|
|
57
|
-
"accessor": lambda arguments:
|
|
65
|
+
"accessor": lambda arguments: _helper.get_response(arguments)
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
"attribute": "check_status",
|
|
69
|
+
"accessor": lambda arguments: _helper.check_status(arguments)
|
|
58
70
|
}
|
|
59
71
|
]
|
|
60
72
|
},
|
|
61
|
-
# {
|
|
62
|
-
# "name": "metadata",
|
|
63
|
-
# "attributes": [
|
|
64
|
-
# {
|
|
65
|
-
# "_comment": "metadata from Teams AI response",
|
|
66
|
-
# "accessor": lambda arguments: {
|
|
67
|
-
# "prompt_tokens": arguments["result"].get("usage", {}).get("prompt_tokens", 0),
|
|
68
|
-
# "completion_tokens": arguments["result"].get("usage", {}).get("completion_tokens", 0),
|
|
69
|
-
# "total_tokens": arguments["result"].get("usage", {}).get("total_tokens", 0),
|
|
70
|
-
# "latency_ms": arguments.get("latency_ms")
|
|
71
|
-
# }
|
|
72
|
-
# }
|
|
73
|
-
# ]
|
|
74
|
-
# }
|
|
75
73
|
]
|
|
76
74
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: monocle_apptrace
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.0b3
|
|
4
4
|
Summary: package with monocle genAI tracing
|
|
5
5
|
Project-URL: Homepage, https://github.com/monocle2ai/monocle
|
|
6
6
|
Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
|
|
@@ -16,40 +16,44 @@ Requires-Dist: opentelemetry-sdk>=1.21.0
|
|
|
16
16
|
Requires-Dist: requests
|
|
17
17
|
Requires-Dist: wrapt>=1.14.0
|
|
18
18
|
Provides-Extra: aws
|
|
19
|
-
Requires-Dist: boto3==1.
|
|
19
|
+
Requires-Dist: boto3==1.37.24; extra == 'aws'
|
|
20
20
|
Provides-Extra: azure
|
|
21
21
|
Requires-Dist: azure-storage-blob==12.22.0; extra == 'azure'
|
|
22
22
|
Provides-Extra: dev
|
|
23
|
-
Requires-Dist: anthropic
|
|
23
|
+
Requires-Dist: anthropic-haystack; extra == 'dev'
|
|
24
|
+
Requires-Dist: anthropic==0.52.0; extra == 'dev'
|
|
24
25
|
Requires-Dist: azure-storage-blob==12.22.0; extra == 'dev'
|
|
25
|
-
Requires-Dist: boto3==1.
|
|
26
|
-
Requires-Dist: chromadb==0.
|
|
26
|
+
Requires-Dist: boto3==1.37.24; extra == 'dev'
|
|
27
|
+
Requires-Dist: chromadb==1.0.10; extra == 'dev'
|
|
27
28
|
Requires-Dist: datasets==2.20.0; extra == 'dev'
|
|
28
29
|
Requires-Dist: faiss-cpu==1.8.0; extra == 'dev'
|
|
29
30
|
Requires-Dist: flask; extra == 'dev'
|
|
30
31
|
Requires-Dist: haystack-ai==2.3.0; extra == 'dev'
|
|
31
32
|
Requires-Dist: instructorembedding==1.0.1; extra == 'dev'
|
|
32
|
-
Requires-Dist: langchain-
|
|
33
|
-
Requires-Dist: langchain-
|
|
34
|
-
Requires-Dist: langchain-
|
|
35
|
-
Requires-Dist: langchain-
|
|
36
|
-
Requires-Dist: langchain-
|
|
37
|
-
Requires-Dist: langchain==0.
|
|
33
|
+
Requires-Dist: langchain-anthropic==0.3.13; extra == 'dev'
|
|
34
|
+
Requires-Dist: langchain-aws==0.2.23; extra == 'dev'
|
|
35
|
+
Requires-Dist: langchain-chroma==0.2.4; extra == 'dev'
|
|
36
|
+
Requires-Dist: langchain-community==0.3.24; extra == 'dev'
|
|
37
|
+
Requires-Dist: langchain-mistralai==0.2.10; extra == 'dev'
|
|
38
|
+
Requires-Dist: langchain-openai==0.3.18; extra == 'dev'
|
|
39
|
+
Requires-Dist: langchain==0.3.25; extra == 'dev'
|
|
38
40
|
Requires-Dist: langchainhub==0.1.21; extra == 'dev'
|
|
39
41
|
Requires-Dist: langgraph==0.2.68; extra == 'dev'
|
|
40
|
-
Requires-Dist: llama-index-embeddings-huggingface==0.
|
|
41
|
-
Requires-Dist: llama-index-llms-
|
|
42
|
-
Requires-Dist: llama-index-llms-
|
|
43
|
-
Requires-Dist: llama-index-
|
|
44
|
-
Requires-Dist: llama-index-vector-stores-
|
|
45
|
-
Requires-Dist: llama-index==0.
|
|
42
|
+
Requires-Dist: llama-index-embeddings-huggingface==0.5.4; extra == 'dev'
|
|
43
|
+
Requires-Dist: llama-index-llms-anthropic==0.6.19; extra == 'dev'
|
|
44
|
+
Requires-Dist: llama-index-llms-azure-openai==0.3.2; extra == 'dev'
|
|
45
|
+
Requires-Dist: llama-index-llms-mistralai==0.4.0; extra == 'dev'
|
|
46
|
+
Requires-Dist: llama-index-vector-stores-chroma==0.4.1; extra == 'dev'
|
|
47
|
+
Requires-Dist: llama-index-vector-stores-opensearch==0.5.4; extra == 'dev'
|
|
48
|
+
Requires-Dist: llama-index==0.12.37; extra == 'dev'
|
|
46
49
|
Requires-Dist: mistral-haystack==0.0.2; extra == 'dev'
|
|
47
50
|
Requires-Dist: numpy==1.26.4; extra == 'dev'
|
|
48
51
|
Requires-Dist: opendal==0.45.14; extra == 'dev'
|
|
49
52
|
Requires-Dist: opensearch-haystack==1.2.0; extra == 'dev'
|
|
50
53
|
Requires-Dist: opentelemetry-instrumentation-flask; extra == 'dev'
|
|
51
54
|
Requires-Dist: parameterized==0.9.0; extra == 'dev'
|
|
52
|
-
Requires-Dist: pytest==
|
|
55
|
+
Requires-Dist: pytest-asyncio==0.26.0; extra == 'dev'
|
|
56
|
+
Requires-Dist: pytest==8.3.5; extra == 'dev'
|
|
53
57
|
Requires-Dist: requests-aws4auth==1.2.3; extra == 'dev'
|
|
54
58
|
Requires-Dist: sentence-transformers==2.6.1; extra == 'dev'
|
|
55
59
|
Requires-Dist: types-requests==2.31.0.20240106; extra == 'dev'
|
|
@@ -12,13 +12,13 @@ monocle_apptrace/exporters/azure/blob_exporter_opendal.py,sha256=wQUtciyFMD28tpW
|
|
|
12
12
|
monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=wFkHd87nOXzFMRejrUiO6N0mNIWkxAyRzob6o82lr_c,4765
|
|
13
13
|
monocle_apptrace/instrumentation/__init__.py,sha256=oa412OuokRm9Vf3XlCJLqpZjz9ZcuxAKxnEBvOK7u2M,21
|
|
14
14
|
monocle_apptrace/instrumentation/common/__init__.py,sha256=oNEcgw4N36_XzPeN1gc7wxhPjVg-Vhh8EjvUIQZ7pDM,224
|
|
15
|
-
monocle_apptrace/instrumentation/common/constants.py,sha256=
|
|
15
|
+
monocle_apptrace/instrumentation/common/constants.py,sha256=XxxPUg0tQGQLM12Z3yzLa-shgU8ZZwlAGsFg9MT7mao,3208
|
|
16
16
|
monocle_apptrace/instrumentation/common/instrumentor.py,sha256=CwQT1oiPyzv_xe8HhdOH7Ucmd18r2Wk3ortOTx6bhpQ,16324
|
|
17
|
-
monocle_apptrace/instrumentation/common/span_handler.py,sha256=
|
|
17
|
+
monocle_apptrace/instrumentation/common/span_handler.py,sha256=Js6RBb6pT-GtlAYSlsVIQC2YBar7IKj6SPNF9cqOQ3U,13228
|
|
18
18
|
monocle_apptrace/instrumentation/common/tracing.md,sha256=6Lr8QGxEFHKhj-mMvLV3xjFnplKSs6HEdwl0McPK47M,7577
|
|
19
|
-
monocle_apptrace/instrumentation/common/utils.py,sha256=
|
|
20
|
-
monocle_apptrace/instrumentation/common/wrapper.py,sha256=
|
|
21
|
-
monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=
|
|
19
|
+
monocle_apptrace/instrumentation/common/utils.py,sha256=oPyUdfBwRCWuxapjtxTd-L4h1Mr6mCDvhSs3do8p0z8,13928
|
|
20
|
+
monocle_apptrace/instrumentation/common/wrapper.py,sha256=NZC0xymn2q6_bFK0d91F0Z-W-YoCmIjOZEm1t1XKSY4,11409
|
|
21
|
+
monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=ig98if45QDU-N76uNAxcb1O7xL8YpwtxJLcb5Dh71bc,4013
|
|
22
22
|
monocle_apptrace/instrumentation/metamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
monocle_apptrace/instrumentation/metamodel/aiohttp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
24
|
monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py,sha256=h-zrif2vgPL9JbCf1eKHbKBYHAxMdHxOdY-soIDGti8,2361
|
|
@@ -36,20 +36,20 @@ monocle_apptrace/instrumentation/metamodel/botocore/entities/__init__.py,sha256=
|
|
|
36
36
|
monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py,sha256=rAsvhRIR9XYGd8NHTFDJQuiQSTzFoZ4oKeA6kEhK0QQ,2363
|
|
37
37
|
monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py,sha256=8FSdoQSS6DuowF7KHhCRj5kpxYF-bBNR47W1tB-gVh0,1433
|
|
38
38
|
monocle_apptrace/instrumentation/metamodel/flask/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=
|
|
39
|
+
monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=seLVsL5gE3GbjY3Yetgg1WnswhDzb0zEQR05fHf5xTM,3094
|
|
40
40
|
monocle_apptrace/instrumentation/metamodel/flask/methods.py,sha256=dWCMEDk-HWHiD0vlMoAVYbIFclstmVkUpRrCtqDWyFE,739
|
|
41
41
|
monocle_apptrace/instrumentation/metamodel/flask/entities/http.py,sha256=wIudpNk6-DY72k0p90XtvjKt8BilvOd-87Q2iqJnWa8,1525
|
|
42
42
|
monocle_apptrace/instrumentation/metamodel/haystack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
|
-
monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=
|
|
43
|
+
monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=HjPZUA9K2dPUcQQNzMN3jX8UV0OHF9N7tQ6kSl1OvxM,5439
|
|
44
44
|
monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=Zd70ycMQ5qWsjPXnQL6qoThNKrQA80P6t11sFyEbQR4,1585
|
|
45
45
|
monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
-
monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=
|
|
46
|
+
monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=FzUg66WRYAcPbWqdq8iqfv7inkry9L_MrC1o5wd96bg,3217
|
|
47
47
|
monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py,sha256=nq3lsk2qFxXqwrAHsBt8zrh4ZVGAJABkPtylrjUCCqc,2357
|
|
48
48
|
monocle_apptrace/instrumentation/metamodel/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
49
|
monocle_apptrace/instrumentation/metamodel/langchain/_helper.py,sha256=CziW8KUmRqGCi_y2fcC9YMsP2wP11OMUitqKupSXStg,5315
|
|
50
50
|
monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=sQLrji0NLuG8i8q5UdbgzPUjWh_WmromfvWL3pGZdCk,2941
|
|
51
51
|
monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
|
-
monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=
|
|
52
|
+
monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=RrLPoYLfn3AB9-siwo1NFLT2aPDJtJm9Hgoj6ad2S7Y,2774
|
|
53
53
|
monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py,sha256=r4UqTCT5vOfkbz9lwoTRoiMkUUJtPMwqOYbqo53A6K8,2039
|
|
54
54
|
monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
55
|
monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py,sha256=-XmAbhkgqwaunFZa-BP0zWZ3e-uD-ihSszbn5Cz75yc,2043
|
|
@@ -61,27 +61,27 @@ monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py,sha256=1Rq2sfoL
|
|
|
61
61
|
monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=p2mDYF_Izgz9M919n3Do5tlc5XRAZt_rnkFRqqIO5yo,3001
|
|
62
62
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
63
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py,sha256=g7IEwFMLjYvxljX7iHoYSPJW6k-wC7Z3i_y2qlNEZcs,1338
|
|
64
|
-
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=
|
|
64
|
+
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=XJI6CbGdsogQLQC41KEJqkRe1THEN_vu8pA_H5RGNww,2737
|
|
65
65
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py,sha256=QBF1nrqog5KHh925jiY2V-kejL6iVLKUowZmqUDoiJ4,1870
|
|
66
66
|
monocle_apptrace/instrumentation/metamodel/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
67
|
-
monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=
|
|
68
|
-
monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=
|
|
67
|
+
monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=KmW1Xbrfw1aWsXrRGtF6bAd2TjblU5i2-X0KVMYHS5M,5682
|
|
68
|
+
monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=jpqZyfiJbzMz1r3W3fwMCGiQsbiDSkhqgADJextGxFQ,1796
|
|
69
69
|
monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
|
-
monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=
|
|
70
|
+
monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=8UdUnKnQsWFPsrKnhNYVRM5ZAMFQl_PZlh67QC_wQ0k,8271
|
|
71
71
|
monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=LU7aec302ZqPrs9MzFWU-JTnhK8OpYfgQKMmktlD6-8,1457
|
|
72
72
|
monocle_apptrace/instrumentation/metamodel/requests/__init__.py,sha256=mg04UgoPzzcH-cPOahYUqN9T-TolZyOZipnBwDg5TP8,250
|
|
73
73
|
monocle_apptrace/instrumentation/metamodel/requests/_helper.py,sha256=GS03VbT9LiGwt4Mz2DPHtxuWd3xhQL4liS9-dfpy9SE,1985
|
|
74
74
|
monocle_apptrace/instrumentation/metamodel/requests/methods.py,sha256=O7lkglRvV97zqnCu6r2JwvW8WQqi4uvlpmNkAPpXigE,440
|
|
75
75
|
monocle_apptrace/instrumentation/metamodel/requests/entities/http.py,sha256=TlY4NZtPleewbF5W0qV61L2ByDOf44EOZhKQgwzRIKc,1669
|
|
76
76
|
monocle_apptrace/instrumentation/metamodel/teamsai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py,sha256=
|
|
77
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py,sha256=mrN-AQ0Nn59_jKidyvibP76JI17SdVFAdpBw_ouuu2U,3821
|
|
78
78
|
monocle_apptrace/instrumentation/metamodel/teamsai/methods.py,sha256=i0Rz6Gn_Skey9uuCtJftNfMjGBbMMhuMMQcx7TPYQXU,2214
|
|
79
79
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
80
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
81
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py,sha256=
|
|
82
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py,sha256=
|
|
83
|
-
monocle_apptrace-0.4.
|
|
84
|
-
monocle_apptrace-0.4.
|
|
85
|
-
monocle_apptrace-0.4.
|
|
86
|
-
monocle_apptrace-0.4.
|
|
87
|
-
monocle_apptrace-0.4.
|
|
81
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py,sha256=6rb75bFKh_hvgD8dJzcIeXeJjLjhm9nXuxHI3F-icqo,2405
|
|
82
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py,sha256=M5uPEbP5c57txrd7BDRXhK5rvRJfyNyvqavtkXuPjXU,2738
|
|
83
|
+
monocle_apptrace-0.4.0b3.dist-info/METADATA,sha256=pJPcqamcOsIMTK6Zhv0JCm57OQu0cJ6gxfZ8HxLjhJs,6592
|
|
84
|
+
monocle_apptrace-0.4.0b3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
85
|
+
monocle_apptrace-0.4.0b3.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
|
|
86
|
+
monocle_apptrace-0.4.0b3.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
|
|
87
|
+
monocle_apptrace-0.4.0b3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|