monocle-apptrace 0.4.0b1__py3-none-any.whl → 0.4.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of monocle-apptrace might be problematic. Click here for more details.
- monocle_apptrace/instrumentation/common/constants.py +4 -4
- monocle_apptrace/instrumentation/common/span_handler.py +52 -28
- monocle_apptrace/instrumentation/common/utils.py +15 -0
- monocle_apptrace/instrumentation/common/wrapper.py +66 -30
- monocle_apptrace/instrumentation/common/wrapper_method.py +3 -1
- monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +1 -1
- monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +1 -1
- monocle_apptrace/instrumentation/metamodel/openai/_helper.py +10 -2
- monocle_apptrace/instrumentation/metamodel/openai/methods.py +6 -6
- monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +37 -5
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +12 -32
- monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +14 -16
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b2.dist-info}/METADATA +2 -1
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b2.dist-info}/RECORD +19 -19
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b2.dist-info}/WHEEL +0 -0
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b2.dist-info}/licenses/LICENSE +0 -0
- {monocle_apptrace-0.4.0b1.dist-info → monocle_apptrace-0.4.0b2.dist-info}/licenses/NOTICE +0 -0
|
@@ -14,10 +14,10 @@ GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
|
|
|
14
14
|
# Azure naming reference can be found here
|
|
15
15
|
# https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
|
|
16
16
|
# https://docs.aws.amazon.com/resource-explorer/latest/userguide/supported-resource-types.html#services-lookoutmetrics
|
|
17
|
-
AZURE_FUNCTION_NAME = "
|
|
18
|
-
AZURE_APP_SERVICE_NAME = "
|
|
19
|
-
AZURE_ML_SERVICE_NAME = "
|
|
20
|
-
AWS_LAMBDA_SERVICE_NAME = "
|
|
17
|
+
AZURE_FUNCTION_NAME = "azure_func"
|
|
18
|
+
AZURE_APP_SERVICE_NAME = "azure_webapp"
|
|
19
|
+
AZURE_ML_SERVICE_NAME = "azure_ml"
|
|
20
|
+
AWS_LAMBDA_SERVICE_NAME = "aws_lambda"
|
|
21
21
|
GITHUB_CODESPACE_SERVICE_NAME = "github_codespace"
|
|
22
22
|
|
|
23
23
|
# Env variables to identify infra service type
|
|
@@ -42,8 +42,17 @@ class SpanHandler:
|
|
|
42
42
|
def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
|
|
43
43
|
return False
|
|
44
44
|
|
|
45
|
-
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) ->
|
|
46
|
-
return
|
|
45
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
46
|
+
return []
|
|
47
|
+
|
|
48
|
+
def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
|
|
49
|
+
span_type:str = None
|
|
50
|
+
if 'type' in output_processor:
|
|
51
|
+
span_type = output_processor['type']
|
|
52
|
+
span.set_attribute("span.type", span_type)
|
|
53
|
+
else:
|
|
54
|
+
logger.warning("type of span not found or incorrect written in entity json")
|
|
55
|
+
return span_type
|
|
47
56
|
|
|
48
57
|
def pre_task_processing(self, to_wrap, wrapped, instance, args,kwargs, span):
|
|
49
58
|
if "pipeline" in to_wrap['package']:
|
|
@@ -69,30 +78,32 @@ class SpanHandler:
|
|
|
69
78
|
workflow_name = SpanHandler.get_workflow_name(span=span)
|
|
70
79
|
if workflow_name:
|
|
71
80
|
span.set_attribute("workflow.name", workflow_name)
|
|
81
|
+
span.set_attribute("span.type", "generic")
|
|
72
82
|
|
|
73
83
|
def post_task_processing(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span):
|
|
74
|
-
|
|
75
|
-
span.set_status(StatusCode.OK)
|
|
84
|
+
pass
|
|
76
85
|
|
|
77
|
-
def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span) -> bool:
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
86
|
+
def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span, ex:Exception = None) -> bool:
|
|
87
|
+
try:
|
|
88
|
+
detected_error_in_attribute = self.hydrate_attributes(to_wrap, wrapped, instance, args, kwargs, result, span)
|
|
89
|
+
detected_error_in_event = self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, result, span, ex)
|
|
90
|
+
if detected_error_in_attribute or detected_error_in_event:
|
|
91
|
+
span.set_attribute(MONOCLE_DETECTED_SPAN_ERROR, True)
|
|
92
|
+
finally:
|
|
93
|
+
if span.status.status_code == StatusCode.UNSET and ex is None:
|
|
94
|
+
span.set_status(StatusCode.OK)
|
|
82
95
|
|
|
83
96
|
def hydrate_attributes(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span) -> bool:
|
|
84
97
|
detected_error:bool = False
|
|
85
98
|
span_index = 0
|
|
86
99
|
if SpanHandler.is_root_span(span):
|
|
87
100
|
span_index = 2 # root span will have workflow and hosting entities pre-populated
|
|
88
|
-
if
|
|
89
|
-
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
101
|
+
if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
|
|
90
102
|
output_processor=to_wrap['output_processor']
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
if 'attributes' in output_processor:
|
|
103
|
+
self.set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
|
|
104
|
+
skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
|
|
105
|
+
|
|
106
|
+
if 'attributes' in output_processor and 'attributes' not in skip_processors:
|
|
96
107
|
for processors in output_processor["attributes"]:
|
|
97
108
|
for processor in processors:
|
|
98
109
|
attribute = processor.get('attribute')
|
|
@@ -113,10 +124,6 @@ class SpanHandler:
|
|
|
113
124
|
else:
|
|
114
125
|
logger.debug(f"{' and '.join([key for key in ['attribute', 'accessor'] if not processor.get(key)])} not found or incorrect in entity JSON")
|
|
115
126
|
span_index += 1
|
|
116
|
-
else:
|
|
117
|
-
logger.debug("attributes not found or incorrect written in entity json")
|
|
118
|
-
else:
|
|
119
|
-
span.set_attribute("span.type", "generic")
|
|
120
127
|
|
|
121
128
|
# set scopes as attributes by calling get_scopes()
|
|
122
129
|
# scopes is a Mapping[str:object], iterate directly with .items()
|
|
@@ -127,16 +134,19 @@ class SpanHandler:
|
|
|
127
134
|
span.set_attribute("entity.count", span_index)
|
|
128
135
|
return detected_error
|
|
129
136
|
|
|
130
|
-
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span) -> bool:
|
|
137
|
+
def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span, ex:Exception=None) -> bool:
|
|
131
138
|
detected_error:bool = False
|
|
132
|
-
if
|
|
133
|
-
'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
|
|
139
|
+
if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
|
|
134
140
|
output_processor=to_wrap['output_processor']
|
|
135
|
-
|
|
136
|
-
|
|
141
|
+
skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
|
|
142
|
+
|
|
143
|
+
arguments = {"instance": instance, "args": args, "kwargs": kwargs, "result": ret_result, "exception":ex}
|
|
144
|
+
if 'events' in output_processor and 'events' not in skip_processors:
|
|
137
145
|
events = output_processor['events']
|
|
138
146
|
for event in events:
|
|
139
147
|
event_name = event.get("name")
|
|
148
|
+
if 'events.'+event_name in skip_processors:
|
|
149
|
+
continue
|
|
140
150
|
event_attributes = {}
|
|
141
151
|
attributes = event.get("attributes", [])
|
|
142
152
|
for attribute in attributes:
|
|
@@ -231,7 +241,7 @@ class SpanHandler:
|
|
|
231
241
|
|
|
232
242
|
@staticmethod
|
|
233
243
|
@contextmanager
|
|
234
|
-
def workflow_type(to_wrap=None):
|
|
244
|
+
def workflow_type(to_wrap=None, span:Span=None):
|
|
235
245
|
token = SpanHandler.attach_workflow_type(to_wrap)
|
|
236
246
|
try:
|
|
237
247
|
yield
|
|
@@ -241,6 +251,20 @@ class SpanHandler:
|
|
|
241
251
|
|
|
242
252
|
class NonFrameworkSpanHandler(SpanHandler):
|
|
243
253
|
|
|
254
|
+
def get_workflow_name_in_progress(self) -> str:
|
|
255
|
+
return get_value(WORKFLOW_TYPE_KEY)
|
|
256
|
+
|
|
257
|
+
def is_framework_span_in_progess(self) -> bool:
|
|
258
|
+
return self.get_workflow_name_in_progress() in WORKFLOW_TYPE_MAP.values()
|
|
259
|
+
|
|
244
260
|
# If the language framework is being executed, then skip generating direct openAI attributes and events
|
|
245
|
-
def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) ->
|
|
246
|
-
|
|
261
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
262
|
+
if self.is_framework_span_in_progess():
|
|
263
|
+
return ["attributes", "events"]
|
|
264
|
+
|
|
265
|
+
def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
|
|
266
|
+
span_type = super().set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
|
|
267
|
+
if self.is_framework_span_in_progess() and span_type is not None:
|
|
268
|
+
span_type = span_type+".modelapi"
|
|
269
|
+
span.set_attribute("span.type", span_type)
|
|
270
|
+
return span_type
|
|
@@ -362,6 +362,21 @@ def get_llm_type(instance):
|
|
|
362
362
|
except:
|
|
363
363
|
pass
|
|
364
364
|
|
|
365
|
+
def get_exception_status_code(arguments):
|
|
366
|
+
if arguments['exception'] is not None and hasattr(arguments['exception'], 'code'):
|
|
367
|
+
return arguments['exception'].code
|
|
368
|
+
else:
|
|
369
|
+
return 'error'
|
|
370
|
+
|
|
371
|
+
def get_exception_message(arguments):
|
|
372
|
+
if arguments['exception'] is not None:
|
|
373
|
+
if hasattr(arguments['exception'], 'message'):
|
|
374
|
+
return arguments['exception'].message
|
|
375
|
+
else:
|
|
376
|
+
return arguments['exception'].__str__()
|
|
377
|
+
else:
|
|
378
|
+
return ''
|
|
379
|
+
|
|
365
380
|
def patch_instance_method(obj, method_name, func):
|
|
366
381
|
"""
|
|
367
382
|
Patch a special method (like __iter__) for a single instance.
|
|
@@ -30,12 +30,22 @@ def pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped,
|
|
|
30
30
|
SpanHandler.set_workflow_properties(span, to_wrap)
|
|
31
31
|
else:
|
|
32
32
|
SpanHandler.set_non_workflow_properties(span)
|
|
33
|
-
|
|
33
|
+
try:
|
|
34
|
+
handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
logger.info(f"Warning: Error occurred in pre_task_processing: {e}")
|
|
34
37
|
|
|
35
|
-
def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span):
|
|
38
|
+
def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex = None):
|
|
36
39
|
if not (SpanHandler.is_root_span(span) or get_value(ADD_NEW_WORKFLOW) == True):
|
|
37
|
-
|
|
38
|
-
|
|
40
|
+
try:
|
|
41
|
+
handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logger.info(f"Warning: Error occurred in hydrate_span: {e}")
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
47
|
+
except Exception as e:
|
|
48
|
+
logger.info(f"Warning: Error occurred in post_task_processing: {e}")
|
|
39
49
|
|
|
40
50
|
def get_span_name(to_wrap, instance):
|
|
41
51
|
if to_wrap.get("span_name"):
|
|
@@ -58,9 +68,15 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
|
|
|
58
68
|
return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
59
69
|
span.set_status(span_status)
|
|
60
70
|
else:
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
71
|
+
ex:Exception = None
|
|
72
|
+
try:
|
|
73
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
74
|
+
return_value = wrapped(*args, **kwargs)
|
|
75
|
+
except Exception as e:
|
|
76
|
+
ex = e
|
|
77
|
+
raise
|
|
78
|
+
finally:
|
|
79
|
+
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
|
|
64
80
|
span_status = span.status
|
|
65
81
|
else:
|
|
66
82
|
span = tracer.start_span(name)
|
|
@@ -72,13 +88,15 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
|
|
|
72
88
|
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
|
|
73
89
|
span.end()
|
|
74
90
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
to_wrap.get("output_processor").get("
|
|
80
|
-
|
|
81
|
-
|
|
91
|
+
try:
|
|
92
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
93
|
+
return_value = wrapped(*args, **kwargs)
|
|
94
|
+
finally:
|
|
95
|
+
if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
|
|
96
|
+
# Process the stream
|
|
97
|
+
to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
|
|
98
|
+
else:
|
|
99
|
+
span.end()
|
|
82
100
|
span_status = span.status
|
|
83
101
|
return return_value, span_status
|
|
84
102
|
|
|
@@ -86,7 +104,10 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
|
|
|
86
104
|
return_value = None
|
|
87
105
|
token = None
|
|
88
106
|
try:
|
|
89
|
-
|
|
107
|
+
try:
|
|
108
|
+
handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.info(f"Warning: Error occurred in pre_tracing: {e}")
|
|
90
111
|
if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
|
|
91
112
|
return_value = wrapped(*args, **kwargs)
|
|
92
113
|
else:
|
|
@@ -98,8 +119,10 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
|
|
|
98
119
|
detach(token)
|
|
99
120
|
return return_value
|
|
100
121
|
finally:
|
|
101
|
-
|
|
102
|
-
|
|
122
|
+
try:
|
|
123
|
+
handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
124
|
+
except Exception as e:
|
|
125
|
+
logger.info(f"Warning: Error occurred in post_tracing: {e}")
|
|
103
126
|
|
|
104
127
|
async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
|
|
105
128
|
# Main span processing logic
|
|
@@ -115,10 +138,16 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
|
|
|
115
138
|
return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
|
|
116
139
|
span.set_status(span_status)
|
|
117
140
|
else:
|
|
118
|
-
|
|
119
|
-
|
|
141
|
+
ex:Exception = None
|
|
142
|
+
try:
|
|
143
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
144
|
+
return_value = await wrapped(*args, **kwargs)
|
|
145
|
+
except Exception as e:
|
|
146
|
+
ex = e
|
|
147
|
+
raise
|
|
148
|
+
finally:
|
|
149
|
+
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
|
|
120
150
|
span_status = span.status
|
|
121
|
-
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
|
|
122
151
|
else:
|
|
123
152
|
span = tracer.start_span(name)
|
|
124
153
|
|
|
@@ -129,14 +158,15 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
|
|
|
129
158
|
post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
|
|
130
159
|
span.end()
|
|
131
160
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
161
|
+
try:
|
|
162
|
+
with SpanHandler.workflow_type(to_wrap, span):
|
|
163
|
+
return_value = await wrapped(*args, **kwargs)
|
|
164
|
+
finally:
|
|
165
|
+
if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
|
|
166
|
+
# Process the stream
|
|
167
|
+
to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
|
|
168
|
+
else:
|
|
169
|
+
span.end()
|
|
140
170
|
span_status = span.status
|
|
141
171
|
return return_value, span.status
|
|
142
172
|
|
|
@@ -144,7 +174,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
|
|
|
144
174
|
return_value = None
|
|
145
175
|
token = None
|
|
146
176
|
try:
|
|
147
|
-
|
|
177
|
+
try:
|
|
178
|
+
handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
|
|
179
|
+
except Exception as e:
|
|
180
|
+
logger.info(f"Warning: Error occurred in pre_tracing: {e}")
|
|
148
181
|
if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
|
|
149
182
|
return_value = await wrapped(*args, **kwargs)
|
|
150
183
|
else:
|
|
@@ -156,7 +189,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
|
|
|
156
189
|
detach(token)
|
|
157
190
|
return return_value
|
|
158
191
|
finally:
|
|
159
|
-
|
|
192
|
+
try:
|
|
193
|
+
handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
|
|
194
|
+
except Exception as e:
|
|
195
|
+
logger.info(f"Warning: Error occurred in post_tracing: {e}")
|
|
160
196
|
|
|
161
197
|
@with_tracer_wrapper
|
|
162
198
|
def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
|
|
@@ -10,6 +10,7 @@ from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
|
|
|
10
10
|
from monocle_apptrace.instrumentation.metamodel.llamaindex.methods import (LLAMAINDEX_METHODS, )
|
|
11
11
|
from monocle_apptrace.instrumentation.metamodel.haystack.methods import (HAYSTACK_METHODS, )
|
|
12
12
|
from monocle_apptrace.instrumentation.metamodel.openai.methods import (OPENAI_METHODS,)
|
|
13
|
+
from monocle_apptrace.instrumentation.metamodel.openai._helper import OpenAISpanHandler
|
|
13
14
|
from monocle_apptrace.instrumentation.metamodel.langgraph.methods import LANGGRAPH_METHODS
|
|
14
15
|
from monocle_apptrace.instrumentation.metamodel.flask.methods import (FLASK_METHODS, )
|
|
15
16
|
from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler, FlaskResponseSpanHandler
|
|
@@ -76,5 +77,6 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
|
|
|
76
77
|
"flask_handler": FlaskSpanHandler(),
|
|
77
78
|
"flask_response_handler": FlaskResponseSpanHandler(),
|
|
78
79
|
"request_handler": RequestSpanHandler(),
|
|
79
|
-
"non_framework_handler": NonFrameworkSpanHandler()
|
|
80
|
+
"non_framework_handler": NonFrameworkSpanHandler(),
|
|
81
|
+
"openai_handler": OpenAISpanHandler(),
|
|
80
82
|
}
|
|
@@ -121,7 +121,7 @@ def update_span_from_llm_response(response, instance):
|
|
|
121
121
|
if response is not None and isinstance(response, dict):
|
|
122
122
|
if "meta" in response:
|
|
123
123
|
token_usage = response["meta"][0]["usage"]
|
|
124
|
-
|
|
124
|
+
elif "replies" in response: # and "meta" in response["replies"][0]:
|
|
125
125
|
token_usage = response["replies"][0].meta["usage"]
|
|
126
126
|
if token_usage is not None:
|
|
127
127
|
temperature = instance.__dict__.get("temperature", None)
|
|
@@ -10,7 +10,7 @@ from monocle_apptrace.instrumentation.common.utils import (
|
|
|
10
10
|
get_nested_value,
|
|
11
11
|
try_option,
|
|
12
12
|
)
|
|
13
|
-
|
|
13
|
+
from monocle_apptrace.instrumentation.common.span_handler import NonFrameworkSpanHandler
|
|
14
14
|
|
|
15
15
|
logger = logging.getLogger(__name__)
|
|
16
16
|
|
|
@@ -114,4 +114,12 @@ def get_inference_type(instance):
|
|
|
114
114
|
if inference_type.unwrap_or(None):
|
|
115
115
|
return 'azure_openai'
|
|
116
116
|
else:
|
|
117
|
-
return 'openai'
|
|
117
|
+
return 'openai'
|
|
118
|
+
|
|
119
|
+
class OpenAISpanHandler(NonFrameworkSpanHandler):
|
|
120
|
+
# If openAI is being called by Teams AI SDK, then retain the metadata part of the span events
|
|
121
|
+
def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
|
|
122
|
+
if self.is_framework_span_in_progess() and self.get_workflow_name_in_progress() == "workflow.teams_ai":
|
|
123
|
+
return ["attributes", "events.data.input", "events.data.output"]
|
|
124
|
+
else:
|
|
125
|
+
return super().skip_processor(to_wrap, wrapped, instance, span, args, kwargs)
|
|
@@ -12,7 +12,7 @@ OPENAI_METHODS = [
|
|
|
12
12
|
"object": "Completions",
|
|
13
13
|
"method": "create",
|
|
14
14
|
"wrapper_method": task_wrapper,
|
|
15
|
-
"span_handler": "
|
|
15
|
+
"span_handler": "openai_handler",
|
|
16
16
|
"output_processor": INFERENCE
|
|
17
17
|
},
|
|
18
18
|
{
|
|
@@ -20,7 +20,7 @@ OPENAI_METHODS = [
|
|
|
20
20
|
"object": "AsyncCompletions",
|
|
21
21
|
"method": "create",
|
|
22
22
|
"wrapper_method": atask_wrapper,
|
|
23
|
-
"span_handler": "
|
|
23
|
+
"span_handler": "openai_handler",
|
|
24
24
|
"output_processor": INFERENCE
|
|
25
25
|
},
|
|
26
26
|
{
|
|
@@ -28,7 +28,7 @@ OPENAI_METHODS = [
|
|
|
28
28
|
"object": "Embeddings",
|
|
29
29
|
"method": "create",
|
|
30
30
|
"wrapper_method": task_wrapper,
|
|
31
|
-
"span_handler": "
|
|
31
|
+
"span_handler": "openai_handler",
|
|
32
32
|
"output_processor": RETRIEVAL
|
|
33
33
|
},
|
|
34
34
|
{
|
|
@@ -36,7 +36,7 @@ OPENAI_METHODS = [
|
|
|
36
36
|
"object": "AsyncEmbeddings",
|
|
37
37
|
"method": "create",
|
|
38
38
|
"wrapper_method": atask_wrapper,
|
|
39
|
-
"span_handler": "
|
|
39
|
+
"span_handler": "openai_handler",
|
|
40
40
|
"output_processor": RETRIEVAL
|
|
41
41
|
},
|
|
42
42
|
{
|
|
@@ -44,7 +44,7 @@ OPENAI_METHODS = [
|
|
|
44
44
|
"object": "Responses",
|
|
45
45
|
"method": "create",
|
|
46
46
|
"wrapper_method": task_wrapper,
|
|
47
|
-
"span_handler": "
|
|
47
|
+
"span_handler": "openai_handler",
|
|
48
48
|
"output_processor": INFERENCE
|
|
49
49
|
},
|
|
50
50
|
{
|
|
@@ -52,7 +52,7 @@ OPENAI_METHODS = [
|
|
|
52
52
|
"object": "AsyncResponses",
|
|
53
53
|
"method": "create",
|
|
54
54
|
"wrapper_method": atask_wrapper,
|
|
55
|
-
"span_handler": "
|
|
55
|
+
"span_handler": "openai_handler",
|
|
56
56
|
"output_processor": INFERENCE
|
|
57
57
|
}
|
|
58
58
|
|
|
@@ -4,6 +4,8 @@ from monocle_apptrace.instrumentation.common.utils import (
|
|
|
4
4
|
get_keys_as_tuple,
|
|
5
5
|
get_nested_value,
|
|
6
6
|
try_option,
|
|
7
|
+
get_exception_message,
|
|
8
|
+
get_exception_status_code
|
|
7
9
|
)
|
|
8
10
|
def capture_input(arguments):
|
|
9
11
|
"""
|
|
@@ -57,11 +59,41 @@ def capture_prompt_info(arguments):
|
|
|
57
59
|
except Exception as e:
|
|
58
60
|
return f"Error capturing prompt: {str(e)}"
|
|
59
61
|
|
|
60
|
-
def
|
|
61
|
-
if
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
62
|
+
def get_status_code(arguments):
|
|
63
|
+
if arguments["exception"] is not None:
|
|
64
|
+
return get_exception_status_code(arguments)
|
|
65
|
+
elif hasattr(arguments["result"], "status"):
|
|
66
|
+
return arguments["result"].status
|
|
67
|
+
else:
|
|
68
|
+
return 'success'
|
|
69
|
+
|
|
70
|
+
def get_status(arguments):
|
|
71
|
+
if arguments["exception"] is not None:
|
|
72
|
+
return 'error'
|
|
73
|
+
elif get_status_code(arguments) == 'success':
|
|
74
|
+
return 'success'
|
|
75
|
+
else:
|
|
76
|
+
return 'error'
|
|
77
|
+
|
|
78
|
+
def get_response(arguments) -> str:
|
|
79
|
+
status = get_status_code(arguments)
|
|
80
|
+
response:str = ""
|
|
81
|
+
if status == 'success':
|
|
82
|
+
if hasattr(arguments["result"], "message"):
|
|
83
|
+
response = arguments["result"].message.content
|
|
84
|
+
else:
|
|
85
|
+
response = str(arguments["result"])
|
|
86
|
+
else:
|
|
87
|
+
if arguments["exception"] is not None:
|
|
88
|
+
response = get_exception_message(arguments)
|
|
89
|
+
elif hasattr(arguments["result"], "error"):
|
|
90
|
+
response = arguments["result"].error
|
|
91
|
+
return response
|
|
92
|
+
|
|
93
|
+
def check_status(arguments):
|
|
94
|
+
status = get_status_code(arguments)
|
|
95
|
+
if status != 'success':
|
|
96
|
+
raise MonocleSpanException(f"{status}")
|
|
65
97
|
|
|
66
98
|
def extract_provider_name(instance):
|
|
67
99
|
provider_url: Option[str] = try_option(getattr, instance._client.base_url, 'host')
|
|
@@ -28,42 +28,22 @@ ACTIONPLANNER_OUTPUT_PROCESSOR = {
|
|
|
28
28
|
{
|
|
29
29
|
"attribute": "tokenizer",
|
|
30
30
|
"accessor": lambda arguments: arguments["instance"]._options.tokenizer.__class__.__name__ if hasattr(arguments["instance"], "_options") else "GPTTokenizer"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"attribute": "prompt_name",
|
|
34
|
+
"accessor": _helper.capture_prompt_info
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
"attribute": "validator",
|
|
38
|
+
"accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"attribute": "memory_type",
|
|
42
|
+
"accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
|
|
31
43
|
}
|
|
32
44
|
]
|
|
33
45
|
],
|
|
34
46
|
"events": [
|
|
35
|
-
{
|
|
36
|
-
"name": "data.input",
|
|
37
|
-
"_comment": "input configuration to ActionPlanner",
|
|
38
|
-
"attributes": [
|
|
39
|
-
{
|
|
40
|
-
"attribute": "prompt_name",
|
|
41
|
-
"accessor": _helper.capture_prompt_info
|
|
42
|
-
},
|
|
43
|
-
{
|
|
44
|
-
"attribute": "validator",
|
|
45
|
-
"accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
|
|
46
|
-
},
|
|
47
|
-
{
|
|
48
|
-
"attribute": "memory_type",
|
|
49
|
-
"accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
|
|
50
|
-
}
|
|
51
|
-
]
|
|
52
|
-
},
|
|
53
|
-
{
|
|
54
|
-
"name": "data.output",
|
|
55
|
-
"_comment": "output from ActionPlanner",
|
|
56
|
-
"attributes": [
|
|
57
|
-
{
|
|
58
|
-
"attribute": "status",
|
|
59
|
-
"accessor": lambda arguments: _helper.status_check(arguments)
|
|
60
|
-
},
|
|
61
|
-
{
|
|
62
|
-
"attribute": "response",
|
|
63
|
-
"accessor": lambda arguments: arguments["result"].message.content if hasattr(arguments["result"], "message") else str(arguments["result"])
|
|
64
|
-
}
|
|
65
|
-
]
|
|
66
|
-
},
|
|
67
47
|
{
|
|
68
48
|
"name": "metadata",
|
|
69
49
|
"attributes": [
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py
CHANGED
|
@@ -3,7 +3,7 @@ from monocle_apptrace.instrumentation.metamodel.teamsai import (
|
|
|
3
3
|
)
|
|
4
4
|
from monocle_apptrace.instrumentation.common.utils import get_llm_type
|
|
5
5
|
TEAMAI_OUTPUT_PROCESSOR = {
|
|
6
|
-
"type": "inference",
|
|
6
|
+
"type": "inference.framework",
|
|
7
7
|
"attributes": [
|
|
8
8
|
[
|
|
9
9
|
{
|
|
@@ -52,25 +52,23 @@ TEAMAI_OUTPUT_PROCESSOR = {
|
|
|
52
52
|
"name": "data.output",
|
|
53
53
|
"_comment": "output from Teams AI",
|
|
54
54
|
"attributes": [
|
|
55
|
+
{
|
|
56
|
+
"attribute": "status",
|
|
57
|
+
"accessor": lambda arguments: _helper.get_status(arguments)
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
"attribute": "status_code",
|
|
61
|
+
"accessor": lambda arguments: _helper.get_status_code(arguments)
|
|
62
|
+
},
|
|
55
63
|
{
|
|
56
64
|
"attribute": "response",
|
|
57
|
-
"accessor": lambda arguments:
|
|
65
|
+
"accessor": lambda arguments: _helper.get_response(arguments)
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
"attribute": "check_status",
|
|
69
|
+
"accessor": lambda arguments: _helper.check_status(arguments)
|
|
58
70
|
}
|
|
59
71
|
]
|
|
60
72
|
},
|
|
61
|
-
# {
|
|
62
|
-
# "name": "metadata",
|
|
63
|
-
# "attributes": [
|
|
64
|
-
# {
|
|
65
|
-
# "_comment": "metadata from Teams AI response",
|
|
66
|
-
# "accessor": lambda arguments: {
|
|
67
|
-
# "prompt_tokens": arguments["result"].get("usage", {}).get("prompt_tokens", 0),
|
|
68
|
-
# "completion_tokens": arguments["result"].get("usage", {}).get("completion_tokens", 0),
|
|
69
|
-
# "total_tokens": arguments["result"].get("usage", {}).get("total_tokens", 0),
|
|
70
|
-
# "latency_ms": arguments.get("latency_ms")
|
|
71
|
-
# }
|
|
72
|
-
# }
|
|
73
|
-
# ]
|
|
74
|
-
# }
|
|
75
73
|
]
|
|
76
74
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: monocle_apptrace
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.0b2
|
|
4
4
|
Summary: package with monocle genAI tracing
|
|
5
5
|
Project-URL: Homepage, https://github.com/monocle2ai/monocle
|
|
6
6
|
Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
|
|
@@ -20,6 +20,7 @@ Requires-Dist: boto3==1.35.19; extra == 'aws'
|
|
|
20
20
|
Provides-Extra: azure
|
|
21
21
|
Requires-Dist: azure-storage-blob==12.22.0; extra == 'azure'
|
|
22
22
|
Provides-Extra: dev
|
|
23
|
+
Requires-Dist: anthropic-haystack; extra == 'dev'
|
|
23
24
|
Requires-Dist: anthropic==0.49.0; extra == 'dev'
|
|
24
25
|
Requires-Dist: azure-storage-blob==12.22.0; extra == 'dev'
|
|
25
26
|
Requires-Dist: boto3==1.34.131; extra == 'dev'
|
|
@@ -12,13 +12,13 @@ monocle_apptrace/exporters/azure/blob_exporter_opendal.py,sha256=wQUtciyFMD28tpW
|
|
|
12
12
|
monocle_apptrace/exporters/okahu/okahu_exporter.py,sha256=wFkHd87nOXzFMRejrUiO6N0mNIWkxAyRzob6o82lr_c,4765
|
|
13
13
|
monocle_apptrace/instrumentation/__init__.py,sha256=oa412OuokRm9Vf3XlCJLqpZjz9ZcuxAKxnEBvOK7u2M,21
|
|
14
14
|
monocle_apptrace/instrumentation/common/__init__.py,sha256=oNEcgw4N36_XzPeN1gc7wxhPjVg-Vhh8EjvUIQZ7pDM,224
|
|
15
|
-
monocle_apptrace/instrumentation/common/constants.py,sha256=
|
|
15
|
+
monocle_apptrace/instrumentation/common/constants.py,sha256=XxxPUg0tQGQLM12Z3yzLa-shgU8ZZwlAGsFg9MT7mao,3208
|
|
16
16
|
monocle_apptrace/instrumentation/common/instrumentor.py,sha256=CwQT1oiPyzv_xe8HhdOH7Ucmd18r2Wk3ortOTx6bhpQ,16324
|
|
17
|
-
monocle_apptrace/instrumentation/common/span_handler.py,sha256=
|
|
17
|
+
monocle_apptrace/instrumentation/common/span_handler.py,sha256=ajW1hkb_gUefJ5dMOVyXY1GcdHxpFkYajNJrmjq9iVg,12965
|
|
18
18
|
monocle_apptrace/instrumentation/common/tracing.md,sha256=6Lr8QGxEFHKhj-mMvLV3xjFnplKSs6HEdwl0McPK47M,7577
|
|
19
|
-
monocle_apptrace/instrumentation/common/utils.py,sha256=
|
|
20
|
-
monocle_apptrace/instrumentation/common/wrapper.py,sha256=
|
|
21
|
-
monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=
|
|
19
|
+
monocle_apptrace/instrumentation/common/utils.py,sha256=wAN3J6H5wxQZgOrK31oWyq1mzxLogkG-Du31GoPcPIo,13519
|
|
20
|
+
monocle_apptrace/instrumentation/common/wrapper.py,sha256=YF2f7j9Z1RPmY69OwdUIhyR8hCLSEk-xRGWbZlowl3c,12079
|
|
21
|
+
monocle_apptrace/instrumentation/common/wrapper_method.py,sha256=ig98if45QDU-N76uNAxcb1O7xL8YpwtxJLcb5Dh71bc,4013
|
|
22
22
|
monocle_apptrace/instrumentation/metamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
monocle_apptrace/instrumentation/metamodel/aiohttp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
24
|
monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py,sha256=h-zrif2vgPL9JbCf1eKHbKBYHAxMdHxOdY-soIDGti8,2361
|
|
@@ -40,16 +40,16 @@ monocle_apptrace/instrumentation/metamodel/flask/_helper.py,sha256=Z0mBGDXP_-Dcc
|
|
|
40
40
|
monocle_apptrace/instrumentation/metamodel/flask/methods.py,sha256=dWCMEDk-HWHiD0vlMoAVYbIFclstmVkUpRrCtqDWyFE,739
|
|
41
41
|
monocle_apptrace/instrumentation/metamodel/flask/entities/http.py,sha256=wIudpNk6-DY72k0p90XtvjKt8BilvOd-87Q2iqJnWa8,1525
|
|
42
42
|
monocle_apptrace/instrumentation/metamodel/haystack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
|
-
monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=
|
|
43
|
+
monocle_apptrace/instrumentation/metamodel/haystack/_helper.py,sha256=HjPZUA9K2dPUcQQNzMN3jX8UV0OHF9N7tQ6kSl1OvxM,5439
|
|
44
44
|
monocle_apptrace/instrumentation/metamodel/haystack/methods.py,sha256=Zd70ycMQ5qWsjPXnQL6qoThNKrQA80P6t11sFyEbQR4,1585
|
|
45
45
|
monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
-
monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=
|
|
46
|
+
monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py,sha256=FzUg66WRYAcPbWqdq8iqfv7inkry9L_MrC1o5wd96bg,3217
|
|
47
47
|
monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py,sha256=nq3lsk2qFxXqwrAHsBt8zrh4ZVGAJABkPtylrjUCCqc,2357
|
|
48
48
|
monocle_apptrace/instrumentation/metamodel/langchain/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
49
|
monocle_apptrace/instrumentation/metamodel/langchain/_helper.py,sha256=CziW8KUmRqGCi_y2fcC9YMsP2wP11OMUitqKupSXStg,5315
|
|
50
50
|
monocle_apptrace/instrumentation/metamodel/langchain/methods.py,sha256=sQLrji0NLuG8i8q5UdbgzPUjWh_WmromfvWL3pGZdCk,2941
|
|
51
51
|
monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
|
-
monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=
|
|
52
|
+
monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py,sha256=RrLPoYLfn3AB9-siwo1NFLT2aPDJtJm9Hgoj6ad2S7Y,2774
|
|
53
53
|
monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py,sha256=r4UqTCT5vOfkbz9lwoTRoiMkUUJtPMwqOYbqo53A6K8,2039
|
|
54
54
|
monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
55
|
monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py,sha256=-XmAbhkgqwaunFZa-BP0zWZ3e-uD-ihSszbn5Cz75yc,2043
|
|
@@ -61,11 +61,11 @@ monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py,sha256=1Rq2sfoL
|
|
|
61
61
|
monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py,sha256=p2mDYF_Izgz9M919n3Do5tlc5XRAZt_rnkFRqqIO5yo,3001
|
|
62
62
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
63
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py,sha256=g7IEwFMLjYvxljX7iHoYSPJW6k-wC7Z3i_y2qlNEZcs,1338
|
|
64
|
-
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=
|
|
64
|
+
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py,sha256=XJI6CbGdsogQLQC41KEJqkRe1THEN_vu8pA_H5RGNww,2737
|
|
65
65
|
monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py,sha256=QBF1nrqog5KHh925jiY2V-kejL6iVLKUowZmqUDoiJ4,1870
|
|
66
66
|
monocle_apptrace/instrumentation/metamodel/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
67
|
-
monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=
|
|
68
|
-
monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=
|
|
67
|
+
monocle_apptrace/instrumentation/metamodel/openai/_helper.py,sha256=8kQhU-QuqQR_SPJs9KKE-32jBJyk2kB91VNtirYX1yI,4878
|
|
68
|
+
monocle_apptrace/instrumentation/metamodel/openai/methods.py,sha256=jpqZyfiJbzMz1r3W3fwMCGiQsbiDSkhqgADJextGxFQ,1796
|
|
69
69
|
monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
70
|
monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py,sha256=JV6favZumCWBx5GCgirQ-txACeOGzb8N_-SZdSY0nOE,7910
|
|
71
71
|
monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py,sha256=LU7aec302ZqPrs9MzFWU-JTnhK8OpYfgQKMmktlD6-8,1457
|
|
@@ -74,14 +74,14 @@ monocle_apptrace/instrumentation/metamodel/requests/_helper.py,sha256=GS03VbT9Li
|
|
|
74
74
|
monocle_apptrace/instrumentation/metamodel/requests/methods.py,sha256=O7lkglRvV97zqnCu6r2JwvW8WQqi4uvlpmNkAPpXigE,440
|
|
75
75
|
monocle_apptrace/instrumentation/metamodel/requests/entities/http.py,sha256=TlY4NZtPleewbF5W0qV61L2ByDOf44EOZhKQgwzRIKc,1669
|
|
76
76
|
monocle_apptrace/instrumentation/metamodel/teamsai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py,sha256=
|
|
77
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py,sha256=mrN-AQ0Nn59_jKidyvibP76JI17SdVFAdpBw_ouuu2U,3821
|
|
78
78
|
monocle_apptrace/instrumentation/metamodel/teamsai/methods.py,sha256=i0Rz6Gn_Skey9uuCtJftNfMjGBbMMhuMMQcx7TPYQXU,2214
|
|
79
79
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
80
|
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
81
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py,sha256=
|
|
82
|
-
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py,sha256=
|
|
83
|
-
monocle_apptrace-0.4.
|
|
84
|
-
monocle_apptrace-0.4.
|
|
85
|
-
monocle_apptrace-0.4.
|
|
86
|
-
monocle_apptrace-0.4.
|
|
87
|
-
monocle_apptrace-0.4.
|
|
81
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py,sha256=6rb75bFKh_hvgD8dJzcIeXeJjLjhm9nXuxHI3F-icqo,2405
|
|
82
|
+
monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py,sha256=M5uPEbP5c57txrd7BDRXhK5rvRJfyNyvqavtkXuPjXU,2738
|
|
83
|
+
monocle_apptrace-0.4.0b2.dist-info/METADATA,sha256=Zjevvr0oyO-NM8uz0h5JTcTUNXtHnPV0LrLtiOtZk0E,6413
|
|
84
|
+
monocle_apptrace-0.4.0b2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
85
|
+
monocle_apptrace-0.4.0b2.dist-info/licenses/LICENSE,sha256=ay9trLiP5I7ZsFXo6AqtkLYdRqe5S9r-DrPOvsNlZrg,9136
|
|
86
|
+
monocle_apptrace-0.4.0b2.dist-info/licenses/NOTICE,sha256=9jn4xtwM_uUetJMx5WqGnhrR7MIhpoRlpokjSTlyt8c,112
|
|
87
|
+
monocle_apptrace-0.4.0b2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|