monocle-apptrace 0.3.1__py3-none-any.whl → 0.4.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (33) hide show
  1. monocle_apptrace/exporters/aws/s3_exporter.py +3 -1
  2. monocle_apptrace/exporters/azure/blob_exporter.py +2 -2
  3. monocle_apptrace/exporters/base_exporter.py +10 -4
  4. monocle_apptrace/exporters/file_exporter.py +19 -4
  5. monocle_apptrace/exporters/monocle_exporters.py +3 -3
  6. monocle_apptrace/exporters/okahu/okahu_exporter.py +5 -2
  7. monocle_apptrace/instrumentation/common/constants.py +5 -1
  8. monocle_apptrace/instrumentation/common/instrumentor.py +24 -13
  9. monocle_apptrace/instrumentation/common/span_handler.py +33 -18
  10. monocle_apptrace/instrumentation/common/utils.py +62 -54
  11. monocle_apptrace/instrumentation/common/wrapper.py +177 -40
  12. monocle_apptrace/instrumentation/common/wrapper_method.py +10 -5
  13. monocle_apptrace/instrumentation/metamodel/aiohttp/__init__.py +0 -0
  14. monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py +66 -0
  15. monocle_apptrace/instrumentation/metamodel/aiohttp/entities/http.py +51 -0
  16. monocle_apptrace/instrumentation/metamodel/aiohttp/methods.py +13 -0
  17. monocle_apptrace/instrumentation/metamodel/flask/_helper.py +7 -2
  18. monocle_apptrace/instrumentation/metamodel/flask/entities/http.py +0 -1
  19. monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +17 -4
  20. monocle_apptrace/instrumentation/metamodel/haystack/methods.py +8 -1
  21. monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py +13 -9
  22. monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +14 -0
  23. monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +174 -26
  24. monocle_apptrace/instrumentation/metamodel/requests/_helper.py +1 -1
  25. monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +19 -1
  26. monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +1 -1
  27. monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +24 -18
  28. monocle_apptrace/instrumentation/metamodel/teamsai/methods.py +42 -8
  29. {monocle_apptrace-0.3.1.dist-info → monocle_apptrace-0.4.0b1.dist-info}/METADATA +1 -1
  30. {monocle_apptrace-0.3.1.dist-info → monocle_apptrace-0.4.0b1.dist-info}/RECORD +33 -29
  31. {monocle_apptrace-0.3.1.dist-info → monocle_apptrace-0.4.0b1.dist-info}/WHEEL +0 -0
  32. {monocle_apptrace-0.3.1.dist-info → monocle_apptrace-0.4.0b1.dist-info}/licenses/LICENSE +0 -0
  33. {monocle_apptrace-0.3.1.dist-info → monocle_apptrace-0.4.0b1.dist-info}/licenses/NOTICE +0 -0
@@ -6,78 +6,168 @@ from opentelemetry.context import set_value, attach, detach, get_value
6
6
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
7
7
  from monocle_apptrace.instrumentation.common.utils import (
8
8
  get_fully_qualified_class_name,
9
+ set_scopes,
9
10
  with_tracer_wrapper,
10
11
  set_scope,
11
- remove_scope,
12
- async_wrapper
12
+ remove_scope
13
13
  )
14
14
  from monocle_apptrace.instrumentation.common.constants import WORKFLOW_TYPE_KEY, ADD_NEW_WORKFLOW
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
- def wrapper_processor(async_task: bool, tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
18
- # Some Langchain objects are wrapped elsewhere, so we ignore them here
19
- if instance.__class__.__name__ in ("AgentExecutor"):
20
- return wrapped(*args, **kwargs)
17
+ def get_auto_close_span(to_wrap, kwargs):
18
+ try:
19
+ if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("is_auto_close"):
20
+ return to_wrap.get("output_processor").get("is_auto_close")(kwargs)
21
+ return True
22
+ except Exception as e:
23
+ logger.warning("Warning: Error occurred in get_auto_close_span: %s", str(e))
24
+ return True
25
+
26
+ def pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path):
27
+ SpanHandler.set_default_monocle_attributes(span, source_path)
28
+ if SpanHandler.is_root_span(span) or add_workflow_span:
29
+ # This is a direct API call of a non-framework type
30
+ SpanHandler.set_workflow_properties(span, to_wrap)
31
+ else:
32
+ SpanHandler.set_non_workflow_properties(span)
33
+ handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
21
34
 
35
+ def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span):
36
+ if not (SpanHandler.is_root_span(span) or get_value(ADD_NEW_WORKFLOW) == True):
37
+ handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
38
+ handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
39
+
40
+ def get_span_name(to_wrap, instance):
22
41
  if to_wrap.get("span_name"):
23
42
  name = to_wrap.get("span_name")
24
43
  else:
25
44
  name = get_fully_qualified_class_name(instance)
45
+ return name
46
+
47
+ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
48
+ # Main span processing logic
49
+ name = get_span_name(to_wrap, instance)
50
+ return_value = None
51
+ span_status = None
52
+ if(get_auto_close_span(to_wrap, kwargs)):
53
+ with tracer.start_as_current_span(name) as span:
54
+ pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
55
+
56
+ if SpanHandler.is_root_span(span) or add_workflow_span:
57
+ # Recursive call for the actual span
58
+ return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
59
+ span.set_status(span_status)
60
+ else:
61
+ with SpanHandler.workflow_type(to_wrap):
62
+ return_value = wrapped(*args, **kwargs)
63
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
64
+ span_status = span.status
65
+ else:
66
+ span = tracer.start_span(name)
67
+
68
+ pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
69
+
70
+ def post_process_span_internal(ret_val):
71
+ nonlocal handler, to_wrap, wrapped, instance, args, kwargs, span
72
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
73
+ span.end()
74
+
75
+ with SpanHandler.workflow_type(to_wrap):
76
+ return_value = wrapped(*args, **kwargs)
77
+ if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
78
+ # Process the stream
79
+ to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
80
+ else:
81
+ span.end()
82
+ span_status = span.status
83
+ return return_value, span_status
26
84
 
85
+ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
27
86
  return_value = None
28
87
  token = None
29
88
  try:
30
89
  handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
31
90
  if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
32
- if async_task:
33
- return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
34
- else:
35
- return_value = wrapped(*args, **kwargs)
91
+ return_value = wrapped(*args, **kwargs)
36
92
  else:
37
93
  add_workflow_span = get_value(ADD_NEW_WORKFLOW) == True
38
94
  token = attach(set_value(ADD_NEW_WORKFLOW, False))
39
95
  try:
40
- return_value = span_processor(name, async_task, tracer, handler, add_workflow_span,
41
- to_wrap, wrapped, instance, args, kwargs)
96
+ return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs)
42
97
  finally:
43
98
  detach(token)
44
99
  return return_value
45
100
  finally:
46
101
  handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
47
102
 
48
- def span_processor(name: str, async_task: bool, tracer: Tracer, handler: SpanHandler, add_workflow_span:bool,
49
- to_wrap, wrapped, instance, args, kwargs):
50
- # For singleton spans, eg OpenAI inference generate a workflow span to format the workflow specific attributes
103
+
104
+ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
105
+ # Main span processing logic
106
+ name = get_span_name(to_wrap, instance)
51
107
  return_value = None
52
- with tracer.start_as_current_span(name) as span:
53
- # Since Spanhandler can be overridden, ensure we set default monocle attributes.
54
- SpanHandler.set_default_monocle_attributes(span)
55
- if SpanHandler.is_root_span(span) or add_workflow_span:
56
- # This is a direct API call of a non-framework type, call the span_processor recursively for the actual span
57
- SpanHandler.set_workflow_properties(span, to_wrap)
58
- return_value = span_processor(name, async_task, tracer, handler, False, to_wrap, wrapped, instance, args, kwargs)
108
+ span_status = None
109
+ if(get_auto_close_span(to_wrap, kwargs)):
110
+ with tracer.start_as_current_span(name) as span:
111
+ pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
112
+
113
+ if SpanHandler.is_root_span(span) or add_workflow_span:
114
+ # Recursive call for the actual span
115
+ return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
116
+ span.set_status(span_status)
117
+ else:
118
+ with SpanHandler.workflow_type(to_wrap):
119
+ return_value = await wrapped(*args, **kwargs)
120
+ span_status = span.status
121
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
122
+ else:
123
+ span = tracer.start_span(name)
124
+
125
+ pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped, instance, args, kwargs, span, source_path)
126
+
127
+ def post_process_span_internal(ret_val):
128
+ nonlocal handler, to_wrap, wrapped, instance, args, kwargs, span
129
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
130
+ span.end()
131
+
132
+ with SpanHandler.workflow_type(to_wrap):
133
+ return_value = await wrapped(*args, **kwargs)
134
+
135
+ if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
136
+ # Process the stream
137
+ to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
138
+ else:
139
+ span.end()
140
+ span_status = span.status
141
+ return return_value, span.status
142
+
143
+ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
144
+ return_value = None
145
+ token = None
146
+ try:
147
+ handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
148
+ if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
149
+ return_value = await wrapped(*args, **kwargs)
59
150
  else:
60
- with SpanHandler.workflow_type(to_wrap):
61
- SpanHandler.set_non_workflow_properties(span)
62
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
63
- if async_task:
64
- return_value = async_wrapper(wrapped, None, None, None, *args, **kwargs)
65
- else:
66
- return_value = wrapped(*args, **kwargs)
67
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
68
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
69
- return return_value
151
+ add_workflow_span = get_value(ADD_NEW_WORKFLOW) == True
152
+ token = attach(set_value(ADD_NEW_WORKFLOW, False))
153
+ try:
154
+ return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs)
155
+ finally:
156
+ detach(token)
157
+ return return_value
158
+ finally:
159
+ handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
70
160
 
71
161
  @with_tracer_wrapper
72
- def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
73
- return wrapper_processor(False, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
162
+ def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
163
+ return monocle_wrapper(tracer, handler, to_wrap, wrapped, instance, source_path, args, kwargs)
74
164
 
75
165
  @with_tracer_wrapper
76
- async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
77
- return wrapper_processor(True, tracer, handler, to_wrap, wrapped, instance, args, kwargs)
166
+ async def atask_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
167
+ return await amonocle_wrapper(tracer, handler, to_wrap, wrapped, instance, source_path, args, kwargs)
78
168
 
79
169
  @with_tracer_wrapper
80
- def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
170
+ def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
81
171
  scope_name = to_wrap.get('scope_name', None)
82
172
  if scope_name:
83
173
  token = set_scope(scope_name)
@@ -87,8 +177,55 @@ def scope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instan
87
177
  return return_value
88
178
 
89
179
  @with_tracer_wrapper
90
- async def ascope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, args, kwargs):
180
+ async def ascope_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
91
181
  scope_name = to_wrap.get('scope_name', None)
92
182
  scope_value = to_wrap.get('scope_value', None)
93
- return_value = async_wrapper(wrapped, scope_name, scope_value, None, *args, **kwargs)
94
- return return_value
183
+ token = None
184
+ try:
185
+ if scope_name:
186
+ token = set_scope(scope_name, scope_value)
187
+ return_value = await wrapped(*args, **kwargs)
188
+ return return_value
189
+ finally:
190
+ if token:
191
+ remove_scope(token)
192
+
193
+ @with_tracer_wrapper
194
+ def scopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
195
+ scope_values = to_wrap.get('scope_values', None)
196
+ scope_values = evaluate_scope_values(args, kwargs, scope_values)
197
+ token = None
198
+ try:
199
+ if scope_values:
200
+ token = set_scopes(scope_values)
201
+ return_value = wrapped(*args, **kwargs)
202
+ return return_value
203
+ finally:
204
+ if token:
205
+ remove_scope(token)
206
+
207
+ @with_tracer_wrapper
208
+ async def ascopes_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
209
+ scope_values = to_wrap.get('scope_values', None)
210
+ scope_values = evaluate_scope_values(args, kwargs, scope_values)
211
+ token = None
212
+ try:
213
+ if scope_values:
214
+ token = set_scopes(scope_values)
215
+ return_value = await wrapped(*args, **kwargs)
216
+ return return_value
217
+ finally:
218
+ if token:
219
+ remove_scope(token)
220
+
221
+ def evaluate_scope_values(args, kwargs, scope_values):
222
+ if callable(scope_values):
223
+ try:
224
+ scope_values = scope_values(args, kwargs)
225
+ except Exception as e:
226
+ logger.warning("Warning: Error occurred in evaluate_scope_values: %s", str(e))
227
+ scope_values = None
228
+ if isinstance(scope_values, dict):
229
+ return scope_values
230
+ return None
231
+
@@ -17,7 +17,8 @@ from monocle_apptrace.instrumentation.metamodel.requests.methods import (REQUEST
17
17
  from monocle_apptrace.instrumentation.metamodel.requests._helper import RequestSpanHandler
18
18
  from monocle_apptrace.instrumentation.metamodel.teamsai.methods import (TEAMAI_METHODS, )
19
19
  from monocle_apptrace.instrumentation.metamodel.anthropic.methods import (ANTHROPIC_METHODS, )
20
-
20
+ from monocle_apptrace.instrumentation.metamodel.aiohttp.methods import (AIOHTTP_METHODS, )
21
+ from monocle_apptrace.instrumentation.metamodel.aiohttp._helper import aiohttpSpanHandler
21
22
  class WrapperMethod:
22
23
  def __init__(
23
24
  self,
@@ -29,7 +30,8 @@ class WrapperMethod:
29
30
  wrapper_method = task_wrapper,
30
31
  span_handler = 'default',
31
32
  scope_name: str = None,
32
- span_type: str = None
33
+ span_type: str = None,
34
+ scope_values = None,
33
35
  ):
34
36
  self.package = package
35
37
  self.object = object_name
@@ -37,10 +39,11 @@ class WrapperMethod:
37
39
  self.span_name = span_name
38
40
  self.output_processor=output_processor
39
41
  self.span_type = span_type
42
+ self.scope_values = scope_values
40
43
 
41
44
  self.span_handler:SpanHandler.__class__ = span_handler
42
45
  self.scope_name = scope_name
43
- if scope_name:
46
+ if scope_name and not scope_values:
44
47
  self.wrapper_method = scope_wrapper
45
48
  else:
46
49
  self.wrapper_method = wrapper_method
@@ -56,17 +59,19 @@ class WrapperMethod:
56
59
  'wrapper_method': self.wrapper_method,
57
60
  'span_handler': self.span_handler,
58
61
  'scope_name': self.scope_name,
59
- 'span_type': self.span_type
62
+ 'span_type': self.span_type,
63
+ 'scope_values': self.scope_values,
60
64
  }
61
65
  return instance_dict
62
66
 
63
67
  def get_span_handler(self) -> SpanHandler:
64
68
  return self.span_handler()
65
69
 
66
- DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS
70
+ DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS + AIOHTTP_METHODS
67
71
 
68
72
  MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
69
73
  "default": SpanHandler(),
74
+ "aiohttp_handler": aiohttpSpanHandler(),
70
75
  "botocore_handler": BotoCoreSpanHandler(),
71
76
  "flask_handler": FlaskSpanHandler(),
72
77
  "flask_response_handler": FlaskResponseSpanHandler(),
@@ -0,0 +1,66 @@
1
+ import logging
2
+ from threading import local
3
+ from monocle_apptrace.instrumentation.common.utils import extract_http_headers, clear_http_scopes, try_option, Option, MonocleSpanException
4
+ from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
5
+ from monocle_apptrace.instrumentation.common.constants import HTTP_SUCCESS_CODES
6
+ from urllib.parse import unquote
7
+
8
+ logger = logging.getLogger(__name__)
9
+ MAX_DATA_LENGTH = 1000
10
+ token_data = local()
11
+ token_data.current_token = None
12
+
13
+ def get_route(args) -> str:
14
+ route_path: Option[str] = try_option(getattr, args[0], 'path')
15
+ return route_path.unwrap_or("")
16
+
17
+ def get_method(args) -> str:
18
+ # return args[0]['method'] if 'method' in args[0] else ""
19
+ http_method: Option[str] = try_option(getattr, args[0], 'method')
20
+ return http_method.unwrap_or("")
21
+
22
+ def get_params(args) -> dict:
23
+ params: Option[str] = try_option(getattr, args[0], 'query_string')
24
+ return unquote(params.unwrap_or(""))
25
+
26
+ def get_body(args) -> dict:
27
+ return ""
28
+
29
+ def extract_response(result) -> str:
30
+ if hasattr(result, 'text'):
31
+ response = result.text[0:max(result.text.__len__(), MAX_DATA_LENGTH)]
32
+ else:
33
+ response = ""
34
+ return response
35
+
36
+ def extract_status(result) -> str:
37
+ status = f"{result.status}" if hasattr(result, 'status') else ""
38
+ if status not in HTTP_SUCCESS_CODES:
39
+ error_message = extract_response(result)
40
+ raise MonocleSpanException(f"error: {status} - {error_message}")
41
+ return status
42
+
43
+ def aiohttp_pre_tracing(args):
44
+ token_data.current_token = extract_http_headers(args[0].headers)
45
+
46
+ def aiohttp_post_tracing():
47
+ clear_http_scopes(token_data.current_token)
48
+ token_data.current_token = None
49
+
50
+ def aiohttp_skip_span(args) -> bool:
51
+ if get_method(args) == "HEAD":
52
+ return True
53
+ return False
54
+
55
+ class aiohttpSpanHandler(SpanHandler):
56
+
57
+ def pre_tracing(self, to_wrap, wrapped, instance, args, kwargs):
58
+ aiohttp_pre_tracing(args)
59
+ return super().pre_tracing(to_wrap, wrapped, instance, args, kwargs)
60
+
61
+ def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value):
62
+ aiohttp_post_tracing()
63
+ return super().post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
64
+
65
+ def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
66
+ return aiohttp_skip_span(args)
@@ -0,0 +1,51 @@
1
+ from monocle_apptrace.instrumentation.metamodel.aiohttp import _helper
2
+ AIO_HTTP_PROCESSOR = {
3
+ "type": "http.process",
4
+ "attributes": [
5
+ [
6
+ {
7
+ "_comment": "request method, request URI",
8
+ "attribute": "method",
9
+ "accessor": lambda arguments: _helper.get_method(arguments['args'])
10
+ },
11
+ {
12
+ "_comment": "request method, request URI",
13
+ "attribute": "route",
14
+ "accessor": lambda arguments: _helper.get_route(arguments['args'])
15
+ },
16
+ {
17
+ "_comment": "request method, request URI",
18
+ "attribute": "body",
19
+ "accessor": lambda arguments: _helper.get_body(arguments['args'])
20
+ },
21
+ ]
22
+ ],
23
+ "events": [
24
+ {
25
+ "name": "data.input",
26
+ "attributes": [
27
+ {
28
+ "_comment": "route params",
29
+ "attribute": "params",
30
+ "accessor": lambda arguments: _helper.get_params(arguments['args'])
31
+ }
32
+ ]
33
+ },
34
+ {
35
+ "name": "data.output",
36
+ "attributes": [
37
+ {
38
+ "_comment": "status from HTTP response",
39
+ "attribute": "status",
40
+ "accessor": lambda arguments: _helper.extract_status(arguments['result'])
41
+ },
42
+ {
43
+ "_comment": "this is result from LLM",
44
+ "attribute": "response",
45
+ "accessor": lambda arguments: _helper.extract_response(arguments['result'])
46
+ }
47
+ ]
48
+ }
49
+
50
+ ]
51
+ }
@@ -0,0 +1,13 @@
1
+ from monocle_apptrace.instrumentation.common.wrapper import atask_wrapper
2
+ from monocle_apptrace.instrumentation.metamodel.aiohttp.entities.http import AIO_HTTP_PROCESSOR
3
+
4
+ AIOHTTP_METHODS = [
5
+ {
6
+ "package": "aiohttp.web_app",
7
+ "object": "Application",
8
+ "method": "_handle",
9
+ "wrapper_method": atask_wrapper,
10
+ "span_handler": "aiohttp_handler",
11
+ "output_processor": AIO_HTTP_PROCESSOR
12
+ }
13
+ ]
@@ -2,6 +2,8 @@ import logging
2
2
  from threading import local
3
3
  from monocle_apptrace.instrumentation.common.utils import extract_http_headers, clear_http_scopes
4
4
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
5
+ from monocle_apptrace.instrumentation.common.constants import HTTP_SUCCESS_CODES
6
+ from monocle_apptrace.instrumentation.common.utils import MonocleSpanException
5
7
  from urllib.parse import unquote
6
8
  from opentelemetry.context import get_current
7
9
  from opentelemetry.trace import Span, get_current_span
@@ -28,12 +30,15 @@ def get_body(args) -> dict:
28
30
  def extract_response(instance) -> str:
29
31
  if hasattr(instance, 'data') and hasattr(instance, 'content_length'):
30
32
  response = instance.data[0:max(instance.content_length, MAX_DATA_LENGTH)]
31
- else:
33
+ else:
32
34
  response = ""
33
35
  return response
34
36
 
35
37
  def extract_status(instance) -> str:
36
- status = instance.status if hasattr(instance, 'status') else ""
38
+ status = f"{instance.status_code}" if hasattr(instance, 'status_code') else ""
39
+ if status not in HTTP_SUCCESS_CODES:
40
+ error_message = extract_response(instance)
41
+ raise MonocleSpanException(f"error: {status} - {error_message}")
37
42
  return status
38
43
 
39
44
  def flask_pre_tracing(args):
@@ -18,7 +18,6 @@ FLASK_HTTP_PROCESSOR = {
18
18
  }
19
19
 
20
20
  FLASK_RESPONSE_PROCESSOR = {
21
- "type": "http.process",
22
21
  "events": [
23
22
  {
24
23
  "name": "data.input",
@@ -1,4 +1,5 @@
1
1
  import logging
2
+
2
3
  from monocle_apptrace.instrumentation.common.utils import (
3
4
  Option,
4
5
  get_keys_as_tuple,
@@ -11,13 +12,19 @@ logger = logging.getLogger(__name__)
11
12
  def extract_messages(kwargs):
12
13
  try:
13
14
  messages = []
15
+ system_message, user_message = None,None
14
16
  if isinstance(kwargs, dict):
15
17
  if 'system_prompt' in kwargs and kwargs['system_prompt']:
16
18
  system_message = kwargs['system_prompt']
17
- messages.append({"system" : system_message})
18
19
  if 'prompt' in kwargs and kwargs['prompt']:
19
20
  user_message = extract_question_from_prompt(kwargs['prompt'])
21
+ if 'messages' in kwargs and len(kwargs['messages'])>1:
22
+ system_message = kwargs['messages'][0].text
23
+ user_message = kwargs['messages'][1].text
24
+ if system_message and user_message:
25
+ messages.append({"system": system_message})
20
26
  messages.append({"user": user_message})
27
+
21
28
  return [str(message) for message in messages]
22
29
  except Exception as e:
23
30
  logger.warning("Warning: Error occurred in extract_messages: %s", str(e))
@@ -52,6 +59,8 @@ def extract_assistant_message(response):
52
59
  reply = response["replies"][0]
53
60
  if hasattr(reply, 'content'):
54
61
  return [reply.content]
62
+ if hasattr(reply, 'text'):
63
+ return [reply.text]
55
64
  return [reply]
56
65
  except Exception as e:
57
66
  logger.warning("Warning: Error occurred in extract_assistant_message: %s", str(e))
@@ -108,15 +117,19 @@ def extract_embeding_model(instance):
108
117
 
109
118
  def update_span_from_llm_response(response, instance):
110
119
  meta_dict = {}
111
- if response is not None and isinstance(response, dict) and "meta" in response:
112
- token_usage = response["meta"][0]["usage"]
120
+ token_usage = None
121
+ if response is not None and isinstance(response, dict):
122
+ if "meta" in response:
123
+ token_usage = response["meta"][0]["usage"]
124
+ if "replies" in response:
125
+ token_usage = response["replies"][0].meta["usage"]
113
126
  if token_usage is not None:
114
127
  temperature = instance.__dict__.get("temperature", None)
115
128
  meta_dict.update({"temperature": temperature})
116
129
  meta_dict.update(
117
130
  {"completion_tokens": token_usage.get("completion_tokens") or token_usage.get("output_tokens")})
118
131
  meta_dict.update({"prompt_tokens": token_usage.get("prompt_tokens") or token_usage.get("input_tokens")})
119
- meta_dict.update({"total_tokens": token_usage.get("total_tokens")})
132
+ meta_dict.update({"total_tokens": token_usage.get("total_tokens") or token_usage.get("completion_tokens")+token_usage.get("prompt_tokens")})
120
133
  return meta_dict
121
134
 
122
135
 
@@ -36,5 +36,12 @@ HAYSTACK_METHODS = [
36
36
  "object": "Pipeline",
37
37
  "method": "run",
38
38
  "wrapper_method": task_wrapper
39
- }
39
+ },
40
+ {
41
+ "package": "haystack_integrations.components.generators.anthropic",
42
+ "object": "AnthropicChatGenerator",
43
+ "method": "run",
44
+ "wrapper_method": task_wrapper,
45
+ "output_processor": INFERENCE
46
+ },
40
47
  ]
@@ -96,12 +96,19 @@ def extract_query_from_content(content):
96
96
 
97
97
 
98
98
  def extract_provider_name(instance):
99
- provider_url = try_option(getattr, instance, 'api_base').and_then(lambda url: urlparse(url).hostname)
100
- return provider_url
99
+ if hasattr(instance,'api_base'):
100
+ provider_url: Option[str]= try_option(getattr, instance, 'api_base').and_then(lambda url: urlparse(url).hostname)
101
+ if hasattr(instance,'_client'):
102
+ provider_url:Option[str] = try_option(getattr, instance._client.base_url,'host')
103
+ return provider_url.unwrap_or(None)
101
104
 
102
105
 
103
106
  def extract_inference_endpoint(instance):
104
- inference_endpoint = try_option(getattr, instance._client.sdk_configuration, 'server_url').map(str)
107
+ if hasattr(instance,'_client'):
108
+ if hasattr(instance._client,'sdk_configuration'):
109
+ inference_endpoint: Option[str] = try_option(getattr, instance._client.sdk_configuration, 'server_url').map(str)
110
+ if hasattr(instance._client,'base_url'):
111
+ inference_endpoint: Option[str] = try_option(getattr, instance._client, 'base_url').map(str)
105
112
  return inference_endpoint.unwrap_or(extract_provider_name(instance))
106
113
 
107
114
 
@@ -163,10 +170,7 @@ def update_span_from_llm_response(response, instance):
163
170
  if token_usage is not None:
164
171
  temperature = instance.__dict__.get("temperature", None)
165
172
  meta_dict.update({"temperature": temperature})
166
- if getattr(token_usage, "completion_tokens", None):
167
- meta_dict.update({"completion_tokens": getattr(token_usage, "completion_tokens")})
168
- if getattr(token_usage, "prompt_tokens", None):
169
- meta_dict.update({"prompt_tokens": getattr(token_usage, "prompt_tokens")})
170
- if getattr(token_usage, "total_tokens", None):
171
- meta_dict.update({"total_tokens": getattr(token_usage, "total_tokens")})
173
+ meta_dict.update({"completion_tokens": getattr(token_usage, "completion_tokens",None) or getattr(token_usage,"output_tokens",None)})
174
+ meta_dict.update({"prompt_tokens": getattr(token_usage, "prompt_tokens",None) or getattr(token_usage,"input_tokens",None)})
175
+ meta_dict.update({"total_tokens": getattr(token_usage, "total_tokens",None) or getattr(token_usage,"output_tokens",None)+getattr(token_usage,"input_tokens",None)})
172
176
  return meta_dict
@@ -84,5 +84,19 @@ LLAMAINDEX_METHODS = [
84
84
  "method": "chat",
85
85
  "wrapper_method": task_wrapper,
86
86
  "output_processor": AGENT
87
+ },
88
+ {
89
+ "package": "llama_index.llms.anthropic",
90
+ "object": "Anthropic",
91
+ "method": "chat",
92
+ "wrapper_method": task_wrapper,
93
+ "output_processor": INFERENCE
94
+ },
95
+ {
96
+ "package": "llama_index.llms.anthropic",
97
+ "object": "Anthropic",
98
+ "method": "achat",
99
+ "wrapper_method": atask_wrapper,
100
+ "output_processor": INFERENCE
87
101
  }
88
102
  ]