monocle-apptrace 0.4.1__py3-none-any.whl → 0.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of monocle-apptrace might be problematic. Click here for more details.

Files changed (74) hide show
  1. monocle_apptrace/__main__.py +1 -1
  2. monocle_apptrace/exporters/file_exporter.py +123 -36
  3. monocle_apptrace/instrumentation/common/__init__.py +16 -1
  4. monocle_apptrace/instrumentation/common/constants.py +6 -1
  5. monocle_apptrace/instrumentation/common/instrumentor.py +19 -152
  6. monocle_apptrace/instrumentation/common/method_wrappers.py +380 -0
  7. monocle_apptrace/instrumentation/common/span_handler.py +39 -24
  8. monocle_apptrace/instrumentation/common/utils.py +20 -14
  9. monocle_apptrace/instrumentation/common/wrapper.py +10 -9
  10. monocle_apptrace/instrumentation/common/wrapper_method.py +40 -1
  11. monocle_apptrace/instrumentation/metamodel/a2a/__init__.py +0 -0
  12. monocle_apptrace/instrumentation/metamodel/a2a/_helper.py +37 -0
  13. monocle_apptrace/instrumentation/metamodel/a2a/entities/__init__.py +0 -0
  14. monocle_apptrace/instrumentation/metamodel/a2a/entities/inference.py +112 -0
  15. monocle_apptrace/instrumentation/metamodel/a2a/methods.py +22 -0
  16. monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py +6 -11
  17. monocle_apptrace/instrumentation/metamodel/anthropic/_helper.py +35 -18
  18. monocle_apptrace/instrumentation/metamodel/anthropic/entities/inference.py +14 -10
  19. monocle_apptrace/instrumentation/metamodel/azfunc/_helper.py +13 -11
  20. monocle_apptrace/instrumentation/metamodel/azfunc/entities/http.py +5 -0
  21. monocle_apptrace/instrumentation/metamodel/azureaiinference/_helper.py +88 -8
  22. monocle_apptrace/instrumentation/metamodel/azureaiinference/entities/inference.py +22 -8
  23. monocle_apptrace/instrumentation/metamodel/botocore/_helper.py +92 -16
  24. monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py +13 -8
  25. monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +1 -1
  26. monocle_apptrace/instrumentation/metamodel/fastapi/__init__.py +0 -0
  27. monocle_apptrace/instrumentation/metamodel/fastapi/_helper.py +82 -0
  28. monocle_apptrace/instrumentation/metamodel/fastapi/entities/__init__.py +0 -0
  29. monocle_apptrace/instrumentation/metamodel/fastapi/entities/http.py +44 -0
  30. monocle_apptrace/instrumentation/metamodel/fastapi/methods.py +23 -0
  31. monocle_apptrace/instrumentation/metamodel/finish_types.py +387 -0
  32. monocle_apptrace/instrumentation/metamodel/flask/_helper.py +6 -11
  33. monocle_apptrace/instrumentation/metamodel/gemini/__init__.py +0 -0
  34. monocle_apptrace/instrumentation/metamodel/gemini/_helper.py +120 -0
  35. monocle_apptrace/instrumentation/metamodel/gemini/entities/__init__.py +0 -0
  36. monocle_apptrace/instrumentation/metamodel/gemini/entities/inference.py +83 -0
  37. monocle_apptrace/instrumentation/metamodel/gemini/entities/retrieval.py +43 -0
  38. monocle_apptrace/instrumentation/metamodel/gemini/methods.py +24 -0
  39. monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +15 -8
  40. monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +5 -10
  41. monocle_apptrace/instrumentation/metamodel/haystack/methods.py +7 -0
  42. monocle_apptrace/instrumentation/metamodel/lambdafunc/_helper.py +78 -0
  43. monocle_apptrace/instrumentation/metamodel/lambdafunc/entities/http.py +51 -0
  44. monocle_apptrace/instrumentation/metamodel/lambdafunc/methods.py +23 -0
  45. monocle_apptrace/instrumentation/metamodel/lambdafunc/wrapper.py +23 -0
  46. monocle_apptrace/instrumentation/metamodel/langchain/_helper.py +127 -19
  47. monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +15 -10
  48. monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py +67 -10
  49. monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py +127 -20
  50. monocle_apptrace/instrumentation/metamodel/langgraph/langgraph_processor.py +43 -0
  51. monocle_apptrace/instrumentation/metamodel/langgraph/methods.py +29 -5
  52. monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py +227 -16
  53. monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py +127 -10
  54. monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +13 -8
  55. monocle_apptrace/instrumentation/metamodel/llamaindex/llamaindex_processor.py +51 -0
  56. monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +68 -1
  57. monocle_apptrace/instrumentation/metamodel/mcp/__init__.py +0 -0
  58. monocle_apptrace/instrumentation/metamodel/mcp/_helper.py +118 -0
  59. monocle_apptrace/instrumentation/metamodel/mcp/entities/__init__.py +0 -0
  60. monocle_apptrace/instrumentation/metamodel/mcp/entities/inference.py +48 -0
  61. monocle_apptrace/instrumentation/metamodel/mcp/mcp_processor.py +13 -0
  62. monocle_apptrace/instrumentation/metamodel/mcp/methods.py +21 -0
  63. monocle_apptrace/instrumentation/metamodel/openai/_helper.py +83 -16
  64. monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +103 -92
  65. monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +1 -1
  66. monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +41 -22
  67. monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +1 -1
  68. monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +5 -9
  69. monocle_apptrace/instrumentation/metamodel/teamsai/sample.json +0 -4
  70. {monocle_apptrace-0.4.1.dist-info → monocle_apptrace-0.5.0b1.dist-info}/METADATA +14 -3
  71. {monocle_apptrace-0.4.1.dist-info → monocle_apptrace-0.5.0b1.dist-info}/RECORD +74 -44
  72. {monocle_apptrace-0.4.1.dist-info → monocle_apptrace-0.5.0b1.dist-info}/WHEEL +0 -0
  73. {monocle_apptrace-0.4.1.dist-info → monocle_apptrace-0.5.0b1.dist-info}/licenses/LICENSE +0 -0
  74. {monocle_apptrace-0.4.1.dist-info → monocle_apptrace-0.5.0b1.dist-info}/licenses/NOTICE +0 -0
@@ -5,7 +5,6 @@ from opentelemetry.context import set_value, attach, detach, get_value
5
5
 
6
6
  from monocle_apptrace.instrumentation.common.span_handler import SpanHandler
7
7
  from monocle_apptrace.instrumentation.common.utils import (
8
- get_fully_qualified_class_name,
9
8
  set_scopes,
10
9
  with_tracer_wrapper,
11
10
  set_scope,
@@ -36,7 +35,7 @@ def pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped,
36
35
  except Exception as e:
37
36
  logger.info(f"Warning: Error occurred in pre_task_processing: {e}")
38
37
 
39
- def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, parent_span=None, ex = None):
38
+ def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, parent_span, ex):
40
39
  if not (SpanHandler.is_root_span(span) or get_value(ADD_NEW_WORKFLOW) == True):
41
40
  try:
42
41
  handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span, parent_span, ex)
@@ -44,7 +43,7 @@ def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_
44
43
  logger.info(f"Warning: Error occurred in hydrate_span: {e}")
45
44
 
46
45
  try:
47
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
46
+ handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, ex, span, parent_span)
48
47
  except Exception as e:
49
48
  logger.info(f"Warning: Error occurred in post_task_processing: {e}")
50
49
 
@@ -52,7 +51,7 @@ def get_span_name(to_wrap, instance):
52
51
  if to_wrap.get("span_name"):
53
52
  name = to_wrap.get("span_name")
54
53
  else:
55
- name = get_fully_qualified_class_name(instance)
54
+ name = to_wrap.get("package", "") + "." + to_wrap.get("object", "") + "." + to_wrap.get("method", "")
56
55
  return name
57
56
 
58
57
  def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
@@ -93,10 +92,11 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
93
92
 
94
93
  def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
95
94
  return_value = None
95
+ pre_trace_token = None
96
96
  token = None
97
97
  try:
98
98
  try:
99
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
99
+ pre_trace_token = handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
100
100
  except Exception as e:
101
101
  logger.info(f"Warning: Error occurred in pre_tracing: {e}")
102
102
  if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
@@ -111,7 +111,7 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
111
111
  return return_value
112
112
  finally:
113
113
  try:
114
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
114
+ handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value, token=pre_trace_token)
115
115
  except Exception as e:
116
116
  logger.info(f"Warning: Error occurred in post_tracing: {e}")
117
117
 
@@ -141,7 +141,7 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
141
141
  raise
142
142
  finally:
143
143
  def post_process_span_internal(ret_val):
144
- post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span, parent_span,ex)
144
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span, parent_span, ex)
145
145
  if not auto_close_span:
146
146
  span.end()
147
147
  if ex is None and not auto_close_span and to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
@@ -154,9 +154,10 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
154
154
  async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
155
155
  return_value = None
156
156
  token = None
157
+ pre_trace_token = None
157
158
  try:
158
159
  try:
159
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
160
+ pre_trace_token = handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
160
161
  except Exception as e:
161
162
  logger.info(f"Warning: Error occurred in pre_tracing: {e}")
162
163
  if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
@@ -171,7 +172,7 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
171
172
  return return_value
172
173
  finally:
173
174
  try:
174
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
175
+ handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value, pre_trace_token)
175
176
  except Exception as e:
176
177
  logger.info(f"Warning: Error occurred in post_tracing: {e}")
177
178
 
@@ -9,10 +9,12 @@ from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
9
9
  LANGCHAIN_METHODS,
10
10
  )
11
11
  from monocle_apptrace.instrumentation.metamodel.llamaindex.methods import (LLAMAINDEX_METHODS, )
12
+ from monocle_apptrace.instrumentation.metamodel.llamaindex.llamaindex_processor import LlamaIndexToolHandler, LlamaIndexAgentHandler, LlamaIndexSingleAgenttToolHandlerWrapper
12
13
  from monocle_apptrace.instrumentation.metamodel.haystack.methods import (HAYSTACK_METHODS, )
13
14
  from monocle_apptrace.instrumentation.metamodel.openai.methods import (OPENAI_METHODS,)
14
15
  from monocle_apptrace.instrumentation.metamodel.openai._helper import OpenAISpanHandler
15
16
  from monocle_apptrace.instrumentation.metamodel.langgraph.methods import LANGGRAPH_METHODS
17
+ from monocle_apptrace.instrumentation.metamodel.langgraph.langgraph_processor import LanggraphAgentHandler, LanggraphToolHandler
16
18
  from monocle_apptrace.instrumentation.metamodel.flask.methods import (FLASK_METHODS, )
17
19
  from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler, FlaskResponseSpanHandler
18
20
  from monocle_apptrace.instrumentation.metamodel.requests.methods import (REQUESTS_METHODS, )
@@ -23,6 +25,15 @@ from monocle_apptrace.instrumentation.metamodel.aiohttp.methods import (AIOHTTP_
23
25
  from monocle_apptrace.instrumentation.metamodel.aiohttp._helper import aiohttpSpanHandler
24
26
  from monocle_apptrace.instrumentation.metamodel.azfunc._helper import (azureSpanHandler)
25
27
  from monocle_apptrace.instrumentation.metamodel.azfunc.methods import AZFUNC_HTTP_METHODS
28
+ from monocle_apptrace.instrumentation.metamodel.gemini.methods import GEMINI_METHODS
29
+ from monocle_apptrace.instrumentation.metamodel.fastapi.methods import FASTAPI_METHODS
30
+ from monocle_apptrace.instrumentation.metamodel.fastapi._helper import FastAPISpanHandler, FastAPIResponseSpanHandler
31
+ from monocle_apptrace.instrumentation.metamodel.lambdafunc._helper import lambdaSpanHandler
32
+ from monocle_apptrace.instrumentation.metamodel.lambdafunc.methods import LAMBDA_HTTP_METHODS
33
+ from monocle_apptrace.instrumentation.metamodel.mcp.methods import MCP_METHODS
34
+ from monocle_apptrace.instrumentation.metamodel.mcp.mcp_processor import MCPAgentHandler
35
+ from monocle_apptrace.instrumentation.metamodel.a2a.methods import A2A_CLIENT_METHODS
36
+
26
37
  class WrapperMethod:
27
38
  def __init__(
28
39
  self,
@@ -71,7 +82,26 @@ class WrapperMethod:
71
82
  def get_span_handler(self) -> SpanHandler:
72
83
  return self.span_handler()
73
84
 
74
- DEFAULT_METHODS_LIST = LANGCHAIN_METHODS + LLAMAINDEX_METHODS + HAYSTACK_METHODS + BOTOCORE_METHODS + FLASK_METHODS + REQUESTS_METHODS + LANGGRAPH_METHODS + OPENAI_METHODS + TEAMAI_METHODS + ANTHROPIC_METHODS + AIOHTTP_METHODS + AZURE_AI_INFERENCE_METHODS + AZFUNC_HTTP_METHODS
85
+ DEFAULT_METHODS_LIST = (
86
+ LANGCHAIN_METHODS +
87
+ LLAMAINDEX_METHODS +
88
+ HAYSTACK_METHODS +
89
+ BOTOCORE_METHODS +
90
+ FLASK_METHODS +
91
+ REQUESTS_METHODS +
92
+ LANGGRAPH_METHODS +
93
+ OPENAI_METHODS +
94
+ TEAMAI_METHODS +
95
+ ANTHROPIC_METHODS +
96
+ AIOHTTP_METHODS +
97
+ AZURE_AI_INFERENCE_METHODS +
98
+ AZFUNC_HTTP_METHODS +
99
+ GEMINI_METHODS +
100
+ FASTAPI_METHODS +
101
+ LAMBDA_HTTP_METHODS +
102
+ MCP_METHODS +
103
+ A2A_CLIENT_METHODS
104
+ )
75
105
 
76
106
  MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
77
107
  "default": SpanHandler(),
@@ -83,4 +113,13 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
83
113
  "non_framework_handler": NonFrameworkSpanHandler(),
84
114
  "openai_handler": OpenAISpanHandler(),
85
115
  "azure_func_handler": azureSpanHandler(),
116
+ "mcp_agent_handler": MCPAgentHandler(),
117
+ "fastapi_handler": FastAPISpanHandler(),
118
+ "fastapi_response_handler": FastAPIResponseSpanHandler(),
119
+ "langgraph_agent_handler": LanggraphAgentHandler(),
120
+ "langgraph_tool_handler": LanggraphToolHandler(),
121
+ "llamaindex_tool_handler": LlamaIndexToolHandler(),
122
+ "llamaindex_agent_handler": LlamaIndexAgentHandler(),
123
+ "llamaindex_single_agent_tool_handler": LlamaIndexSingleAgenttToolHandlerWrapper(),
124
+ "lambda_func_handler": lambdaSpanHandler(),
86
125
  }
@@ -0,0 +1,37 @@
1
+ from opentelemetry.context import get_value
2
+ from monocle_apptrace.instrumentation.common.utils import resolve_from_alias
3
+ import logging
4
+ import json
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ def get_url(arguments):
9
+ """Get the URL of the tool from the instance."""
10
+ return arguments["instance"].url
11
+
12
+ def get_method(arguments):
13
+ """Get the method of the tool from the instance."""
14
+ return arguments["args"][0].method
15
+
16
+ def get_params_arguments(arguments):
17
+ """Get the params of the tool from the instance."""
18
+ return arguments["args"][0].params.message.parts[0].root.text
19
+
20
+ def get_role(arguments):
21
+ """Get the role of the tool from the instance."""
22
+ return arguments["args"][0].params.message.role.value
23
+
24
+ def get_status(arguments):
25
+ """Get the status of the tool from the result."""
26
+ return arguments["result"].root.result.status.state.value
27
+
28
+ def get_response(arguments):
29
+ """Get the response of the tool from the result."""
30
+ ret_val = []
31
+ for artifact in arguments["result"].root.result.artifacts:
32
+ if artifact.parts:
33
+ for part in artifact.parts:
34
+ if part.root.text:
35
+ ret_val.append(part.root.text)
36
+ return ret_val
37
+ # return arguments["result"].root.result.artifacts[0].parts[0].root.text
@@ -0,0 +1,112 @@
1
+ from monocle_apptrace.instrumentation.metamodel.a2a import _helper
2
+
3
+ A2A_CLIENT = {
4
+ "type": "agentic.invocation",
5
+ "attributes": [
6
+ [
7
+ {
8
+ "attribute": "type",
9
+ "accessor": lambda arguments: "agent2agent.server"
10
+ },
11
+ {
12
+ "attribute": "url",
13
+ "accessor": lambda arguments: _helper.get_url(arguments)
14
+ },
15
+ {
16
+ "attribute": "method",
17
+ "accessor": lambda arguments: _helper.get_method(arguments)
18
+ }
19
+ ]
20
+ ],
21
+ "events": [
22
+ {
23
+ "name": "data.input",
24
+ "attributes": [
25
+ {
26
+ "_comment": "this is a2a input",
27
+ "attribute": "input",
28
+ "accessor": lambda arguments: _helper.get_params_arguments(arguments)
29
+ },
30
+ {
31
+ "_comment": "this is a2a input",
32
+ "attribute": "role",
33
+ "accessor": lambda arguments: _helper.get_role(arguments)
34
+ },
35
+ ],
36
+ },
37
+ {
38
+ "name": "data.output",
39
+ "attributes": [
40
+ {
41
+ "_comment": "this is a2a output",
42
+ "attribute": "status",
43
+ "accessor": lambda arguments: _helper.get_status(arguments, "status")
44
+ },
45
+ {
46
+ "_comment": "this is a2a output",
47
+ "attribute": "response",
48
+ "accessor": lambda arguments: _helper.get_response(arguments)
49
+ },
50
+ ],
51
+ },
52
+ ],
53
+ }
54
+
55
+ # A2A_RESOLVE = {
56
+ # "type": "a2a.resolve",
57
+ # "attributes": [
58
+ # [
59
+ # # {
60
+ # # "_comment": "tool type",
61
+ # # "attribute": "type",
62
+ # # "accessor": lambda arguments:'tool.mcp'
63
+ # # },
64
+ # {
65
+ # "_comment": "name of the tool",
66
+ # "attribute": "name",
67
+ # "accessor": lambda arguments: _helper.log(arguments),
68
+ # },
69
+ # {
70
+ # "_comment": "tool description",
71
+ # "attribute": "agent_description",
72
+ # "accessor": lambda arguments: arguments["result"].description
73
+ # },
74
+ # {
75
+ # "_comment": "tool name",
76
+ # "attribute": "agent_name",
77
+ # "accessor": lambda arguments: arguments["result"].name
78
+ # }
79
+ # # {
80
+ # # "_comment": "tool type",
81
+ # # "attribute": "type",
82
+ # # "accessor": lambda arguments: _helper.get_type(arguments),
83
+ # # },
84
+ # ]
85
+ # ],
86
+ # "events": [
87
+ # # {
88
+ # # "name": "data.input",
89
+ # # "attributes": [
90
+ # # {
91
+ # # "_comment": "this is Tool input",
92
+ # # "attribute": "input",
93
+ # # "accessor": lambda arguments: _helper.get_params_arguments(
94
+ # # arguments
95
+ # # ),
96
+ # # },
97
+ # # ],
98
+ # # },
99
+ # # {
100
+ # # "name": "data.output",
101
+ # # "attributes": [
102
+ # # {
103
+ # # "_comment": "this is Tool output",
104
+ # # "attribute": "output",
105
+ # # "accessor": lambda arguments: _helper.get_output_text(arguments)
106
+ # # },
107
+ # # ],
108
+ # # },
109
+ # ],
110
+ # }
111
+
112
+
@@ -0,0 +1,22 @@
1
+ from monocle_apptrace.instrumentation.common.wrapper import task_wrapper, atask_wrapper
2
+ from monocle_apptrace.instrumentation.metamodel.a2a.entities.inference import A2A_CLIENT
3
+
4
+ A2A_CLIENT_METHODS = [
5
+ # {
6
+ # "package": "a2a.client.client",
7
+ # "object": "A2ACardResolver",
8
+ # "method": "get_agent_card",
9
+ # "wrapper_method": atask_wrapper,
10
+ # # "span_handler": "mcp_agent_handler",
11
+ # "output_processor": A2A_RESOLVE,
12
+ # },
13
+ {
14
+ "package": "a2a.client.client",
15
+ "object": "A2AClient",
16
+ "method": "send_message",
17
+ "wrapper_method": atask_wrapper,
18
+ "output_processor": A2A_CLIENT,
19
+ },
20
+ ]
21
+
22
+
@@ -7,8 +7,6 @@ from urllib.parse import unquote
7
7
 
8
8
  logger = logging.getLogger(__name__)
9
9
  MAX_DATA_LENGTH = 1000
10
- token_data = local()
11
- token_data.current_token = None
12
10
 
13
11
  def get_route(args) -> str:
14
12
  route_path: Option[str] = try_option(getattr, args[0], 'path')
@@ -41,11 +39,10 @@ def extract_status(result) -> str:
41
39
  return status
42
40
 
43
41
  def aiohttp_pre_tracing(args):
44
- token_data.current_token = extract_http_headers(args[0].headers)
42
+ return extract_http_headers(args[0].headers)
45
43
 
46
- def aiohttp_post_tracing():
47
- clear_http_scopes(token_data.current_token)
48
- token_data.current_token = None
44
+ def aiohttp_post_tracing(token):
45
+ clear_http_scopes(token)
49
46
 
50
47
  def aiohttp_skip_span(args) -> bool:
51
48
  if get_method(args) == "HEAD":
@@ -55,12 +52,10 @@ def aiohttp_skip_span(args) -> bool:
55
52
  class aiohttpSpanHandler(SpanHandler):
56
53
 
57
54
  def pre_tracing(self, to_wrap, wrapped, instance, args, kwargs):
58
- aiohttp_pre_tracing(args)
59
- return super().pre_tracing(to_wrap, wrapped, instance, args, kwargs)
55
+ return aiohttp_pre_tracing(args)
60
56
 
61
- def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value):
62
- aiohttp_post_tracing()
63
- return super().post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
57
+ def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value, token):
58
+ aiohttp_post_tracing(token)
64
59
 
65
60
  def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
66
61
  return aiohttp_skip_span(args)
@@ -3,14 +3,18 @@ This module provides utility functions for extracting system, user,
3
3
  and assistant messages from various input formats.
4
4
  """
5
5
 
6
+ import json
6
7
  import logging
7
8
  from monocle_apptrace.instrumentation.common.utils import (
8
9
  Option,
10
+ get_json_dumps,
9
11
  get_keys_as_tuple,
10
12
  get_nested_value,
13
+ get_status_code,
11
14
  try_option,
12
15
  get_exception_message,
13
16
  )
17
+ from monocle_apptrace.instrumentation.metamodel.finish_types import map_anthropic_finish_reason_to_finish_type
14
18
 
15
19
 
16
20
  logger = logging.getLogger(__name__)
@@ -30,12 +34,13 @@ def extract_messages(kwargs):
30
34
  """Extract system and user messages"""
31
35
  try:
32
36
  messages = []
37
+ if "system" in kwargs and isinstance(kwargs["system"], str):
38
+ messages.append({"system": kwargs["system"]})
33
39
  if 'messages' in kwargs and len(kwargs['messages']) >0:
34
40
  for msg in kwargs['messages']:
35
41
  if msg.get('content') and msg.get('role'):
36
42
  messages.append({msg['role']: msg['content']})
37
-
38
- return [str(message) for message in messages]
43
+ return [get_json_dumps(message) for message in messages]
39
44
  except Exception as e:
40
45
  logger.warning("Warning: Error occurred in extract_messages: %s", str(e))
41
46
  return []
@@ -48,28 +53,24 @@ def get_exception_status_code(arguments):
48
53
  else:
49
54
  return 'success'
50
55
 
51
- def get_status_code(arguments):
52
- if arguments["exception"] is not None:
53
- return get_exception_status_code(arguments)
54
- elif hasattr(arguments["result"], "status"):
55
- return arguments["result"].status
56
- else:
57
- return 'success'
58
-
59
56
  def extract_assistant_message(arguments):
60
57
  try:
61
58
  status = get_status_code(arguments)
62
- response: str = ""
59
+ response = arguments["result"]
63
60
  if status == 'success':
64
- if arguments['result'] is not None and hasattr(arguments['result'],"content") and len(arguments['result'].content) >0:
65
- if hasattr(arguments['result'].content[0],"text"):
66
- response = arguments['result'].content[0].text
61
+ messages = []
62
+ role = response.role if hasattr(response, 'role') else "assistant"
63
+ if response is not None and hasattr(response,"content") and len(response.content) >0:
64
+ if hasattr(response.content[0],"text"):
65
+ messages.append({role: response.content[0].text})
66
+ # return first message if list is not empty
67
+ return get_json_dumps(messages[0]) if messages else ""
67
68
  else:
68
69
  if arguments["exception"] is not None:
69
- response = get_exception_message(arguments)
70
+ return get_exception_message(arguments)
70
71
  elif hasattr(arguments["result"], "error"):
71
- response = arguments["result"].error
72
- return response
72
+ return arguments["result"].error
73
+
73
74
  except (IndexError, AttributeError) as e:
74
75
  logger.warning("Warning: Error occurred in extract_assistant_message: %s", str(e))
75
76
  return None
@@ -86,4 +87,20 @@ def update_span_from_llm_response(response):
86
87
  meta_dict.update({"completion_tokens": getattr(response.usage, "output_tokens", 0)})
87
88
  meta_dict.update({"prompt_tokens": getattr(response.usage, "input_tokens", 0)})
88
89
  meta_dict.update({"total_tokens": getattr(response.usage, "input_tokens", 0)+getattr(response.usage, "output_tokens", 0)})
89
- return meta_dict
90
+ return meta_dict
91
+
92
+ def extract_finish_reason(arguments):
93
+ """Extract stop_reason from Anthropic response (Claude)."""
94
+ try:
95
+ # Arguments may be a dict with 'result' or just the response object
96
+ response = arguments.get("result") if isinstance(arguments, dict) else arguments
97
+ if response is not None and hasattr(response, "stop_reason"):
98
+ return response.stop_reason
99
+ except Exception as e:
100
+ logger.warning("Warning: Error occurred in extract_finish_reason: %s", str(e))
101
+ return None
102
+ return None
103
+
104
+ def map_finish_reason_to_finish_type(finish_reason):
105
+ """Map Anthropic stop_reason to finish_type, similar to OpenAI mapping."""
106
+ return map_anthropic_finish_reason_to_finish_type(finish_reason)
@@ -1,9 +1,7 @@
1
1
  from monocle_apptrace.instrumentation.metamodel.anthropic import (
2
2
  _helper,
3
3
  )
4
- from monocle_apptrace.instrumentation.common.utils import (resolve_from_alias, get_llm_type,
5
- get_status, get_status_code
6
- )
4
+ from monocle_apptrace.instrumentation.common.utils import (get_error_message, resolve_from_alias)
7
5
 
8
6
  INFERENCE = {
9
7
  "type": "inference",
@@ -12,7 +10,7 @@ INFERENCE = {
12
10
  {
13
11
  "_comment": "provider type ,name , deployment , inference_endpoint",
14
12
  "attribute": "type",
15
- "accessor": lambda arguments: 'inference.' + (get_llm_type(arguments['instance']) or 'generic')
13
+ "accessor": lambda arguments: 'inference.anthropic'
16
14
 
17
15
  },
18
16
  {
@@ -55,12 +53,8 @@ INFERENCE = {
55
53
  "name": "data.output",
56
54
  "attributes": [
57
55
  {
58
- "attribute": "status",
59
- "accessor": lambda arguments: get_status(arguments)
60
- },
61
- {
62
- "attribute": "status_code",
63
- "accessor": lambda arguments: _helper.get_status_code(arguments)
56
+ "attribute": "error_code",
57
+ "accessor": lambda arguments: get_error_message(arguments)
64
58
  },
65
59
  {
66
60
  "_comment": "this is result from LLM",
@@ -75,6 +69,16 @@ INFERENCE = {
75
69
  {
76
70
  "_comment": "this is metadata usage from LLM",
77
71
  "accessor": lambda arguments: _helper.update_span_from_llm_response(arguments['result'])
72
+ },
73
+ {
74
+ "_comment": "finish reason from Anthropic response",
75
+ "attribute": "finish_reason",
76
+ "accessor": lambda arguments: _helper.extract_finish_reason(arguments)
77
+ },
78
+ {
79
+ "_comment": "finish type mapped from finish reason",
80
+ "attribute": "finish_type",
81
+ "accessor": lambda arguments: _helper.map_finish_reason_to_finish_type(_helper.extract_finish_reason(arguments))
78
82
  }
79
83
  ]
80
84
  }
@@ -8,8 +8,6 @@ from urllib.parse import unquote, urlparse, ParseResult
8
8
 
9
9
  logger = logging.getLogger(__name__)
10
10
  MAX_DATA_LENGTH = 1000
11
- token_data = local()
12
- token_data.current_token = None
13
11
 
14
12
  def get_url(kwargs) -> ParseResult:
15
13
  url_str = try_option(getattr, kwargs['req'], 'url')
@@ -19,6 +17,13 @@ def get_url(kwargs) -> ParseResult:
19
17
  else:
20
18
  return None
21
19
 
20
+ def get_function_name(kwargs) -> str:
21
+ context = kwargs.get('context', None)
22
+ if context is not None and hasattr(context, 'function_name'):
23
+ return context.function_name
24
+ return ""
25
+
26
+
22
27
  def get_route(kwargs) -> str:
23
28
  url:ParseResult = get_url(kwargs)
24
29
  if url is not None:
@@ -61,18 +66,15 @@ def extract_status(result) -> str:
61
66
 
62
67
  def azure_func_pre_tracing(kwargs):
63
68
  headers = kwargs['req'].headers if hasattr(kwargs['req'], 'headers') else {}
64
- token_data.current_token = extract_http_headers(headers)
69
+ return extract_http_headers(headers)
65
70
 
66
- def azure_func_post_tracing():
67
- clear_http_scopes(token_data.current_token)
68
- token_data.current_token = None
71
+ def azure_func_post_tracing(token):
72
+ clear_http_scopes(token)
69
73
 
70
74
  class azureSpanHandler(SpanHandler):
71
75
 
72
76
  def pre_tracing(self, to_wrap, wrapped, instance, args, kwargs):
73
- azure_func_pre_tracing(kwargs)
74
- return super().pre_tracing(to_wrap, wrapped, instance, args, kwargs)
77
+ return azure_func_pre_tracing(kwargs)
75
78
 
76
- def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value):
77
- azure_func_post_tracing()
78
- return super().post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
79
+ def post_tracing(self, to_wrap, wrapped, instance, args, kwargs, return_value, token):
80
+ azure_func_post_tracing(token)
@@ -18,6 +18,11 @@ AZFUNC_HTTP_PROCESSOR = {
18
18
  "attribute": "body",
19
19
  "accessor": lambda arguments: _helper.get_body(arguments['kwargs'])
20
20
  },
21
+ {
22
+ "_comment": "request function name",
23
+ "attribute": "function_name",
24
+ "accessor": lambda arguments: _helper.get_function_name(arguments['kwargs'])
25
+ }
21
26
  ]
22
27
  ],
23
28
  "events": [