monocle-apptrace 0.4.0b1__tar.gz → 0.4.0b2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/CHANGELOG.md +6 -0
  2. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/PKG-INFO +2 -1
  3. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/pyproject.toml +2 -1
  4. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/constants.py +4 -4
  5. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/span_handler.py +52 -28
  6. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/utils.py +15 -0
  7. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/wrapper.py +66 -30
  8. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/wrapper_method.py +3 -1
  9. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/_helper.py +1 -1
  10. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/entities/inference.py +1 -1
  11. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/entities/inference.py +1 -1
  12. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/entities/inference.py +1 -1
  13. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/_helper.py +10 -2
  14. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/methods.py +6 -6
  15. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/_helper.py +37 -5
  16. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/actionplanner_output_processor.py +12 -32
  17. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/teamsai_output_processor.py +14 -16
  18. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/.gitignore +0 -0
  19. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/CODEOWNERS.md +0 -0
  20. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/CODE_OF_CONDUCT.md +0 -0
  21. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/CONTRIBUTING.md +0 -0
  22. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/COPYRIGHT.template +0 -0
  23. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/LICENSE +0 -0
  24. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/MAINTAINER.md +0 -0
  25. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/Monocle_User_Guide.md +0 -0
  26. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/Monocle_committer_guide.md +0 -0
  27. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/Monocle_contributor_guide.md +0 -0
  28. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/NOTICE +0 -0
  29. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/README.md +0 -0
  30. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/SECURITY.md +0 -0
  31. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/README.md +0 -0
  32. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/__init__.py +0 -0
  33. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/__main__.py +0 -0
  34. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/aws/s3_exporter.py +0 -0
  35. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/aws/s3_exporter_opendal.py +0 -0
  36. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/azure/blob_exporter.py +0 -0
  37. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/azure/blob_exporter_opendal.py +0 -0
  38. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/base_exporter.py +0 -0
  39. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/exporter_processor.py +0 -0
  40. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/file_exporter.py +0 -0
  41. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/monocle_exporters.py +0 -0
  42. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/exporters/okahu/okahu_exporter.py +0 -0
  43. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/__init__.py +0 -0
  44. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/__init__.py +0 -0
  45. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/instrumentor.py +0 -0
  46. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/common/tracing.md +0 -0
  47. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/__init__.py +0 -0
  48. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/aiohttp/__init__.py +0 -0
  49. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/aiohttp/_helper.py +0 -0
  50. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/aiohttp/entities/http.py +0 -0
  51. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/aiohttp/methods.py +0 -0
  52. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/anthropic/__init__.py +0 -0
  53. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/anthropic/_helper.py +0 -0
  54. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/anthropic/entities/__init__.py +0 -0
  55. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/anthropic/entities/inference.py +0 -0
  56. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/anthropic/methods.py +0 -0
  57. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/__init__.py +0 -0
  58. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/_helper.py +0 -0
  59. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/entities/__init__.py +0 -0
  60. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/entities/inference.py +0 -0
  61. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/handlers/botocore_span_handler.py +0 -0
  62. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/botocore/methods.py +0 -0
  63. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/flask/__init__.py +0 -0
  64. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/flask/_helper.py +0 -0
  65. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/flask/entities/http.py +0 -0
  66. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/flask/methods.py +0 -0
  67. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/__init__.py +0 -0
  68. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/entities/__init__.py +0 -0
  69. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/entities/retrieval.py +0 -0
  70. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/haystack/methods.py +0 -0
  71. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/__init__.py +0 -0
  72. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/_helper.py +0 -0
  73. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/entities/__init__.py +0 -0
  74. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/entities/retrieval.py +0 -0
  75. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langchain/methods.py +0 -0
  76. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langgraph/__init__.py +0 -0
  77. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langgraph/_helper.py +0 -0
  78. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langgraph/entities/__init__.py +0 -0
  79. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langgraph/entities/inference.py +0 -0
  80. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/langgraph/methods.py +0 -0
  81. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/__init__.py +0 -0
  82. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/_helper.py +0 -0
  83. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/entities/__init__.py +0 -0
  84. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/entities/agent.py +0 -0
  85. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/entities/retrieval.py +0 -0
  86. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/llamaindex/methods.py +0 -0
  87. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/__init__.py +0 -0
  88. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/entities/__init__.py +0 -0
  89. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/entities/inference.py +0 -0
  90. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/openai/entities/retrieval.py +0 -0
  91. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/requests/__init__.py +0 -0
  92. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/requests/_helper.py +0 -0
  93. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/requests/entities/http.py +0 -0
  94. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/requests/methods.py +0 -0
  95. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/__init__.py +0 -0
  96. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/entities/__init__.py +0 -0
  97. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/entities/inference/__init__.py +0 -0
  98. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/src/monocle_apptrace/instrumentation/metamodel/teamsai/methods.py +0 -0
  99. {monocle_apptrace-0.4.0b1 → monocle_apptrace-0.4.0b2}/tox.ini +0 -0
@@ -1,3 +1,9 @@
1
+ ## Version 0.4.0b2 (2025-05-21)
2
+
3
+ - Add Span error handling ([#186](https://github.com/monocle2ai/monocle/pull/186))
4
+ - Add teams ai enhancements ([#184](https://github.com/monocle2ai/monocle/pull/184))
5
+
6
+
1
7
  ## Version 0.4.0b1 (2025-05-14)
2
8
 
3
9
  - Added conversation id in scope for teams ai bot ([#180](https://github.com/monocle2ai/monocle/pull/180))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: monocle_apptrace
3
- Version: 0.4.0b1
3
+ Version: 0.4.0b2
4
4
  Summary: package with monocle genAI tracing
5
5
  Project-URL: Homepage, https://github.com/monocle2ai/monocle
6
6
  Project-URL: Issues, https://github.com/monocle2ai/monocle/issues
@@ -20,6 +20,7 @@ Requires-Dist: boto3==1.35.19; extra == 'aws'
20
20
  Provides-Extra: azure
21
21
  Requires-Dist: azure-storage-blob==12.22.0; extra == 'azure'
22
22
  Provides-Extra: dev
23
+ Requires-Dist: anthropic-haystack; extra == 'dev'
23
24
  Requires-Dist: anthropic==0.49.0; extra == 'dev'
24
25
  Requires-Dist: azure-storage-blob==12.22.0; extra == 'dev'
25
26
  Requires-Dist: boto3==1.34.131; extra == 'dev'
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "monocle_apptrace"
7
- version = "0.4.0b1"
7
+ version = "0.4.0b2"
8
8
  authors = []
9
9
  description = "package with monocle genAI tracing"
10
10
  readme = "README.md"
@@ -74,6 +74,7 @@ dev = [
74
74
  'langgraph==0.2.68',
75
75
  'opendal==0.45.14',
76
76
  'anthropic==0.49.0',
77
+ 'anthropic-haystack'
77
78
  ]
78
79
 
79
80
  azure = [
@@ -14,10 +14,10 @@ GITHUB_CODESPACE_IDENTIFIER_ENV_NAME = "GITHUB_REPOSITORY"
14
14
  # Azure naming reference can be found here
15
15
  # https://learn.microsoft.com/en-us/azure/cloud-adoption-framework/ready/azure-best-practices/resource-abbreviations
16
16
  # https://docs.aws.amazon.com/resource-explorer/latest/userguide/supported-resource-types.html#services-lookoutmetrics
17
- AZURE_FUNCTION_NAME = "azure.func"
18
- AZURE_APP_SERVICE_NAME = "azure.asp"
19
- AZURE_ML_SERVICE_NAME = "azure.mlw"
20
- AWS_LAMBDA_SERVICE_NAME = "aws.lambda"
17
+ AZURE_FUNCTION_NAME = "azure_func"
18
+ AZURE_APP_SERVICE_NAME = "azure_webapp"
19
+ AZURE_ML_SERVICE_NAME = "azure_ml"
20
+ AWS_LAMBDA_SERVICE_NAME = "aws_lambda"
21
21
  GITHUB_CODESPACE_SERVICE_NAME = "github_codespace"
22
22
 
23
23
  # Env variables to identify infra service type
@@ -42,8 +42,17 @@ class SpanHandler:
42
42
  def skip_span(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
43
43
  return False
44
44
 
45
- def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
46
- return False
45
+ def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
46
+ return []
47
+
48
+ def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
49
+ span_type:str = None
50
+ if 'type' in output_processor:
51
+ span_type = output_processor['type']
52
+ span.set_attribute("span.type", span_type)
53
+ else:
54
+ logger.warning("type of span not found or incorrect written in entity json")
55
+ return span_type
47
56
 
48
57
  def pre_task_processing(self, to_wrap, wrapped, instance, args,kwargs, span):
49
58
  if "pipeline" in to_wrap['package']:
@@ -69,30 +78,32 @@ class SpanHandler:
69
78
  workflow_name = SpanHandler.get_workflow_name(span=span)
70
79
  if workflow_name:
71
80
  span.set_attribute("workflow.name", workflow_name)
81
+ span.set_attribute("span.type", "generic")
72
82
 
73
83
  def post_task_processing(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span):
74
- if span.status.status_code == StatusCode.UNSET:
75
- span.set_status(StatusCode.OK)
84
+ pass
76
85
 
77
- def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span) -> bool:
78
- detected_error_in_attribute = self.hydrate_attributes(to_wrap, wrapped, instance, args, kwargs, result, span)
79
- detected_error_in_event = self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, result, span)
80
- if detected_error_in_attribute or detected_error_in_event:
81
- span.set_attribute(MONOCLE_DETECTED_SPAN_ERROR, True)
86
+ def hydrate_span(self, to_wrap, wrapped, instance, args, kwargs, result, span, ex:Exception = None) -> bool:
87
+ try:
88
+ detected_error_in_attribute = self.hydrate_attributes(to_wrap, wrapped, instance, args, kwargs, result, span)
89
+ detected_error_in_event = self.hydrate_events(to_wrap, wrapped, instance, args, kwargs, result, span, ex)
90
+ if detected_error_in_attribute or detected_error_in_event:
91
+ span.set_attribute(MONOCLE_DETECTED_SPAN_ERROR, True)
92
+ finally:
93
+ if span.status.status_code == StatusCode.UNSET and ex is None:
94
+ span.set_status(StatusCode.OK)
82
95
 
83
96
  def hydrate_attributes(self, to_wrap, wrapped, instance, args, kwargs, result, span:Span) -> bool:
84
97
  detected_error:bool = False
85
98
  span_index = 0
86
99
  if SpanHandler.is_root_span(span):
87
100
  span_index = 2 # root span will have workflow and hosting entities pre-populated
88
- if not self.skip_processor(to_wrap, wrapped, instance, args, kwargs) and (
89
- 'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
101
+ if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
90
102
  output_processor=to_wrap['output_processor']
91
- if 'type' in output_processor:
92
- span.set_attribute("span.type", output_processor['type'])
93
- else:
94
- logger.warning("type of span not found or incorrect written in entity json")
95
- if 'attributes' in output_processor:
103
+ self.set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
104
+ skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
105
+
106
+ if 'attributes' in output_processor and 'attributes' not in skip_processors:
96
107
  for processors in output_processor["attributes"]:
97
108
  for processor in processors:
98
109
  attribute = processor.get('attribute')
@@ -113,10 +124,6 @@ class SpanHandler:
113
124
  else:
114
125
  logger.debug(f"{' and '.join([key for key in ['attribute', 'accessor'] if not processor.get(key)])} not found or incorrect in entity JSON")
115
126
  span_index += 1
116
- else:
117
- logger.debug("attributes not found or incorrect written in entity json")
118
- else:
119
- span.set_attribute("span.type", "generic")
120
127
 
121
128
  # set scopes as attributes by calling get_scopes()
122
129
  # scopes is a Mapping[str:object], iterate directly with .items()
@@ -127,16 +134,19 @@ class SpanHandler:
127
134
  span.set_attribute("entity.count", span_index)
128
135
  return detected_error
129
136
 
130
- def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span) -> bool:
137
+ def hydrate_events(self, to_wrap, wrapped, instance, args, kwargs, ret_result, span, ex:Exception=None) -> bool:
131
138
  detected_error:bool = False
132
- if not self.skip_processor(to_wrap, wrapped, instance, args, kwargs) and (
133
- 'output_processor' in to_wrap and to_wrap["output_processor"] is not None):
139
+ if 'output_processor' in to_wrap and to_wrap["output_processor"] is not None:
134
140
  output_processor=to_wrap['output_processor']
135
- arguments = {"instance": instance, "args": args, "kwargs": kwargs, "result": ret_result}
136
- if 'events' in output_processor:
141
+ skip_processors:list[str] = self.skip_processor(to_wrap, wrapped, instance, span, args, kwargs) or []
142
+
143
+ arguments = {"instance": instance, "args": args, "kwargs": kwargs, "result": ret_result, "exception":ex}
144
+ if 'events' in output_processor and 'events' not in skip_processors:
137
145
  events = output_processor['events']
138
146
  for event in events:
139
147
  event_name = event.get("name")
148
+ if 'events.'+event_name in skip_processors:
149
+ continue
140
150
  event_attributes = {}
141
151
  attributes = event.get("attributes", [])
142
152
  for attribute in attributes:
@@ -231,7 +241,7 @@ class SpanHandler:
231
241
 
232
242
  @staticmethod
233
243
  @contextmanager
234
- def workflow_type(to_wrap=None):
244
+ def workflow_type(to_wrap=None, span:Span=None):
235
245
  token = SpanHandler.attach_workflow_type(to_wrap)
236
246
  try:
237
247
  yield
@@ -241,6 +251,20 @@ class SpanHandler:
241
251
 
242
252
  class NonFrameworkSpanHandler(SpanHandler):
243
253
 
254
+ def get_workflow_name_in_progress(self) -> str:
255
+ return get_value(WORKFLOW_TYPE_KEY)
256
+
257
+ def is_framework_span_in_progess(self) -> bool:
258
+ return self.get_workflow_name_in_progress() in WORKFLOW_TYPE_MAP.values()
259
+
244
260
  # If the language framework is being executed, then skip generating direct openAI attributes and events
245
- def skip_processor(self, to_wrap, wrapped, instance, args, kwargs) -> bool:
246
- return get_value(WORKFLOW_TYPE_KEY) in WORKFLOW_TYPE_MAP.values()
261
+ def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
262
+ if self.is_framework_span_in_progess():
263
+ return ["attributes", "events"]
264
+
265
+ def set_span_type(self, to_wrap, wrapped, instance, output_processor, span:Span, args, kwargs) -> str:
266
+ span_type = super().set_span_type(to_wrap, wrapped, instance, output_processor, span, args, kwargs)
267
+ if self.is_framework_span_in_progess() and span_type is not None:
268
+ span_type = span_type+".modelapi"
269
+ span.set_attribute("span.type", span_type)
270
+ return span_type
@@ -362,6 +362,21 @@ def get_llm_type(instance):
362
362
  except:
363
363
  pass
364
364
 
365
+ def get_exception_status_code(arguments):
366
+ if arguments['exception'] is not None and hasattr(arguments['exception'], 'code'):
367
+ return arguments['exception'].code
368
+ else:
369
+ return 'error'
370
+
371
+ def get_exception_message(arguments):
372
+ if arguments['exception'] is not None:
373
+ if hasattr(arguments['exception'], 'message'):
374
+ return arguments['exception'].message
375
+ else:
376
+ return arguments['exception'].__str__()
377
+ else:
378
+ return ''
379
+
365
380
  def patch_instance_method(obj, method_name, func):
366
381
  """
367
382
  Patch a special method (like __iter__) for a single instance.
@@ -30,12 +30,22 @@ def pre_process_span(name, tracer, handler, add_workflow_span, to_wrap, wrapped,
30
30
  SpanHandler.set_workflow_properties(span, to_wrap)
31
31
  else:
32
32
  SpanHandler.set_non_workflow_properties(span)
33
- handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
33
+ try:
34
+ handler.pre_task_processing(to_wrap, wrapped, instance, args, kwargs, span)
35
+ except Exception as e:
36
+ logger.info(f"Warning: Error occurred in pre_task_processing: {e}")
34
37
 
35
- def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span):
38
+ def post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex = None):
36
39
  if not (SpanHandler.is_root_span(span) or get_value(ADD_NEW_WORKFLOW) == True):
37
- handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span)
38
- handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
40
+ try:
41
+ handler.hydrate_span(to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
42
+ except Exception as e:
43
+ logger.info(f"Warning: Error occurred in hydrate_span: {e}")
44
+
45
+ try:
46
+ handler.post_task_processing(to_wrap, wrapped, instance, args, kwargs, return_value, span)
47
+ except Exception as e:
48
+ logger.info(f"Warning: Error occurred in post_task_processing: {e}")
39
49
 
40
50
  def get_span_name(to_wrap, instance):
41
51
  if to_wrap.get("span_name"):
@@ -58,9 +68,15 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
58
68
  return_value, span_status = monocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
59
69
  span.set_status(span_status)
60
70
  else:
61
- with SpanHandler.workflow_type(to_wrap):
62
- return_value = wrapped(*args, **kwargs)
63
- post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
71
+ ex:Exception = None
72
+ try:
73
+ with SpanHandler.workflow_type(to_wrap, span):
74
+ return_value = wrapped(*args, **kwargs)
75
+ except Exception as e:
76
+ ex = e
77
+ raise
78
+ finally:
79
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
64
80
  span_status = span.status
65
81
  else:
66
82
  span = tracer.start_span(name)
@@ -72,13 +88,15 @@ def monocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap
72
88
  post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
73
89
  span.end()
74
90
 
75
- with SpanHandler.workflow_type(to_wrap):
76
- return_value = wrapped(*args, **kwargs)
77
- if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
78
- # Process the stream
79
- to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
80
- else:
81
- span.end()
91
+ try:
92
+ with SpanHandler.workflow_type(to_wrap, span):
93
+ return_value = wrapped(*args, **kwargs)
94
+ finally:
95
+ if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
96
+ # Process the stream
97
+ to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
98
+ else:
99
+ span.end()
82
100
  span_status = span.status
83
101
  return return_value, span_status
84
102
 
@@ -86,7 +104,10 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
86
104
  return_value = None
87
105
  token = None
88
106
  try:
89
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
107
+ try:
108
+ handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
109
+ except Exception as e:
110
+ logger.info(f"Warning: Error occurred in pre_tracing: {e}")
90
111
  if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
91
112
  return_value = wrapped(*args, **kwargs)
92
113
  else:
@@ -98,8 +119,10 @@ def monocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, inst
98
119
  detach(token)
99
120
  return return_value
100
121
  finally:
101
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
102
-
122
+ try:
123
+ handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
124
+ except Exception as e:
125
+ logger.info(f"Warning: Error occurred in post_tracing: {e}")
103
126
 
104
127
  async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, add_workflow_span, args, kwargs):
105
128
  # Main span processing logic
@@ -115,10 +138,16 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
115
138
  return_value, span_status = await amonocle_wrapper_span_processor(tracer, handler, to_wrap, wrapped, instance, source_path, False, args, kwargs)
116
139
  span.set_status(span_status)
117
140
  else:
118
- with SpanHandler.workflow_type(to_wrap):
119
- return_value = await wrapped(*args, **kwargs)
141
+ ex:Exception = None
142
+ try:
143
+ with SpanHandler.workflow_type(to_wrap, span):
144
+ return_value = await wrapped(*args, **kwargs)
145
+ except Exception as e:
146
+ ex = e
147
+ raise
148
+ finally:
149
+ post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span, ex)
120
150
  span_status = span.status
121
- post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, return_value, span)
122
151
  else:
123
152
  span = tracer.start_span(name)
124
153
 
@@ -129,14 +158,15 @@ async def amonocle_wrapper_span_processor(tracer: Tracer, handler: SpanHandler,
129
158
  post_process_span(handler, to_wrap, wrapped, instance, args, kwargs, ret_val, span)
130
159
  span.end()
131
160
 
132
- with SpanHandler.workflow_type(to_wrap):
133
- return_value = await wrapped(*args, **kwargs)
134
-
135
- if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
136
- # Process the stream
137
- to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
138
- else:
139
- span.end()
161
+ try:
162
+ with SpanHandler.workflow_type(to_wrap, span):
163
+ return_value = await wrapped(*args, **kwargs)
164
+ finally:
165
+ if to_wrap.get("output_processor") and to_wrap.get("output_processor").get("response_processor"):
166
+ # Process the stream
167
+ to_wrap.get("output_processor").get("response_processor")(to_wrap, return_value, post_process_span_internal)
168
+ else:
169
+ span.end()
140
170
  span_status = span.status
141
171
  return return_value, span.status
142
172
 
@@ -144,7 +174,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
144
174
  return_value = None
145
175
  token = None
146
176
  try:
147
- handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
177
+ try:
178
+ handler.pre_tracing(to_wrap, wrapped, instance, args, kwargs)
179
+ except Exception as e:
180
+ logger.info(f"Warning: Error occurred in pre_tracing: {e}")
148
181
  if to_wrap.get('skip_span', False) or handler.skip_span(to_wrap, wrapped, instance, args, kwargs):
149
182
  return_value = await wrapped(*args, **kwargs)
150
183
  else:
@@ -156,7 +189,10 @@ async def amonocle_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrappe
156
189
  detach(token)
157
190
  return return_value
158
191
  finally:
159
- handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
192
+ try:
193
+ handler.post_tracing(to_wrap, wrapped, instance, args, kwargs, return_value)
194
+ except Exception as e:
195
+ logger.info(f"Warning: Error occurred in post_tracing: {e}")
160
196
 
161
197
  @with_tracer_wrapper
162
198
  def task_wrapper(tracer: Tracer, handler: SpanHandler, to_wrap, wrapped, instance, source_path, args, kwargs):
@@ -10,6 +10,7 @@ from monocle_apptrace.instrumentation.metamodel.langchain.methods import (
10
10
  from monocle_apptrace.instrumentation.metamodel.llamaindex.methods import (LLAMAINDEX_METHODS, )
11
11
  from monocle_apptrace.instrumentation.metamodel.haystack.methods import (HAYSTACK_METHODS, )
12
12
  from monocle_apptrace.instrumentation.metamodel.openai.methods import (OPENAI_METHODS,)
13
+ from monocle_apptrace.instrumentation.metamodel.openai._helper import OpenAISpanHandler
13
14
  from monocle_apptrace.instrumentation.metamodel.langgraph.methods import LANGGRAPH_METHODS
14
15
  from monocle_apptrace.instrumentation.metamodel.flask.methods import (FLASK_METHODS, )
15
16
  from monocle_apptrace.instrumentation.metamodel.flask._helper import FlaskSpanHandler, FlaskResponseSpanHandler
@@ -76,5 +77,6 @@ MONOCLE_SPAN_HANDLERS: Dict[str, SpanHandler] = {
76
77
  "flask_handler": FlaskSpanHandler(),
77
78
  "flask_response_handler": FlaskResponseSpanHandler(),
78
79
  "request_handler": RequestSpanHandler(),
79
- "non_framework_handler": NonFrameworkSpanHandler()
80
+ "non_framework_handler": NonFrameworkSpanHandler(),
81
+ "openai_handler": OpenAISpanHandler(),
80
82
  }
@@ -121,7 +121,7 @@ def update_span_from_llm_response(response, instance):
121
121
  if response is not None and isinstance(response, dict):
122
122
  if "meta" in response:
123
123
  token_usage = response["meta"][0]["usage"]
124
- if "replies" in response:
124
+ elif "replies" in response: # and "meta" in response["replies"][0]:
125
125
  token_usage = response["replies"][0].meta["usage"]
126
126
  if token_usage is not None:
127
127
  temperature = instance.__dict__.get("temperature", None)
@@ -4,7 +4,7 @@ from monocle_apptrace.instrumentation.metamodel.haystack import (
4
4
  from monocle_apptrace.instrumentation.common.utils import get_llm_type
5
5
 
6
6
  INFERENCE = {
7
- "type": "inference",
7
+ "type": "inference.framework",
8
8
  "attributes": [
9
9
  [
10
10
  {
@@ -4,7 +4,7 @@ from monocle_apptrace.instrumentation.metamodel.langchain import (
4
4
  from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
- "type": "inference",
7
+ "type": "inference.framework",
8
8
  "attributes": [
9
9
  [
10
10
  {
@@ -4,7 +4,7 @@ from monocle_apptrace.instrumentation.metamodel.llamaindex import (
4
4
  from monocle_apptrace.instrumentation.common.utils import resolve_from_alias, get_llm_type
5
5
 
6
6
  INFERENCE = {
7
- "type": "inference",
7
+ "type": "inference.framework",
8
8
  "attributes": [
9
9
  [
10
10
  {
@@ -10,7 +10,7 @@ from monocle_apptrace.instrumentation.common.utils import (
10
10
  get_nested_value,
11
11
  try_option,
12
12
  )
13
-
13
+ from monocle_apptrace.instrumentation.common.span_handler import NonFrameworkSpanHandler
14
14
 
15
15
  logger = logging.getLogger(__name__)
16
16
 
@@ -114,4 +114,12 @@ def get_inference_type(instance):
114
114
  if inference_type.unwrap_or(None):
115
115
  return 'azure_openai'
116
116
  else:
117
- return 'openai'
117
+ return 'openai'
118
+
119
+ class OpenAISpanHandler(NonFrameworkSpanHandler):
120
+ # If openAI is being called by Teams AI SDK, then retain the metadata part of the span events
121
+ def skip_processor(self, to_wrap, wrapped, instance, span, args, kwargs) -> list[str]:
122
+ if self.is_framework_span_in_progess() and self.get_workflow_name_in_progress() == "workflow.teams_ai":
123
+ return ["attributes", "events.data.input", "events.data.output"]
124
+ else:
125
+ return super().skip_processor(to_wrap, wrapped, instance, span, args, kwargs)
@@ -12,7 +12,7 @@ OPENAI_METHODS = [
12
12
  "object": "Completions",
13
13
  "method": "create",
14
14
  "wrapper_method": task_wrapper,
15
- "span_handler": "non_framework_handler",
15
+ "span_handler": "openai_handler",
16
16
  "output_processor": INFERENCE
17
17
  },
18
18
  {
@@ -20,7 +20,7 @@ OPENAI_METHODS = [
20
20
  "object": "AsyncCompletions",
21
21
  "method": "create",
22
22
  "wrapper_method": atask_wrapper,
23
- "span_handler": "non_framework_handler",
23
+ "span_handler": "openai_handler",
24
24
  "output_processor": INFERENCE
25
25
  },
26
26
  {
@@ -28,7 +28,7 @@ OPENAI_METHODS = [
28
28
  "object": "Embeddings",
29
29
  "method": "create",
30
30
  "wrapper_method": task_wrapper,
31
- "span_handler": "non_framework_handler",
31
+ "span_handler": "openai_handler",
32
32
  "output_processor": RETRIEVAL
33
33
  },
34
34
  {
@@ -36,7 +36,7 @@ OPENAI_METHODS = [
36
36
  "object": "AsyncEmbeddings",
37
37
  "method": "create",
38
38
  "wrapper_method": atask_wrapper,
39
- "span_handler": "non_framework_handler",
39
+ "span_handler": "openai_handler",
40
40
  "output_processor": RETRIEVAL
41
41
  },
42
42
  {
@@ -44,7 +44,7 @@ OPENAI_METHODS = [
44
44
  "object": "Responses",
45
45
  "method": "create",
46
46
  "wrapper_method": task_wrapper,
47
- "span_handler": "non_framework_handler",
47
+ "span_handler": "openai_handler",
48
48
  "output_processor": INFERENCE
49
49
  },
50
50
  {
@@ -52,7 +52,7 @@ OPENAI_METHODS = [
52
52
  "object": "AsyncResponses",
53
53
  "method": "create",
54
54
  "wrapper_method": atask_wrapper,
55
- "span_handler": "non_framework_handler",
55
+ "span_handler": "openai_handler",
56
56
  "output_processor": INFERENCE
57
57
  }
58
58
 
@@ -4,6 +4,8 @@ from monocle_apptrace.instrumentation.common.utils import (
4
4
  get_keys_as_tuple,
5
5
  get_nested_value,
6
6
  try_option,
7
+ get_exception_message,
8
+ get_exception_status_code
7
9
  )
8
10
  def capture_input(arguments):
9
11
  """
@@ -57,11 +59,41 @@ def capture_prompt_info(arguments):
57
59
  except Exception as e:
58
60
  return f"Error capturing prompt: {str(e)}"
59
61
 
60
- def status_check(arguments):
61
- if hasattr(arguments["result"], "error") and arguments["result"].error is not None:
62
- error_msg:str = arguments["result"].error
63
- error_code:str = arguments["result"].status if hasattr(arguments["result"], "status") else "unknown"
64
- raise MonocleSpanException(f"Error: {error_code} - {error_msg}")
62
+ def get_status_code(arguments):
63
+ if arguments["exception"] is not None:
64
+ return get_exception_status_code(arguments)
65
+ elif hasattr(arguments["result"], "status"):
66
+ return arguments["result"].status
67
+ else:
68
+ return 'success'
69
+
70
+ def get_status(arguments):
71
+ if arguments["exception"] is not None:
72
+ return 'error'
73
+ elif get_status_code(arguments) == 'success':
74
+ return 'success'
75
+ else:
76
+ return 'error'
77
+
78
+ def get_response(arguments) -> str:
79
+ status = get_status_code(arguments)
80
+ response:str = ""
81
+ if status == 'success':
82
+ if hasattr(arguments["result"], "message"):
83
+ response = arguments["result"].message.content
84
+ else:
85
+ response = str(arguments["result"])
86
+ else:
87
+ if arguments["exception"] is not None:
88
+ response = get_exception_message(arguments)
89
+ elif hasattr(arguments["result"], "error"):
90
+ response = arguments["result"].error
91
+ return response
92
+
93
+ def check_status(arguments):
94
+ status = get_status_code(arguments)
95
+ if status != 'success':
96
+ raise MonocleSpanException(f"{status}")
65
97
 
66
98
  def extract_provider_name(instance):
67
99
  provider_url: Option[str] = try_option(getattr, instance._client.base_url, 'host')
@@ -28,42 +28,22 @@ ACTIONPLANNER_OUTPUT_PROCESSOR = {
28
28
  {
29
29
  "attribute": "tokenizer",
30
30
  "accessor": lambda arguments: arguments["instance"]._options.tokenizer.__class__.__name__ if hasattr(arguments["instance"], "_options") else "GPTTokenizer"
31
+ },
32
+ {
33
+ "attribute": "prompt_name",
34
+ "accessor": _helper.capture_prompt_info
35
+ },
36
+ {
37
+ "attribute": "validator",
38
+ "accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
39
+ },
40
+ {
41
+ "attribute": "memory_type",
42
+ "accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
31
43
  }
32
44
  ]
33
45
  ],
34
46
  "events": [
35
- {
36
- "name": "data.input",
37
- "_comment": "input configuration to ActionPlanner",
38
- "attributes": [
39
- {
40
- "attribute": "prompt_name",
41
- "accessor": _helper.capture_prompt_info
42
- },
43
- {
44
- "attribute": "validator",
45
- "accessor": lambda arguments: arguments["kwargs"].get("validator").__class__.__name__ if arguments.get("kwargs", {}).get("validator") else "DefaultResponseValidator"
46
- },
47
- {
48
- "attribute": "memory_type",
49
- "accessor": lambda arguments: arguments["kwargs"].get("memory").__class__.__name__ if arguments.get("kwargs", {}).get("memory") else "unknown"
50
- }
51
- ]
52
- },
53
- {
54
- "name": "data.output",
55
- "_comment": "output from ActionPlanner",
56
- "attributes": [
57
- {
58
- "attribute": "status",
59
- "accessor": lambda arguments: _helper.status_check(arguments)
60
- },
61
- {
62
- "attribute": "response",
63
- "accessor": lambda arguments: arguments["result"].message.content if hasattr(arguments["result"], "message") else str(arguments["result"])
64
- }
65
- ]
66
- },
67
47
  {
68
48
  "name": "metadata",
69
49
  "attributes": [
@@ -3,7 +3,7 @@ from monocle_apptrace.instrumentation.metamodel.teamsai import (
3
3
  )
4
4
  from monocle_apptrace.instrumentation.common.utils import get_llm_type
5
5
  TEAMAI_OUTPUT_PROCESSOR = {
6
- "type": "inference",
6
+ "type": "inference.framework",
7
7
  "attributes": [
8
8
  [
9
9
  {
@@ -52,25 +52,23 @@ TEAMAI_OUTPUT_PROCESSOR = {
52
52
  "name": "data.output",
53
53
  "_comment": "output from Teams AI",
54
54
  "attributes": [
55
+ {
56
+ "attribute": "status",
57
+ "accessor": lambda arguments: _helper.get_status(arguments)
58
+ },
59
+ {
60
+ "attribute": "status_code",
61
+ "accessor": lambda arguments: _helper.get_status_code(arguments)
62
+ },
55
63
  {
56
64
  "attribute": "response",
57
- "accessor": lambda arguments: arguments["result"].message.content if hasattr(arguments["result"], "message") else str(arguments["result"])
65
+ "accessor": lambda arguments: _helper.get_response(arguments)
66
+ },
67
+ {
68
+ "attribute": "check_status",
69
+ "accessor": lambda arguments: _helper.check_status(arguments)
58
70
  }
59
71
  ]
60
72
  },
61
- # {
62
- # "name": "metadata",
63
- # "attributes": [
64
- # {
65
- # "_comment": "metadata from Teams AI response",
66
- # "accessor": lambda arguments: {
67
- # "prompt_tokens": arguments["result"].get("usage", {}).get("prompt_tokens", 0),
68
- # "completion_tokens": arguments["result"].get("usage", {}).get("completion_tokens", 0),
69
- # "total_tokens": arguments["result"].get("usage", {}).get("total_tokens", 0),
70
- # "latency_ms": arguments.get("latency_ms")
71
- # }
72
- # }
73
- # ]
74
- # }
75
73
  ]
76
74
  }