microsoft-agents-a365-observability-extensions-openai 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ """
4
+ Wraps the OpenAI Agents SDK tracer to integrate with the Microsoft Agent 365 Telemetry Solution.
5
+ """
6
+
7
+ from .trace_instrumentor import OpenAIAgentsTraceInstrumentor
8
+
9
+ __all__ = ["OpenAIAgentsTraceInstrumentor"]
@@ -0,0 +1,34 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ # Span Attribute Types
4
+ from microsoft_agents_a365.observability.core.constants import (
5
+ EXECUTE_TOOL_OPERATION_NAME,
6
+ INVOKE_AGENT_OPERATION_NAME,
7
+ )
8
+ from microsoft_agents_a365.observability.core.inference_operation_type import InferenceOperationType
9
+
10
+ GEN_AI_SPAN_KIND_AGENT_KEY = INVOKE_AGENT_OPERATION_NAME
11
+ GEN_AI_SPAN_KIND_TOOL_KEY = EXECUTE_TOOL_OPERATION_NAME
12
+ GEN_AI_SPAN_KIND_CHAIN_KEY = "chain"
13
+ GEN_AI_SPAN_KIND_LLM_KEY = InferenceOperationType.CHAT.value.lower()
14
+ GEN_AI_SPAN_KIND_RETRIEVER_KEY = "retriever"
15
+ GEN_AI_SPAN_KIND_EMBEDDING_KEY = "embedding"
16
+ GEN_AI_SPAN_KIND_RERANKER_KEY = "reranker"
17
+ GEN_AI_SPAN_KIND_GUARDRAIL_KEY = "guardrail"
18
+ GEN_AI_SPAN_KIND_EVALUATOR_KEY = "evaluator"
19
+ GEN_AI_SPAN_KIND_UNKNOWN_KEY = "unknown"
20
+
21
+ # PREFIXES
22
+ GEN_AI_MESSAGE_ROLE = "message_role"
23
+ GEN_AI_MESSAGE_CONTENT = "message_content"
24
+ GEN_AI_MESSAGE_CONTENTS = "message_contents"
25
+ GEN_AI_MESSAGE_CONTENT_TYPE = "content_type"
26
+ GEN_AI_MESSAGE_TOOL_CALLS = "message_tool_calls"
27
+ GEN_AI_MESSAGE_TOOL_CALL_ID = "message_tool_id"
28
+ GEN_AI_MESSAGE_TOOL_CALL_NAME = "message_tool_name"
29
+ GEN_AI_TOOL_JSON_SCHEMA = "tool_json_schema"
30
+ GEN_AI_LLM_TOKEN_COUNT_TOTAL = "llm_token_count_total"
31
+ GEN_AI_LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHED_READ = "llm_token_count_prompt_details_cached_read"
32
+ GEN_AI_LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING = "llm_token_count_completion_details_reasoning"
33
+ GEN_AI_GRAPH_NODE_ID = "graph_node_id"
34
+ GEN_AI_GRAPH_NODE_PARENT_ID = "graph_node_parent_id"
@@ -0,0 +1,70 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ # Wrapper for OpenAI Agents SDK
4
+
5
+ import logging
6
+ from collections.abc import Collection
7
+ from typing import Any, cast
8
+
9
+ import opentelemetry.trace as optel_trace
10
+ from agents import set_trace_processors
11
+ from microsoft_agents_a365.observability.core import get_tracer, get_tracer_provider, is_configured
12
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
13
+ from opentelemetry.trace import Tracer
14
+
15
+ from .trace_processor import OpenAIAgentsTraceProcessor
16
+
17
+ logging.basicConfig(level=logging.INFO)
18
+ logger = logging.getLogger(__name__)
19
+
20
+ _instruments = ("openai-agents >= 0.2.6",)
21
+
22
+
23
+ class OpenAIAgentsTraceInstrumentor(BaseInstrumentor):
24
+ """
25
+ Custom Trace Processor for OpenAI Agents SDK using Microsoft Agent 365.
26
+ Forwards OpenAI Agents SDK traces and spans to Microsoft Agent 365's tracing scopes.
27
+
28
+ ```
29
+ """
30
+
31
+ def __init__(self):
32
+ """Initialize the OpenAIAgentsTraceInstrumentor.
33
+ Raises: RuntimeError: If Microsoft Agent 365 is not configured.
34
+ """
35
+ # Verify if Microsoft Agent 365 is configured
36
+ Agent365_status = is_configured()
37
+ if not Agent365_status:
38
+ raise RuntimeError(
39
+ "Microsoft Agent 365 is not configured yet. Please configure Microsoft Agent 365 before initializing this instrumentor."
40
+ )
41
+ super().__init__()
42
+
43
+ def instrumentation_dependencies(self) -> Collection[str]:
44
+ return _instruments
45
+
46
+ def _instrument(self, **kwargs: Any) -> None:
47
+ """Instruments the OpenAI Agents SDK with Microsoft Agent 365 tracing."""
48
+ tracer_name = kwargs["tracer_name"] if kwargs.get("tracer_name") else None
49
+ tracer_version = kwargs["tracer_version"] if kwargs.get("tracer_version") else None
50
+
51
+ # Get the configured Microsoft Agent 365 Tracer
52
+ try:
53
+ tracer = get_tracer(tracer_name, tracer_version)
54
+ except Exception:
55
+ # fallback
56
+ tracer = optel_trace.get_tracer(tracer_name, tracer_version)
57
+
58
+ # Get the configured Microsoft Agent 365 Tracer Provider instance
59
+ try:
60
+ get_tracer_provider()
61
+ except Exception:
62
+ # fallback
63
+ optel_trace.get_tracer_provider()
64
+
65
+ agent365_tracer = cast(Tracer, tracer)
66
+
67
+ set_trace_processors([OpenAIAgentsTraceProcessor(agent365_tracer)])
68
+
69
+ def _uninstrument(self, **kwargs: Any) -> None:
70
+ pass
@@ -0,0 +1,212 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ # Processor for OpenAI Agents SDK
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+ from collections import OrderedDict
9
+ from datetime import datetime
10
+ from typing import TYPE_CHECKING, Any, assert_never
11
+
12
+ from agents import MCPListToolsSpanData
13
+ from agents.tracing import Span, Trace, TracingProcessor
14
+ from agents.tracing.span_data import (
15
+ AgentSpanData,
16
+ FunctionSpanData,
17
+ GenerationSpanData,
18
+ HandoffSpanData,
19
+ ResponseSpanData,
20
+ )
21
+ from microsoft_agents_a365.observability.core.constants import (
22
+ CUSTOM_PARENT_SPAN_ID_KEY,
23
+ EXECUTE_TOOL_OPERATION_NAME,
24
+ GEN_AI_INPUT_MESSAGES_KEY,
25
+ GEN_AI_OPERATION_NAME_KEY,
26
+ GEN_AI_OUTPUT_MESSAGES_KEY,
27
+ GEN_AI_REQUEST_MODEL_KEY,
28
+ GEN_AI_SYSTEM_KEY,
29
+ INVOKE_AGENT_OPERATION_NAME,
30
+ )
31
+ from microsoft_agents_a365.observability.core.utils import as_utc_nano, safe_json_dumps
32
+ from opentelemetry import trace as ot_trace
33
+ from opentelemetry.context import attach, detach
34
+ from opentelemetry.trace import Span as OtelSpan
35
+ from opentelemetry.trace import (
36
+ Status,
37
+ StatusCode,
38
+ Tracer,
39
+ set_span_in_context,
40
+ )
41
+
42
+ from openai.types.responses import (
43
+ Response,
44
+ )
45
+
46
+ from .constants import (
47
+ GEN_AI_GRAPH_NODE_ID,
48
+ GEN_AI_GRAPH_NODE_PARENT_ID,
49
+ )
50
+ from .utils import (
51
+ get_attributes_from_function_span_data,
52
+ get_attributes_from_generation_span_data,
53
+ get_attributes_from_input,
54
+ get_attributes_from_mcp_list_tool_span_data,
55
+ get_attributes_from_response,
56
+ get_span_kind,
57
+ get_span_name,
58
+ get_span_status,
59
+ )
60
+
61
+ logger = logging.getLogger(__name__)
62
+
63
+
64
+ """
65
+ Custom Trace Processor for OpenAI Agents SDK
66
+ """
67
+
68
+
69
+ class OpenAIAgentsTraceProcessor(TracingProcessor):
70
+ _MAX_HANDOFFS_IN_FLIGHT = 1000
71
+
72
+ def __init__(self, tracer: Tracer) -> None:
73
+ self._tracer = tracer
74
+ self._root_spans: dict[str, OtelSpan] = {}
75
+ self._otel_spans: dict[str, OtelSpan] = {}
76
+ self._tokens: dict[str, object] = {}
77
+ # This captures in flight handoff. Once the handoff is complete, the entry is deleted
78
+ # If the handoff does not complete, the entry stays in the dict.
79
+ # Use an OrderedDict and _MAX_HANDOFFS_IN_FLIGHT to cap the size of the dict
80
+ # in case there are large numbers of orphaned handoffs
81
+ self._reverse_handoffs_dict: OrderedDict[str, str] = OrderedDict()
82
+
83
+ # helper
84
+ def _stamp_custom_parent(self, otel_span: OtelSpan, trace_id: str) -> None:
85
+ root = self._root_spans.get(trace_id)
86
+ if not root:
87
+ return
88
+ sc = root.get_span_context()
89
+ pid_hex = "0x" + ot_trace.format_span_id(sc.span_id)
90
+ otel_span.set_attribute(CUSTOM_PARENT_SPAN_ID_KEY, pid_hex)
91
+
92
+ def on_trace_start(self, trace: Trace) -> None:
93
+ """Called when a trace is started.
94
+
95
+ Args:
96
+ trace: The trace that started.
97
+ """
98
+
99
+ def on_trace_end(self, trace: Trace) -> None:
100
+ """Called when a trace is finished.
101
+
102
+ Args:
103
+ trace: The trace that started.
104
+ """
105
+ if root_span := self._root_spans.pop(trace.trace_id, None):
106
+ root_span.set_status(Status(StatusCode.OK))
107
+ root_span.end()
108
+
109
+ def on_span_start(self, span: Span[Any]) -> None:
110
+ """Called when a span is started.
111
+
112
+ Args:
113
+ span: The span that started.
114
+ """
115
+ if not span.started_at:
116
+ return
117
+ start_time = datetime.fromisoformat(span.started_at)
118
+ parent_span = (
119
+ self._otel_spans.get(span.parent_id)
120
+ if span.parent_id
121
+ else self._root_spans.get(span.trace_id)
122
+ )
123
+ context = set_span_in_context(parent_span) if parent_span else None
124
+ span_name = get_span_name(span)
125
+ otel_span = self._tracer.start_span(
126
+ name=span_name,
127
+ context=context,
128
+ start_time=as_utc_nano(start_time),
129
+ attributes={
130
+ GEN_AI_OPERATION_NAME_KEY: get_span_kind(span.span_data),
131
+ GEN_AI_SYSTEM_KEY: "openai",
132
+ },
133
+ )
134
+ self._otel_spans[span.span_id] = otel_span
135
+ self._tokens[span.span_id] = attach(set_span_in_context(otel_span))
136
+
137
+ def on_span_end(self, span: Span[Any]) -> None:
138
+ """Called when a span is finished. Should not block or raise exceptions.
139
+
140
+ Args:
141
+ span: The span that finished.
142
+ """
143
+ if token := self._tokens.pop(span.span_id, None):
144
+ detach(token) # type: ignore[arg-type]
145
+ if not (otel_span := self._otel_spans.pop(span.span_id, None)):
146
+ return
147
+ otel_span.update_name(get_span_name(span))
148
+
149
+ data = span.span_data
150
+
151
+ # DATA TYPES AS PER OPENAI AGENTS SDK
152
+ if isinstance(data, ResponseSpanData):
153
+ if hasattr(data, "response") and isinstance(response := data.response, Response):
154
+ otel_span.set_attribute(GEN_AI_OUTPUT_MESSAGES_KEY, response.model_dump_json())
155
+ for k, v in get_attributes_from_response(response):
156
+ otel_span.set_attribute(k, v)
157
+ if hasattr(data, "input") and (input := data.input):
158
+ if isinstance(input, str):
159
+ otel_span.set_attribute(GEN_AI_INPUT_MESSAGES_KEY, input)
160
+ elif isinstance(input, list):
161
+ otel_span.set_attribute(GEN_AI_INPUT_MESSAGES_KEY, safe_json_dumps(input))
162
+ for k, v in get_attributes_from_input(input):
163
+ otel_span.set_attribute(k, v)
164
+ elif TYPE_CHECKING:
165
+ assert_never(input)
166
+ elif isinstance(data, GenerationSpanData):
167
+ for k, v in get_attributes_from_generation_span_data(data):
168
+ otel_span.set_attribute(k, v)
169
+ self._stamp_custom_parent(otel_span, span.trace_id)
170
+ otel_span.update_name(
171
+ f"{otel_span.attributes[GEN_AI_OPERATION_NAME_KEY]} {otel_span.attributes[GEN_AI_REQUEST_MODEL_KEY]}"
172
+ )
173
+ elif isinstance(data, FunctionSpanData):
174
+ for k, v in get_attributes_from_function_span_data(data):
175
+ otel_span.set_attribute(k, v)
176
+ self._stamp_custom_parent(otel_span, span.trace_id)
177
+ otel_span.update_name(f"{EXECUTE_TOOL_OPERATION_NAME} {data.function_name}")
178
+ elif isinstance(data, MCPListToolsSpanData):
179
+ for k, v in get_attributes_from_mcp_list_tool_span_data(data):
180
+ otel_span.set_attribute(k, v)
181
+ elif isinstance(data, HandoffSpanData):
182
+ # Set this dict to find the parent node when the agent span starts
183
+ if data.to_agent and data.from_agent:
184
+ key = f"{data.to_agent}:{span.trace_id}"
185
+ self._reverse_handoffs_dict[key] = data.from_agent
186
+ # Cap the size of the dict
187
+ while len(self._reverse_handoffs_dict) > self._MAX_HANDOFFS_IN_FLIGHT:
188
+ self._reverse_handoffs_dict.popitem(last=False)
189
+ elif isinstance(data, AgentSpanData):
190
+ otel_span.set_attribute(GEN_AI_GRAPH_NODE_ID, data.name)
191
+ # Lookup the parent node if exists
192
+ key = f"{data.name}:{span.trace_id}"
193
+ if parent_node := self._reverse_handoffs_dict.pop(key, None):
194
+ otel_span.set_attribute(GEN_AI_GRAPH_NODE_PARENT_ID, parent_node)
195
+ otel_span.update_name(f"{INVOKE_AGENT_OPERATION_NAME} {get_span_name(span)}")
196
+
197
+ end_time: int | None = None
198
+ if span.ended_at:
199
+ try:
200
+ end_time = as_utc_nano(datetime.fromisoformat(span.ended_at))
201
+ except ValueError:
202
+ pass
203
+ otel_span.set_status(status=get_span_status(span))
204
+ otel_span.end(end_time)
205
+
206
+ def force_flush(self) -> None:
207
+ """Forces an immediate flush of all queued spans/traces."""
208
+ pass
209
+
210
+ def shutdown(self) -> None:
211
+ """Called when the application stops."""
212
+ pass
@@ -0,0 +1,536 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ # -------------------------------------------------- #
4
+ # HELPER FUNCTIONS ###
5
+ # -------------------------------------------------- #
6
+
7
+ from collections.abc import Iterable, Iterator, Mapping
8
+ from typing import TYPE_CHECKING, Any, assert_never
9
+ from urllib.parse import urlparse
10
+
11
+ from agents import MCPListToolsSpanData
12
+ from agents.tracing import Span
13
+ from agents.tracing.span_data import (
14
+ AgentSpanData,
15
+ CustomSpanData,
16
+ FunctionSpanData,
17
+ GenerationSpanData,
18
+ GuardrailSpanData,
19
+ HandoffSpanData,
20
+ ResponseSpanData,
21
+ SpanData,
22
+ )
23
+ from microsoft_agents_a365.observability.core.constants import (
24
+ GEN_AI_CHOICE,
25
+ GEN_AI_EXECUTION_PAYLOAD_KEY,
26
+ GEN_AI_INPUT_MESSAGES_KEY,
27
+ GEN_AI_OUTPUT_MESSAGES_KEY,
28
+ GEN_AI_PROVIDER_NAME_KEY,
29
+ GEN_AI_REQUEST_MODEL_KEY,
30
+ GEN_AI_RESPONSE_FINISH_REASONS_KEY,
31
+ GEN_AI_RESPONSE_ID_KEY,
32
+ GEN_AI_SYSTEM_KEY,
33
+ GEN_AI_TOOL_ARGS_KEY,
34
+ GEN_AI_TOOL_CALL_ID_KEY,
35
+ GEN_AI_TOOL_CALL_RESULT_KEY,
36
+ GEN_AI_TOOL_NAME_KEY,
37
+ GEN_AI_USAGE_INPUT_TOKENS_KEY,
38
+ GEN_AI_USAGE_OUTPUT_TOKENS_KEY,
39
+ )
40
+ from microsoft_agents_a365.observability.core.utils import safe_json_dumps
41
+ from opentelemetry.trace import (
42
+ Status,
43
+ StatusCode,
44
+ )
45
+ from opentelemetry.util.types import AttributeValue
46
+
47
+ from openai.types.responses import (
48
+ EasyInputMessageParam,
49
+ FunctionTool,
50
+ Response,
51
+ ResponseCustomToolCall,
52
+ ResponseCustomToolCallOutputParam,
53
+ ResponseCustomToolCallParam,
54
+ ResponseFunctionToolCall,
55
+ ResponseFunctionToolCallParam,
56
+ ResponseInputContentParam,
57
+ ResponseInputItemParam,
58
+ ResponseOutputItem,
59
+ ResponseOutputMessage,
60
+ ResponseOutputMessageParam,
61
+ ResponseOutputRefusal,
62
+ ResponseOutputText,
63
+ ResponseUsage,
64
+ Tool,
65
+ )
66
+ from openai.types.responses.response_input_item_param import FunctionCallOutput, Message
67
+ from openai.types.responses.response_output_message_param import Content
68
+
69
+ from .constants import (
70
+ GEN_AI_LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING,
71
+ GEN_AI_LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHED_READ,
72
+ GEN_AI_LLM_TOKEN_COUNT_TOTAL,
73
+ GEN_AI_MESSAGE_CONTENT,
74
+ GEN_AI_MESSAGE_CONTENT_TYPE,
75
+ GEN_AI_MESSAGE_CONTENTS,
76
+ GEN_AI_MESSAGE_ROLE,
77
+ GEN_AI_MESSAGE_TOOL_CALL_ID,
78
+ GEN_AI_MESSAGE_TOOL_CALL_NAME,
79
+ GEN_AI_MESSAGE_TOOL_CALLS,
80
+ GEN_AI_SPAN_KIND_AGENT_KEY,
81
+ GEN_AI_SPAN_KIND_CHAIN_KEY,
82
+ GEN_AI_SPAN_KIND_LLM_KEY,
83
+ GEN_AI_SPAN_KIND_TOOL_KEY,
84
+ GEN_AI_TOOL_JSON_SCHEMA,
85
+ )
86
+
87
+
88
+ def get_span_name(obj: Span[Any]) -> str:
89
+ if hasattr(data := obj.span_data, "name") and isinstance(name := data.name, str):
90
+ return name
91
+ if isinstance(obj.span_data, HandoffSpanData) and obj.span_data.to_agent:
92
+ return f"handoff to {obj.span_data.to_agent}"
93
+ return obj.span_data.type # type: ignore[no-any-return]
94
+
95
+
96
+ def get_span_kind(obj: SpanData) -> str:
97
+ if isinstance(obj, AgentSpanData):
98
+ return GEN_AI_SPAN_KIND_AGENT_KEY
99
+ if isinstance(obj, FunctionSpanData):
100
+ return GEN_AI_SPAN_KIND_TOOL_KEY
101
+ if isinstance(obj, GenerationSpanData):
102
+ return GEN_AI_SPAN_KIND_LLM_KEY
103
+ if isinstance(obj, ResponseSpanData):
104
+ return GEN_AI_SPAN_KIND_LLM_KEY
105
+ if isinstance(obj, HandoffSpanData):
106
+ return GEN_AI_SPAN_KIND_TOOL_KEY
107
+ if isinstance(obj, CustomSpanData):
108
+ return GEN_AI_SPAN_KIND_CHAIN_KEY
109
+ if isinstance(obj, GuardrailSpanData):
110
+ return GEN_AI_SPAN_KIND_CHAIN_KEY
111
+ return GEN_AI_SPAN_KIND_CHAIN_KEY
112
+
113
+
114
+ def get_attributes_from_input(
115
+ obj: Iterable[ResponseInputItemParam],
116
+ msg_idx: int = 1,
117
+ ) -> Iterator[tuple[str, AttributeValue]]:
118
+ for i, item in enumerate(obj, msg_idx):
119
+ prefix = f"{GEN_AI_INPUT_MESSAGES_KEY}.{i}."
120
+ if "type" not in item:
121
+ if "role" in item and "content" in item:
122
+ yield from get_attributes_from_message_param(
123
+ { # type: ignore[misc, arg-type]
124
+ "type": "message",
125
+ "role": item["role"], # type: ignore[typeddict-item]
126
+ "content": item["content"], # type: ignore[typeddict-item]
127
+ },
128
+ prefix,
129
+ )
130
+ elif item["type"] == "message":
131
+ yield from get_attributes_from_message_param(item, prefix)
132
+ elif item["type"] == "function_call":
133
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", "assistant"
134
+ yield from get_attributes_from_response_function_tool_call_param(
135
+ item,
136
+ f"{prefix}{GEN_AI_MESSAGE_TOOL_CALLS}.0.",
137
+ )
138
+ elif item["type"] == "function_call_output":
139
+ yield from get_attributes_from_function_call_output(item, prefix)
140
+ elif item["type"] == "custom_tool_call":
141
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", "assistant"
142
+ yield from get_attributes_from_response_custom_tool_call_param(
143
+ item,
144
+ f"{prefix}{GEN_AI_MESSAGE_TOOL_CALLS}.0.",
145
+ )
146
+ elif item["type"] == "custom_tool_call_output":
147
+ yield from get_attributes_from_response_custom_tool_call_output_param(item, prefix)
148
+ elif TYPE_CHECKING and item["type"] is not None:
149
+ assert_never(item["type"])
150
+
151
+
152
+ def get_attributes_from_message_param(
153
+ obj: EasyInputMessageParam | Message | ResponseOutputMessageParam,
154
+ prefix: str = "",
155
+ ) -> Iterator[tuple[str, AttributeValue]]:
156
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", obj["role"]
157
+ if content := obj.get("content"):
158
+ if isinstance(content, str):
159
+ yield f"{prefix}{GEN_AI_MESSAGE_CONTENT}", content
160
+ elif isinstance(content, list):
161
+ yield from get_attributes_from_message_content_list(content, prefix)
162
+
163
+
164
+ def get_attributes_from_response_function_tool_call_param(
165
+ obj: ResponseFunctionToolCallParam,
166
+ prefix: str = "",
167
+ ) -> Iterator[tuple[str, AttributeValue]]:
168
+ yield f"{prefix}{GEN_AI_MESSAGE_TOOL_CALL_ID}", obj["call_id"]
169
+ yield f"{prefix}{GEN_AI_MESSAGE_TOOL_CALL_NAME}", obj["name"]
170
+ if obj["arguments"] != "{}":
171
+ yield f"{prefix}{GEN_AI_TOOL_ARGS_KEY}", obj["arguments"]
172
+
173
+
174
+ def get_attributes_from_response_custom_tool_call_param(
175
+ obj: ResponseCustomToolCallParam,
176
+ prefix: str = "",
177
+ ) -> Iterator[tuple[str, AttributeValue]]:
178
+ if (call_id := obj.get("call_id")) is not None:
179
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", call_id
180
+ if (name := obj.get("name")) is not None:
181
+ yield f"{prefix}{GEN_AI_TOOL_NAME_KEY}", name
182
+ if (input_data := obj.get("input")) is not None:
183
+ yield (
184
+ f"{prefix}{GEN_AI_TOOL_ARGS_KEY}",
185
+ safe_json_dumps({"input": input_data}),
186
+ )
187
+
188
+
189
+ def get_attributes_from_response_custom_tool_call_output_param(
190
+ obj: ResponseCustomToolCallOutputParam,
191
+ prefix: str = "",
192
+ ) -> Iterator[tuple[str, AttributeValue]]:
193
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", "tool"
194
+ if (call_id := obj.get("call_id")) is not None:
195
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", call_id
196
+ if (output := obj.get("output")) is not None:
197
+ yield f"{prefix}{GEN_AI_TOOL_CALL_RESULT_KEY}", output
198
+
199
+
200
+ def get_attributes_from_function_call_output(
201
+ obj: FunctionCallOutput,
202
+ prefix: str = "",
203
+ ) -> Iterator[tuple[str, AttributeValue]]:
204
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", "tool"
205
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", obj["call_id"]
206
+ yield f"{prefix}{GEN_AI_TOOL_CALL_RESULT_KEY}", obj["output"]
207
+
208
+
209
+ def get_attributes_from_generation_span_data(
210
+ obj: GenerationSpanData,
211
+ ) -> Iterator[tuple[str, AttributeValue]]:
212
+ yield GEN_AI_PROVIDER_NAME_KEY, "openai"
213
+ if isinstance(model := obj.model, str):
214
+ yield GEN_AI_REQUEST_MODEL_KEY, model
215
+ if isinstance(obj.model_config, dict) and (
216
+ param := {k: v for k, v in obj.model_config.items() if v is not None}
217
+ ):
218
+ yield GEN_AI_EXECUTION_PAYLOAD_KEY, safe_json_dumps(param)
219
+ if base_url := param.get("base_url"):
220
+ parsed = urlparse(base_url)
221
+ if parsed.hostname == "api.openai.com":
222
+ yield GEN_AI_SYSTEM_KEY, "openai"
223
+ yield from _get_attributes_from_chat_completions_input(obj.input)
224
+ yield from _get_attributes_from_chat_completions_output(obj.output)
225
+ yield from _get_attributes_from_chat_completions_usage(obj.usage)
226
+
227
+
228
+ def get_attributes_from_mcp_list_tool_span_data(
229
+ obj: MCPListToolsSpanData,
230
+ ) -> Iterator[tuple[str, AttributeValue]]:
231
+ yield GEN_AI_OUTPUT_MESSAGES_KEY, safe_json_dumps(obj.result)
232
+
233
+
234
+ def _get_attributes_from_chat_completions_input(
235
+ obj: Iterable[Mapping[str, Any]] | None,
236
+ ) -> Iterator[tuple[str, AttributeValue]]:
237
+ if not obj:
238
+ return
239
+ try:
240
+ yield GEN_AI_INPUT_MESSAGES_KEY, safe_json_dumps(obj)
241
+ except Exception:
242
+ pass
243
+ yield from get_attributes_from_chat_completions_message_dicts(
244
+ obj,
245
+ f"{GEN_AI_INPUT_MESSAGES_KEY}.",
246
+ )
247
+
248
+
249
+ def _get_attributes_from_chat_completions_output(
250
+ obj: Iterable[Mapping[str, Any]] | None,
251
+ ) -> Iterator[tuple[str, AttributeValue]]:
252
+ if not obj:
253
+ return
254
+ try:
255
+ yield GEN_AI_OUTPUT_MESSAGES_KEY, safe_json_dumps(obj)
256
+ except Exception:
257
+ pass
258
+
259
+ if isinstance(obj, Mapping) and "id" in obj:
260
+ yield GEN_AI_RESPONSE_ID_KEY, obj["id"]
261
+
262
+ # Collect all finish_reason values
263
+ finish_reasons = [
264
+ message.get("finish_reason") for message in obj if message.get("finish_reason") is not None
265
+ ]
266
+ if finish_reasons:
267
+ yield GEN_AI_RESPONSE_FINISH_REASONS_KEY, ",".join(finish_reasons)
268
+
269
+ yield from get_attributes_from_chat_completions_message_dicts(
270
+ obj,
271
+ f"{GEN_AI_OUTPUT_MESSAGES_KEY}.",
272
+ )
273
+
274
+
275
+ def get_attributes_from_chat_completions_message_dicts(
276
+ obj: Iterable[Mapping[str, Any]],
277
+ prefix: str = "",
278
+ msg_idx: int = 0,
279
+ tool_call_idx: int = 0,
280
+ ) -> Iterator[tuple[str, AttributeValue]]:
281
+ if not isinstance(obj, Iterable):
282
+ return
283
+ for msg in obj:
284
+ if isinstance(role := msg.get("role"), str):
285
+ yield f"{prefix}{msg_idx}.{GEN_AI_MESSAGE_ROLE}", role
286
+ if content := msg.get("content"):
287
+ yield from get_attributes_from_chat_completions_message_content(
288
+ content,
289
+ f"{prefix}{msg_idx}.",
290
+ )
291
+ if isinstance(tool_call_id := msg.get("tool_call_id"), str):
292
+ yield f"{prefix}{msg_idx}.{GEN_AI_MESSAGE_TOOL_CALL_ID}", tool_call_id
293
+ if isinstance(tool_calls := msg.get("tool_calls"), Iterable):
294
+ for tc in tool_calls:
295
+ yield from _get_attributes_from_chat_completions_tool_call_dict(
296
+ tc,
297
+ f"{prefix}{msg_idx}.{GEN_AI_MESSAGE_TOOL_CALLS}.{tool_call_idx}.",
298
+ )
299
+ tool_call_idx += 1
300
+ msg_idx += 1
301
+
302
+
303
+ def get_attributes_from_chat_completions_message_content(
304
+ obj: str | Iterable[Mapping[str, Any]],
305
+ prefix: str = "",
306
+ ) -> Iterator[tuple[str, AttributeValue]]:
307
+ if isinstance(obj, str):
308
+ yield f"{prefix}{GEN_AI_MESSAGE_CONTENT}", obj
309
+ elif isinstance(obj, Iterable):
310
+ for i, item in enumerate(obj):
311
+ if not isinstance(item, Mapping):
312
+ continue
313
+ yield from _get_attributes_from_chat_completions_message_content_item(
314
+ item,
315
+ f"{prefix}{GEN_AI_MESSAGE_CONTENTS}.{i}.",
316
+ )
317
+
318
+
319
+ def _get_attributes_from_chat_completions_message_content_item(
320
+ obj: Mapping[str, Any],
321
+ prefix: str = "",
322
+ ) -> Iterator[tuple[str, AttributeValue]]:
323
+ if obj.get("type") == "text" and (text := obj.get("text")):
324
+ yield f"{prefix}{GEN_AI_OUTPUT_MESSAGES_KEY}", text
325
+
326
+
327
+ def _get_attributes_from_chat_completions_tool_call_dict(
328
+ obj: Mapping[str, Any],
329
+ prefix: str = "",
330
+ ) -> Iterator[tuple[str, AttributeValue]]:
331
+ if id_ := obj.get("id"):
332
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", id_
333
+ if function := obj.get("function"):
334
+ if name := function.get("name"):
335
+ yield f"{prefix}{GEN_AI_TOOL_NAME_KEY}", name
336
+ if arguments := function.get("arguments"):
337
+ if arguments != "{}":
338
+ yield f"{prefix}{GEN_AI_TOOL_ARGS_KEY}", arguments
339
+
340
+
341
+ def _get_attributes_from_chat_completions_usage(
342
+ obj: Mapping[str, Any] | None,
343
+ ) -> Iterator[tuple[str, AttributeValue]]:
344
+ if not obj:
345
+ return
346
+ if input_tokens := obj.get("input_tokens"):
347
+ yield GEN_AI_USAGE_INPUT_TOKENS_KEY, input_tokens
348
+ if output_tokens := obj.get("output_tokens"):
349
+ yield GEN_AI_USAGE_OUTPUT_TOKENS_KEY, output_tokens
350
+
351
+
352
+ def _convert_to_primitive(value: Any) -> bool | str | bytes | int | float:
353
+ if isinstance(value, (bool, str, bytes, int, float)):
354
+ return value
355
+ if isinstance(value, (list, tuple)):
356
+ return safe_json_dumps(value)
357
+ if isinstance(value, dict):
358
+ return safe_json_dumps(value)
359
+ return str(value)
360
+
361
+
362
+ def get_attributes_from_function_span_data(
363
+ obj: FunctionSpanData,
364
+ ) -> Iterator[tuple[str, AttributeValue]]:
365
+ yield GEN_AI_TOOL_NAME_KEY, obj.name
366
+ if obj.input:
367
+ yield GEN_AI_INPUT_MESSAGES_KEY, obj.input
368
+ if obj.output is not None:
369
+ yield GEN_AI_OUTPUT_MESSAGES_KEY, _convert_to_primitive(obj.output)
370
+
371
+
372
+ def get_attributes_from_message_content_list(
373
+ obj: Iterable[ResponseInputContentParam | Content],
374
+ prefix: str = "",
375
+ ) -> Iterator[tuple[str, AttributeValue]]:
376
+ for i, item in enumerate(obj):
377
+ if item["type"] == "input_text" or item["type"] == "output_text":
378
+ yield f"{prefix}{GEN_AI_INPUT_MESSAGES_KEY}.{i}.{GEN_AI_MESSAGE_CONTENT_TYPE}", "text"
379
+ yield (
380
+ f"{prefix}{GEN_AI_INPUT_MESSAGES_KEY}.{i}.{GEN_AI_OUTPUT_MESSAGES_KEY}",
381
+ item["text"],
382
+ )
383
+ elif item["type"] == "refusal":
384
+ yield f"{prefix}{GEN_AI_INPUT_MESSAGES_KEY}.{i}.{GEN_AI_MESSAGE_CONTENT_TYPE}", "text"
385
+ yield (
386
+ f"{prefix}{GEN_AI_INPUT_MESSAGES_KEY}.{i}.{GEN_AI_OUTPUT_MESSAGES_KEY}",
387
+ item["refusal"],
388
+ )
389
+ elif TYPE_CHECKING:
390
+ assert_never(item["type"])
391
+
392
+
393
+ def get_attributes_from_response(obj: Response) -> Iterator[tuple[str, AttributeValue]]:
394
+ yield from get_attributes_from_tools(obj.tools)
395
+ yield from get_attributes_from_usage(obj.usage)
396
+ yield from get_attributes_from_response_output(obj.output)
397
+ if isinstance(obj.instructions, str):
398
+ yield from _get_attributes_from_response_instruction(obj.instructions)
399
+ else:
400
+ pass # TODO: handle list instructions
401
+ yield GEN_AI_REQUEST_MODEL_KEY, obj.model
402
+ param = obj.model_dump(
403
+ exclude_none=True,
404
+ exclude={"object", "tools", "usage", "output", "error", "status"},
405
+ )
406
+ yield GEN_AI_EXECUTION_PAYLOAD_KEY, safe_json_dumps(param)
407
+
408
+
409
+ def get_attributes_from_tools(
410
+ tools: Iterable[Tool] | None,
411
+ ) -> Iterator[tuple[str, AttributeValue]]:
412
+ if not tools:
413
+ return
414
+ for i, tool in enumerate(tools):
415
+ if isinstance(tool, FunctionTool):
416
+ yield (
417
+ f"{GEN_AI_CHOICE}.{i}.{GEN_AI_TOOL_JSON_SCHEMA}",
418
+ safe_json_dumps(
419
+ {
420
+ "type": "function",
421
+ "function": {
422
+ "name": tool.name,
423
+ "description": tool.description,
424
+ "parameters": tool.parameters,
425
+ "strict": tool.strict,
426
+ },
427
+ }
428
+ ),
429
+ )
430
+ else:
431
+ pass
432
+
433
+
434
+ def get_attributes_from_response_output(
435
+ obj: Iterable[ResponseOutputItem],
436
+ msg_idx: int = 0,
437
+ ) -> Iterator[tuple[str, AttributeValue]]:
438
+ tool_call_idx = 0
439
+ for _i, item in enumerate(obj):
440
+ if item.type == "message":
441
+ prefix = f"{GEN_AI_OUTPUT_MESSAGES_KEY}.{msg_idx}."
442
+ yield from _get_attributes_from_message(item, prefix)
443
+ msg_idx += 1
444
+ elif item.type == "function_call":
445
+ yield f"{GEN_AI_OUTPUT_MESSAGES_KEY}.{msg_idx}.{GEN_AI_MESSAGE_ROLE}", "assistant"
446
+ prefix = f"{GEN_AI_OUTPUT_MESSAGES_KEY}.{msg_idx}.{GEN_AI_MESSAGE_TOOL_CALLS}.{tool_call_idx}."
447
+ yield from _get_attributes_from_function_tool_call(item, prefix)
448
+ tool_call_idx += 1
449
+ elif item.type == "custom_tool_call":
450
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", "assistant"
451
+ yield from _get_attributes_from_response_custom_tool_call(
452
+ item,
453
+ f"{prefix}{GEN_AI_MESSAGE_TOOL_CALLS}.0.",
454
+ )
455
+ elif TYPE_CHECKING:
456
+ assert_never(item)
457
+
458
+
459
+ def _get_attributes_from_response_instruction(
460
+ instructions: str | None,
461
+ ) -> Iterator[tuple[str, AttributeValue]]:
462
+ if not instructions:
463
+ return
464
+ yield f"{GEN_AI_INPUT_MESSAGES_KEY}.0.{GEN_AI_MESSAGE_ROLE}", "system"
465
+ yield f"{GEN_AI_INPUT_MESSAGES_KEY}.0.{GEN_AI_OUTPUT_MESSAGES_KEY}", instructions
466
+
467
+
468
+ def _get_attributes_from_function_tool_call(
469
+ obj: ResponseFunctionToolCall,
470
+ prefix: str = "",
471
+ ) -> Iterator[tuple[str, AttributeValue]]:
472
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", obj.call_id
473
+ yield f"{prefix}{GEN_AI_TOOL_NAME_KEY}", obj.name
474
+ if obj.arguments != "{}":
475
+ yield f"{prefix}{GEN_AI_TOOL_ARGS_KEY}", obj.arguments
476
+
477
+
478
+ def _get_attributes_from_response_custom_tool_call(
479
+ obj: ResponseCustomToolCall,
480
+ prefix: str = "",
481
+ ) -> Iterator[tuple[str, AttributeValue]]:
482
+ if (call_id := obj.call_id) is not None:
483
+ yield f"{prefix}{GEN_AI_TOOL_CALL_ID_KEY}", call_id
484
+ if (name := obj.name) is not None:
485
+ yield f"{prefix}{GEN_AI_TOOL_NAME_KEY}", name
486
+ if (input_data := obj.input) is not None:
487
+ yield (
488
+ f"{prefix}{GEN_AI_TOOL_ARGS_KEY}",
489
+ safe_json_dumps({"input": input_data}),
490
+ )
491
+
492
+
493
+ def _get_attributes_from_message(
494
+ obj: ResponseOutputMessage,
495
+ prefix: str = "",
496
+ ) -> Iterator[tuple[str, AttributeValue]]:
497
+ yield f"{prefix}{GEN_AI_MESSAGE_ROLE}", obj.role
498
+ for i, item in enumerate(obj.content):
499
+ if isinstance(item, ResponseOutputText):
500
+ yield f"{prefix}{GEN_AI_OUTPUT_MESSAGES_KEY}.{i}.{GEN_AI_MESSAGE_CONTENT_TYPE}", "text"
501
+ yield (
502
+ f"{prefix}{GEN_AI_OUTPUT_MESSAGES_KEY}.{i}.{GEN_AI_OUTPUT_MESSAGES_KEY}",
503
+ item.text,
504
+ )
505
+ elif isinstance(item, ResponseOutputRefusal):
506
+ yield f"{prefix}{GEN_AI_OUTPUT_MESSAGES_KEY}.{i}.{GEN_AI_MESSAGE_CONTENT_TYPE}", "text"
507
+ yield (
508
+ f"{prefix}{GEN_AI_OUTPUT_MESSAGES_KEY}.{i}.{GEN_AI_OUTPUT_MESSAGES_KEY}",
509
+ item.refusal,
510
+ )
511
+ elif TYPE_CHECKING:
512
+ assert_never(item)
513
+
514
+
515
+ def get_attributes_from_usage(
516
+ obj: ResponseUsage | None,
517
+ ) -> Iterator[tuple[str, AttributeValue]]:
518
+ if not obj:
519
+ return
520
+ yield GEN_AI_USAGE_OUTPUT_TOKENS_KEY, obj.output_tokens
521
+ yield GEN_AI_USAGE_INPUT_TOKENS_KEY, obj.input_tokens
522
+ yield GEN_AI_LLM_TOKEN_COUNT_TOTAL, obj.total_tokens
523
+ yield GEN_AI_LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHED_READ, obj.input_tokens_details.cached_tokens
524
+ yield (
525
+ GEN_AI_LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING,
526
+ obj.output_tokens_details.reasoning_tokens,
527
+ )
528
+
529
+
530
+ def get_span_status(obj: Span[Any]) -> Status:
531
+ if error := getattr(obj, "error", None):
532
+ return Status(
533
+ status_code=StatusCode.ERROR, description=f"{error.get('message')}: {error.get('data')}"
534
+ )
535
+ else:
536
+ return Status(StatusCode.OK)
@@ -0,0 +1,70 @@
1
+ Metadata-Version: 2.4
2
+ Name: microsoft-agents-a365-observability-extensions-openai
3
+ Version: 0.1.0
4
+ Summary: OpenAI Agents SDK observability and tracing extensions for Microsoft Agent 365
5
+ Author-email: Microsoft <support@microsoft.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/microsoft/Agent365-python
8
+ Project-URL: Repository, https://github.com/microsoft/Agent365-python
9
+ Project-URL: Issues, https://github.com/microsoft/Agent365-python/issues
10
+ Project-URL: Documentation, https://github.com/microsoft/Agent365-python/tree/main/libraries/microsoft-agents-a365-observability-extensions-openai
11
+ Keywords: observability,telemetry,tracing,opentelemetry,openai,agents,ai
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
+ Classifier: Topic :: System :: Monitoring
21
+ Requires-Python: >=3.11
22
+ Description-Content-Type: text/markdown
23
+ Requires-Dist: microsoft-agents-a365-observability-core>=0.0.0
24
+ Requires-Dist: openai-agents>=0.2.6
25
+ Requires-Dist: opentelemetry-api>=1.36.0
26
+ Requires-Dist: opentelemetry-sdk>=1.36.0
27
+ Requires-Dist: opentelemetry-instrumentation>=0.47b0
28
+ Provides-Extra: dev
29
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
30
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
31
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
32
+ Requires-Dist: black>=23.0.0; extra == "dev"
33
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
34
+ Provides-Extra: test
35
+ Requires-Dist: pytest>=7.0.0; extra == "test"
36
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "test"
37
+
38
+ # microsoft-agents-a365-observability-extensions-openai
39
+
40
+ [![PyPI](https://img.shields.io/pypi/v/microsoft-agents-a365-observability-extensions-openai?label=PyPI&logo=pypi)](https://pypi.org/project/microsoft-agents-a365-observability-extensions-openai)
41
+ [![PyPI Downloads](https://img.shields.io/pypi/dm/microsoft-agents-a365-observability-extensions-openai?label=Downloads&logo=pypi)](https://pypi.org/project/microsoft-agents-a365-observability-extensions-openai)
42
+
43
+ Observability extensions for OpenAI Agents SDK. This package provides OpenTelemetry tracing integration for OpenAI Agents-based applications with automatic instrumentation for agent workflows and tool invocations.
44
+
45
+ ## Installation
46
+
47
+ ```bash
48
+ pip install microsoft-agents-a365-observability-extensions-openai
49
+ ```
50
+
51
+ ## Usage
52
+
53
+ For usage examples and detailed documentation, see the [Observability documentation](https://learn.microsoft.com/microsoft-agent-365/developer/observability?tabs=python) on Microsoft Learn.
54
+
55
+ ## Support
56
+
57
+ For issues, questions, or feedback:
58
+
59
+ - File issues in the [GitHub Issues](https://github.com/microsoft/Agent365-python/issues) section
60
+ - See the [main documentation](../../../README.md) for more information
61
+
62
+ ## Trademarks
63
+
64
+ *Microsoft, Windows, Microsoft Azure and/or other Microsoft products and services referenced in the documentation may be either trademarks or registered trademarks of Microsoft in the United States and/or other countries. The licenses for this project do not grant you rights to use any Microsoft names, logos, or trademarks. Microsoft's general trademark guidelines can be found at http://go.microsoft.com/fwlink/?LinkID=254653.*
65
+
66
+ ## License
67
+
68
+ Copyright (c) Microsoft Corporation. All rights reserved.
69
+
70
+ Licensed under the MIT License - see the [LICENSE](../../../LICENSE.md) file for details.
@@ -0,0 +1,9 @@
1
+ microsoft_agents_a365/observability/extensions/openai/__init__.py,sha256=C525c5EXggrjZp2rbQGtak2S5u4I3TC9rYZWxh1MRR8,262
2
+ microsoft_agents_a365/observability/extensions/openai/constants.py,sha256=eAptrhnCpRx0PnCEr_HmmdaHt9CYDLbPwzSDtAy3NSM,1511
3
+ microsoft_agents_a365/observability/extensions/openai/trace_instrumentor.py,sha256=qvZY7vwDfD4n0sRu06VMGhX2UUqxyiXyQj_yEfSGIeQ,2450
4
+ microsoft_agents_a365/observability/extensions/openai/trace_processor.py,sha256=aOP6uEz0ECX4C4wu1GUW-TkHVMKzMWMc00nziOhIsVA,7973
5
+ microsoft_agents_a365/observability/extensions/openai/utils.py,sha256=xi7akBeJHnj5F6ayJCUUZS0SRwWJYjOKeMv0p0zwBeo,19607
6
+ microsoft_agents_a365_observability_extensions_openai-0.1.0.dist-info/METADATA,sha256=T29kR5x9GAEExWVGXzWK6U-y12J2pmK-9oAGDpE6Cv4,3627
7
+ microsoft_agents_a365_observability_extensions_openai-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ microsoft_agents_a365_observability_extensions_openai-0.1.0.dist-info/top_level.txt,sha256=G3c2_4sy5_EM_BWO67SbK2tKj4G8XFn-QXRbh8g9Lgk,22
9
+ microsoft_agents_a365_observability_extensions_openai-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+