paid-python 0.3.3__tar.gz → 0.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. {paid_python-0.3.3 → paid_python-0.3.5}/PKG-INFO +51 -2
  2. {paid_python-0.3.3 → paid_python-0.3.5}/README.md +50 -0
  3. {paid_python-0.3.3 → paid_python-0.3.5}/pyproject.toml +5 -2
  4. paid_python-0.3.5/src/paid/_vendor/opentelemetry/__init__.py +0 -0
  5. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/__init__.py +0 -0
  6. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/__init__.py +54 -0
  7. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/__init__.py +399 -0
  8. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +1192 -0
  9. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +292 -0
  10. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/config.py +15 -0
  11. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +311 -0
  12. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/event_emitter.py +108 -0
  13. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/event_models.py +41 -0
  14. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +68 -0
  15. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/shared/span_utils.py +0 -0
  16. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/utils.py +190 -0
  17. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/v0/__init__.py +176 -0
  18. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/v1/__init__.py +358 -0
  19. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +329 -0
  20. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +134 -0
  21. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +996 -0
  22. paid_python-0.3.5/src/paid/_vendor/opentelemetry/instrumentation/openai/version.py +1 -0
  23. paid_python-0.3.5/src/paid/py.typed +0 -0
  24. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/autoinstrumentation.py +2 -1
  25. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/tracing.py +8 -2
  26. {paid_python-0.3.3 → paid_python-0.3.5}/LICENSE +0 -0
  27. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/__init__.py +0 -0
  28. /paid_python-0.3.3/src/paid/py.typed → /paid_python-0.3.5/src/paid/_vendor/__init__.py +0 -0
  29. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/agents/__init__.py +0 -0
  30. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/agents/client.py +0 -0
  31. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/agents/raw_client.py +0 -0
  32. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/client.py +0 -0
  33. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/contacts/__init__.py +0 -0
  34. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/contacts/client.py +0 -0
  35. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/contacts/raw_client.py +0 -0
  36. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/__init__.py +0 -0
  37. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/api_error.py +0 -0
  38. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/client_wrapper.py +0 -0
  39. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/datetime_utils.py +0 -0
  40. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/file.py +0 -0
  41. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/force_multipart.py +0 -0
  42. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/http_client.py +0 -0
  43. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/http_response.py +0 -0
  44. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/jsonable_encoder.py +0 -0
  45. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/pydantic_utilities.py +0 -0
  46. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/query_encoder.py +0 -0
  47. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/remove_none_from_dict.py +0 -0
  48. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/request_options.py +0 -0
  49. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/core/serialization.py +0 -0
  50. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/customers/__init__.py +0 -0
  51. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/customers/client.py +0 -0
  52. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/customers/raw_client.py +0 -0
  53. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/environment.py +0 -0
  54. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/errors/__init__.py +0 -0
  55. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/errors/bad_request_error.py +0 -0
  56. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/errors/forbidden_error.py +0 -0
  57. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/errors/not_found_error.py +0 -0
  58. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/logger.py +0 -0
  59. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/__init__.py +0 -0
  60. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/client.py +0 -0
  61. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/lines/__init__.py +0 -0
  62. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/lines/client.py +0 -0
  63. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/lines/raw_client.py +0 -0
  64. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/orders/raw_client.py +0 -0
  65. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/__init__.py +0 -0
  66. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/context_data.py +0 -0
  67. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/context_manager.py +0 -0
  68. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/distributed_tracing.py +0 -0
  69. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/signal.py +0 -0
  70. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/__init__.py +0 -0
  71. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/anthropic/__init__.py +0 -0
  72. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/anthropic/anthropicWrapper.py +0 -0
  73. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/bedrock/__init__.py +0 -0
  74. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/bedrock/bedrockWrapper.py +0 -0
  75. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/gemini/__init__.py +0 -0
  76. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/gemini/geminiWrapper.py +0 -0
  77. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/langchain/__init__.py +0 -0
  78. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/langchain/paidLangChainCallback.py +0 -0
  79. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/llamaindex/__init__.py +0 -0
  80. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/llamaindex/llamaIndexWrapper.py +0 -0
  81. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/mistral/__init__.py +0 -0
  82. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/mistral/mistralWrapper.py +0 -0
  83. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/openai/__init__.py +0 -0
  84. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/openai/openAiWrapper.py +0 -0
  85. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/openai_agents/__init__.py +0 -0
  86. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/openai_agents/openaiAgentsHook.py +0 -0
  87. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/tracing/wrappers/utils.py +0 -0
  88. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/__init__.py +0 -0
  89. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/address.py +0 -0
  90. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/agent.py +0 -0
  91. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/agent_attribute.py +0 -0
  92. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/agent_price_point.py +0 -0
  93. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/agent_price_point_tiers.py +0 -0
  94. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/agent_update.py +0 -0
  95. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/api_error.py +0 -0
  96. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/billing_frequency.py +0 -0
  97. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/charge_type.py +0 -0
  98. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/contact.py +0 -0
  99. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/cost_amount.py +0 -0
  100. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/cost_trace.py +0 -0
  101. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/cost_traces_response.py +0 -0
  102. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/creation_source.py +0 -0
  103. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/creation_state.py +0 -0
  104. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/customer.py +0 -0
  105. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/customer_update.py +0 -0
  106. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/entitlement_usage.py +0 -0
  107. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/error.py +0 -0
  108. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order.py +0 -0
  109. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order_line.py +0 -0
  110. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order_line_attribute.py +0 -0
  111. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order_line_attribute_create.py +0 -0
  112. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order_line_attribute_pricing.py +0 -0
  113. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/order_line_create.py +0 -0
  114. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/pagination_meta.py +0 -0
  115. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/price_point.py +0 -0
  116. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/pricing.py +0 -0
  117. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/pricing_model_type.py +0 -0
  118. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/salutation.py +0 -0
  119. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/signal.py +0 -0
  120. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/tax_exempt_status.py +0 -0
  121. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/types/tier.py +0 -0
  122. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/usage/__init__.py +0 -0
  123. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/usage/client.py +0 -0
  124. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/usage/raw_client.py +0 -0
  125. {paid_python-0.3.3 → paid_python-0.3.5}/src/paid/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: paid-python
3
- Version: 0.3.3
3
+ Version: 0.3.5
4
4
  Summary:
5
5
  Requires-Python: >=3.9,<3.14
6
6
  Classifier: Intended Audience :: Developers
@@ -26,7 +26,6 @@ Requires-Dist: opentelemetry-api (>=1.23.0)
26
26
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.23.0)
27
27
  Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.47.0)
28
28
  Requires-Dist: opentelemetry-instrumentation-langchain (>=0.47.0)
29
- Requires-Dist: opentelemetry-instrumentation-openai (>=0.47.0)
30
29
  Requires-Dist: opentelemetry-sdk (>=1.23.0)
31
30
  Requires-Dist: pydantic (>=1.9.0)
32
31
  Requires-Dist: pydantic-core (>=2.18.0)
@@ -123,6 +122,56 @@ export PAID_LOG_LEVEL=DEBUG
123
122
 
124
123
  Defaults to ERROR.
125
124
 
125
+ ## Environment Variables
126
+
127
+ The Paid SDK supports the following environment variables for configuration:
128
+
129
+ ### `PAID_API_KEY`
130
+
131
+ Your Paid API key for authentication. This is used as a fallback when you don't explicitly pass the `token` parameter to the `Paid()` client or `initialize_tracing()`.
132
+
133
+ ```bash
134
+ export PAID_API_KEY="your_api_key_here"
135
+ ```
136
+
137
+ You can then initialize the client without passing the token:
138
+
139
+ ```python
140
+ from paid import Paid
141
+
142
+ # API key is read from PAID_API_KEY environment variable
143
+ client = Paid()
144
+ ```
145
+
146
+ ### `PAID_ENABLED`
147
+
148
+ Controls whether Paid tracing is enabled. Set to `false` (case-insensitive) to disable all tracing functionality.
149
+
150
+ ```bash
151
+ export PAID_ENABLED=false
152
+ ```
153
+
154
+ This is useful for:
155
+ - Development/testing environments where tracing isn't needed
156
+ - Temporarily disabling tracing without modifying code
157
+ - Feature flagging in different deployment environments
158
+
159
+ Defaults to `true` if not set.
160
+
161
+ ### `PAID_LOG_LEVEL`
162
+
163
+ Sets the logging level for Paid SDK operations. See the [Logging](#logging) section for details.
164
+
165
+ ### `PAID_OTEL_COLLECTOR_ENDPOINT`
166
+
167
+ Overrides the default OpenTelemetry collector endpoint URL. Only needed if you want to route traces to a custom endpoint.
168
+
169
+ ```bash
170
+ export PAID_OTEL_COLLECTOR_ENDPOINT="https://your-custom-endpoint.com:4318/v1/traces"
171
+ ```
172
+
173
+ Defaults to `https://collector.agentpaid.io:4318/v1/traces`.
174
+
126
175
  ## Cost Tracking via OTEL tracing
127
176
 
128
177
  ### Simple Decorator and Context Manager Methods
@@ -87,6 +87,56 @@ export PAID_LOG_LEVEL=DEBUG
87
87
 
88
88
  Defaults to ERROR.
89
89
 
90
+ ## Environment Variables
91
+
92
+ The Paid SDK supports the following environment variables for configuration:
93
+
94
+ ### `PAID_API_KEY`
95
+
96
+ Your Paid API key for authentication. This is used as a fallback when you don't explicitly pass the `token` parameter to the `Paid()` client or `initialize_tracing()`.
97
+
98
+ ```bash
99
+ export PAID_API_KEY="your_api_key_here"
100
+ ```
101
+
102
+ You can then initialize the client without passing the token:
103
+
104
+ ```python
105
+ from paid import Paid
106
+
107
+ # API key is read from PAID_API_KEY environment variable
108
+ client = Paid()
109
+ ```
110
+
111
+ ### `PAID_ENABLED`
112
+
113
+ Controls whether Paid tracing is enabled. Set to `false` (case-insensitive) to disable all tracing functionality.
114
+
115
+ ```bash
116
+ export PAID_ENABLED=false
117
+ ```
118
+
119
+ This is useful for:
120
+ - Development/testing environments where tracing isn't needed
121
+ - Temporarily disabling tracing without modifying code
122
+ - Feature flagging in different deployment environments
123
+
124
+ Defaults to `true` if not set.
125
+
126
+ ### `PAID_LOG_LEVEL`
127
+
128
+ Sets the logging level for Paid SDK operations. See the [Logging](#logging) section for details.
129
+
130
+ ### `PAID_OTEL_COLLECTOR_ENDPOINT`
131
+
132
+ Overrides the default OpenTelemetry collector endpoint URL. Only needed if you want to route traces to a custom endpoint.
133
+
134
+ ```bash
135
+ export PAID_OTEL_COLLECTOR_ENDPOINT="https://your-custom-endpoint.com:4318/v1/traces"
136
+ ```
137
+
138
+ Defaults to `https://collector.agentpaid.io:4318/v1/traces`.
139
+
90
140
  ## Cost Tracking via OTEL tracing
91
141
 
92
142
  ### Simple Decorator and Context Manager Methods
@@ -3,7 +3,7 @@ name = "paid-python"
3
3
 
4
4
  [tool.poetry]
5
5
  name = "paid-python"
6
- version = "0.3.3"
6
+ version = "0.3.5"
7
7
  description = ""
8
8
  readme = "README.md"
9
9
  authors = []
@@ -44,7 +44,7 @@ opentelemetry-exporter-otlp-proto-http = ">=1.23.0"
44
44
  mutagen = ">=1.47.0"
45
45
  python-dotenv = ">=0.15.0"
46
46
  opentelemetry-instrumentation-anthropic = ">=0.47.0"
47
- opentelemetry-instrumentation-openai = ">=0.47.0"
47
+ # opentelemetry-instrumentation-openai is vendored from https://github.com/paid-ai/openllmetry-instrumentation-patch (patched version)
48
48
  opentelemetry-instrumentation-langchain = ">=0.47.0"
49
49
  openinference-instrumentation-openai-agents = ">=1.0.0"
50
50
  openinference-instrumentation-bedrock = ">=0.1.0"
@@ -87,8 +87,11 @@ module = [
87
87
  "agents",
88
88
  "agents.*",
89
89
  "langchain_core.*",
90
+ "paid._vendor", # remove once openai instrumentor is upstream
91
+ "paid._vendor.*", # remove once openai instrumentor is upstream
90
92
  ]
91
93
  ignore_missing_imports = true
94
+ ignore_errors = true # remove once openai instrumentor is upstream
92
95
 
93
96
  [tool.ruff]
94
97
  line-length = 120
@@ -0,0 +1,54 @@
1
+ from typing import Callable, Collection, Optional
2
+
3
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
4
+ from paid._vendor.opentelemetry.instrumentation.openai.shared.config import Config
5
+ from paid._vendor.opentelemetry.instrumentation.openai.utils import is_openai_v1
6
+ from typing_extensions import Coroutine
7
+
8
+ _instruments = ("openai >= 0.27.0",)
9
+
10
+
11
+ class OpenAIInstrumentor(BaseInstrumentor):
12
+ """An instrumentor for OpenAI's client library."""
13
+
14
+ def __init__(
15
+ self,
16
+ enrich_assistant: bool = False,
17
+ exception_logger=None,
18
+ get_common_metrics_attributes: Callable[[], dict] = lambda: {},
19
+ upload_base64_image: Optional[
20
+ Callable[[str, str, str, str], Coroutine[None, None, str]]
21
+ ] = lambda *args: "",
22
+ enable_trace_context_propagation: bool = True,
23
+ use_legacy_attributes: bool = True,
24
+ ):
25
+ super().__init__()
26
+ Config.enrich_assistant = enrich_assistant
27
+ Config.exception_logger = exception_logger
28
+ Config.get_common_metrics_attributes = get_common_metrics_attributes
29
+ Config.upload_base64_image = upload_base64_image
30
+ Config.enable_trace_context_propagation = enable_trace_context_propagation
31
+ Config.use_legacy_attributes = use_legacy_attributes
32
+
33
+ def instrumentation_dependencies(self) -> Collection[str]:
34
+ return _instruments
35
+
36
+ def _instrument(self, **kwargs):
37
+ if is_openai_v1():
38
+ from paid._vendor.opentelemetry.instrumentation.openai.v1 import OpenAIV1Instrumentor
39
+
40
+ OpenAIV1Instrumentor().instrument(**kwargs)
41
+ else:
42
+ from paid._vendor.opentelemetry.instrumentation.openai.v0 import OpenAIV0Instrumentor
43
+
44
+ OpenAIV0Instrumentor().instrument(**kwargs)
45
+
46
+ def _uninstrument(self, **kwargs):
47
+ if is_openai_v1():
48
+ from paid._vendor.opentelemetry.instrumentation.openai.v1 import OpenAIV1Instrumentor
49
+
50
+ OpenAIV1Instrumentor().uninstrument(**kwargs)
51
+ else:
52
+ from paid._vendor.opentelemetry.instrumentation.openai.v0 import OpenAIV0Instrumentor
53
+
54
+ OpenAIV0Instrumentor().uninstrument(**kwargs)
@@ -0,0 +1,399 @@
1
+ import json
2
+ import logging
3
+ import types
4
+ import openai
5
+ import pydantic
6
+ from importlib.metadata import version
7
+
8
+ from paid._vendor.opentelemetry.instrumentation.openai.shared.config import Config
9
+ from paid._vendor.opentelemetry.instrumentation.openai.utils import (
10
+ dont_throw,
11
+ is_openai_v1,
12
+ )
13
+ from opentelemetry.semconv._incubating.attributes import (
14
+ gen_ai_attributes as GenAIAttributes,
15
+ openai_attributes as OpenAIAttributes,
16
+ )
17
+ from opentelemetry.semconv_ai import SpanAttributes
18
+ from opentelemetry.trace.propagation import set_span_in_context
19
+ from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
20
+
21
+ OPENAI_LLM_USAGE_TOKEN_TYPES = ["prompt_tokens", "completion_tokens"]
22
+ PROMPT_FILTER_KEY = "prompt_filter_results"
23
+ PROMPT_ERROR = "prompt_error"
24
+
25
+ _PYDANTIC_VERSION = version("pydantic")
26
+
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ def _set_span_attribute(span, name, value):
32
+ if value is None or value == "":
33
+ return
34
+
35
+ if hasattr(openai, "NOT_GIVEN") and value == openai.NOT_GIVEN:
36
+ return
37
+
38
+ span.set_attribute(name, value)
39
+
40
+
41
+ def _set_client_attributes(span, instance):
42
+ if not span.is_recording():
43
+ return
44
+
45
+ if not is_openai_v1():
46
+ return
47
+
48
+ client = instance._client # pylint: disable=protected-access
49
+ if isinstance(client, (openai.AsyncOpenAI, openai.OpenAI)):
50
+ _set_span_attribute(
51
+ span, SpanAttributes.LLM_OPENAI_API_BASE, str(client.base_url)
52
+ )
53
+ if isinstance(client, (openai.AsyncAzureOpenAI, openai.AzureOpenAI)):
54
+ _set_span_attribute(
55
+ span, SpanAttributes.LLM_OPENAI_API_VERSION, client._api_version
56
+ ) # pylint: disable=protected-access
57
+
58
+
59
+ def _set_api_attributes(span):
60
+ if not span.is_recording():
61
+ return
62
+
63
+ if is_openai_v1():
64
+ return
65
+
66
+ base_url = openai.base_url if hasattr(openai, "base_url") else openai.api_base
67
+
68
+ _set_span_attribute(span, SpanAttributes.LLM_OPENAI_API_BASE, base_url)
69
+ _set_span_attribute(span, SpanAttributes.LLM_OPENAI_API_TYPE, openai.api_type)
70
+ _set_span_attribute(span, SpanAttributes.LLM_OPENAI_API_VERSION, openai.api_version)
71
+
72
+ return
73
+
74
+
75
+ def _set_functions_attributes(span, functions):
76
+ if not functions:
77
+ return
78
+
79
+ for i, function in enumerate(functions):
80
+ prefix = f"{SpanAttributes.LLM_REQUEST_FUNCTIONS}.{i}"
81
+ _set_span_attribute(span, f"{prefix}.name", function.get("name"))
82
+ _set_span_attribute(span, f"{prefix}.description", function.get("description"))
83
+ _set_span_attribute(
84
+ span, f"{prefix}.parameters", json.dumps(function.get("parameters"))
85
+ )
86
+
87
+
88
+ def set_tools_attributes(span, tools):
89
+ if not tools:
90
+ return
91
+
92
+ for i, tool in enumerate(tools):
93
+ function = tool.get("function")
94
+ if not function:
95
+ continue
96
+
97
+ prefix = f"{SpanAttributes.LLM_REQUEST_FUNCTIONS}.{i}"
98
+ _set_span_attribute(span, f"{prefix}.name", function.get("name"))
99
+ _set_span_attribute(span, f"{prefix}.description", function.get("description"))
100
+ _set_span_attribute(
101
+ span, f"{prefix}.parameters", json.dumps(function.get("parameters"))
102
+ )
103
+
104
+
105
+ def _set_request_attributes(span, kwargs, instance=None):
106
+ if not span.is_recording():
107
+ return
108
+
109
+ _set_api_attributes(span)
110
+
111
+ base_url = _get_openai_base_url(instance) if instance else ""
112
+ vendor = _get_vendor_from_url(base_url)
113
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_SYSTEM, vendor)
114
+
115
+ model = kwargs.get("model")
116
+ if vendor == "AWS" and model and "." in model:
117
+ model = _cross_region_check(model)
118
+ elif vendor == "OpenRouter":
119
+ model = _extract_model_name_from_provider_format(model)
120
+
121
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_REQUEST_MODEL, model)
122
+ _set_span_attribute(
123
+ span, GenAIAttributes.GEN_AI_REQUEST_MAX_TOKENS, kwargs.get("max_tokens")
124
+ )
125
+ _set_span_attribute(
126
+ span, GenAIAttributes.GEN_AI_REQUEST_TEMPERATURE, kwargs.get("temperature")
127
+ )
128
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_REQUEST_TOP_P, kwargs.get("top_p"))
129
+ _set_span_attribute(
130
+ span, SpanAttributes.LLM_FREQUENCY_PENALTY, kwargs.get("frequency_penalty")
131
+ )
132
+ _set_span_attribute(
133
+ span, SpanAttributes.LLM_PRESENCE_PENALTY, kwargs.get("presence_penalty")
134
+ )
135
+ _set_span_attribute(span, SpanAttributes.LLM_USER, kwargs.get("user"))
136
+ _set_span_attribute(span, SpanAttributes.LLM_HEADERS, str(kwargs.get("headers")))
137
+ # The new OpenAI SDK removed the `headers` and create new field called `extra_headers`
138
+ if kwargs.get("extra_headers") is not None:
139
+ _set_span_attribute(
140
+ span, SpanAttributes.LLM_HEADERS, str(kwargs.get("extra_headers"))
141
+ )
142
+ _set_span_attribute(
143
+ span, SpanAttributes.LLM_IS_STREAMING, kwargs.get("stream") or False
144
+ )
145
+ _set_span_attribute(
146
+ span, OpenAIAttributes.OPENAI_REQUEST_SERVICE_TIER, kwargs.get("service_tier")
147
+ )
148
+ if response_format := kwargs.get("response_format"):
149
+ # backward-compatible check for
150
+ # openai.types.shared_params.response_format_json_schema.ResponseFormatJSONSchema
151
+ if (
152
+ isinstance(response_format, dict)
153
+ and response_format.get("type") == "json_schema"
154
+ and response_format.get("json_schema")
155
+ ):
156
+ schema = dict(response_format.get("json_schema")).get("schema")
157
+ if schema:
158
+ _set_span_attribute(
159
+ span,
160
+ SpanAttributes.LLM_REQUEST_STRUCTURED_OUTPUT_SCHEMA,
161
+ json.dumps(schema),
162
+ )
163
+ elif (
164
+ isinstance(response_format, pydantic.BaseModel)
165
+ or (
166
+ hasattr(response_format, "model_json_schema")
167
+ and callable(response_format.model_json_schema)
168
+ )
169
+ ):
170
+ _set_span_attribute(
171
+ span,
172
+ SpanAttributes.LLM_REQUEST_STRUCTURED_OUTPUT_SCHEMA,
173
+ json.dumps(response_format.model_json_schema()),
174
+ )
175
+ else:
176
+ schema = None
177
+ try:
178
+ schema = json.dumps(pydantic.TypeAdapter(response_format).json_schema())
179
+ except Exception:
180
+ try:
181
+ schema = json.dumps(response_format)
182
+ except Exception:
183
+ pass
184
+
185
+ if schema:
186
+ _set_span_attribute(
187
+ span,
188
+ SpanAttributes.LLM_REQUEST_STRUCTURED_OUTPUT_SCHEMA,
189
+ schema,
190
+ )
191
+
192
+
193
+ @dont_throw
194
+ def _set_response_attributes(span, response):
195
+ if not span.is_recording():
196
+ return
197
+
198
+ if "error" in response:
199
+ _set_span_attribute(
200
+ span,
201
+ f"{GenAIAttributes.GEN_AI_PROMPT}.{PROMPT_ERROR}",
202
+ json.dumps(response.get("error")),
203
+ )
204
+ return
205
+
206
+ response_model = response.get("model")
207
+ if response_model:
208
+ response_model = _extract_model_name_from_provider_format(response_model)
209
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_RESPONSE_MODEL, response_model)
210
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_RESPONSE_ID, response.get("id"))
211
+
212
+ _set_span_attribute(
213
+ span,
214
+ SpanAttributes.LLM_OPENAI_RESPONSE_SYSTEM_FINGERPRINT,
215
+ response.get("system_fingerprint"),
216
+ )
217
+ _set_span_attribute(
218
+ span,
219
+ OpenAIAttributes.OPENAI_RESPONSE_SERVICE_TIER,
220
+ response.get("service_tier"),
221
+ )
222
+ _log_prompt_filter(span, response)
223
+ usage = response.get("usage")
224
+ if not usage:
225
+ return
226
+
227
+ if is_openai_v1() and not isinstance(usage, dict):
228
+ usage = usage.__dict__
229
+
230
+ _set_span_attribute(
231
+ span, SpanAttributes.LLM_USAGE_TOTAL_TOKENS, usage.get("total_tokens")
232
+ )
233
+ _set_span_attribute(
234
+ span,
235
+ GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS,
236
+ usage.get("completion_tokens"),
237
+ )
238
+ _set_span_attribute(
239
+ span, GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS, usage.get("prompt_tokens")
240
+ )
241
+ prompt_tokens_details = dict(usage.get("prompt_tokens_details", {}))
242
+ _set_span_attribute(
243
+ span,
244
+ SpanAttributes.LLM_USAGE_CACHE_READ_INPUT_TOKENS,
245
+ prompt_tokens_details.get("cached_tokens", 0),
246
+ )
247
+ return
248
+
249
+
250
+ def _log_prompt_filter(span, response_dict):
251
+ if response_dict.get("prompt_filter_results"):
252
+ _set_span_attribute(
253
+ span,
254
+ f"{GenAIAttributes.GEN_AI_PROMPT}.{PROMPT_FILTER_KEY}",
255
+ json.dumps(response_dict.get("prompt_filter_results")),
256
+ )
257
+
258
+
259
+ @dont_throw
260
+ def _set_span_stream_usage(span, prompt_tokens, completion_tokens):
261
+ if not span.is_recording():
262
+ return
263
+
264
+ if isinstance(completion_tokens, int) and completion_tokens >= 0:
265
+ _set_span_attribute(
266
+ span, GenAIAttributes.GEN_AI_USAGE_OUTPUT_TOKENS, completion_tokens
267
+ )
268
+
269
+ if isinstance(prompt_tokens, int) and prompt_tokens >= 0:
270
+ _set_span_attribute(span, GenAIAttributes.GEN_AI_USAGE_INPUT_TOKENS, prompt_tokens)
271
+
272
+ if (
273
+ isinstance(prompt_tokens, int)
274
+ and isinstance(completion_tokens, int)
275
+ and completion_tokens + prompt_tokens >= 0
276
+ ):
277
+ _set_span_attribute(
278
+ span,
279
+ SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
280
+ completion_tokens + prompt_tokens,
281
+ )
282
+
283
+
284
+ def _get_openai_base_url(instance):
285
+ if hasattr(instance, "_client"):
286
+ client = instance._client # pylint: disable=protected-access
287
+ if isinstance(client, (openai.AsyncOpenAI, openai.OpenAI)):
288
+ return str(client.base_url)
289
+
290
+ return ""
291
+
292
+
293
+ def _get_vendor_from_url(base_url):
294
+ if not base_url:
295
+ return "openai"
296
+
297
+ if "openai.azure.com" in base_url:
298
+ return "Azure"
299
+ elif "amazonaws.com" in base_url or "bedrock" in base_url:
300
+ return "AWS"
301
+ elif "googleapis.com" in base_url or "vertex" in base_url:
302
+ return "Google"
303
+ elif "openrouter.ai" in base_url:
304
+ return "OpenRouter"
305
+
306
+ return "openai"
307
+
308
+
309
+ def _cross_region_check(value):
310
+ if not value or "." not in value:
311
+ return value
312
+
313
+ prefixes = ["us", "us-gov", "eu", "apac"]
314
+ if any(value.startswith(prefix + ".") for prefix in prefixes):
315
+ parts = value.split(".")
316
+ if len(parts) > 2:
317
+ return parts[2]
318
+ else:
319
+ return value
320
+ else:
321
+ vendor, model = value.split(".", 1)
322
+ return model
323
+
324
+
325
+ def _extract_model_name_from_provider_format(model_name):
326
+ """
327
+ Extract model name from provider/model format.
328
+ E.g., 'openai/gpt-4o' -> 'gpt-4o', 'anthropic/claude-3-sonnet' -> 'claude-3-sonnet'
329
+ """
330
+ if not model_name:
331
+ return model_name
332
+
333
+ if "/" in model_name:
334
+ parts = model_name.split("/")
335
+ return parts[-1] # Return the last part (actual model name)
336
+
337
+ return model_name
338
+
339
+
340
+ def is_streaming_response(response):
341
+ if is_openai_v1():
342
+ return isinstance(response, openai.Stream) or isinstance(
343
+ response, openai.AsyncStream
344
+ )
345
+
346
+ return isinstance(response, types.GeneratorType) or isinstance(
347
+ response, types.AsyncGeneratorType
348
+ )
349
+
350
+
351
+ def model_as_dict(model):
352
+ if isinstance(model, dict):
353
+ return model
354
+ if _PYDANTIC_VERSION < "2.0.0":
355
+ return model.dict()
356
+ if hasattr(model, "model_dump"):
357
+ return model.model_dump()
358
+ elif hasattr(model, "parse"): # Raw API response
359
+ return model_as_dict(model.parse())
360
+ else:
361
+ return model
362
+
363
+
364
+ def _token_type(token_type: str):
365
+ if token_type == "prompt_tokens":
366
+ return "input"
367
+ elif token_type == "completion_tokens":
368
+ return "output"
369
+
370
+ return None
371
+
372
+
373
+ def metric_shared_attributes(
374
+ response_model: str, operation: str, server_address: str, is_streaming: bool = False
375
+ ):
376
+ attributes = Config.get_common_metrics_attributes()
377
+ vendor = _get_vendor_from_url(server_address)
378
+
379
+ return {
380
+ **attributes,
381
+ GenAIAttributes.GEN_AI_SYSTEM: vendor,
382
+ GenAIAttributes.GEN_AI_RESPONSE_MODEL: response_model,
383
+ "gen_ai.operation.name": operation,
384
+ "server.address": server_address,
385
+ "stream": is_streaming,
386
+ }
387
+
388
+
389
+ def propagate_trace_context(span, kwargs):
390
+ if is_openai_v1():
391
+ extra_headers = kwargs.get("extra_headers", {})
392
+ ctx = set_span_in_context(span)
393
+ TraceContextTextMapPropagator().inject(extra_headers, context=ctx)
394
+ kwargs["extra_headers"] = extra_headers
395
+ else:
396
+ headers = kwargs.get("headers", {})
397
+ ctx = set_span_in_context(span)
398
+ TraceContextTextMapPropagator().inject(headers, context=ctx)
399
+ kwargs["headers"] = headers