paid-python 0.1.1__tar.gz → 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. {paid_python-0.1.1 → paid_python-0.2.0}/PKG-INFO +34 -11
  2. {paid_python-0.1.1 → paid_python-0.2.0}/README.md +32 -10
  3. {paid_python-0.1.1 → paid_python-0.2.0}/pyproject.toml +2 -1
  4. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/client.py +3 -3
  5. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/__init__.py +2 -0
  6. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/autoinstrumentation.py +29 -4
  7. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/context_manager.py +4 -4
  8. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/tracing.py +37 -31
  9. {paid_python-0.1.1 → paid_python-0.2.0}/LICENSE +0 -0
  10. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/__init__.py +0 -0
  11. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/agents/__init__.py +0 -0
  12. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/agents/client.py +0 -0
  13. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/agents/raw_client.py +0 -0
  14. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/contacts/__init__.py +0 -0
  15. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/contacts/client.py +0 -0
  16. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/contacts/raw_client.py +0 -0
  17. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/__init__.py +0 -0
  18. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/api_error.py +0 -0
  19. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/client_wrapper.py +0 -0
  20. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/datetime_utils.py +0 -0
  21. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/file.py +0 -0
  22. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/force_multipart.py +0 -0
  23. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/http_client.py +0 -0
  24. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/http_response.py +0 -0
  25. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/jsonable_encoder.py +0 -0
  26. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/pydantic_utilities.py +0 -0
  27. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/query_encoder.py +0 -0
  28. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/remove_none_from_dict.py +0 -0
  29. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/request_options.py +0 -0
  30. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/core/serialization.py +0 -0
  31. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/customers/__init__.py +0 -0
  32. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/customers/client.py +0 -0
  33. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/customers/raw_client.py +0 -0
  34. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/environment.py +0 -0
  35. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/errors/__init__.py +0 -0
  36. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/errors/bad_request_error.py +0 -0
  37. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/errors/forbidden_error.py +0 -0
  38. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/errors/not_found_error.py +0 -0
  39. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/logger.py +0 -0
  40. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/__init__.py +0 -0
  41. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/client.py +0 -0
  42. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/lines/__init__.py +0 -0
  43. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/lines/client.py +0 -0
  44. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/lines/raw_client.py +0 -0
  45. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/orders/raw_client.py +0 -0
  46. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/py.typed +0 -0
  47. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/distributed_tracing.py +0 -0
  48. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/signal.py +0 -0
  49. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/__init__.py +0 -0
  50. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/anthropic/__init__.py +0 -0
  51. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/anthropic/anthropicWrapper.py +0 -0
  52. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/bedrock/__init__.py +0 -0
  53. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/bedrock/bedrockWrapper.py +0 -0
  54. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/gemini/__init__.py +0 -0
  55. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/gemini/geminiWrapper.py +0 -0
  56. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/langchain/__init__.py +0 -0
  57. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/langchain/paidLangChainCallback.py +0 -0
  58. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/llamaindex/__init__.py +0 -0
  59. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/llamaindex/llamaIndexWrapper.py +0 -0
  60. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/mistral/__init__.py +0 -0
  61. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/mistral/mistralWrapper.py +0 -0
  62. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/openai/__init__.py +0 -0
  63. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/openai/openAiWrapper.py +0 -0
  64. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/openai_agents/__init__.py +0 -0
  65. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/openai_agents/openaiAgentsHook.py +0 -0
  66. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/tracing/wrappers/utils.py +0 -0
  67. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/__init__.py +0 -0
  68. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/address.py +0 -0
  69. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/agent.py +0 -0
  70. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/agent_attribute.py +0 -0
  71. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/agent_price_point.py +0 -0
  72. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/agent_price_point_tiers.py +0 -0
  73. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/agent_update.py +0 -0
  74. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/api_error.py +0 -0
  75. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/billing_frequency.py +0 -0
  76. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/charge_type.py +0 -0
  77. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/contact.py +0 -0
  78. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/cost_amount.py +0 -0
  79. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/cost_trace.py +0 -0
  80. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/cost_traces_response.py +0 -0
  81. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/creation_source.py +0 -0
  82. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/creation_state.py +0 -0
  83. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/customer.py +0 -0
  84. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/customer_update.py +0 -0
  85. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/entitlement_usage.py +0 -0
  86. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/error.py +0 -0
  87. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order.py +0 -0
  88. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order_line.py +0 -0
  89. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order_line_attribute.py +0 -0
  90. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order_line_attribute_create.py +0 -0
  91. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order_line_attribute_pricing.py +0 -0
  92. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/order_line_create.py +0 -0
  93. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/pagination_meta.py +0 -0
  94. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/price_point.py +0 -0
  95. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/pricing.py +0 -0
  96. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/pricing_model_type.py +0 -0
  97. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/salutation.py +0 -0
  98. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/signal.py +0 -0
  99. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/tax_exempt_status.py +0 -0
  100. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/types/tier.py +0 -0
  101. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/usage/__init__.py +0 -0
  102. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/usage/client.py +0 -0
  103. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/usage/raw_client.py +0 -0
  104. {paid_python-0.1.1 → paid_python-0.2.0}/src/paid/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: paid-python
3
- Version: 0.1.1
3
+ Version: 0.2.0
4
4
  Summary:
5
5
  Requires-Python: >=3.9,<3.14
6
6
  Classifier: Intended Audience :: Developers
@@ -25,6 +25,7 @@ Requires-Dist: opentelemetry-api (>=1.23.0)
25
25
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.23.0)
26
26
  Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.47.0)
27
27
  Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.47.0)
28
+ Requires-Dist: opentelemetry-instrumentation-langchain (>=0.47.0)
28
29
  Requires-Dist: opentelemetry-instrumentation-openai (>=0.47.0)
29
30
  Requires-Dist: opentelemetry-sdk (>=1.23.0)
30
31
  Requires-Dist: pydantic (>=1.9.0)
@@ -162,6 +163,13 @@ Both approaches:
162
163
  - Gracefully fall back to normal execution if tracing fails
163
164
  - Support the same parameters: `external_customer_id`, `external_agent_id`, `tracing_token`, `store_prompt`, `metadata`
164
165
 
166
+ * Note - if it happens that you're calling `paid_tracing` from non-main thread, then it's advised to initialize from main thread:
167
+ ```python
168
+ from paid.tracing import initialize_tracing
169
+ initialize_tracing()
170
+ ```
171
+ * `initialize_tracing` also accepts optional arguments like OTEL collector endpoint and api key if you want to reroute your tracing somewhere else :)
172
+
165
173
  ### Using the Paid wrappers
166
174
 
167
175
  You can track usage costs by using Paid wrappers around your AI provider's SDK.
@@ -182,9 +190,11 @@ Example usage:
182
190
 
183
191
  ```python
184
192
  from openai import OpenAI
185
- from paid.tracing import paid_tracing
193
+ from paid.tracing import paid_tracing, initialize_tracing
186
194
  from paid.tracing.wrappers.openai import PaidOpenAI
187
195
 
196
+ initialize_tracing()
197
+
188
198
  openAIClient = PaidOpenAI(OpenAI(
189
199
  # This is the default and can be omitted
190
200
  api_key="<OPENAI_API_KEY>",
@@ -212,10 +222,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
212
222
  <Tabs>
213
223
  <Tab title="Python - Decorator">
214
224
  ```python
215
- from paid.tracing import paid_tracing, signal
225
+ from paid.tracing import paid_tracing, signal, initialize_tracing
216
226
  from paid.tracing.wrappers import PaidOpenAI
217
227
  from openai import OpenAI
218
228
 
229
+ initialize_tracing()
230
+
219
231
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
220
232
 
221
233
  @paid_tracing(
@@ -243,10 +255,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
243
255
 
244
256
  <Tab title="Python - Context Manager">
245
257
  ```python
246
- from paid.tracing import paid_tracing, signal
258
+ from paid.tracing import paid_tracing, signal, initialize_tracing
247
259
  from paid.tracing.wrappers import PaidOpenAI
248
260
  from openai import OpenAI
249
261
 
262
+ initialize_tracing()
263
+
250
264
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
251
265
 
252
266
  def process_event(event):
@@ -305,14 +319,14 @@ For maximum convenience, you can use OpenTelemetry auto-instrumentation to autom
305
319
 
306
320
  ```python
307
321
  from paid import Paid
308
- from paid.tracing import paid_autoinstrument
322
+ from paid.tracing import paid_autoinstrument, initialize_tracing
309
323
  from openai import OpenAI
310
324
 
311
325
  # Initialize Paid SDK
312
326
  client = Paid(token="PAID_API_KEY")
327
+ initialize_tracing()
313
328
 
314
- # Enable auto-instrumentation for all supported libraries
315
- paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock
329
+ paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock, langchain
316
330
 
317
331
  # Now all OpenAI calls will be automatically traced
318
332
  openai_client = OpenAI(api_key="<OPENAI_API_KEY>")
@@ -338,6 +352,7 @@ gemini - Google Generative AI (google-generativeai)
338
352
  openai - OpenAI Python SDK
339
353
  openai-agents - OpenAI Agents SDK
340
354
  bedrock - AWS Bedrock (boto3)
355
+ langchain - LangChain framework
341
356
  ```
342
357
 
343
358
  #### Selective Instrumentation
@@ -437,10 +452,12 @@ For such cases, you can pass a tracing token directly to `@paid_tracing()` or co
437
452
  The simplest way to implement distributed tracing is to pass the token directly to the decorator or context manager:
438
453
 
439
454
  ```python
440
- from paid.tracing import paid_tracing, signal, generate_tracing_token
455
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
441
456
  from paid.tracing.wrappers.openai import PaidOpenAI
442
457
  from openai import OpenAI
443
458
 
459
+ initialize_tracing()
460
+
444
461
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
445
462
 
446
463
  # Process 1: Generate token and do initial work
@@ -482,10 +499,12 @@ process_part_2()
482
499
  Using context manager instead of decorator:
483
500
 
484
501
  ```python
485
- from paid.tracing import paid_tracing, signal, generate_tracing_token
502
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
486
503
  from paid.tracing.wrappers.openai import PaidOpenAI
487
504
  from openai import OpenAI
488
505
 
506
+ initialize_tracing()
507
+
489
508
  # Initialize
490
509
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
491
510
 
@@ -643,9 +662,11 @@ The `@paid_tracing` decorator automatically handles both sync and async function
643
662
 
644
663
  ```python
645
664
  from openai import AsyncOpenAI
646
- from paid.tracing import paid_tracing
665
+ from paid.tracing import paid_tracing, initialize_tracing
647
666
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
648
667
 
668
+ initialize_tracing()
669
+
649
670
  # Wrap the async OpenAI client
650
671
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
651
672
 
@@ -669,10 +690,12 @@ await generate_image()
669
690
  The `signal()` function works seamlessly in async contexts:
670
691
 
671
692
  ```python
672
- from paid.tracing import paid_tracing, signal
693
+ from paid.tracing import paid_tracing, signal, initialize_tracing
673
694
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
674
695
  from openai import AsyncOpenAI
675
696
 
697
+ initialize_tracing()
698
+
676
699
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
677
700
 
678
701
  @paid_tracing("your_external_customer_id", "your_external_agent_id")
@@ -127,6 +127,13 @@ Both approaches:
127
127
  - Gracefully fall back to normal execution if tracing fails
128
128
  - Support the same parameters: `external_customer_id`, `external_agent_id`, `tracing_token`, `store_prompt`, `metadata`
129
129
 
130
+ * Note - if it happens that you're calling `paid_tracing` from non-main thread, then it's advised to initialize from main thread:
131
+ ```python
132
+ from paid.tracing import initialize_tracing
133
+ initialize_tracing()
134
+ ```
135
+ * `initialize_tracing` also accepts optional arguments like OTEL collector endpoint and api key if you want to reroute your tracing somewhere else :)
136
+
130
137
  ### Using the Paid wrappers
131
138
 
132
139
  You can track usage costs by using Paid wrappers around your AI provider's SDK.
@@ -147,9 +154,11 @@ Example usage:
147
154
 
148
155
  ```python
149
156
  from openai import OpenAI
150
- from paid.tracing import paid_tracing
157
+ from paid.tracing import paid_tracing, initialize_tracing
151
158
  from paid.tracing.wrappers.openai import PaidOpenAI
152
159
 
160
+ initialize_tracing()
161
+
153
162
  openAIClient = PaidOpenAI(OpenAI(
154
163
  # This is the default and can be omitted
155
164
  api_key="<OPENAI_API_KEY>",
@@ -177,10 +186,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
177
186
  <Tabs>
178
187
  <Tab title="Python - Decorator">
179
188
  ```python
180
- from paid.tracing import paid_tracing, signal
189
+ from paid.tracing import paid_tracing, signal, initialize_tracing
181
190
  from paid.tracing.wrappers import PaidOpenAI
182
191
  from openai import OpenAI
183
192
 
193
+ initialize_tracing()
194
+
184
195
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
185
196
 
186
197
  @paid_tracing(
@@ -208,10 +219,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
208
219
 
209
220
  <Tab title="Python - Context Manager">
210
221
  ```python
211
- from paid.tracing import paid_tracing, signal
222
+ from paid.tracing import paid_tracing, signal, initialize_tracing
212
223
  from paid.tracing.wrappers import PaidOpenAI
213
224
  from openai import OpenAI
214
225
 
226
+ initialize_tracing()
227
+
215
228
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
216
229
 
217
230
  def process_event(event):
@@ -270,14 +283,14 @@ For maximum convenience, you can use OpenTelemetry auto-instrumentation to autom
270
283
 
271
284
  ```python
272
285
  from paid import Paid
273
- from paid.tracing import paid_autoinstrument
286
+ from paid.tracing import paid_autoinstrument, initialize_tracing
274
287
  from openai import OpenAI
275
288
 
276
289
  # Initialize Paid SDK
277
290
  client = Paid(token="PAID_API_KEY")
291
+ initialize_tracing()
278
292
 
279
- # Enable auto-instrumentation for all supported libraries
280
- paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock
293
+ paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock, langchain
281
294
 
282
295
  # Now all OpenAI calls will be automatically traced
283
296
  openai_client = OpenAI(api_key="<OPENAI_API_KEY>")
@@ -303,6 +316,7 @@ gemini - Google Generative AI (google-generativeai)
303
316
  openai - OpenAI Python SDK
304
317
  openai-agents - OpenAI Agents SDK
305
318
  bedrock - AWS Bedrock (boto3)
319
+ langchain - LangChain framework
306
320
  ```
307
321
 
308
322
  #### Selective Instrumentation
@@ -402,10 +416,12 @@ For such cases, you can pass a tracing token directly to `@paid_tracing()` or co
402
416
  The simplest way to implement distributed tracing is to pass the token directly to the decorator or context manager:
403
417
 
404
418
  ```python
405
- from paid.tracing import paid_tracing, signal, generate_tracing_token
419
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
406
420
  from paid.tracing.wrappers.openai import PaidOpenAI
407
421
  from openai import OpenAI
408
422
 
423
+ initialize_tracing()
424
+
409
425
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
410
426
 
411
427
  # Process 1: Generate token and do initial work
@@ -447,10 +463,12 @@ process_part_2()
447
463
  Using context manager instead of decorator:
448
464
 
449
465
  ```python
450
- from paid.tracing import paid_tracing, signal, generate_tracing_token
466
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
451
467
  from paid.tracing.wrappers.openai import PaidOpenAI
452
468
  from openai import OpenAI
453
469
 
470
+ initialize_tracing()
471
+
454
472
  # Initialize
455
473
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
456
474
 
@@ -608,9 +626,11 @@ The `@paid_tracing` decorator automatically handles both sync and async function
608
626
 
609
627
  ```python
610
628
  from openai import AsyncOpenAI
611
- from paid.tracing import paid_tracing
629
+ from paid.tracing import paid_tracing, initialize_tracing
612
630
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
613
631
 
632
+ initialize_tracing()
633
+
614
634
  # Wrap the async OpenAI client
615
635
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
616
636
 
@@ -634,10 +654,12 @@ await generate_image()
634
654
  The `signal()` function works seamlessly in async contexts:
635
655
 
636
656
  ```python
637
- from paid.tracing import paid_tracing, signal
657
+ from paid.tracing import paid_tracing, signal, initialize_tracing
638
658
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
639
659
  from openai import AsyncOpenAI
640
660
 
661
+ initialize_tracing()
662
+
641
663
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
642
664
 
643
665
  @paid_tracing("your_external_customer_id", "your_external_agent_id")
@@ -3,7 +3,7 @@ name = "paid-python"
3
3
 
4
4
  [tool.poetry]
5
5
  name = "paid-python"
6
- version = "0.1.1"
6
+ version = "0.2.0"
7
7
  description = ""
8
8
  readme = "README.md"
9
9
  authors = []
@@ -46,6 +46,7 @@ python-dotenv = ">=0.15.0"
46
46
  opentelemetry-instrumentation-anthropic = ">=0.47.0"
47
47
  opentelemetry-instrumentation-google-generativeai = ">=0.47.0"
48
48
  opentelemetry-instrumentation-openai = ">=0.47.0"
49
+ opentelemetry-instrumentation-langchain = ">=0.47.0"
49
50
  openinference-instrumentation-openai-agents = ">=1.0.0"
50
51
  openinference-instrumentation-bedrock = ">=0.1.0"
51
52
 
@@ -17,7 +17,7 @@ from .tracing.distributed_tracing import (
17
17
  from .tracing.signal import signal
18
18
  from .tracing.tracing import (
19
19
  DEFAULT_COLLECTOR_ENDPOINT,
20
- initialize_tracing_,
20
+ initialize_tracing,
21
21
  trace_async_,
22
22
  trace_sync_,
23
23
  )
@@ -114,7 +114,7 @@ class Paid:
114
114
  stacklevel=2,
115
115
  )
116
116
  token = self._client_wrapper._get_token()
117
- initialize_tracing_(token, collector_endpoint=collector_endpoint)
117
+ initialize_tracing(token, collector_endpoint=collector_endpoint)
118
118
 
119
119
  def generate_tracing_token(self) -> int:
120
120
  """
@@ -394,7 +394,7 @@ class AsyncPaid:
394
394
  stacklevel=2,
395
395
  )
396
396
  token = self._client_wrapper._get_token()
397
- initialize_tracing_(token, collector_endpoint=collector_endpoint)
397
+ initialize_tracing(token, collector_endpoint=collector_endpoint)
398
398
 
399
399
  def generate_tracing_token(self) -> int:
400
400
  """
@@ -7,11 +7,13 @@ from .distributed_tracing import (
7
7
  unset_tracing_token,
8
8
  )
9
9
  from .signal import signal
10
+ from .tracing import initialize_tracing
10
11
 
11
12
  __all__ = [
12
13
  "generate_tracing_token",
13
14
  "paid_autoinstrument",
14
15
  "paid_tracing",
16
+ "initialize_tracing",
15
17
  "set_tracing_token",
16
18
  "unset_tracing_token",
17
19
  "signal",
@@ -8,7 +8,7 @@ sending traces to the Paid collector endpoint.
8
8
  from typing import List, Optional
9
9
 
10
10
  from . import tracing
11
- from .tracing import initialize_tracing_
11
+ from .tracing import initialize_tracing
12
12
  from opentelemetry.trace import NoOpTracerProvider
13
13
 
14
14
  from paid.logger import logger
@@ -49,6 +49,13 @@ try:
49
49
  except ImportError:
50
50
  BEDROCK_AVAILABLE = False
51
51
 
52
+ try:
53
+ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
54
+
55
+ LANGCHAIN_AVAILABLE = True
56
+ except ImportError:
57
+ LANGCHAIN_AVAILABLE = False
58
+
52
59
 
53
60
  # Track which instrumentors have been initialized
54
61
  _initialized_instrumentors: List[str] = []
@@ -69,6 +76,7 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
69
76
  - "openai": OpenAI library
70
77
  - "openai-agents": OpenAI Agents SDK
71
78
  - "bedrock": AWS Bedrock
79
+ - "langchain": LangChain library
72
80
  If None, all supported libraries that are installed will be instrumented.
73
81
 
74
82
  Note:
@@ -94,11 +102,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
94
102
  # Initialize tracing if not already initialized
95
103
  if isinstance(tracing.paid_tracer_provider, NoOpTracerProvider):
96
104
  logger.info("Tracing not initialized, initializing automatically")
97
- initialize_tracing_()
105
+ initialize_tracing()
98
106
 
99
107
  # Default to all supported libraries if none specified
100
108
  if libraries is None:
101
- libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock"]
109
+ libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock", "langchain"]
102
110
 
103
111
  for library in libraries:
104
112
  if library in _initialized_instrumentors:
@@ -115,9 +123,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
115
123
  _instrument_openai_agents()
116
124
  elif library == "bedrock":
117
125
  _instrument_bedrock()
126
+ elif library == "langchain":
127
+ _instrument_langchain()
118
128
  else:
119
129
  logger.warning(
120
- f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock"
130
+ f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock, langchain"
121
131
  )
122
132
 
123
133
  logger.info(f"Auto-instrumentation enabled for: {', '.join(_initialized_instrumentors)}")
@@ -196,3 +206,18 @@ def _instrument_bedrock() -> None:
196
206
 
197
207
  _initialized_instrumentors.append("bedrock")
198
208
  logger.info("Bedrock auto-instrumentation enabled")
209
+
210
+
211
+ def _instrument_langchain() -> None:
212
+ """
213
+ Instrument LangChain using opentelemetry-instrumentation-langchain.
214
+ """
215
+ if not LANGCHAIN_AVAILABLE:
216
+ logger.warning("LangChain instrumentation library not available, skipping instrumentation")
217
+ return
218
+
219
+ # Instrument LangChain with Paid's tracer provider
220
+ LangchainInstrumentor().instrument(tracer_provider=tracing.paid_tracer_provider)
221
+
222
+ _initialized_instrumentors.append("langchain")
223
+ logger.info("LangChain auto-instrumentation enabled")
@@ -4,7 +4,7 @@ import functools
4
4
  from typing import Any, Callable, Dict, Optional, Tuple
5
5
 
6
6
  from . import distributed_tracing, tracing
7
- from .tracing import get_paid_tracer, get_token, initialize_tracing_, trace_async_, trace_sync_
7
+ from .tracing import get_paid_tracer, get_token, initialize_tracing, trace_async_, trace_sync_
8
8
  from opentelemetry import trace
9
9
  from opentelemetry.context import Context
10
10
  from opentelemetry.trace import NonRecordingSpan, Span, SpanContext, Status, StatusCode, TraceFlags
@@ -88,7 +88,7 @@ class paid_tracing:
88
88
  ] = None
89
89
 
90
90
  if not get_token():
91
- initialize_tracing_(None, self.collector_endpoint)
91
+ initialize_tracing(None, self.collector_endpoint)
92
92
 
93
93
  def _setup_context(self) -> Optional[Context]:
94
94
  """Set up context variables and return OTEL context if needed."""
@@ -190,7 +190,7 @@ class paid_tracing:
190
190
  # Auto-initialize tracing if not done
191
191
  if get_token() is None:
192
192
  try:
193
- initialize_tracing_(None, self.collector_endpoint)
193
+ initialize_tracing(None, self.collector_endpoint)
194
194
  except Exception as e:
195
195
  logger.error(f"Failed to auto-initialize tracing: {e}")
196
196
  # Fall back to executing function without tracing
@@ -219,7 +219,7 @@ class paid_tracing:
219
219
  # Auto-initialize tracing if not done
220
220
  if get_token() is None:
221
221
  try:
222
- initialize_tracing_(None, self.collector_endpoint)
222
+ initialize_tracing(None, self.collector_endpoint)
223
223
  except Exception as e:
224
224
  logger.error(f"Failed to auto-initialize tracing: {e}")
225
225
  # Fall back to executing function without tracing
@@ -155,8 +155,43 @@ class PaidSpanProcessor(SpanProcessor):
155
155
  """Called to force flush. Always returns True since there's nothing to flush."""
156
156
  return True
157
157
 
158
+ def setup_graceful_termination():
159
+ def flush_traces():
160
+ try:
161
+ if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
162
+ 10000
163
+ ):
164
+ logger.error("OTEL force flush : timeout reached")
165
+ except Exception as e:
166
+ logger.error(f"Error flushing traces: {e}")
167
+
168
+ def create_chained_signal_handler(signum: int):
169
+ current_handler = signal.getsignal(signum)
170
+
171
+ def chained_handler(_signum, frame):
172
+ logger.warning(f"Received signal {_signum}, flushing traces")
173
+ flush_traces()
174
+ # Restore the original handler
175
+ signal.signal(_signum, current_handler)
176
+ # Re-raise the signal to let the original handler (or default) handle it
177
+ os.kill(os.getpid(), _signum)
178
+
179
+ return chained_handler
158
180
 
159
- def initialize_tracing_(api_key: Optional[str] = None, collector_endpoint: Optional[str] = DEFAULT_COLLECTOR_ENDPOINT):
181
+ try:
182
+ # This is already done by default OTEL shutdown,
183
+ # but user might turn that off - so register it explicitly
184
+ atexit.register(flush_traces)
185
+
186
+ # signal handlers
187
+ for sig in (signal.SIGINT, signal.SIGTERM):
188
+ signal.signal(sig, create_chained_signal_handler(sig))
189
+ except Exception as e:
190
+ logger.warning(f"Could not set up termination handlers: {e}"
191
+ "\nConsider calling initialize_tracing() from the main thread during app initialization if you don't already")
192
+
193
+
194
+ def initialize_tracing(api_key: Optional[str] = None, collector_endpoint: Optional[str] = DEFAULT_COLLECTOR_ENDPOINT):
160
195
  """
161
196
  Initialize OpenTelemetry with OTLP exporter for Paid backend.
162
197
 
@@ -203,36 +238,7 @@ def initialize_tracing_(api_key: Optional[str] = None, collector_endpoint: Optio
203
238
  span_processor = SimpleSpanProcessor(otlp_exporter)
204
239
  paid_tracer_provider.add_span_processor(span_processor)
205
240
 
206
- # Terminate gracefully and don't lose traces
207
- def flush_traces():
208
- try:
209
- if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
210
- 10000
211
- ):
212
- logger.error("OTEL force flush : timeout reached")
213
- except Exception as e:
214
- logger.error(f"Error flushing traces: {e}")
215
-
216
- def create_chained_signal_handler(signum: int):
217
- current_handler = signal.getsignal(signum)
218
-
219
- def chained_handler(_signum, frame):
220
- logger.warning(f"Received signal {_signum}, flushing traces")
221
- flush_traces()
222
- # Restore the original handler
223
- signal.signal(_signum, current_handler)
224
- # Re-raise the signal to let the original handler (or default) handle it
225
- os.kill(os.getpid(), _signum)
226
-
227
- return chained_handler
228
-
229
- # This is already done by default OTEL shutdown,
230
- # but user might turn that off - so register it explicitly
231
- atexit.register(flush_traces)
232
-
233
- # Handle signals
234
- for sig in (signal.SIGINT, signal.SIGTERM):
235
- signal.signal(sig, create_chained_signal_handler(sig))
241
+ setup_graceful_termination() # doesn't throw
236
242
 
237
243
  logger.info("Paid tracing initialized successfully - collector at %s", collector_endpoint)
238
244
  except Exception as e:
File without changes