paid-python 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
paid/client.py CHANGED
@@ -17,7 +17,7 @@ from .tracing.distributed_tracing import (
17
17
  from .tracing.signal import signal
18
18
  from .tracing.tracing import (
19
19
  DEFAULT_COLLECTOR_ENDPOINT,
20
- initialize_tracing_,
20
+ initialize_tracing,
21
21
  trace_async_,
22
22
  trace_sync_,
23
23
  )
@@ -114,7 +114,7 @@ class Paid:
114
114
  stacklevel=2,
115
115
  )
116
116
  token = self._client_wrapper._get_token()
117
- initialize_tracing_(token, collector_endpoint=collector_endpoint)
117
+ initialize_tracing(token, collector_endpoint=collector_endpoint)
118
118
 
119
119
  def generate_tracing_token(self) -> int:
120
120
  """
@@ -394,7 +394,7 @@ class AsyncPaid:
394
394
  stacklevel=2,
395
395
  )
396
396
  token = self._client_wrapper._get_token()
397
- initialize_tracing_(token, collector_endpoint=collector_endpoint)
397
+ initialize_tracing(token, collector_endpoint=collector_endpoint)
398
398
 
399
399
  def generate_tracing_token(self) -> int:
400
400
  """
paid/tracing/__init__.py CHANGED
@@ -7,11 +7,13 @@ from .distributed_tracing import (
7
7
  unset_tracing_token,
8
8
  )
9
9
  from .signal import signal
10
+ from .tracing import initialize_tracing
10
11
 
11
12
  __all__ = [
12
13
  "generate_tracing_token",
13
14
  "paid_autoinstrument",
14
15
  "paid_tracing",
16
+ "initialize_tracing",
15
17
  "set_tracing_token",
16
18
  "unset_tracing_token",
17
19
  "signal",
@@ -8,7 +8,7 @@ sending traces to the Paid collector endpoint.
8
8
  from typing import List, Optional
9
9
 
10
10
  from . import tracing
11
- from .tracing import initialize_tracing_
11
+ from .tracing import initialize_tracing
12
12
  from opentelemetry.trace import NoOpTracerProvider
13
13
 
14
14
  from paid.logger import logger
@@ -49,6 +49,13 @@ try:
49
49
  except ImportError:
50
50
  BEDROCK_AVAILABLE = False
51
51
 
52
+ try:
53
+ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
54
+
55
+ LANGCHAIN_AVAILABLE = True
56
+ except ImportError:
57
+ LANGCHAIN_AVAILABLE = False
58
+
52
59
 
53
60
  # Track which instrumentors have been initialized
54
61
  _initialized_instrumentors: List[str] = []
@@ -69,6 +76,7 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
69
76
  - "openai": OpenAI library
70
77
  - "openai-agents": OpenAI Agents SDK
71
78
  - "bedrock": AWS Bedrock
79
+ - "langchain": LangChain library
72
80
  If None, all supported libraries that are installed will be instrumented.
73
81
 
74
82
  Note:
@@ -94,11 +102,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
94
102
  # Initialize tracing if not already initialized
95
103
  if isinstance(tracing.paid_tracer_provider, NoOpTracerProvider):
96
104
  logger.info("Tracing not initialized, initializing automatically")
97
- initialize_tracing_()
105
+ initialize_tracing()
98
106
 
99
107
  # Default to all supported libraries if none specified
100
108
  if libraries is None:
101
- libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock"]
109
+ libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock", "langchain"]
102
110
 
103
111
  for library in libraries:
104
112
  if library in _initialized_instrumentors:
@@ -115,9 +123,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
115
123
  _instrument_openai_agents()
116
124
  elif library == "bedrock":
117
125
  _instrument_bedrock()
126
+ elif library == "langchain":
127
+ _instrument_langchain()
118
128
  else:
119
129
  logger.warning(
120
- f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock"
130
+ f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock, langchain"
121
131
  )
122
132
 
123
133
  logger.info(f"Auto-instrumentation enabled for: {', '.join(_initialized_instrumentors)}")
@@ -196,3 +206,18 @@ def _instrument_bedrock() -> None:
196
206
 
197
207
  _initialized_instrumentors.append("bedrock")
198
208
  logger.info("Bedrock auto-instrumentation enabled")
209
+
210
+
211
+ def _instrument_langchain() -> None:
212
+ """
213
+ Instrument LangChain using opentelemetry-instrumentation-langchain.
214
+ """
215
+ if not LANGCHAIN_AVAILABLE:
216
+ logger.warning("LangChain instrumentation library not available, skipping instrumentation")
217
+ return
218
+
219
+ # Instrument LangChain with Paid's tracer provider
220
+ LangchainInstrumentor().instrument(tracer_provider=tracing.paid_tracer_provider)
221
+
222
+ _initialized_instrumentors.append("langchain")
223
+ logger.info("LangChain auto-instrumentation enabled")
@@ -4,7 +4,7 @@ import functools
4
4
  from typing import Any, Callable, Dict, Optional, Tuple
5
5
 
6
6
  from . import distributed_tracing, tracing
7
- from .tracing import get_paid_tracer, get_token, initialize_tracing_, trace_async_, trace_sync_
7
+ from .tracing import get_paid_tracer, get_token, initialize_tracing, trace_async_, trace_sync_
8
8
  from opentelemetry import trace
9
9
  from opentelemetry.context import Context
10
10
  from opentelemetry.trace import NonRecordingSpan, Span, SpanContext, Status, StatusCode, TraceFlags
@@ -62,7 +62,7 @@ class paid_tracing:
62
62
 
63
63
  def __init__(
64
64
  self,
65
- external_customer_id: str,
65
+ external_customer_id: Optional[str] = None,
66
66
  *,
67
67
  external_agent_id: Optional[str] = None,
68
68
  tracing_token: Optional[int] = None,
@@ -88,7 +88,7 @@ class paid_tracing:
88
88
  ] = None
89
89
 
90
90
  if not get_token():
91
- initialize_tracing_(None, self.collector_endpoint)
91
+ initialize_tracing(None, self.collector_endpoint)
92
92
 
93
93
  def _setup_context(self) -> Optional[Context]:
94
94
  """Set up context variables and return OTEL context if needed."""
@@ -190,7 +190,7 @@ class paid_tracing:
190
190
  # Auto-initialize tracing if not done
191
191
  if get_token() is None:
192
192
  try:
193
- initialize_tracing_(None, self.collector_endpoint)
193
+ initialize_tracing(None, self.collector_endpoint)
194
194
  except Exception as e:
195
195
  logger.error(f"Failed to auto-initialize tracing: {e}")
196
196
  # Fall back to executing function without tracing
@@ -219,7 +219,7 @@ class paid_tracing:
219
219
  # Auto-initialize tracing if not done
220
220
  if get_token() is None:
221
221
  try:
222
- initialize_tracing_(None, self.collector_endpoint)
222
+ initialize_tracing(None, self.collector_endpoint)
223
223
  except Exception as e:
224
224
  logger.error(f"Failed to auto-initialize tracing: {e}")
225
225
  # Fall back to executing function without tracing
paid/tracing/tracing.py CHANGED
@@ -155,8 +155,43 @@ class PaidSpanProcessor(SpanProcessor):
155
155
  """Called to force flush. Always returns True since there's nothing to flush."""
156
156
  return True
157
157
 
158
+ def setup_graceful_termination():
159
+ def flush_traces():
160
+ try:
161
+ if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
162
+ 10000
163
+ ):
164
+ logger.error("OTEL force flush : timeout reached")
165
+ except Exception as e:
166
+ logger.error(f"Error flushing traces: {e}")
167
+
168
+ def create_chained_signal_handler(signum: int):
169
+ current_handler = signal.getsignal(signum)
170
+
171
+ def chained_handler(_signum, frame):
172
+ logger.warning(f"Received signal {_signum}, flushing traces")
173
+ flush_traces()
174
+ # Restore the original handler
175
+ signal.signal(_signum, current_handler)
176
+ # Re-raise the signal to let the original handler (or default) handle it
177
+ os.kill(os.getpid(), _signum)
178
+
179
+ return chained_handler
158
180
 
159
- def initialize_tracing_(api_key: Optional[str] = None, collector_endpoint: Optional[str] = DEFAULT_COLLECTOR_ENDPOINT):
181
+ try:
182
+ # This is already done by default OTEL shutdown,
183
+ # but user might turn that off - so register it explicitly
184
+ atexit.register(flush_traces)
185
+
186
+ # signal handlers
187
+ for sig in (signal.SIGINT, signal.SIGTERM):
188
+ signal.signal(sig, create_chained_signal_handler(sig))
189
+ except Exception as e:
190
+ logger.warning(f"Could not set up termination handlers: {e}"
191
+ "\nConsider calling initialize_tracing() from the main thread during app initialization if you don't already")
192
+
193
+
194
+ def initialize_tracing(api_key: Optional[str] = None, collector_endpoint: Optional[str] = DEFAULT_COLLECTOR_ENDPOINT):
160
195
  """
161
196
  Initialize OpenTelemetry with OTLP exporter for Paid backend.
162
197
 
@@ -203,36 +238,7 @@ def initialize_tracing_(api_key: Optional[str] = None, collector_endpoint: Optio
203
238
  span_processor = SimpleSpanProcessor(otlp_exporter)
204
239
  paid_tracer_provider.add_span_processor(span_processor)
205
240
 
206
- # Terminate gracefully and don't lose traces
207
- def flush_traces():
208
- try:
209
- if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
210
- 10000
211
- ):
212
- logger.error("OTEL force flush : timeout reached")
213
- except Exception as e:
214
- logger.error(f"Error flushing traces: {e}")
215
-
216
- def create_chained_signal_handler(signum: int):
217
- current_handler = signal.getsignal(signum)
218
-
219
- def chained_handler(_signum, frame):
220
- logger.warning(f"Received signal {_signum}, flushing traces")
221
- flush_traces()
222
- # Restore the original handler
223
- signal.signal(_signum, current_handler)
224
- # Re-raise the signal to let the original handler (or default) handle it
225
- os.kill(os.getpid(), _signum)
226
-
227
- return chained_handler
228
-
229
- # This is already done by default OTEL shutdown,
230
- # but user might turn that off - so register it explicitly
231
- atexit.register(flush_traces)
232
-
233
- # Handle signals
234
- for sig in (signal.SIGINT, signal.SIGTERM):
235
- signal.signal(sig, create_chained_signal_handler(sig))
241
+ setup_graceful_termination() # doesn't throw
236
242
 
237
243
  logger.info("Paid tracing initialized successfully - collector at %s", collector_endpoint)
238
244
  except Exception as e:
@@ -258,7 +264,7 @@ def get_paid_tracer() -> trace.Tracer:
258
264
 
259
265
 
260
266
  def trace_sync_(
261
- external_customer_id: str,
267
+ external_customer_id: Optional[str],
262
268
  fn: Callable[..., T],
263
269
  external_agent_id: Optional[str] = None,
264
270
  tracing_token: Optional[int] = None,
@@ -316,9 +322,6 @@ def trace_sync_(
316
322
  tracer = get_paid_tracer()
317
323
  logger.info(f"Creating span for external_customer_id: {external_customer_id}")
318
324
  with tracer.start_as_current_span("parent_span", context=ctx) as span:
319
- span.set_attribute("external_customer_id", external_customer_id)
320
- if external_agent_id:
321
- span.set_attribute("external_agent_id", external_agent_id)
322
325
  try:
323
326
  result = fn(*args, **kwargs)
324
327
  span.set_status(Status(StatusCode.OK))
@@ -335,7 +338,7 @@ def trace_sync_(
335
338
 
336
339
 
337
340
  async def trace_async_(
338
- external_customer_id: str,
341
+ external_customer_id: Optional[str],
339
342
  fn: Callable[..., Union[T, Awaitable[T]]],
340
343
  external_agent_id: Optional[str] = None,
341
344
  tracing_token: Optional[int] = None,
@@ -393,9 +396,6 @@ async def trace_async_(
393
396
  tracer = get_paid_tracer()
394
397
  logger.info(f"Creating span for external_customer_id: {external_customer_id}")
395
398
  with tracer.start_as_current_span("parent_span", context=ctx) as span:
396
- span.set_attribute("external_customer_id", external_customer_id)
397
- if external_agent_id:
398
- span.set_attribute("external_agent_id", external_agent_id)
399
399
  try:
400
400
  if asyncio.iscoroutinefunction(fn):
401
401
  result = await fn(*args, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: paid-python
3
- Version: 0.1.0
3
+ Version: 0.2.0
4
4
  Summary:
5
5
  Requires-Python: >=3.9,<3.14
6
6
  Classifier: Intended Audience :: Developers
@@ -25,6 +25,7 @@ Requires-Dist: opentelemetry-api (>=1.23.0)
25
25
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.23.0)
26
26
  Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.47.0)
27
27
  Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.47.0)
28
+ Requires-Dist: opentelemetry-instrumentation-langchain (>=0.47.0)
28
29
  Requires-Dist: opentelemetry-instrumentation-openai (>=0.47.0)
29
30
  Requires-Dist: opentelemetry-sdk (>=1.23.0)
30
31
  Requires-Dist: pydantic (>=1.9.0)
@@ -162,6 +163,13 @@ Both approaches:
162
163
  - Gracefully fall back to normal execution if tracing fails
163
164
  - Support the same parameters: `external_customer_id`, `external_agent_id`, `tracing_token`, `store_prompt`, `metadata`
164
165
 
166
+ * Note - if it happens that you're calling `paid_tracing` from non-main thread, then it's advised to initialize from main thread:
167
+ ```python
168
+ from paid.tracing import initialize_tracing
169
+ initialize_tracing()
170
+ ```
171
+ * `initialize_tracing` also accepts optional arguments like OTEL collector endpoint and api key if you want to reroute your tracing somewhere else :)
172
+
165
173
  ### Using the Paid wrappers
166
174
 
167
175
  You can track usage costs by using Paid wrappers around your AI provider's SDK.
@@ -182,9 +190,11 @@ Example usage:
182
190
 
183
191
  ```python
184
192
  from openai import OpenAI
185
- from paid.tracing import paid_tracing
193
+ from paid.tracing import paid_tracing, initialize_tracing
186
194
  from paid.tracing.wrappers.openai import PaidOpenAI
187
195
 
196
+ initialize_tracing()
197
+
188
198
  openAIClient = PaidOpenAI(OpenAI(
189
199
  # This is the default and can be omitted
190
200
  api_key="<OPENAI_API_KEY>",
@@ -205,6 +215,102 @@ def image_generate():
205
215
  image_generate()
206
216
  ```
207
217
 
218
+ ### Passing User Metadata
219
+
220
+ You can attach custom metadata to your traces by passing a `metadata` dictionary to the `paid_tracing()` decorator or context manager. This metadata will be stored with the trace and can be used to filter and query traces later.
221
+
222
+ <Tabs>
223
+ <Tab title="Python - Decorator">
224
+ ```python
225
+ from paid.tracing import paid_tracing, signal, initialize_tracing
226
+ from paid.tracing.wrappers import PaidOpenAI
227
+ from openai import OpenAI
228
+
229
+ initialize_tracing()
230
+
231
+ openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
232
+
233
+ @paid_tracing(
234
+ "customer_123",
235
+ "agent_123",
236
+ metadata={
237
+ "campaign_id": "campaign_456",
238
+ "environment": "production",
239
+ "user_tier": "enterprise"
240
+ }
241
+ )
242
+ def process_event(event):
243
+ """Process event with custom metadata"""
244
+ response = openai_client.chat.completions.create(
245
+ model="gpt-4",
246
+ messages=[{"role": "user", "content": event.content}]
247
+ )
248
+
249
+ signal("event_processed", enable_cost_tracing=True)
250
+ return response
251
+
252
+ process_event(incoming_event)
253
+ ```
254
+ </Tab>
255
+
256
+ <Tab title="Python - Context Manager">
257
+ ```python
258
+ from paid.tracing import paid_tracing, signal, initialize_tracing
259
+ from paid.tracing.wrappers import PaidOpenAI
260
+ from openai import OpenAI
261
+
262
+ initialize_tracing()
263
+
264
+ openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
265
+
266
+ def process_event(event):
267
+ """Process event with custom metadata"""
268
+ response = openai_client.chat.completions.create(
269
+ model="gpt-4",
270
+ messages=[{"role": "user", "content": event.content}]
271
+ )
272
+
273
+ signal("event_processed", enable_cost_tracing=True)
274
+ return response
275
+
276
+ # Pass metadata to context manager
277
+ with paid_tracing(
278
+ "customer_123",
279
+ external_agent_id="agent_123",
280
+ metadata={
281
+ "campaign_id": "campaign_456",
282
+ "environment": "production",
283
+ "user_tier": "enterprise"
284
+ }
285
+ ):
286
+ process_event(incoming_event)
287
+ ```
288
+ </Tab>
289
+
290
+ <Tab title="Node.js">
291
+ ```typescript
292
+ // Metadata support is not yet available in the Node.js SDK.
293
+ // Please use Python for passing custom metadata to traces.
294
+ ```
295
+ </Tab>
296
+ </Tabs>
297
+
298
+ #### Querying Traces by Metadata
299
+
300
+ Once you've added metadata to your traces, you can filter traces using the metadata parameter in the traces API endpoint:
301
+
302
+ ```bash
303
+ # Filter by single metadata field
304
+ curl -G "https://api.paid.ai/api/organizations/{orgId}/traces" \
305
+ --data-urlencode 'metadata={"campaign_id":"campaign_456"}' \
306
+ -H "Authorization: Bearer YOUR_API_KEY"
307
+
308
+ # Filter by multiple metadata fields (all must match)
309
+ curl -G "https://api.paid.ai/api/organizations/{orgId}/traces" \
310
+ --data-urlencode 'metadata={"campaign_id":"campaign_456","environment":"production"}' \
311
+ -H "Authorization: Bearer YOUR_API_KEY"
312
+ ```
313
+
208
314
  ### Auto-Instrumentation (OpenTelemetry Instrumentors)
209
315
 
210
316
  For maximum convenience, you can use OpenTelemetry auto-instrumentation to automatically track costs without modifying your AI library calls. This approach uses official OpenTelemetry instrumentors for supported AI libraries.
@@ -213,14 +319,14 @@ For maximum convenience, you can use OpenTelemetry auto-instrumentation to autom
213
319
 
214
320
  ```python
215
321
  from paid import Paid
216
- from paid.tracing import paid_autoinstrument
322
+ from paid.tracing import paid_autoinstrument, initialize_tracing
217
323
  from openai import OpenAI
218
324
 
219
325
  # Initialize Paid SDK
220
326
  client = Paid(token="PAID_API_KEY")
327
+ initialize_tracing()
221
328
 
222
- # Enable auto-instrumentation for all supported libraries
223
- paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock
329
+ paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock, langchain
224
330
 
225
331
  # Now all OpenAI calls will be automatically traced
226
332
  openai_client = OpenAI(api_key="<OPENAI_API_KEY>")
@@ -246,6 +352,7 @@ gemini - Google Generative AI (google-generativeai)
246
352
  openai - OpenAI Python SDK
247
353
  openai-agents - OpenAI Agents SDK
248
354
  bedrock - AWS Bedrock (boto3)
355
+ langchain - LangChain framework
249
356
  ```
250
357
 
251
358
  #### Selective Instrumentation
@@ -345,10 +452,12 @@ For such cases, you can pass a tracing token directly to `@paid_tracing()` or co
345
452
  The simplest way to implement distributed tracing is to pass the token directly to the decorator or context manager:
346
453
 
347
454
  ```python
348
- from paid.tracing import paid_tracing, signal, generate_tracing_token
455
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
349
456
  from paid.tracing.wrappers.openai import PaidOpenAI
350
457
  from openai import OpenAI
351
458
 
459
+ initialize_tracing()
460
+
352
461
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
353
462
 
354
463
  # Process 1: Generate token and do initial work
@@ -390,10 +499,12 @@ process_part_2()
390
499
  Using context manager instead of decorator:
391
500
 
392
501
  ```python
393
- from paid.tracing import paid_tracing, signal, generate_tracing_token
502
+ from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
394
503
  from paid.tracing.wrappers.openai import PaidOpenAI
395
504
  from openai import OpenAI
396
505
 
506
+ initialize_tracing()
507
+
397
508
  # Initialize
398
509
  openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
399
510
 
@@ -551,9 +662,11 @@ The `@paid_tracing` decorator automatically handles both sync and async function
551
662
 
552
663
  ```python
553
664
  from openai import AsyncOpenAI
554
- from paid.tracing import paid_tracing
665
+ from paid.tracing import paid_tracing, initialize_tracing
555
666
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
556
667
 
668
+ initialize_tracing()
669
+
557
670
  # Wrap the async OpenAI client
558
671
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
559
672
 
@@ -577,10 +690,12 @@ await generate_image()
577
690
  The `signal()` function works seamlessly in async contexts:
578
691
 
579
692
  ```python
580
- from paid.tracing import paid_tracing, signal
693
+ from paid.tracing import paid_tracing, signal, initialize_tracing
581
694
  from paid.tracing.wrappers.openai import PaidAsyncOpenAI
582
695
  from openai import AsyncOpenAI
583
696
 
697
+ initialize_tracing()
698
+
584
699
  openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
585
700
 
586
701
  @paid_tracing("your_external_customer_id", "your_external_agent_id")
@@ -2,7 +2,7 @@ paid/__init__.py,sha256=D1SeLoeTlySo_vZCZrxFX3y5KhKGrHflphLXoewImfk,1826
2
2
  paid/agents/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
3
3
  paid/agents/client.py,sha256=ojc3H-nx4MqDrb74_i6JE_wjHSJaVAErsIunfNeffMo,23305
4
4
  paid/agents/raw_client.py,sha256=jN9LvPK2-bGeNQzcV3iRmprpegXKtO2JaOEXjnPfz9Y,26833
5
- paid/client.py,sha256=AWo35RlJ5bP2A5bGre3iY1GbBdYW7gu7HlioQIOoltk,23038
5
+ paid/client.py,sha256=2GGQByab__kDKaWeNy4wK_T6RkS36TX_mA6fsO08Ww4,23035
6
6
  paid/contacts/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
7
7
  paid/contacts/client.py,sha256=sNm-yAg4dR9AyYWL7-RC_CuCCvOXX7YlDAUqn47yZhE,14058
8
8
  paid/contacts/raw_client.py,sha256=ZYNWuekHiL2sqK_gHR0IzcrLAopUKRXIqMUi-fuLGe4,19211
@@ -36,12 +36,12 @@ paid/orders/lines/client.py,sha256=GqSwiXdlu49KLHt7uccS_H4nkVQosM1_PQOcPA9v82A,4
36
36
  paid/orders/lines/raw_client.py,sha256=KZN_yBokCOkf1lUb4ZJtX_NZbqmTqCdJNoaIOdWar8I,4590
37
37
  paid/orders/raw_client.py,sha256=650e1Sj2vi9KVJc15M3ENXIKYoth0qMz66dzvXy1Sb4,16245
38
38
  paid/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
- paid/tracing/__init__.py,sha256=IP6OkV885_xlK1H68RxGTX_IhpVAkY266zsWgnQKKTs,440
40
- paid/tracing/autoinstrumentation.py,sha256=p57bU87x1Xi-nb_2C4O56tcgfvN2UYZAd9pJ0Vyh9Nw,6765
41
- paid/tracing/context_manager.py,sha256=ZVsmum4np_Eyub1_D0D5ChhvdBWnkFBFqzZHxSsljdU,9716
39
+ paid/tracing/__init__.py,sha256=Pe55koIwqJ6Vv5-9Wqi8xIdwCS2BbxZds-MK5fD-F5Y,506
40
+ paid/tracing/autoinstrumentation.py,sha256=qlLLiP9cWNIXX226dLDLvvlioLCPO-0F81mU_0fBb9s,7604
41
+ paid/tracing/context_manager.py,sha256=Qtl59mjDsacX53LPFFAVXsfeGCEA6CIfxPsvjO-Kmx8,9729
42
42
  paid/tracing/distributed_tracing.py,sha256=CpUWpHai-4LxLLHbGxz41r9h5wLG-dC83YL5Vg29OBI,3967
43
43
  paid/tracing/signal.py,sha256=PfYxF6EFQS8j7RY5_C5NXrCBVu9Hq2E2tyG4fdQScJk,3252
44
- paid/tracing/tracing.py,sha256=Xtue-_vuAKLnZ1l6d-tSj9o0UWmMPgXggTVgXYb1Zz4,16230
44
+ paid/tracing/tracing.py,sha256=w07I7KQ4O8yJdvLs9RDAQVVofVWeR4xNei3zC5-EE6c,16080
45
45
  paid/tracing/wrappers/__init__.py,sha256=IIleLB_JUbzLw7FshrU2VHZAKF3dZHMGy1O5zCBwwqM,1588
46
46
  paid/tracing/wrappers/anthropic/__init__.py,sha256=_x1fjySAQxuT5cIGO_jU09LiGcZH-WQLqKg8mUFAu2w,115
47
47
  paid/tracing/wrappers/anthropic/anthropicWrapper.py,sha256=pGchbOb41CbTxc7H8xXoM-LjR085spqrzXqCVC_rrFk,4913
@@ -98,7 +98,7 @@ paid/usage/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
98
98
  paid/usage/client.py,sha256=280WJuepoovk3BAVbAx2yN2Q_qBdvx3CcPkLu8lXslc,3030
99
99
  paid/usage/raw_client.py,sha256=2acg5C4lxuZodZjepU9QYF0fmBxgG-3ZgXs1zUJG-wM,3709
100
100
  paid/version.py,sha256=QIpDFnOrxMxrs86eL0iNH0mSZ1DO078wWHYY9TYAoew,78
101
- paid_python-0.1.0.dist-info/LICENSE,sha256=Nz4baY1zvv0Qy7lqrQtbaiMhmEeGr2Q7A93aqzpml4c,1071
102
- paid_python-0.1.0.dist-info/METADATA,sha256=liDYbzbDdhe3sBUeAuPaBbD7P99Q3YLYClqGqIu9pYs,18729
103
- paid_python-0.1.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
104
- paid_python-0.1.0.dist-info/RECORD,,
101
+ paid_python-0.2.0.dist-info/LICENSE,sha256=Nz4baY1zvv0Qy7lqrQtbaiMhmEeGr2Q7A93aqzpml4c,1071
102
+ paid_python-0.2.0.dist-info/METADATA,sha256=tFvHkGpKqlXPnxZhDhxFm-vETz6RbVihbJZOEdjGPUw,22343
103
+ paid_python-0.2.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
104
+ paid_python-0.2.0.dist-info/RECORD,,