paid-python 0.1.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- paid/client.py +3 -3
- paid/tracing/__init__.py +2 -0
- paid/tracing/autoinstrumentation.py +29 -4
- paid/tracing/context_manager.py +4 -4
- paid/tracing/tracing.py +37 -31
- {paid_python-0.1.1.dist-info → paid_python-0.2.0.dist-info}/METADATA +34 -11
- {paid_python-0.1.1.dist-info → paid_python-0.2.0.dist-info}/RECORD +9 -9
- {paid_python-0.1.1.dist-info → paid_python-0.2.0.dist-info}/LICENSE +0 -0
- {paid_python-0.1.1.dist-info → paid_python-0.2.0.dist-info}/WHEEL +0 -0
paid/client.py
CHANGED
|
@@ -17,7 +17,7 @@ from .tracing.distributed_tracing import (
|
|
|
17
17
|
from .tracing.signal import signal
|
|
18
18
|
from .tracing.tracing import (
|
|
19
19
|
DEFAULT_COLLECTOR_ENDPOINT,
|
|
20
|
-
|
|
20
|
+
initialize_tracing,
|
|
21
21
|
trace_async_,
|
|
22
22
|
trace_sync_,
|
|
23
23
|
)
|
|
@@ -114,7 +114,7 @@ class Paid:
|
|
|
114
114
|
stacklevel=2,
|
|
115
115
|
)
|
|
116
116
|
token = self._client_wrapper._get_token()
|
|
117
|
-
|
|
117
|
+
initialize_tracing(token, collector_endpoint=collector_endpoint)
|
|
118
118
|
|
|
119
119
|
def generate_tracing_token(self) -> int:
|
|
120
120
|
"""
|
|
@@ -394,7 +394,7 @@ class AsyncPaid:
|
|
|
394
394
|
stacklevel=2,
|
|
395
395
|
)
|
|
396
396
|
token = self._client_wrapper._get_token()
|
|
397
|
-
|
|
397
|
+
initialize_tracing(token, collector_endpoint=collector_endpoint)
|
|
398
398
|
|
|
399
399
|
def generate_tracing_token(self) -> int:
|
|
400
400
|
"""
|
paid/tracing/__init__.py
CHANGED
|
@@ -7,11 +7,13 @@ from .distributed_tracing import (
|
|
|
7
7
|
unset_tracing_token,
|
|
8
8
|
)
|
|
9
9
|
from .signal import signal
|
|
10
|
+
from .tracing import initialize_tracing
|
|
10
11
|
|
|
11
12
|
__all__ = [
|
|
12
13
|
"generate_tracing_token",
|
|
13
14
|
"paid_autoinstrument",
|
|
14
15
|
"paid_tracing",
|
|
16
|
+
"initialize_tracing",
|
|
15
17
|
"set_tracing_token",
|
|
16
18
|
"unset_tracing_token",
|
|
17
19
|
"signal",
|
|
@@ -8,7 +8,7 @@ sending traces to the Paid collector endpoint.
|
|
|
8
8
|
from typing import List, Optional
|
|
9
9
|
|
|
10
10
|
from . import tracing
|
|
11
|
-
from .tracing import
|
|
11
|
+
from .tracing import initialize_tracing
|
|
12
12
|
from opentelemetry.trace import NoOpTracerProvider
|
|
13
13
|
|
|
14
14
|
from paid.logger import logger
|
|
@@ -49,6 +49,13 @@ try:
|
|
|
49
49
|
except ImportError:
|
|
50
50
|
BEDROCK_AVAILABLE = False
|
|
51
51
|
|
|
52
|
+
try:
|
|
53
|
+
from opentelemetry.instrumentation.langchain import LangchainInstrumentor
|
|
54
|
+
|
|
55
|
+
LANGCHAIN_AVAILABLE = True
|
|
56
|
+
except ImportError:
|
|
57
|
+
LANGCHAIN_AVAILABLE = False
|
|
58
|
+
|
|
52
59
|
|
|
53
60
|
# Track which instrumentors have been initialized
|
|
54
61
|
_initialized_instrumentors: List[str] = []
|
|
@@ -69,6 +76,7 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
|
|
|
69
76
|
- "openai": OpenAI library
|
|
70
77
|
- "openai-agents": OpenAI Agents SDK
|
|
71
78
|
- "bedrock": AWS Bedrock
|
|
79
|
+
- "langchain": LangChain library
|
|
72
80
|
If None, all supported libraries that are installed will be instrumented.
|
|
73
81
|
|
|
74
82
|
Note:
|
|
@@ -94,11 +102,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
|
|
|
94
102
|
# Initialize tracing if not already initialized
|
|
95
103
|
if isinstance(tracing.paid_tracer_provider, NoOpTracerProvider):
|
|
96
104
|
logger.info("Tracing not initialized, initializing automatically")
|
|
97
|
-
|
|
105
|
+
initialize_tracing()
|
|
98
106
|
|
|
99
107
|
# Default to all supported libraries if none specified
|
|
100
108
|
if libraries is None:
|
|
101
|
-
libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock"]
|
|
109
|
+
libraries = ["anthropic", "gemini", "openai", "openai-agents", "bedrock", "langchain"]
|
|
102
110
|
|
|
103
111
|
for library in libraries:
|
|
104
112
|
if library in _initialized_instrumentors:
|
|
@@ -115,9 +123,11 @@ def paid_autoinstrument(libraries: Optional[List[str]] = None) -> None:
|
|
|
115
123
|
_instrument_openai_agents()
|
|
116
124
|
elif library == "bedrock":
|
|
117
125
|
_instrument_bedrock()
|
|
126
|
+
elif library == "langchain":
|
|
127
|
+
_instrument_langchain()
|
|
118
128
|
else:
|
|
119
129
|
logger.warning(
|
|
120
|
-
f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock"
|
|
130
|
+
f"Unknown library '{library}' - supported libraries: anthropic, gemini, openai, openai-agents, bedrock, langchain"
|
|
121
131
|
)
|
|
122
132
|
|
|
123
133
|
logger.info(f"Auto-instrumentation enabled for: {', '.join(_initialized_instrumentors)}")
|
|
@@ -196,3 +206,18 @@ def _instrument_bedrock() -> None:
|
|
|
196
206
|
|
|
197
207
|
_initialized_instrumentors.append("bedrock")
|
|
198
208
|
logger.info("Bedrock auto-instrumentation enabled")
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def _instrument_langchain() -> None:
|
|
212
|
+
"""
|
|
213
|
+
Instrument LangChain using opentelemetry-instrumentation-langchain.
|
|
214
|
+
"""
|
|
215
|
+
if not LANGCHAIN_AVAILABLE:
|
|
216
|
+
logger.warning("LangChain instrumentation library not available, skipping instrumentation")
|
|
217
|
+
return
|
|
218
|
+
|
|
219
|
+
# Instrument LangChain with Paid's tracer provider
|
|
220
|
+
LangchainInstrumentor().instrument(tracer_provider=tracing.paid_tracer_provider)
|
|
221
|
+
|
|
222
|
+
_initialized_instrumentors.append("langchain")
|
|
223
|
+
logger.info("LangChain auto-instrumentation enabled")
|
paid/tracing/context_manager.py
CHANGED
|
@@ -4,7 +4,7 @@ import functools
|
|
|
4
4
|
from typing import Any, Callable, Dict, Optional, Tuple
|
|
5
5
|
|
|
6
6
|
from . import distributed_tracing, tracing
|
|
7
|
-
from .tracing import get_paid_tracer, get_token,
|
|
7
|
+
from .tracing import get_paid_tracer, get_token, initialize_tracing, trace_async_, trace_sync_
|
|
8
8
|
from opentelemetry import trace
|
|
9
9
|
from opentelemetry.context import Context
|
|
10
10
|
from opentelemetry.trace import NonRecordingSpan, Span, SpanContext, Status, StatusCode, TraceFlags
|
|
@@ -88,7 +88,7 @@ class paid_tracing:
|
|
|
88
88
|
] = None
|
|
89
89
|
|
|
90
90
|
if not get_token():
|
|
91
|
-
|
|
91
|
+
initialize_tracing(None, self.collector_endpoint)
|
|
92
92
|
|
|
93
93
|
def _setup_context(self) -> Optional[Context]:
|
|
94
94
|
"""Set up context variables and return OTEL context if needed."""
|
|
@@ -190,7 +190,7 @@ class paid_tracing:
|
|
|
190
190
|
# Auto-initialize tracing if not done
|
|
191
191
|
if get_token() is None:
|
|
192
192
|
try:
|
|
193
|
-
|
|
193
|
+
initialize_tracing(None, self.collector_endpoint)
|
|
194
194
|
except Exception as e:
|
|
195
195
|
logger.error(f"Failed to auto-initialize tracing: {e}")
|
|
196
196
|
# Fall back to executing function without tracing
|
|
@@ -219,7 +219,7 @@ class paid_tracing:
|
|
|
219
219
|
# Auto-initialize tracing if not done
|
|
220
220
|
if get_token() is None:
|
|
221
221
|
try:
|
|
222
|
-
|
|
222
|
+
initialize_tracing(None, self.collector_endpoint)
|
|
223
223
|
except Exception as e:
|
|
224
224
|
logger.error(f"Failed to auto-initialize tracing: {e}")
|
|
225
225
|
# Fall back to executing function without tracing
|
paid/tracing/tracing.py
CHANGED
|
@@ -155,8 +155,43 @@ class PaidSpanProcessor(SpanProcessor):
|
|
|
155
155
|
"""Called to force flush. Always returns True since there's nothing to flush."""
|
|
156
156
|
return True
|
|
157
157
|
|
|
158
|
+
def setup_graceful_termination():
|
|
159
|
+
def flush_traces():
|
|
160
|
+
try:
|
|
161
|
+
if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
|
|
162
|
+
10000
|
|
163
|
+
):
|
|
164
|
+
logger.error("OTEL force flush : timeout reached")
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.error(f"Error flushing traces: {e}")
|
|
167
|
+
|
|
168
|
+
def create_chained_signal_handler(signum: int):
|
|
169
|
+
current_handler = signal.getsignal(signum)
|
|
170
|
+
|
|
171
|
+
def chained_handler(_signum, frame):
|
|
172
|
+
logger.warning(f"Received signal {_signum}, flushing traces")
|
|
173
|
+
flush_traces()
|
|
174
|
+
# Restore the original handler
|
|
175
|
+
signal.signal(_signum, current_handler)
|
|
176
|
+
# Re-raise the signal to let the original handler (or default) handle it
|
|
177
|
+
os.kill(os.getpid(), _signum)
|
|
178
|
+
|
|
179
|
+
return chained_handler
|
|
158
180
|
|
|
159
|
-
|
|
181
|
+
try:
|
|
182
|
+
# This is already done by default OTEL shutdown,
|
|
183
|
+
# but user might turn that off - so register it explicitly
|
|
184
|
+
atexit.register(flush_traces)
|
|
185
|
+
|
|
186
|
+
# signal handlers
|
|
187
|
+
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
188
|
+
signal.signal(sig, create_chained_signal_handler(sig))
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.warning(f"Could not set up termination handlers: {e}"
|
|
191
|
+
"\nConsider calling initialize_tracing() from the main thread during app initialization if you don't already")
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def initialize_tracing(api_key: Optional[str] = None, collector_endpoint: Optional[str] = DEFAULT_COLLECTOR_ENDPOINT):
|
|
160
195
|
"""
|
|
161
196
|
Initialize OpenTelemetry with OTLP exporter for Paid backend.
|
|
162
197
|
|
|
@@ -203,36 +238,7 @@ def initialize_tracing_(api_key: Optional[str] = None, collector_endpoint: Optio
|
|
|
203
238
|
span_processor = SimpleSpanProcessor(otlp_exporter)
|
|
204
239
|
paid_tracer_provider.add_span_processor(span_processor)
|
|
205
240
|
|
|
206
|
-
#
|
|
207
|
-
def flush_traces():
|
|
208
|
-
try:
|
|
209
|
-
if not isinstance(paid_tracer_provider, NoOpTracerProvider) and not paid_tracer_provider.force_flush(
|
|
210
|
-
10000
|
|
211
|
-
):
|
|
212
|
-
logger.error("OTEL force flush : timeout reached")
|
|
213
|
-
except Exception as e:
|
|
214
|
-
logger.error(f"Error flushing traces: {e}")
|
|
215
|
-
|
|
216
|
-
def create_chained_signal_handler(signum: int):
|
|
217
|
-
current_handler = signal.getsignal(signum)
|
|
218
|
-
|
|
219
|
-
def chained_handler(_signum, frame):
|
|
220
|
-
logger.warning(f"Received signal {_signum}, flushing traces")
|
|
221
|
-
flush_traces()
|
|
222
|
-
# Restore the original handler
|
|
223
|
-
signal.signal(_signum, current_handler)
|
|
224
|
-
# Re-raise the signal to let the original handler (or default) handle it
|
|
225
|
-
os.kill(os.getpid(), _signum)
|
|
226
|
-
|
|
227
|
-
return chained_handler
|
|
228
|
-
|
|
229
|
-
# This is already done by default OTEL shutdown,
|
|
230
|
-
# but user might turn that off - so register it explicitly
|
|
231
|
-
atexit.register(flush_traces)
|
|
232
|
-
|
|
233
|
-
# Handle signals
|
|
234
|
-
for sig in (signal.SIGINT, signal.SIGTERM):
|
|
235
|
-
signal.signal(sig, create_chained_signal_handler(sig))
|
|
241
|
+
setup_graceful_termination() # doesn't throw
|
|
236
242
|
|
|
237
243
|
logger.info("Paid tracing initialized successfully - collector at %s", collector_endpoint)
|
|
238
244
|
except Exception as e:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: paid-python
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary:
|
|
5
5
|
Requires-Python: >=3.9,<3.14
|
|
6
6
|
Classifier: Intended Audience :: Developers
|
|
@@ -25,6 +25,7 @@ Requires-Dist: opentelemetry-api (>=1.23.0)
|
|
|
25
25
|
Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.23.0)
|
|
26
26
|
Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.47.0)
|
|
27
27
|
Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.47.0)
|
|
28
|
+
Requires-Dist: opentelemetry-instrumentation-langchain (>=0.47.0)
|
|
28
29
|
Requires-Dist: opentelemetry-instrumentation-openai (>=0.47.0)
|
|
29
30
|
Requires-Dist: opentelemetry-sdk (>=1.23.0)
|
|
30
31
|
Requires-Dist: pydantic (>=1.9.0)
|
|
@@ -162,6 +163,13 @@ Both approaches:
|
|
|
162
163
|
- Gracefully fall back to normal execution if tracing fails
|
|
163
164
|
- Support the same parameters: `external_customer_id`, `external_agent_id`, `tracing_token`, `store_prompt`, `metadata`
|
|
164
165
|
|
|
166
|
+
* Note - if it happens that you're calling `paid_tracing` from non-main thread, then it's advised to initialize from main thread:
|
|
167
|
+
```python
|
|
168
|
+
from paid.tracing import initialize_tracing
|
|
169
|
+
initialize_tracing()
|
|
170
|
+
```
|
|
171
|
+
* `initialize_tracing` also accepts optional arguments like OTEL collector endpoint and api key if you want to reroute your tracing somewhere else :)
|
|
172
|
+
|
|
165
173
|
### Using the Paid wrappers
|
|
166
174
|
|
|
167
175
|
You can track usage costs by using Paid wrappers around your AI provider's SDK.
|
|
@@ -182,9 +190,11 @@ Example usage:
|
|
|
182
190
|
|
|
183
191
|
```python
|
|
184
192
|
from openai import OpenAI
|
|
185
|
-
from paid.tracing import paid_tracing
|
|
193
|
+
from paid.tracing import paid_tracing, initialize_tracing
|
|
186
194
|
from paid.tracing.wrappers.openai import PaidOpenAI
|
|
187
195
|
|
|
196
|
+
initialize_tracing()
|
|
197
|
+
|
|
188
198
|
openAIClient = PaidOpenAI(OpenAI(
|
|
189
199
|
# This is the default and can be omitted
|
|
190
200
|
api_key="<OPENAI_API_KEY>",
|
|
@@ -212,10 +222,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
|
|
|
212
222
|
<Tabs>
|
|
213
223
|
<Tab title="Python - Decorator">
|
|
214
224
|
```python
|
|
215
|
-
from paid.tracing import paid_tracing, signal
|
|
225
|
+
from paid.tracing import paid_tracing, signal, initialize_tracing
|
|
216
226
|
from paid.tracing.wrappers import PaidOpenAI
|
|
217
227
|
from openai import OpenAI
|
|
218
228
|
|
|
229
|
+
initialize_tracing()
|
|
230
|
+
|
|
219
231
|
openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
|
|
220
232
|
|
|
221
233
|
@paid_tracing(
|
|
@@ -243,10 +255,12 @@ You can attach custom metadata to your traces by passing a `metadata` dictionary
|
|
|
243
255
|
|
|
244
256
|
<Tab title="Python - Context Manager">
|
|
245
257
|
```python
|
|
246
|
-
from paid.tracing import paid_tracing, signal
|
|
258
|
+
from paid.tracing import paid_tracing, signal, initialize_tracing
|
|
247
259
|
from paid.tracing.wrappers import PaidOpenAI
|
|
248
260
|
from openai import OpenAI
|
|
249
261
|
|
|
262
|
+
initialize_tracing()
|
|
263
|
+
|
|
250
264
|
openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
|
|
251
265
|
|
|
252
266
|
def process_event(event):
|
|
@@ -305,14 +319,14 @@ For maximum convenience, you can use OpenTelemetry auto-instrumentation to autom
|
|
|
305
319
|
|
|
306
320
|
```python
|
|
307
321
|
from paid import Paid
|
|
308
|
-
from paid.tracing import paid_autoinstrument
|
|
322
|
+
from paid.tracing import paid_autoinstrument, initialize_tracing
|
|
309
323
|
from openai import OpenAI
|
|
310
324
|
|
|
311
325
|
# Initialize Paid SDK
|
|
312
326
|
client = Paid(token="PAID_API_KEY")
|
|
327
|
+
initialize_tracing()
|
|
313
328
|
|
|
314
|
-
#
|
|
315
|
-
paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock
|
|
329
|
+
paid_autoinstrument() # instruments all available: anthropic, gemini, openai, openai-agents, bedrock, langchain
|
|
316
330
|
|
|
317
331
|
# Now all OpenAI calls will be automatically traced
|
|
318
332
|
openai_client = OpenAI(api_key="<OPENAI_API_KEY>")
|
|
@@ -338,6 +352,7 @@ gemini - Google Generative AI (google-generativeai)
|
|
|
338
352
|
openai - OpenAI Python SDK
|
|
339
353
|
openai-agents - OpenAI Agents SDK
|
|
340
354
|
bedrock - AWS Bedrock (boto3)
|
|
355
|
+
langchain - LangChain framework
|
|
341
356
|
```
|
|
342
357
|
|
|
343
358
|
#### Selective Instrumentation
|
|
@@ -437,10 +452,12 @@ For such cases, you can pass a tracing token directly to `@paid_tracing()` or co
|
|
|
437
452
|
The simplest way to implement distributed tracing is to pass the token directly to the decorator or context manager:
|
|
438
453
|
|
|
439
454
|
```python
|
|
440
|
-
from paid.tracing import paid_tracing, signal, generate_tracing_token
|
|
455
|
+
from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
|
|
441
456
|
from paid.tracing.wrappers.openai import PaidOpenAI
|
|
442
457
|
from openai import OpenAI
|
|
443
458
|
|
|
459
|
+
initialize_tracing()
|
|
460
|
+
|
|
444
461
|
openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
|
|
445
462
|
|
|
446
463
|
# Process 1: Generate token and do initial work
|
|
@@ -482,10 +499,12 @@ process_part_2()
|
|
|
482
499
|
Using context manager instead of decorator:
|
|
483
500
|
|
|
484
501
|
```python
|
|
485
|
-
from paid.tracing import paid_tracing, signal, generate_tracing_token
|
|
502
|
+
from paid.tracing import paid_tracing, signal, generate_tracing_token, initialize_tracing
|
|
486
503
|
from paid.tracing.wrappers.openai import PaidOpenAI
|
|
487
504
|
from openai import OpenAI
|
|
488
505
|
|
|
506
|
+
initialize_tracing()
|
|
507
|
+
|
|
489
508
|
# Initialize
|
|
490
509
|
openai_client = PaidOpenAI(OpenAI(api_key="<OPENAI_API_KEY>"))
|
|
491
510
|
|
|
@@ -643,9 +662,11 @@ The `@paid_tracing` decorator automatically handles both sync and async function
|
|
|
643
662
|
|
|
644
663
|
```python
|
|
645
664
|
from openai import AsyncOpenAI
|
|
646
|
-
from paid.tracing import paid_tracing
|
|
665
|
+
from paid.tracing import paid_tracing, initialize_tracing
|
|
647
666
|
from paid.tracing.wrappers.openai import PaidAsyncOpenAI
|
|
648
667
|
|
|
668
|
+
initialize_tracing()
|
|
669
|
+
|
|
649
670
|
# Wrap the async OpenAI client
|
|
650
671
|
openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
|
|
651
672
|
|
|
@@ -669,10 +690,12 @@ await generate_image()
|
|
|
669
690
|
The `signal()` function works seamlessly in async contexts:
|
|
670
691
|
|
|
671
692
|
```python
|
|
672
|
-
from paid.tracing import paid_tracing, signal
|
|
693
|
+
from paid.tracing import paid_tracing, signal, initialize_tracing
|
|
673
694
|
from paid.tracing.wrappers.openai import PaidAsyncOpenAI
|
|
674
695
|
from openai import AsyncOpenAI
|
|
675
696
|
|
|
697
|
+
initialize_tracing()
|
|
698
|
+
|
|
676
699
|
openai_client = PaidAsyncOpenAI(AsyncOpenAI(api_key="<OPENAI_API_KEY>"))
|
|
677
700
|
|
|
678
701
|
@paid_tracing("your_external_customer_id", "your_external_agent_id")
|
|
@@ -2,7 +2,7 @@ paid/__init__.py,sha256=D1SeLoeTlySo_vZCZrxFX3y5KhKGrHflphLXoewImfk,1826
|
|
|
2
2
|
paid/agents/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
3
3
|
paid/agents/client.py,sha256=ojc3H-nx4MqDrb74_i6JE_wjHSJaVAErsIunfNeffMo,23305
|
|
4
4
|
paid/agents/raw_client.py,sha256=jN9LvPK2-bGeNQzcV3iRmprpegXKtO2JaOEXjnPfz9Y,26833
|
|
5
|
-
paid/client.py,sha256=
|
|
5
|
+
paid/client.py,sha256=2GGQByab__kDKaWeNy4wK_T6RkS36TX_mA6fsO08Ww4,23035
|
|
6
6
|
paid/contacts/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
7
7
|
paid/contacts/client.py,sha256=sNm-yAg4dR9AyYWL7-RC_CuCCvOXX7YlDAUqn47yZhE,14058
|
|
8
8
|
paid/contacts/raw_client.py,sha256=ZYNWuekHiL2sqK_gHR0IzcrLAopUKRXIqMUi-fuLGe4,19211
|
|
@@ -36,12 +36,12 @@ paid/orders/lines/client.py,sha256=GqSwiXdlu49KLHt7uccS_H4nkVQosM1_PQOcPA9v82A,4
|
|
|
36
36
|
paid/orders/lines/raw_client.py,sha256=KZN_yBokCOkf1lUb4ZJtX_NZbqmTqCdJNoaIOdWar8I,4590
|
|
37
37
|
paid/orders/raw_client.py,sha256=650e1Sj2vi9KVJc15M3ENXIKYoth0qMz66dzvXy1Sb4,16245
|
|
38
38
|
paid/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
paid/tracing/__init__.py,sha256=
|
|
40
|
-
paid/tracing/autoinstrumentation.py,sha256=
|
|
41
|
-
paid/tracing/context_manager.py,sha256=
|
|
39
|
+
paid/tracing/__init__.py,sha256=Pe55koIwqJ6Vv5-9Wqi8xIdwCS2BbxZds-MK5fD-F5Y,506
|
|
40
|
+
paid/tracing/autoinstrumentation.py,sha256=qlLLiP9cWNIXX226dLDLvvlioLCPO-0F81mU_0fBb9s,7604
|
|
41
|
+
paid/tracing/context_manager.py,sha256=Qtl59mjDsacX53LPFFAVXsfeGCEA6CIfxPsvjO-Kmx8,9729
|
|
42
42
|
paid/tracing/distributed_tracing.py,sha256=CpUWpHai-4LxLLHbGxz41r9h5wLG-dC83YL5Vg29OBI,3967
|
|
43
43
|
paid/tracing/signal.py,sha256=PfYxF6EFQS8j7RY5_C5NXrCBVu9Hq2E2tyG4fdQScJk,3252
|
|
44
|
-
paid/tracing/tracing.py,sha256=
|
|
44
|
+
paid/tracing/tracing.py,sha256=w07I7KQ4O8yJdvLs9RDAQVVofVWeR4xNei3zC5-EE6c,16080
|
|
45
45
|
paid/tracing/wrappers/__init__.py,sha256=IIleLB_JUbzLw7FshrU2VHZAKF3dZHMGy1O5zCBwwqM,1588
|
|
46
46
|
paid/tracing/wrappers/anthropic/__init__.py,sha256=_x1fjySAQxuT5cIGO_jU09LiGcZH-WQLqKg8mUFAu2w,115
|
|
47
47
|
paid/tracing/wrappers/anthropic/anthropicWrapper.py,sha256=pGchbOb41CbTxc7H8xXoM-LjR085spqrzXqCVC_rrFk,4913
|
|
@@ -98,7 +98,7 @@ paid/usage/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
|
98
98
|
paid/usage/client.py,sha256=280WJuepoovk3BAVbAx2yN2Q_qBdvx3CcPkLu8lXslc,3030
|
|
99
99
|
paid/usage/raw_client.py,sha256=2acg5C4lxuZodZjepU9QYF0fmBxgG-3ZgXs1zUJG-wM,3709
|
|
100
100
|
paid/version.py,sha256=QIpDFnOrxMxrs86eL0iNH0mSZ1DO078wWHYY9TYAoew,78
|
|
101
|
-
paid_python-0.
|
|
102
|
-
paid_python-0.
|
|
103
|
-
paid_python-0.
|
|
104
|
-
paid_python-0.
|
|
101
|
+
paid_python-0.2.0.dist-info/LICENSE,sha256=Nz4baY1zvv0Qy7lqrQtbaiMhmEeGr2Q7A93aqzpml4c,1071
|
|
102
|
+
paid_python-0.2.0.dist-info/METADATA,sha256=tFvHkGpKqlXPnxZhDhxFm-vETz6RbVihbJZOEdjGPUw,22343
|
|
103
|
+
paid_python-0.2.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
104
|
+
paid_python-0.2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|