lucidicai 2.0.1__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. lucidicai/__init__.py +351 -876
  2. lucidicai/api/__init__.py +1 -0
  3. lucidicai/api/client.py +218 -0
  4. lucidicai/api/resources/__init__.py +1 -0
  5. lucidicai/api/resources/dataset.py +192 -0
  6. lucidicai/api/resources/event.py +88 -0
  7. lucidicai/api/resources/session.py +126 -0
  8. lucidicai/client.py +4 -1
  9. lucidicai/core/__init__.py +1 -0
  10. lucidicai/core/config.py +223 -0
  11. lucidicai/core/errors.py +60 -0
  12. lucidicai/core/types.py +35 -0
  13. lucidicai/dataset.py +2 -0
  14. lucidicai/errors.py +6 -0
  15. lucidicai/feature_flag.py +8 -0
  16. lucidicai/sdk/__init__.py +1 -0
  17. lucidicai/sdk/context.py +144 -0
  18. lucidicai/sdk/decorators.py +187 -0
  19. lucidicai/sdk/error_boundary.py +299 -0
  20. lucidicai/sdk/event.py +122 -0
  21. lucidicai/sdk/event_builder.py +304 -0
  22. lucidicai/sdk/features/__init__.py +1 -0
  23. lucidicai/sdk/features/dataset.py +605 -0
  24. lucidicai/sdk/features/feature_flag.py +383 -0
  25. lucidicai/sdk/init.py +271 -0
  26. lucidicai/sdk/shutdown_manager.py +302 -0
  27. lucidicai/telemetry/context_bridge.py +82 -0
  28. lucidicai/telemetry/context_capture_processor.py +25 -9
  29. lucidicai/telemetry/litellm_bridge.py +18 -24
  30. lucidicai/telemetry/lucidic_exporter.py +51 -36
  31. lucidicai/telemetry/utils/model_pricing.py +278 -0
  32. lucidicai/utils/__init__.py +1 -0
  33. lucidicai/utils/images.py +337 -0
  34. lucidicai/utils/logger.py +168 -0
  35. lucidicai/utils/queue.py +393 -0
  36. {lucidicai-2.0.1.dist-info → lucidicai-2.1.0.dist-info}/METADATA +1 -1
  37. {lucidicai-2.0.1.dist-info → lucidicai-2.1.0.dist-info}/RECORD +39 -12
  38. {lucidicai-2.0.1.dist-info → lucidicai-2.1.0.dist-info}/WHEEL +0 -0
  39. {lucidicai-2.0.1.dist-info → lucidicai-2.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,393 @@
1
+ """Parallel event queue for efficient event processing.
2
+
3
+ This module provides a high-performance event queue that processes events
4
+ in parallel while respecting parent-child dependencies.
5
+ """
6
+ import gzip
7
+ import io
8
+ import json
9
+ import queue
10
+ import threading
11
+ import time
12
+ import requests
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
14
+ from datetime import datetime, timezone
15
+ from typing import Any, Dict, List, Optional, Set, Tuple
16
+
17
+ from ..core.config import get_config
18
+ from ..utils.logger import debug, info, warning, error, truncate_id, truncate_data
19
+
20
+
21
+ class EventQueue:
22
+ """High-performance parallel event queue."""
23
+
24
+ def __init__(self, client):
25
+ """Initialize the event queue."""
26
+ self.config = get_config()
27
+ self._client = client
28
+
29
+ # Queue configuration
30
+ self.max_queue_size = self.config.event_queue.max_queue_size
31
+ self.flush_interval_ms = self.config.event_queue.flush_interval_ms
32
+ self.flush_at_count = self.config.event_queue.flush_at_count
33
+ self.blob_threshold = self.config.event_queue.blob_threshold
34
+ self.max_workers = self.config.event_queue.max_parallel_workers
35
+ self.retry_failed = self.config.event_queue.retry_failed
36
+
37
+ # Runtime state
38
+ self._queue = queue.Queue(maxsize=self.max_queue_size)
39
+ self._stopped = threading.Event()
40
+ self._flush_event = threading.Event()
41
+ self._worker: Optional[threading.Thread] = None
42
+ self._sent_ids: Set[str] = set()
43
+ # Removed deferred queue - no longer needed since backend handles any order
44
+
45
+ # Thread pool for parallel processing
46
+ self._executor = ThreadPoolExecutor(
47
+ max_workers=self.max_workers,
48
+ thread_name_prefix="LucidicSender"
49
+ )
50
+
51
+ # Thread safety
52
+ self._flush_lock = threading.Lock()
53
+ self._processing_count = 0
54
+ self._processing_lock = threading.Lock()
55
+
56
+ # Start background worker
57
+ self._start_worker()
58
+
59
+ debug(f"[EventQueue] Initialized with {self.max_workers} parallel workers, batch size: {self.flush_at_count}, flush interval: {self.flush_interval_ms}ms")
60
+
61
+ def queue_event(self, event_request: Dict[str, Any]) -> None:
62
+ """Enqueue an event for background processing."""
63
+ if "defer_count" not in event_request:
64
+ event_request["defer_count"] = 0
65
+
66
+ try:
67
+ self._queue.put(event_request, block=True, timeout=0.001)
68
+
69
+ event_id = event_request.get('client_event_id', 'unknown')
70
+ parent_id = event_request.get('client_parent_event_id')
71
+ debug(f"[EventQueue] Queued event {truncate_id(event_id)} (parent: {truncate_id(parent_id)}), queue size: {self._queue.qsize()}")
72
+
73
+ # Wake worker if batch large enough
74
+ if self._queue.qsize() >= self.flush_at_count:
75
+ self._flush_event.set()
76
+
77
+ except queue.Full:
78
+ warning(f"[EventQueue] Queue at max size {self.max_queue_size}, dropping event")
79
+
80
+ def force_flush(self, timeout_seconds: float = 5.0) -> None:
81
+ """Flush current queue synchronously (best-effort)."""
82
+ with self._flush_lock:
83
+ debug(f"[EventQueue] Force flush requested, queue size: {self._queue.qsize()}")
84
+
85
+ # Signal the worker to flush immediately
86
+ self._flush_event.set()
87
+
88
+ # Wait for the queue to be processed
89
+ end_time = time.time() + timeout_seconds
90
+ last_size = -1
91
+ stable_count = 0
92
+
93
+ debug(f"[EventQueue] Force flush: entering wait loop, timeout={timeout_seconds}s")
94
+ iterations = 0
95
+ start_time = time.time()
96
+ while time.time() < end_time:
97
+ iterations += 1
98
+ if iterations % 20 == 1: # Log every second (20 * 0.05s)
99
+ debug(f"[EventQueue] Force flush: iteration {iterations}, time left: {end_time - time.time():.1f}s")
100
+
101
+ current_size = self._queue.qsize()
102
+
103
+ with self._processing_lock:
104
+ processing = self._processing_count
105
+
106
+ # Check if we're done
107
+ if current_size == 0 and processing == 0:
108
+ if stable_count >= 2:
109
+ debug("[EventQueue] Force flush complete")
110
+ return
111
+ stable_count += 1
112
+ debug(f"[EventQueue] Force flush: queue empty, stable_count={stable_count}")
113
+ else:
114
+ stable_count = 0
115
+
116
+ # Check for progress
117
+ if current_size == last_size:
118
+ stable_count += 1
119
+ if stable_count >= 10: # 0.5 seconds of no progress
120
+ break
121
+ else:
122
+ stable_count = 0
123
+ last_size = current_size
124
+
125
+ self._flush_event.set()
126
+ time.sleep(0.05)
127
+
128
+ # Safety check to prevent infinite loop
129
+ if time.time() - start_time > timeout_seconds + 1:
130
+ warning(f"[EventQueue] Force flush: exceeded timeout by >1s, breaking")
131
+ break
132
+
133
+ debug(f"[EventQueue] Force flush: exited wait loop after {time.time() - start_time:.1f}s")
134
+
135
+ def is_empty(self) -> bool:
136
+ """Check if queue is completely empty."""
137
+ with self._processing_lock:
138
+ queue_empty = self._queue.empty()
139
+ not_processing = self._processing_count == 0
140
+ # No deferred queue to check anymore
141
+ return queue_empty and not_processing
142
+
143
+ def shutdown(self, timeout: float = 5.0) -> None:
144
+ """Shutdown the event queue."""
145
+ info(f"[EventQueue] Shutting down with {self._queue.qsize()} events in queue")
146
+
147
+ # Flush remaining events
148
+ self.force_flush(timeout_seconds=timeout)
149
+
150
+ # Shutdown executor (timeout param added in Python 3.9+)
151
+ try:
152
+ self._executor.shutdown(wait=True, timeout=timeout)
153
+ except TypeError:
154
+ # Fallback for older Python versions
155
+ self._executor.shutdown(wait=True)
156
+
157
+ # Signal stop
158
+ self._stopped.set()
159
+ self._flush_event.set()
160
+
161
+ # Wait for worker
162
+ if self._worker and self._worker.is_alive():
163
+ self._worker.join(timeout=timeout)
164
+
165
+ # --- Internal Implementation ---
166
+
167
+ def _start_worker(self) -> None:
168
+ """Start the background worker thread."""
169
+ if self._worker and self._worker.is_alive():
170
+ return
171
+
172
+ self._worker = threading.Thread(
173
+ target=self._run_loop,
174
+ name="LucidicEventQueue",
175
+ daemon=self.config.event_queue.daemon_mode
176
+ )
177
+ self._worker.start()
178
+
179
+ def _run_loop(self) -> None:
180
+ """Main worker loop."""
181
+ while not self._stopped.is_set():
182
+ batch = self._collect_batch()
183
+
184
+ if batch:
185
+ with self._processing_lock:
186
+ self._processing_count = len(batch)
187
+
188
+ try:
189
+ self._process_batch(batch)
190
+ except Exception as e:
191
+ error(f"[EventQueue] Batch processing error: {e}")
192
+ finally:
193
+ with self._processing_lock:
194
+ self._processing_count = 0
195
+
196
+ def _collect_batch(self) -> List[Dict[str, Any]]:
197
+ """Collect a batch of events from the queue."""
198
+ batch: List[Dict[str, Any]] = []
199
+ deadline = time.time() + (self.flush_interval_ms / 1000.0)
200
+
201
+ while True:
202
+ # Check for force flush
203
+ if self._flush_event.is_set():
204
+ self._flush_event.clear()
205
+ # Drain entire queue
206
+ while not self._queue.empty():
207
+ try:
208
+ batch.append(self._queue.get_nowait())
209
+ except queue.Empty:
210
+ break
211
+ if batch:
212
+ break
213
+
214
+ # Check batch size
215
+ if len(batch) >= self.flush_at_count:
216
+ break
217
+
218
+ # Check deadline
219
+ remaining_time = deadline - time.time()
220
+ if remaining_time <= 0:
221
+ break
222
+
223
+ # Try to get an item
224
+ try:
225
+ timeout = min(remaining_time, 0.05)
226
+ item = self._queue.get(block=True, timeout=timeout)
227
+ batch.append(item)
228
+ except queue.Empty:
229
+ if self._stopped.is_set():
230
+ # Drain remaining on shutdown
231
+ while not self._queue.empty():
232
+ try:
233
+ batch.append(self._queue.get_nowait())
234
+ except queue.Empty:
235
+ break
236
+ break
237
+ if batch and time.time() >= deadline:
238
+ break
239
+
240
+ return batch
241
+
242
+ def _process_batch(self, batch: List[Dict[str, Any]]) -> None:
243
+ """Process batch with parallel sending."""
244
+ debug(f"[EventQueue] Processing batch of {len(batch)} events")
245
+
246
+ # No need to handle deferred events - we don't defer anymore
247
+
248
+ # Group for parallel processing
249
+ dependency_groups = self._group_by_dependencies(batch)
250
+
251
+ # Process each group in parallel
252
+ for group_index, group in enumerate(dependency_groups):
253
+ debug(f"[EventQueue] Processing dependency group {group_index + 1}/{len(dependency_groups)} with {len(group)} events in parallel")
254
+
255
+ # Submit all events in group for parallel processing
256
+ futures_to_event = {}
257
+ for event in group:
258
+ future = self._executor.submit(self._send_event_safe, event)
259
+ futures_to_event[future] = event
260
+
261
+ # Wait for completion
262
+ for future in as_completed(futures_to_event):
263
+ event = futures_to_event[future]
264
+ try:
265
+ success = future.result(timeout=30)
266
+ if success:
267
+ if event_id := event.get("client_event_id"):
268
+ self._sent_ids.add(event_id)
269
+ except Exception as e:
270
+ debug(f"[EventQueue] Failed to send event: {e}")
271
+ if self.retry_failed:
272
+ self._retry_event(event)
273
+
274
+ def _group_by_dependencies(self, events: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]:
275
+ """Group events for parallel processing.
276
+
277
+ Since the backend handles events in any order using client-side event IDs,
278
+ we don't need to check dependencies. Just return all events in one group
279
+ for maximum parallel processing.
280
+ """
281
+ if not events:
282
+ return []
283
+
284
+ # Mark all event IDs as sent for tracking
285
+ for event in events:
286
+ if event_id := event.get("client_event_id"):
287
+ self._sent_ids.add(event_id)
288
+
289
+ # Return all events in a single group for parallel processing
290
+ return [events]
291
+
292
+ def _send_event_safe(self, event_request: Dict[str, Any]) -> bool:
293
+ """Send event with error suppression if configured."""
294
+ if self.config.error_handling.suppress_errors:
295
+ try:
296
+ return self._send_event(event_request)
297
+ except Exception as e:
298
+ warning(f"[EventQueue] Suppressed send error: {e}")
299
+ return False
300
+ else:
301
+ return self._send_event(event_request)
302
+
303
+ def _send_event(self, event_request: Dict[str, Any]) -> bool:
304
+ """Send a single event to the backend."""
305
+ # No dependency checking needed - backend handles events in any order
306
+
307
+ # Check for blob offloading
308
+ payload = event_request.get("payload", {})
309
+ raw_bytes = json.dumps(payload, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
310
+ should_offload = len(raw_bytes) > self.blob_threshold
311
+
312
+ if should_offload:
313
+ event_id = event_request.get('client_event_id', 'unknown')
314
+ debug(f"[EventQueue] Event {truncate_id(event_id)} needs blob storage ({len(raw_bytes)} bytes > {self.blob_threshold} threshold)")
315
+
316
+ send_body: Dict[str, Any] = dict(event_request)
317
+ if should_offload:
318
+ send_body["needs_blob"] = True
319
+ send_body["payload"] = self._create_preview(send_body.get("type"), payload)
320
+ else:
321
+ send_body["needs_blob"] = False
322
+
323
+ # Send event
324
+ try:
325
+ response = self._client.make_request("events", "POST", send_body)
326
+
327
+ # Handle blob upload if needed
328
+ if should_offload:
329
+ blob_url = response.get("blob_url")
330
+ if blob_url:
331
+ compressed = self._compress_json(payload)
332
+ self._upload_blob(blob_url, compressed)
333
+ debug(f"[EventQueue] Blob uploaded for event {truncate_id(event_request.get('client_event_id'))}")
334
+ else:
335
+ error("[EventQueue] No blob_url received for large payload")
336
+ return False
337
+
338
+ return True
339
+
340
+ except Exception as e:
341
+ debug(f"[EventQueue] Failed to send event {truncate_id(event_request.get('client_event_id'))}: {e}")
342
+ return False
343
+
344
+ def _retry_event(self, event: Dict[str, Any]) -> None:
345
+ """Retry a failed event."""
346
+ event["retry_count"] = event.get("retry_count", 0) + 1
347
+ if event["retry_count"] <= 3:
348
+ try:
349
+ self._queue.put_nowait(event)
350
+ except queue.Full:
351
+ pass
352
+
353
+ @staticmethod
354
+ def _compress_json(payload: Dict[str, Any]) -> bytes:
355
+ """Compress JSON payload using gzip."""
356
+ raw = json.dumps(payload, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
357
+ buf = io.BytesIO()
358
+ with gzip.GzipFile(fileobj=buf, mode="wb") as gz:
359
+ gz.write(raw)
360
+ return buf.getvalue()
361
+
362
+ def _upload_blob(self, blob_url: str, data: bytes) -> None:
363
+ """Upload compressed blob to presigned URL."""
364
+ headers = {"Content-Type": "application/json", "Content-Encoding": "gzip"}
365
+ resp = requests.put(blob_url, data=data, headers=headers)
366
+ resp.raise_for_status()
367
+
368
+ @staticmethod
369
+ def _create_preview(event_type: Optional[str], payload: Dict[str, Any]) -> Dict[str, Any]:
370
+ """Create preview of large payload for logging."""
371
+ try:
372
+ t = (event_type or "generic").lower()
373
+
374
+ if t == "llm_generation":
375
+ req = payload.get("request", {})
376
+ return {
377
+ "request": {
378
+ "model": str(req.get("model", ""))[:200],
379
+ "provider": str(req.get("provider", ""))[:200],
380
+ "messages": "truncated"
381
+ },
382
+ "response": {"output": "truncated"}
383
+ }
384
+ elif t == "function_call":
385
+ return {
386
+ "function_name": str(payload.get("function_name", ""))[:200],
387
+ "arguments": "truncated"
388
+ }
389
+ else:
390
+ return {"details": "preview_unavailable"}
391
+
392
+ except Exception:
393
+ return {"details": "preview_error"}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lucidicai
3
- Version: 2.0.1
3
+ Version: 2.1.0
4
4
  Summary: Lucidic AI Python SDK
5
5
  Author: Andy Liang
6
6
  Author-email: andy@lucidic.ai
@@ -1,14 +1,14 @@
1
- lucidicai/__init__.py,sha256=3gm2pFPTndvfx1zalGWqBdbdkUE_DDvAPbtsoIV5720,35879
1
+ lucidicai/__init__.py,sha256=qAza0IBGUQS3e-_uXzRvqVxUH7XliA5XYieTEpXnGT0,11992
2
2
  lucidicai/action.py,sha256=sPRd1hTIVXDqnvG9ZXWEipUFh0bsXcE0Fm7RVqmVccM,237
3
- lucidicai/client.py,sha256=dRSwOAGth_b-RRBjLuxhPI75ULpQHP7M-KfP9X-XYjY,22172
3
+ lucidicai/client.py,sha256=IIhlY6Mfwy47FeMxzpvIygCaqcI1FnqiXiVU6M4QEiE,22327
4
4
  lucidicai/constants.py,sha256=zN8O7TjoRHRlaGa9CZUWppS73rhzKGwaEkF9XMTV0Cg,1160
5
5
  lucidicai/context.py,sha256=ruEXAndSv0gQ-YEXLlC4Fx6NNbaylfp_dZxbpwmLZSA,4622
6
- lucidicai/dataset.py,sha256=IgWCUhoclq1ZzSNc22UHd3fLs0hJv9A81OQizjbHtiE,3951
6
+ lucidicai/dataset.py,sha256=wu25X02JyWkht_yQabgQpGZFfzbNTxG6tf5k9ol8Amo,4005
7
7
  lucidicai/decorators.py,sha256=obpHbGLhRd-yIL5xIqzjNmf-ZKCIIx5vlYnMpCcJ7Uo,5416
8
- lucidicai/errors.py,sha256=XT9UiYVoi88VsxrD2RU96l6mwCmxSeICOWhghB0iJ7Y,2058
8
+ lucidicai/errors.py,sha256=IjnGag21aEsWryJ8hSqRMPftMeteHLQHQVZuQWl0ynM,2254
9
9
  lucidicai/event.py,sha256=ObPXS22QIB-n4eHxzEimTtrlOxC1L6_eQVUAx4ZIT7s,2089
10
10
  lucidicai/event_queue.py,sha256=7Y8hkrm0a7EGCBN2oW_XWd-GkJ9Cihnu2Gyk6FMftks,20065
11
- lucidicai/feature_flag.py,sha256=Hfcoqqb5VimuaY1Q0NXl08elxQWG97KqzRpaMfE4PYA,11841
11
+ lucidicai/feature_flag.py,sha256=JRvIKUtF9d49o6L8laSg-LUfqSw-Q8QoTqin4z-wIVs,12005
12
12
  lucidicai/image_upload.py,sha256=6SRudg-BpInM2gzMx1Yf1Rz_Zyh8inwoJ7U4pBw7ruY,3807
13
13
  lucidicai/lru.py,sha256=PXiDSoUCOxjamG1QlQx6pDbQCm8h5hKAnnr_NI0PEgE,618
14
14
  lucidicai/model_pricing.py,sha256=Dxi6e0WjcIyCTkVX7K7f0pJ5rPu7nSt3lOmgzAUQl1o,12402
@@ -17,6 +17,16 @@ lucidicai/singleton.py,sha256=SKiNBgt_Wb5cCWbMt3IWjRAQw3v153LTRgqvDj8poF8,1457
17
17
  lucidicai/state.py,sha256=4Tb1X6l2or6w_e62FYSuEeghAv3xXm5gquKwzCpvdok,235
18
18
  lucidicai/step.py,sha256=_oBIyTBZBvNkUkYHIrwWd75KMSlMtR9Ws2Lo71Lyff8,2522
19
19
  lucidicai/streaming.py,sha256=QOLAzhwxetvx711J8VcphY5kXWPJz9XEBJrmHveRKMc,9796
20
+ lucidicai/api/__init__.py,sha256=UOYuFZupG0TgzMAxbLNgpodDXhDRXBgMva8ZblgBN9Y,31
21
+ lucidicai/api/client.py,sha256=czD3sg4wgyGQTVVlnSi3wpeCt90_D4eOuK8nBlpiv4U,7276
22
+ lucidicai/api/resources/__init__.py,sha256=Wc8-JfL82wkE7eB8PHplqYvaEG2oXNXXhRyEPeduJeE,27
23
+ lucidicai/api/resources/dataset.py,sha256=6UnMUd-y__TOAjUJLjbc0lZJRTy_gHkyoE82OvjFoN4,5583
24
+ lucidicai/api/resources/event.py,sha256=GyyNL3_k53EbmvTdgJEABexiuJnoX61hxWey7DYmlYY,2434
25
+ lucidicai/api/resources/session.py,sha256=w7b4kkbWdbaNbwuMBFgEeVmDfaYozBf9OK8B8L9B1m8,3730
26
+ lucidicai/core/__init__.py,sha256=b0YQkd8190Y_GgwUcmf0tOiSLARd7L4kq4jwfhhGAyI,39
27
+ lucidicai/core/config.py,sha256=m5kl9wiVp5J0DW6ES--GzsMgyykSYtaCi3D-2tW650M,7972
28
+ lucidicai/core/errors.py,sha256=aRfdXABiTWFTiWELgu2Dz_wxVSggcBFqX7Q-toCy_fY,2130
29
+ lucidicai/core/types.py,sha256=KabcTBQe7SemigccKfJSDiJmjSJDJJvvtefSd8pfrJI,702
20
30
  lucidicai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
31
  lucidicai/providers/anthropic_handler.py,sha256=GZEa4QOrjZ9ftu_qTwY3L410HwKzkXgN7omYRsEQ4LU,10174
22
32
  lucidicai/providers/base_providers.py,sha256=nrZVr4Y9xcAiMn4uAN3t3k6DlHNTvlXrA4qQg7lANOQ,544
@@ -34,12 +44,24 @@ lucidicai/providers/otel_provider.py,sha256=ixLc80-_Vag0EO_92wj2m3_lg6HXyIpz9Md4
34
44
  lucidicai/providers/pydantic_ai_handler.py,sha256=Yhd9VTJhq292ZzJF04O_jYGRh-1bzs70BzQdo7a2Z9M,28269
35
45
  lucidicai/providers/text_storage.py,sha256=L62MMJ8E23TDqDTUv2aRntdKMCItsXV7XjY6cFwx2DE,1503
36
46
  lucidicai/providers/universal_image_interceptor.py,sha256=7d-hw4xihRwvvA1AP8-vqYNChtmVXKmn09MN4pDS7KQ,12126
47
+ lucidicai/sdk/__init__.py,sha256=UrkV9FYbZkBxaX9qwxGbCJdXp-JqMpn0_u-huO9Y-ec,32
48
+ lucidicai/sdk/context.py,sha256=ruEXAndSv0gQ-YEXLlC4Fx6NNbaylfp_dZxbpwmLZSA,4622
49
+ lucidicai/sdk/decorators.py,sha256=B5BXG9Sn5ruUkxFq10L1rrCR_wzYUPlYeu5aqyXetMM,8393
50
+ lucidicai/sdk/error_boundary.py,sha256=IPr5wS9rS7ZQNgEaBwK53UaixAm6L2rijKKFfxcxjUI,9190
51
+ lucidicai/sdk/event.py,sha256=jadK8bZ_kkpycx5zHC5tlNUqL_yCk2WJ6REuFrSrIVI,3564
52
+ lucidicai/sdk/event_builder.py,sha256=oMvt39m07ZLmPllJTWwRxpinJUz9_AD17yNE6wQRoDA,10423
53
+ lucidicai/sdk/init.py,sha256=gxWfK_c22BcAwnv9LZWqmS_G303_rRAhA0zt2nWKdvc,9289
54
+ lucidicai/sdk/shutdown_manager.py,sha256=I5ylR96QHQ_SfP1euAiM0qQ-I7upCPMW1HUNvoj7hCw,12090
55
+ lucidicai/sdk/features/__init__.py,sha256=23KUF2EZBzsaH9JUFDGNXZb_3PSfc35VZfD59gAfyR0,26
56
+ lucidicai/sdk/features/dataset.py,sha256=qFGnu8Wm1yhaflBhtm-5veN-KaoxGLBL5xWEifkrsY0,19416
57
+ lucidicai/sdk/features/feature_flag.py,sha256=SzuzHiVnbticD6Ojn0_i9xQKui2s9QUFPJ7LixzAtf4,13844
37
58
  lucidicai/telemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
59
  lucidicai/telemetry/base_provider.py,sha256=nrZVr4Y9xcAiMn4uAN3t3k6DlHNTvlXrA4qQg7lANOQ,544
39
- lucidicai/telemetry/context_capture_processor.py,sha256=k4_uTaoOhLeUAZsyUcDExVNXadk0nR4R1hJW096EVwY,2472
60
+ lucidicai/telemetry/context_bridge.py,sha256=NwyclZvPcZHZtIvLSrY3oO8WQ_J1JSuHWIr36gxA7xk,2989
61
+ lucidicai/telemetry/context_capture_processor.py,sha256=kzKWpg5m0OMUP5we6g453FjckWwA_jAVjOKCfiyKVN8,3651
40
62
  lucidicai/telemetry/extract.py,sha256=30Iqvnr9I0EkD61GRCMN0Zpk3fLmRYcuVajWjRz0z9I,6814
41
- lucidicai/telemetry/litellm_bridge.py,sha256=CFXVu8nduBtJEhv21maNMrkXV_x5ropy--7qr6HAjh8,16542
42
- lucidicai/telemetry/lucidic_exporter.py,sha256=PfUB5a5o6av1YbYj52WQ-I71q8PBja3xvYTRWItFDPc,11029
63
+ lucidicai/telemetry/litellm_bridge.py,sha256=QXUNwFI3GyvCQtnsnnSnmh2BfdnfnbKIDjfc_Rviau4,16436
64
+ lucidicai/telemetry/lucidic_exporter.py,sha256=ghzPVGJlR3yPtRNMtnvlqcMuQCNTkU-oRLyu_YiuzQU,11892
43
65
  lucidicai/telemetry/lucidic_span_processor.py,sha256=-jo7Muuslo3ZCSAysLsDGBqJijQSpIOvJHPbPNjP4iQ,31029
44
66
  lucidicai/telemetry/openai_agents_instrumentor.py,sha256=__wIbeglMnEEf4AGTQ--FXeWCKmz2yy8SBupwprEdZA,12694
45
67
  lucidicai/telemetry/opentelemetry_converter.py,sha256=xOHCqoTyO4hUkL6k7fxy84PbljPpYep6ET9ZqbkJehc,17665
@@ -50,9 +72,14 @@ lucidicai/telemetry/pydantic_ai_handler.py,sha256=WPa3tFcVgVnPPO3AxcNOTbNkmODLgN
50
72
  lucidicai/telemetry/telemetry_init.py,sha256=8RMzZeeHYvaJKaM5KeSt0svaUAqODHmLstECjgHr8fc,8660
51
73
  lucidicai/telemetry/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
74
  lucidicai/telemetry/utils/image_storage.py,sha256=4Z59ZpVexr7-lcExfr8GsqXe0y2VZmr8Yjwa-3DeOxU,1457
75
+ lucidicai/telemetry/utils/model_pricing.py,sha256=Dxi6e0WjcIyCTkVX7K7f0pJ5rPu7nSt3lOmgzAUQl1o,12402
53
76
  lucidicai/telemetry/utils/text_storage.py,sha256=L62MMJ8E23TDqDTUv2aRntdKMCItsXV7XjY6cFwx2DE,1503
54
77
  lucidicai/telemetry/utils/universal_image_interceptor.py,sha256=vARgMk1hVSF--zfi5b8qBpJJOESuD17YlH9xqxmB9Uw,15954
55
- lucidicai-2.0.1.dist-info/METADATA,sha256=DOyezEU2bp3jBJOiNkXIOOZu55NRdLXztk95jZf9rwA,902
56
- lucidicai-2.0.1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
57
- lucidicai-2.0.1.dist-info/top_level.txt,sha256=vSSdM3lclF4I5tyVC0xxUk8eIRnnYXMe1hW-eO91HUo,10
58
- lucidicai-2.0.1.dist-info/RECORD,,
78
+ lucidicai/utils/__init__.py,sha256=ZiGtmJaF0ph9iIFIgQiAreVuYM_1o7qu9VySK1NblTw,22
79
+ lucidicai/utils/images.py,sha256=YHFjeKHRxzWu0IsuNwKw303egPsd99AShaD4WND1lJk,12325
80
+ lucidicai/utils/logger.py,sha256=R3B3gSee64F6UVHUrShihBq_O7W7bgfrBiVDXTO3Isg,4777
81
+ lucidicai/utils/queue.py,sha256=iBhazYt9EPTpyuexfDyPjvJT-2ODaAbCBbGYvLVl8wM,15815
82
+ lucidicai-2.1.0.dist-info/METADATA,sha256=5olKiUoS21uLWjjoOkbJaQGY1J0FDJSTKMVGmOcoTEI,902
83
+ lucidicai-2.1.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
84
+ lucidicai-2.1.0.dist-info/top_level.txt,sha256=vSSdM3lclF4I5tyVC0xxUk8eIRnnYXMe1hW-eO91HUo,10
85
+ lucidicai-2.1.0.dist-info/RECORD,,