lucidicai 2.1.2__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lucidicai/__init__.py +32 -390
- lucidicai/api/client.py +260 -92
- lucidicai/api/resources/__init__.py +16 -1
- lucidicai/api/resources/dataset.py +422 -82
- lucidicai/api/resources/event.py +399 -27
- lucidicai/api/resources/experiment.py +108 -0
- lucidicai/api/resources/feature_flag.py +78 -0
- lucidicai/api/resources/prompt.py +84 -0
- lucidicai/api/resources/session.py +545 -38
- lucidicai/client.py +395 -480
- lucidicai/core/config.py +73 -48
- lucidicai/core/errors.py +3 -3
- lucidicai/sdk/bound_decorators.py +321 -0
- lucidicai/sdk/context.py +20 -2
- lucidicai/sdk/decorators.py +283 -74
- lucidicai/sdk/event.py +538 -36
- lucidicai/sdk/event_builder.py +2 -4
- lucidicai/sdk/features/dataset.py +408 -232
- lucidicai/sdk/features/feature_flag.py +344 -3
- lucidicai/sdk/init.py +50 -279
- lucidicai/sdk/session.py +502 -0
- lucidicai/sdk/shutdown_manager.py +103 -46
- lucidicai/session_obj.py +321 -0
- lucidicai/telemetry/context_capture_processor.py +13 -6
- lucidicai/telemetry/extract.py +60 -63
- lucidicai/telemetry/litellm_bridge.py +3 -44
- lucidicai/telemetry/lucidic_exporter.py +143 -131
- lucidicai/telemetry/openai_agents_instrumentor.py +2 -2
- lucidicai/telemetry/openai_patch.py +7 -6
- lucidicai/telemetry/telemetry_manager.py +183 -0
- lucidicai/telemetry/utils/model_pricing.py +21 -30
- lucidicai/telemetry/utils/provider.py +77 -0
- lucidicai/utils/images.py +30 -14
- lucidicai/utils/queue.py +2 -2
- lucidicai/utils/serialization.py +27 -0
- {lucidicai-2.1.2.dist-info → lucidicai-3.0.0.dist-info}/METADATA +1 -1
- {lucidicai-2.1.2.dist-info → lucidicai-3.0.0.dist-info}/RECORD +39 -30
- {lucidicai-2.1.2.dist-info → lucidicai-3.0.0.dist-info}/WHEEL +0 -0
- {lucidicai-2.1.2.dist-info → lucidicai-3.0.0.dist-info}/top_level.txt +0 -0
|
@@ -230,7 +230,7 @@ class OpenAIResponsesPatcher:
|
|
|
230
230
|
attrs['gen_ai.request.response_format'] = text_format.__name__
|
|
231
231
|
|
|
232
232
|
if instructions:
|
|
233
|
-
# Never truncate -
|
|
233
|
+
# Never truncate - large payloads are handled via blob storage
|
|
234
234
|
attrs['gen_ai.request.instructions'] = str(instructions)
|
|
235
235
|
|
|
236
236
|
elif 'responses.create' in span_name:
|
|
@@ -269,13 +269,13 @@ class OpenAIResponsesPatcher:
|
|
|
269
269
|
messages = [{"role": "user", "content": messages}]
|
|
270
270
|
|
|
271
271
|
# Always set message attributes for proper event creation
|
|
272
|
-
#
|
|
272
|
+
# Large payloads are handled via blob storage
|
|
273
273
|
for i, msg in enumerate(messages):
|
|
274
274
|
if isinstance(msg, dict):
|
|
275
275
|
role = msg.get('role', 'user')
|
|
276
276
|
content = msg.get('content', '')
|
|
277
277
|
attrs[f'gen_ai.prompt.{i}.role'] = role
|
|
278
|
-
# Always include full content -
|
|
278
|
+
# Always include full content - large payloads use blob storage
|
|
279
279
|
attrs[f'gen_ai.prompt.{i}.content'] = str(content)
|
|
280
280
|
|
|
281
281
|
return attrs
|
|
@@ -332,7 +332,7 @@ class OpenAIResponsesPatcher:
|
|
|
332
332
|
|
|
333
333
|
# Set completion attributes if we have output
|
|
334
334
|
if output_text:
|
|
335
|
-
# Never truncate -
|
|
335
|
+
# Never truncate - large payloads are handled via blob storage
|
|
336
336
|
span.set_attribute("gen_ai.completion.0.role", "assistant")
|
|
337
337
|
span.set_attribute("gen_ai.completion.0.content", output_text)
|
|
338
338
|
debug(f"[OpenAI Patch] Set completion: {output_text[:100]}")
|
|
@@ -394,8 +394,9 @@ class OpenAIResponsesPatcher:
|
|
|
394
394
|
for obj, method_name, original_method in self._client_refs:
|
|
395
395
|
try:
|
|
396
396
|
setattr(obj, method_name, original_method)
|
|
397
|
-
except:
|
|
398
|
-
|
|
397
|
+
except (AttributeError, ReferenceError) as e:
|
|
398
|
+
# Client might have been garbage collected
|
|
399
|
+
logger.debug(f"[OpenAI Patch] Could not restore {method_name}: {e}")
|
|
399
400
|
|
|
400
401
|
self._client_refs.clear()
|
|
401
402
|
self._is_patched = False
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
"""Telemetry Manager - Singleton for shared OpenTelemetry infrastructure.
|
|
2
|
+
|
|
3
|
+
Manages the global TracerProvider and client registry for multi-client support.
|
|
4
|
+
Telemetry is shared across all LucidicAI client instances, with spans routed
|
|
5
|
+
to the correct client based on client_id captured in span attributes.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import threading
|
|
10
|
+
from typing import Dict, List, Optional, TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
13
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
|
14
|
+
|
|
15
|
+
from .lucidic_exporter import LucidicSpanExporter
|
|
16
|
+
from .context_capture_processor import ContextCaptureProcessor
|
|
17
|
+
from .telemetry_init import instrument_providers
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from ..client import LucidicAI
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger("Lucidic")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class TelemetryManager:
|
|
26
|
+
"""Singleton manager for shared OpenTelemetry infrastructure.
|
|
27
|
+
|
|
28
|
+
This class manages a single TracerProvider that is shared across all
|
|
29
|
+
LucidicAI client instances. Spans are routed to the correct client
|
|
30
|
+
based on the client_id captured in span attributes at creation time.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
_instance: Optional["TelemetryManager"] = None
|
|
34
|
+
_lock = threading.Lock()
|
|
35
|
+
|
|
36
|
+
def __new__(cls) -> "TelemetryManager":
|
|
37
|
+
if cls._instance is None:
|
|
38
|
+
with cls._lock:
|
|
39
|
+
# Double-check locking
|
|
40
|
+
if cls._instance is None:
|
|
41
|
+
cls._instance = super().__new__(cls)
|
|
42
|
+
cls._instance._initialized = False
|
|
43
|
+
return cls._instance
|
|
44
|
+
|
|
45
|
+
def __init__(self):
|
|
46
|
+
# Only initialize once
|
|
47
|
+
if self._initialized:
|
|
48
|
+
return
|
|
49
|
+
|
|
50
|
+
self._tracer_provider: Optional[TracerProvider] = None
|
|
51
|
+
self._exporter: Optional[LucidicSpanExporter] = None
|
|
52
|
+
self._context_processor: Optional[ContextCaptureProcessor] = None
|
|
53
|
+
self._instrumentors: Dict[str, Any] = {}
|
|
54
|
+
self._client_registry: Dict[str, "LucidicAI"] = {}
|
|
55
|
+
self._registry_lock = threading.Lock()
|
|
56
|
+
self._init_lock = threading.Lock()
|
|
57
|
+
self._initialized = True
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def tracer_provider(self) -> Optional[TracerProvider]:
|
|
61
|
+
"""Get the shared TracerProvider."""
|
|
62
|
+
return self._tracer_provider
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def is_telemetry_initialized(self) -> bool:
|
|
66
|
+
"""Check if telemetry has been initialized."""
|
|
67
|
+
return self._tracer_provider is not None
|
|
68
|
+
|
|
69
|
+
def ensure_initialized(self, providers: List[str]) -> None:
|
|
70
|
+
"""Initialize telemetry infrastructure if not already done.
|
|
71
|
+
|
|
72
|
+
This method is idempotent - calling it multiple times with different
|
|
73
|
+
providers will add new providers to the instrumentation.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
providers: List of provider names to instrument (e.g., ["openai", "anthropic"])
|
|
77
|
+
"""
|
|
78
|
+
with self._init_lock:
|
|
79
|
+
if self._tracer_provider is None:
|
|
80
|
+
# First initialization - create TracerProvider
|
|
81
|
+
logger.info("[Telemetry] Initializing shared TracerProvider")
|
|
82
|
+
|
|
83
|
+
self._tracer_provider = TracerProvider()
|
|
84
|
+
|
|
85
|
+
# Add context capture processor (captures session_id, parent_event_id, client_id)
|
|
86
|
+
self._context_processor = ContextCaptureProcessor()
|
|
87
|
+
self._tracer_provider.add_span_processor(self._context_processor)
|
|
88
|
+
|
|
89
|
+
# Add our exporter via BatchSpanProcessor
|
|
90
|
+
self._exporter = LucidicSpanExporter()
|
|
91
|
+
export_processor = BatchSpanProcessor(self._exporter)
|
|
92
|
+
self._tracer_provider.add_span_processor(export_processor)
|
|
93
|
+
|
|
94
|
+
logger.info("[Telemetry] TracerProvider initialized with Lucidic exporter")
|
|
95
|
+
|
|
96
|
+
# Instrument providers (idempotent - only instruments new ones)
|
|
97
|
+
if providers:
|
|
98
|
+
new_instrumentors = instrument_providers(
|
|
99
|
+
providers,
|
|
100
|
+
self._tracer_provider,
|
|
101
|
+
self._instrumentors
|
|
102
|
+
)
|
|
103
|
+
self._instrumentors.update(new_instrumentors)
|
|
104
|
+
|
|
105
|
+
def register_client(self, client: "LucidicAI") -> None:
|
|
106
|
+
"""Register a client with the telemetry system.
|
|
107
|
+
|
|
108
|
+
This allows the exporter to route spans to the correct client
|
|
109
|
+
based on the client_id captured in span attributes.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
client: The LucidicAI client to register
|
|
113
|
+
"""
|
|
114
|
+
with self._registry_lock:
|
|
115
|
+
self._client_registry[client._client_id] = client
|
|
116
|
+
if self._exporter:
|
|
117
|
+
self._exporter.register_client(client)
|
|
118
|
+
logger.debug(f"[Telemetry] Registered client {client._client_id[:8]}...")
|
|
119
|
+
|
|
120
|
+
def unregister_client(self, client_id: str) -> None:
|
|
121
|
+
"""Unregister a client from the telemetry system.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
client_id: The client ID to unregister
|
|
125
|
+
"""
|
|
126
|
+
with self._registry_lock:
|
|
127
|
+
self._client_registry.pop(client_id, None)
|
|
128
|
+
if self._exporter:
|
|
129
|
+
self._exporter.unregister_client(client_id)
|
|
130
|
+
logger.debug(f"[Telemetry] Unregistered client {client_id[:8]}...")
|
|
131
|
+
|
|
132
|
+
def get_client(self, client_id: str) -> Optional["LucidicAI"]:
|
|
133
|
+
"""Get a registered client by ID.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
client_id: The client ID to look up
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
The client if found, None otherwise
|
|
140
|
+
"""
|
|
141
|
+
with self._registry_lock:
|
|
142
|
+
return self._client_registry.get(client_id)
|
|
143
|
+
|
|
144
|
+
def force_flush(self, timeout_millis: int = 5000) -> bool:
|
|
145
|
+
"""Force flush all pending spans.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
timeout_millis: Maximum time to wait for flush
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
True if successful, False otherwise
|
|
152
|
+
"""
|
|
153
|
+
if self._tracer_provider:
|
|
154
|
+
return self._tracer_provider.force_flush(timeout_millis=timeout_millis)
|
|
155
|
+
return True
|
|
156
|
+
|
|
157
|
+
def shutdown(self) -> None:
|
|
158
|
+
"""Shutdown the telemetry system."""
|
|
159
|
+
if self._tracer_provider:
|
|
160
|
+
logger.info("[Telemetry] Shutting down telemetry system")
|
|
161
|
+
self._tracer_provider.shutdown()
|
|
162
|
+
self._tracer_provider = None
|
|
163
|
+
self._exporter = None
|
|
164
|
+
self._context_processor = None
|
|
165
|
+
|
|
166
|
+
with self._registry_lock:
|
|
167
|
+
self._client_registry.clear()
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
# Module-level singleton accessor
|
|
171
|
+
_telemetry_manager: Optional[TelemetryManager] = None
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def get_telemetry_manager() -> TelemetryManager:
|
|
175
|
+
"""Get the singleton TelemetryManager instance.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
The TelemetryManager singleton
|
|
179
|
+
"""
|
|
180
|
+
global _telemetry_manager
|
|
181
|
+
if _telemetry_manager is None:
|
|
182
|
+
_telemetry_manager = TelemetryManager()
|
|
183
|
+
return _telemetry_manager
|
|
@@ -4,8 +4,12 @@ logger = logging.getLogger("Lucidic")
|
|
|
4
4
|
|
|
5
5
|
MODEL_PRICING = {
|
|
6
6
|
|
|
7
|
+
# gpt 5.x pricing
|
|
8
|
+
"gpt-5.2": {"input": 1.75, "output": 14.0},
|
|
9
|
+
"gpt-5.1": {"input": 1.25, "output": 10.0},
|
|
10
|
+
|
|
7
11
|
# OpenAI GPT-5 Series (Verified 2025)
|
|
8
|
-
"gpt-5": {"input":
|
|
12
|
+
"gpt-5": {"input": 1.25, "output": 10.0},
|
|
9
13
|
"gpt-5-mini": {"input": 0.250, "output": 2.0},
|
|
10
14
|
"gpt-5-nano": {"input": 0.05, "output": 0.4},
|
|
11
15
|
|
|
@@ -44,6 +48,11 @@ MODEL_PRICING = {
|
|
|
44
48
|
"text-davinci-003": {"input": 20.0, "output": 20.0},
|
|
45
49
|
"text-davinci-002": {"input": 20.0, "output": 20.0},
|
|
46
50
|
"code-davinci-002": {"input": 20.0, "output": 20.0},
|
|
51
|
+
|
|
52
|
+
# Claude 4.5 models
|
|
53
|
+
"claude-sonnet-4-5": {"input": 3.0, "output": 15.0},
|
|
54
|
+
"claude-haiku-4-5": {"input": 1.0, "output": 5.0},
|
|
55
|
+
"claude-opus-4-5": {"input": 5.0, "output": 25.0},
|
|
47
56
|
|
|
48
57
|
# Claude 4 Models (2025) - Verified
|
|
49
58
|
"claude-4-opus": {"input": 15.0, "output": 75.0},
|
|
@@ -74,6 +83,11 @@ MODEL_PRICING = {
|
|
|
74
83
|
"claude-instant": {"input": 0.8, "output": 2.4},
|
|
75
84
|
"claude-instant-1": {"input": 0.8, "output": 2.4},
|
|
76
85
|
"claude-instant-1.2": {"input": 0.8, "output": 2.4},
|
|
86
|
+
|
|
87
|
+
# Gemini 3 series
|
|
88
|
+
"gemini-3-flash-preview": {"input": 0.5, "output": 3.00},
|
|
89
|
+
"gemini-3-pro-preview": {"input": 2.0, "output": 12.00}, # different pricing for different input sizes ????
|
|
90
|
+
|
|
77
91
|
|
|
78
92
|
# Google Gemini 2.5 Series (2025) - Verified
|
|
79
93
|
"gemini-2.5-pro": {"input": 1.25, "output": 10.0}, # Up to 200k tokens
|
|
@@ -200,35 +214,12 @@ PROVIDER_AVERAGES = {
|
|
|
200
214
|
}
|
|
201
215
|
|
|
202
216
|
def get_provider_from_model(model: str) -> str:
|
|
203
|
-
"""Extract provider name from model string
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
return "openai"
|
|
210
|
-
elif any(gemini in model_lower for gemini in ["gemini", "google", "gemma", "palm", "bison"]):
|
|
211
|
-
return "google"
|
|
212
|
-
elif any(llama in model_lower for llama in ["llama", "meta"]):
|
|
213
|
-
return "meta"
|
|
214
|
-
elif "mistral" in model_lower:
|
|
215
|
-
return "mistral"
|
|
216
|
-
elif any(cohere in model_lower for cohere in ["command", "cohere"]):
|
|
217
|
-
return "cohere"
|
|
218
|
-
elif "deepseek" in model_lower:
|
|
219
|
-
return "deepseek"
|
|
220
|
-
elif any(qwen in model_lower for qwen in ["qwen", "qwq"]):
|
|
221
|
-
return "qwen"
|
|
222
|
-
elif any(together in model_lower for together in ["together", "redpajama"]):
|
|
223
|
-
return "together"
|
|
224
|
-
elif any(pplx in model_lower for pplx in ["pplx", "perplexity"]):
|
|
225
|
-
return "perplexity"
|
|
226
|
-
elif any(grok in model_lower for grok in ["grok", "xAI"]):
|
|
227
|
-
return "grok"
|
|
228
|
-
elif "groq" in model_lower:
|
|
229
|
-
return "groq"
|
|
230
|
-
else:
|
|
231
|
-
return "unknown"
|
|
217
|
+
"""Extract provider name from model string.
|
|
218
|
+
|
|
219
|
+
This is a backward-compatible alias for detect_provider().
|
|
220
|
+
"""
|
|
221
|
+
from .provider import detect_provider
|
|
222
|
+
return detect_provider(model=model)
|
|
232
223
|
|
|
233
224
|
def normalize_model_name(model: str) -> str:
|
|
234
225
|
"""Normalize model name by stripping dates and provider prefixes"""
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""Unified provider detection utilities.
|
|
2
|
+
|
|
3
|
+
Consolidates provider detection logic from:
|
|
4
|
+
- lucidic_exporter.py (_detect_provider_name)
|
|
5
|
+
- model_pricing.py (get_provider_from_model)
|
|
6
|
+
- litellm_bridge.py (_extract_provider)
|
|
7
|
+
"""
|
|
8
|
+
from typing import Any, Dict, Optional
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Provider patterns: provider name -> list of substrings to match
|
|
12
|
+
PROVIDER_PATTERNS = {
|
|
13
|
+
"anthropic": ["claude", "anthropic"],
|
|
14
|
+
"openai": ["gpt", "openai", "o1", "o3", "o4", "text-davinci", "code-davinci"],
|
|
15
|
+
"google": ["gemini", "google", "gemma", "palm", "bison"],
|
|
16
|
+
"meta": ["llama", "meta"],
|
|
17
|
+
"mistral": ["mistral", "mixtral"],
|
|
18
|
+
"cohere": ["command", "cohere"],
|
|
19
|
+
"deepseek": ["deepseek"],
|
|
20
|
+
"qwen": ["qwen", "qwq"],
|
|
21
|
+
"together": ["together", "redpajama"],
|
|
22
|
+
"perplexity": ["pplx", "perplexity"],
|
|
23
|
+
"grok": ["grok", "xai"],
|
|
24
|
+
"groq": ["groq"],
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def detect_provider(
|
|
29
|
+
model: Optional[str] = None,
|
|
30
|
+
attributes: Optional[Dict[str, Any]] = None,
|
|
31
|
+
) -> str:
|
|
32
|
+
"""Detect LLM provider from model name or span attributes.
|
|
33
|
+
|
|
34
|
+
Checks in order:
|
|
35
|
+
1. Span attributes (gen_ai.system, service.name) - most reliable
|
|
36
|
+
2. Model prefix (e.g., "anthropic/claude-3") - common in LiteLLM
|
|
37
|
+
3. Model name pattern matching - fallback
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
model: Model name string (e.g., "gpt-4", "claude-3-opus")
|
|
41
|
+
attributes: OpenTelemetry span attributes dict
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Provider name string (e.g., "openai", "anthropic") or "unknown"
|
|
45
|
+
"""
|
|
46
|
+
# 1. Check attributes first (most reliable source)
|
|
47
|
+
if attributes:
|
|
48
|
+
# Direct gen_ai.system attribute
|
|
49
|
+
if system := attributes.get("gen_ai.system"):
|
|
50
|
+
return str(system).lower()
|
|
51
|
+
|
|
52
|
+
# Service name may contain provider info
|
|
53
|
+
if service := attributes.get("service.name"):
|
|
54
|
+
service_lower = str(service).lower()
|
|
55
|
+
for provider in PROVIDER_PATTERNS:
|
|
56
|
+
if provider in service_lower:
|
|
57
|
+
return provider
|
|
58
|
+
|
|
59
|
+
# 2. Check for provider prefix in model (e.g., "anthropic/claude-3")
|
|
60
|
+
if model and "/" in model:
|
|
61
|
+
prefix = model.split("/")[0].lower()
|
|
62
|
+
# Validate it's a known provider
|
|
63
|
+
if prefix in PROVIDER_PATTERNS:
|
|
64
|
+
return prefix
|
|
65
|
+
# Check if prefix matches any provider patterns
|
|
66
|
+
for provider, patterns in PROVIDER_PATTERNS.items():
|
|
67
|
+
if any(p in prefix for p in patterns):
|
|
68
|
+
return provider
|
|
69
|
+
|
|
70
|
+
# 3. Fall back to model name pattern matching
|
|
71
|
+
if model:
|
|
72
|
+
model_lower = model.lower()
|
|
73
|
+
for provider, patterns in PROVIDER_PATTERNS.items():
|
|
74
|
+
if any(pattern in model_lower for pattern in patterns):
|
|
75
|
+
return provider
|
|
76
|
+
|
|
77
|
+
return "unknown"
|
lucidicai/utils/images.py
CHANGED
|
@@ -11,7 +11,7 @@ import logging
|
|
|
11
11
|
import threading
|
|
12
12
|
from typing import List, Dict, Any, Optional, Tuple, Union
|
|
13
13
|
from PIL import Image
|
|
14
|
-
import
|
|
14
|
+
import httpx
|
|
15
15
|
|
|
16
16
|
logger = logging.getLogger("Lucidic")
|
|
17
17
|
|
|
@@ -243,6 +243,25 @@ class ImageHandler:
|
|
|
243
243
|
class ImageUploader:
|
|
244
244
|
"""Handle image uploads to S3."""
|
|
245
245
|
|
|
246
|
+
@staticmethod
|
|
247
|
+
def _get_http_client():
|
|
248
|
+
"""Get an HTTP client from a registered client.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
HTTP client or None if no client is available.
|
|
252
|
+
"""
|
|
253
|
+
try:
|
|
254
|
+
from ..sdk.shutdown_manager import get_shutdown_manager
|
|
255
|
+
manager = get_shutdown_manager()
|
|
256
|
+
with manager._client_lock:
|
|
257
|
+
# Return first available client's HTTP client
|
|
258
|
+
for client in manager._clients.values():
|
|
259
|
+
if hasattr(client, '_http') and client._http:
|
|
260
|
+
return client._http
|
|
261
|
+
except Exception:
|
|
262
|
+
pass
|
|
263
|
+
return None
|
|
264
|
+
|
|
246
265
|
@staticmethod
|
|
247
266
|
def get_presigned_url(
|
|
248
267
|
agent_id: str,
|
|
@@ -251,34 +270,31 @@ class ImageUploader:
|
|
|
251
270
|
nthscreenshot: Optional[int] = None
|
|
252
271
|
) -> Tuple[str, str, str]:
|
|
253
272
|
"""Get a presigned URL for uploading an image to S3.
|
|
254
|
-
|
|
273
|
+
|
|
255
274
|
Args:
|
|
256
275
|
agent_id: The ID of the agent
|
|
257
276
|
session_id: Optional session ID for the image
|
|
258
277
|
event_id: Optional event ID for the image
|
|
259
278
|
nthscreenshot: Optional nth screenshot for the image
|
|
260
|
-
|
|
279
|
+
|
|
261
280
|
Returns:
|
|
262
281
|
Tuple of (presigned_url, bucket_name, object_key)
|
|
263
282
|
"""
|
|
264
|
-
|
|
265
|
-
from ..sdk.init import get_http
|
|
266
|
-
|
|
267
|
-
http = get_http()
|
|
283
|
+
http = ImageUploader._get_http_client()
|
|
268
284
|
if not http:
|
|
269
|
-
raise RuntimeError("
|
|
270
|
-
|
|
285
|
+
raise RuntimeError("No LucidicAI client initialized. Create a LucidicAI client first.")
|
|
286
|
+
|
|
271
287
|
request_data = {"agent_id": agent_id}
|
|
272
|
-
|
|
288
|
+
|
|
273
289
|
if session_id:
|
|
274
290
|
request_data["session_id"] = session_id
|
|
275
|
-
|
|
291
|
+
|
|
276
292
|
if event_id:
|
|
277
293
|
request_data["event_id"] = event_id
|
|
278
294
|
if nthscreenshot is None:
|
|
279
295
|
raise ValueError("nth_screenshot is required when event_id is provided")
|
|
280
296
|
request_data["nth_screenshot"] = nthscreenshot
|
|
281
|
-
|
|
297
|
+
|
|
282
298
|
response = http.get('getpresigneduploadurl', params=request_data)
|
|
283
299
|
return response['presigned_url'], response['bucket_name'], response['object_key']
|
|
284
300
|
|
|
@@ -299,9 +315,9 @@ class ImageUploader:
|
|
|
299
315
|
image_obj, content_type = ImageHandler.prepare_for_upload(image_data, format)
|
|
300
316
|
|
|
301
317
|
# Upload to S3
|
|
302
|
-
upload_response =
|
|
318
|
+
upload_response = httpx.put(
|
|
303
319
|
url,
|
|
304
|
-
|
|
320
|
+
content=image_obj.getvalue() if hasattr(image_obj, 'getvalue') else image_obj,
|
|
305
321
|
headers={"Content-Type": content_type}
|
|
306
322
|
)
|
|
307
323
|
upload_response.raise_for_status()
|
lucidicai/utils/queue.py
CHANGED
|
@@ -9,7 +9,7 @@ import json
|
|
|
9
9
|
import queue
|
|
10
10
|
import threading
|
|
11
11
|
import time
|
|
12
|
-
import
|
|
12
|
+
import httpx
|
|
13
13
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
14
14
|
from datetime import datetime, timezone
|
|
15
15
|
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
@@ -362,7 +362,7 @@ class EventQueue:
|
|
|
362
362
|
def _upload_blob(self, blob_url: str, data: bytes) -> None:
|
|
363
363
|
"""Upload compressed blob to presigned URL."""
|
|
364
364
|
headers = {"Content-Type": "application/json", "Content-Encoding": "gzip"}
|
|
365
|
-
resp =
|
|
365
|
+
resp = httpx.put(blob_url, content=data, headers=headers)
|
|
366
366
|
resp.raise_for_status()
|
|
367
367
|
|
|
368
368
|
@staticmethod
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Serialization utilities for event payloads."""
|
|
2
|
+
import json
|
|
3
|
+
from typing import Any
|
|
4
|
+
from collections.abc import Iterable
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def serialize_value(value: Any) -> Any:
|
|
8
|
+
"""Serialize a value to JSON-compatible format.
|
|
9
|
+
|
|
10
|
+
Recursively converts complex objects to JSON-serializable types.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
value: Any value to serialize
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
JSON-compatible representation of the value
|
|
17
|
+
"""
|
|
18
|
+
if isinstance(value, (str, int, float, bool)):
|
|
19
|
+
return value
|
|
20
|
+
if isinstance(value, dict):
|
|
21
|
+
return {k: serialize_value(v) for k, v in value.items()}
|
|
22
|
+
if isinstance(value, Iterable) and not isinstance(value, (str, bytes)):
|
|
23
|
+
return [serialize_value(v) for v in value]
|
|
24
|
+
try:
|
|
25
|
+
return json.loads(json.dumps(value, default=str))
|
|
26
|
+
except Exception:
|
|
27
|
+
return str(value)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
lucidicai/__init__.py,sha256=
|
|
1
|
+
lucidicai/__init__.py,sha256=2tzRM0UYAxeeYzqqMrxnKyINWqKtxg3gbX6beieIeFs,1180
|
|
2
2
|
lucidicai/action.py,sha256=sPRd1hTIVXDqnvG9ZXWEipUFh0bsXcE0Fm7RVqmVccM,237
|
|
3
|
-
lucidicai/client.py,sha256=
|
|
3
|
+
lucidicai/client.py,sha256=XxJ4NYfSi1mlgLC9XjLL9TmwpXpgvKMLzi7zzZazuRU,14180
|
|
4
4
|
lucidicai/constants.py,sha256=zN8O7TjoRHRlaGa9CZUWppS73rhzKGwaEkF9XMTV0Cg,1160
|
|
5
5
|
lucidicai/context.py,sha256=ruEXAndSv0gQ-YEXLlC4Fx6NNbaylfp_dZxbpwmLZSA,4622
|
|
6
6
|
lucidicai/dataset.py,sha256=wu25X02JyWkht_yQabgQpGZFfzbNTxG6tf5k9ol8Amo,4005
|
|
@@ -13,19 +13,23 @@ lucidicai/image_upload.py,sha256=6SRudg-BpInM2gzMx1Yf1Rz_Zyh8inwoJ7U4pBw7ruY,380
|
|
|
13
13
|
lucidicai/lru.py,sha256=PXiDSoUCOxjamG1QlQx6pDbQCm8h5hKAnnr_NI0PEgE,618
|
|
14
14
|
lucidicai/model_pricing.py,sha256=Dxi6e0WjcIyCTkVX7K7f0pJ5rPu7nSt3lOmgzAUQl1o,12402
|
|
15
15
|
lucidicai/session.py,sha256=wHnjUPo7ANzJAdz_llA4EXKeCAm0WZR0Ij9dNvdCodY,1729
|
|
16
|
+
lucidicai/session_obj.py,sha256=bSGTJWwhdiCJj_n-LG7TnO5W8UyRPGwPrJ1U9iABMLo,11846
|
|
16
17
|
lucidicai/singleton.py,sha256=SKiNBgt_Wb5cCWbMt3IWjRAQw3v153LTRgqvDj8poF8,1457
|
|
17
18
|
lucidicai/state.py,sha256=4Tb1X6l2or6w_e62FYSuEeghAv3xXm5gquKwzCpvdok,235
|
|
18
19
|
lucidicai/step.py,sha256=_oBIyTBZBvNkUkYHIrwWd75KMSlMtR9Ws2Lo71Lyff8,2522
|
|
19
20
|
lucidicai/streaming.py,sha256=QOLAzhwxetvx711J8VcphY5kXWPJz9XEBJrmHveRKMc,9796
|
|
20
21
|
lucidicai/api/__init__.py,sha256=UOYuFZupG0TgzMAxbLNgpodDXhDRXBgMva8ZblgBN9Y,31
|
|
21
|
-
lucidicai/api/client.py,sha256=
|
|
22
|
-
lucidicai/api/resources/__init__.py,sha256=
|
|
23
|
-
lucidicai/api/resources/dataset.py,sha256=
|
|
24
|
-
lucidicai/api/resources/event.py,sha256=
|
|
25
|
-
lucidicai/api/resources/
|
|
22
|
+
lucidicai/api/client.py,sha256=0Ia5cXo5OKifhP-63TPAWPuy2bWCzR9VN4BpWIMo34w,13212
|
|
23
|
+
lucidicai/api/resources/__init__.py,sha256=DDgviDW3Su-G1ofkZGlaJMc2pzYJqrbBnEruNg1whCM,416
|
|
24
|
+
lucidicai/api/resources/dataset.py,sha256=I6g9ah4vaqEH1jyeouBn7xvC0oAuDNPeyl-bmtNj-T0,17400
|
|
25
|
+
lucidicai/api/resources/event.py,sha256=GTIU5sIbLNTWAHk4rB120xWTRkhnraz9JNfamEygyNo,14267
|
|
26
|
+
lucidicai/api/resources/experiment.py,sha256=fOIKJ5d89bHJBVZ3wjbhY_6XF3kLHz9TE3BVPA5pNpA,3563
|
|
27
|
+
lucidicai/api/resources/feature_flag.py,sha256=ii412DIkZCEAhrXdGydcpQKveqGlFq4NlgdmWQnU83c,2259
|
|
28
|
+
lucidicai/api/resources/prompt.py,sha256=tdMVTaLc3DDRbd_R8Xd5mkvpdwQONfr8OwkJRTE0atE,2495
|
|
29
|
+
lucidicai/api/resources/session.py,sha256=jW_bftHdunhLHl_3-k0nqB5FrtLhlFeCF0tMFE82nNw,20761
|
|
26
30
|
lucidicai/core/__init__.py,sha256=b0YQkd8190Y_GgwUcmf0tOiSLARd7L4kq4jwfhhGAyI,39
|
|
27
|
-
lucidicai/core/config.py,sha256=
|
|
28
|
-
lucidicai/core/errors.py,sha256=
|
|
31
|
+
lucidicai/core/config.py,sha256=06XZPOCpB8YY9nzqt7deR3CP6MAQIKCTZYdSzscAPDY,8730
|
|
32
|
+
lucidicai/core/errors.py,sha256=bYSRPqadXUCPadVLb-2fj63CB6jlAnfDeu2azHB2z8M,2137
|
|
29
33
|
lucidicai/core/types.py,sha256=KabcTBQe7SemigccKfJSDiJmjSJDJJvvtefSd8pfrJI,702
|
|
30
34
|
lucidicai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
35
|
lucidicai/providers/anthropic_handler.py,sha256=GZEa4QOrjZ9ftu_qTwY3L410HwKzkXgN7omYRsEQ4LU,10174
|
|
@@ -45,26 +49,28 @@ lucidicai/providers/pydantic_ai_handler.py,sha256=Yhd9VTJhq292ZzJF04O_jYGRh-1bzs
|
|
|
45
49
|
lucidicai/providers/text_storage.py,sha256=L62MMJ8E23TDqDTUv2aRntdKMCItsXV7XjY6cFwx2DE,1503
|
|
46
50
|
lucidicai/providers/universal_image_interceptor.py,sha256=7d-hw4xihRwvvA1AP8-vqYNChtmVXKmn09MN4pDS7KQ,12126
|
|
47
51
|
lucidicai/sdk/__init__.py,sha256=UrkV9FYbZkBxaX9qwxGbCJdXp-JqMpn0_u-huO9Y-ec,32
|
|
48
|
-
lucidicai/sdk/
|
|
49
|
-
lucidicai/sdk/
|
|
52
|
+
lucidicai/sdk/bound_decorators.py,sha256=SzmNZwORhArXL9D8T8BpPltT-jQ-tVpy71t8bJWOIU0,12151
|
|
53
|
+
lucidicai/sdk/context.py,sha256=y58_C9JlBML_xFPUbmAn6WuxsnM03bECJ2pKBWz0TuQ,10386
|
|
54
|
+
lucidicai/sdk/decorators.py,sha256=-MBMCn-vED2x3ioWD3MYo0_FfMdr6LbfpekURH7Cpvw,15660
|
|
50
55
|
lucidicai/sdk/error_boundary.py,sha256=IPr5wS9rS7ZQNgEaBwK53UaixAm6L2rijKKFfxcxjUI,9190
|
|
51
|
-
lucidicai/sdk/event.py,sha256=
|
|
52
|
-
lucidicai/sdk/event_builder.py,sha256=
|
|
53
|
-
lucidicai/sdk/init.py,sha256=
|
|
54
|
-
lucidicai/sdk/
|
|
56
|
+
lucidicai/sdk/event.py,sha256=hpBJfqKteOuQKoZfhxQfbeVOrdmR8wCcQc8P6658VRo,22658
|
|
57
|
+
lucidicai/sdk/event_builder.py,sha256=Z376RKlStM7IBcAm5LKgTDh3x_fjmcvkWltUrjZ6RAc,10304
|
|
58
|
+
lucidicai/sdk/init.py,sha256=9Cr6vFI9_kHBr6eUVR-MTi4xxjzOHLB0TRS1Q62J2mI,4494
|
|
59
|
+
lucidicai/sdk/session.py,sha256=eo1aeDjW5xA_3TUi-nN3i13wBCCqrtBx6MSy8ILhjps,17153
|
|
60
|
+
lucidicai/sdk/shutdown_manager.py,sha256=YKM235RynP7s6qAD3YqtG6e75sjcr1nnL9MkmWlDMq4,14158
|
|
55
61
|
lucidicai/sdk/features/__init__.py,sha256=23KUF2EZBzsaH9JUFDGNXZb_3PSfc35VZfD59gAfyR0,26
|
|
56
|
-
lucidicai/sdk/features/dataset.py,sha256=
|
|
57
|
-
lucidicai/sdk/features/feature_flag.py,sha256=
|
|
62
|
+
lucidicai/sdk/features/dataset.py,sha256=yd1zGZ-HNs6QouNCvjOB1kMAdorP_mCvJ1gCk4XL7GY,23998
|
|
63
|
+
lucidicai/sdk/features/feature_flag.py,sha256=bF05dl0xMi8eyXS8rFaA-cSW5BtDY8oUI6B_xmX6ziM,26864
|
|
58
64
|
lucidicai/telemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
59
65
|
lucidicai/telemetry/base_provider.py,sha256=nrZVr4Y9xcAiMn4uAN3t3k6DlHNTvlXrA4qQg7lANOQ,544
|
|
60
66
|
lucidicai/telemetry/context_bridge.py,sha256=NwyclZvPcZHZtIvLSrY3oO8WQ_J1JSuHWIr36gxA7xk,2989
|
|
61
|
-
lucidicai/telemetry/context_capture_processor.py,sha256=
|
|
62
|
-
lucidicai/telemetry/extract.py,sha256=
|
|
63
|
-
lucidicai/telemetry/litellm_bridge.py,sha256=
|
|
64
|
-
lucidicai/telemetry/lucidic_exporter.py,sha256=
|
|
67
|
+
lucidicai/telemetry/context_capture_processor.py,sha256=5zGHp6FT9V9IxgEaTYjqy5qci0tL2g58O1cyIkBStAg,4127
|
|
68
|
+
lucidicai/telemetry/extract.py,sha256=mSv6uhJubUWVcd8X1Fd8TYYEUamNTUFTgC-Fzt0oKUk,6771
|
|
69
|
+
lucidicai/telemetry/litellm_bridge.py,sha256=1gZhnMXdAMFnlpzz_WhdbpqHraEWk9LSAHAEHEFTs2U,15022
|
|
70
|
+
lucidicai/telemetry/lucidic_exporter.py,sha256=Z22vi3aRIsyGGgK3idhweAJQUSTymKVn28quXHdXUSU,14195
|
|
65
71
|
lucidicai/telemetry/lucidic_span_processor.py,sha256=-jo7Muuslo3ZCSAysLsDGBqJijQSpIOvJHPbPNjP4iQ,31029
|
|
66
|
-
lucidicai/telemetry/openai_agents_instrumentor.py,sha256=
|
|
67
|
-
lucidicai/telemetry/openai_patch.py,sha256=
|
|
72
|
+
lucidicai/telemetry/openai_agents_instrumentor.py,sha256=NUSPB3ROBhYrt9ZvvPD1yq2RJ3UoYQ536Y_TSdQHrbk,12773
|
|
73
|
+
lucidicai/telemetry/openai_patch.py,sha256=OPa8T5-WYFUG-yVKMaJS3QJM05l-D3y_kHGRhA0ffWM,17168
|
|
68
74
|
lucidicai/telemetry/openai_uninstrument.py,sha256=zELpoz2BU8O-rdHrg_7NuvjdNoY6swgoqVm5NtTCJRQ,3456
|
|
69
75
|
lucidicai/telemetry/opentelemetry_converter.py,sha256=xOHCqoTyO4hUkL6k7fxy84PbljPpYep6ET9ZqbkJehc,17665
|
|
70
76
|
lucidicai/telemetry/otel_handlers.py,sha256=OCzXuYog6AuwjI4eXy5Sk40DUehyz48QOxuOujXnEVU,20859
|
|
@@ -72,16 +78,19 @@ lucidicai/telemetry/otel_init.py,sha256=hjUOX8nEBLrDOuh0UTKFfG-C98yFZHTiP8ql59bm
|
|
|
72
78
|
lucidicai/telemetry/otel_provider.py,sha256=e5XcpQTd_a5UrMAq-EQcJ0zUJpO7NO16T-BphVUigR4,7513
|
|
73
79
|
lucidicai/telemetry/pydantic_ai_handler.py,sha256=WPa3tFcVgVnPPO3AxcNOTbNkmODLgNOrU2_3GVtWqUw,28261
|
|
74
80
|
lucidicai/telemetry/telemetry_init.py,sha256=YpjcYDcqlWpUDDz76-x2v4K0yz_ToEpuaDz_Hypbr2w,9554
|
|
81
|
+
lucidicai/telemetry/telemetry_manager.py,sha256=XiNv-etC6ZRMyYav0v8HqURD2PGaXbwlY0O86lxfWIk,6587
|
|
75
82
|
lucidicai/telemetry/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
83
|
lucidicai/telemetry/utils/image_storage.py,sha256=4Z59ZpVexr7-lcExfr8GsqXe0y2VZmr8Yjwa-3DeOxU,1457
|
|
77
|
-
lucidicai/telemetry/utils/model_pricing.py,sha256=
|
|
84
|
+
lucidicai/telemetry/utils/model_pricing.py,sha256=_5izJRMS1moJNccO-N3pxv3VDWUiq9LVgDwZK4LHn9M,11897
|
|
85
|
+
lucidicai/telemetry/utils/provider.py,sha256=Ca6C7n4TGTLfnRgC1f7zr_-aZfg87vCjEjRJkMJyFwE,2682
|
|
78
86
|
lucidicai/telemetry/utils/text_storage.py,sha256=L62MMJ8E23TDqDTUv2aRntdKMCItsXV7XjY6cFwx2DE,1503
|
|
79
87
|
lucidicai/telemetry/utils/universal_image_interceptor.py,sha256=vARgMk1hVSF--zfi5b8qBpJJOESuD17YlH9xqxmB9Uw,15954
|
|
80
88
|
lucidicai/utils/__init__.py,sha256=ZiGtmJaF0ph9iIFIgQiAreVuYM_1o7qu9VySK1NblTw,22
|
|
81
|
-
lucidicai/utils/images.py,sha256=
|
|
89
|
+
lucidicai/utils/images.py,sha256=z8mlIKgFfrIbuk-l4L2rB62uw_uPO79sHPXPY7eLu2A,12891
|
|
82
90
|
lucidicai/utils/logger.py,sha256=R3B3gSee64F6UVHUrShihBq_O7W7bgfrBiVDXTO3Isg,4777
|
|
83
|
-
lucidicai/utils/queue.py,sha256=
|
|
84
|
-
lucidicai
|
|
85
|
-
lucidicai-
|
|
86
|
-
lucidicai-
|
|
87
|
-
lucidicai-
|
|
91
|
+
lucidicai/utils/queue.py,sha256=8DQwnGw7pINEJ0dNSkB0PhdPW-iBQQ-YZg23poe4umE,17323
|
|
92
|
+
lucidicai/utils/serialization.py,sha256=KdOREZd7XBxFBAZ86DePMfYPzSVyKr4RcgUa82aFxrs,820
|
|
93
|
+
lucidicai-3.0.0.dist-info/METADATA,sha256=KY6XOzFlxTySIfjrzQvPa3SI50ecJp0_86qEq-Or4xw,902
|
|
94
|
+
lucidicai-3.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
|
|
95
|
+
lucidicai-3.0.0.dist-info/top_level.txt,sha256=vSSdM3lclF4I5tyVC0xxUk8eIRnnYXMe1hW-eO91HUo,10
|
|
96
|
+
lucidicai-3.0.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|