lucidicai 2.1.3__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. lucidicai/__init__.py +32 -390
  2. lucidicai/api/client.py +31 -2
  3. lucidicai/api/resources/__init__.py +16 -1
  4. lucidicai/api/resources/dataset.py +422 -82
  5. lucidicai/api/resources/event.py +399 -27
  6. lucidicai/api/resources/experiment.py +108 -0
  7. lucidicai/api/resources/feature_flag.py +78 -0
  8. lucidicai/api/resources/prompt.py +84 -0
  9. lucidicai/api/resources/session.py +545 -38
  10. lucidicai/client.py +395 -480
  11. lucidicai/core/config.py +73 -48
  12. lucidicai/core/errors.py +3 -3
  13. lucidicai/sdk/bound_decorators.py +321 -0
  14. lucidicai/sdk/context.py +20 -2
  15. lucidicai/sdk/decorators.py +283 -74
  16. lucidicai/sdk/event.py +538 -36
  17. lucidicai/sdk/event_builder.py +2 -4
  18. lucidicai/sdk/features/dataset.py +391 -1
  19. lucidicai/sdk/features/feature_flag.py +344 -3
  20. lucidicai/sdk/init.py +49 -347
  21. lucidicai/sdk/session.py +502 -0
  22. lucidicai/sdk/shutdown_manager.py +103 -46
  23. lucidicai/session_obj.py +321 -0
  24. lucidicai/telemetry/context_capture_processor.py +13 -6
  25. lucidicai/telemetry/extract.py +60 -63
  26. lucidicai/telemetry/litellm_bridge.py +3 -44
  27. lucidicai/telemetry/lucidic_exporter.py +143 -131
  28. lucidicai/telemetry/openai_agents_instrumentor.py +2 -2
  29. lucidicai/telemetry/openai_patch.py +7 -6
  30. lucidicai/telemetry/telemetry_manager.py +183 -0
  31. lucidicai/telemetry/utils/model_pricing.py +21 -30
  32. lucidicai/telemetry/utils/provider.py +77 -0
  33. lucidicai/utils/images.py +27 -11
  34. lucidicai/utils/serialization.py +27 -0
  35. {lucidicai-2.1.3.dist-info → lucidicai-3.0.0.dist-info}/METADATA +1 -1
  36. {lucidicai-2.1.3.dist-info → lucidicai-3.0.0.dist-info}/RECORD +38 -29
  37. {lucidicai-2.1.3.dist-info → lucidicai-3.0.0.dist-info}/WHEEL +0 -0
  38. {lucidicai-2.1.3.dist-info → lucidicai-3.0.0.dist-info}/top_level.txt +0 -0
lucidicai/core/config.py CHANGED
@@ -16,22 +16,71 @@ class Environment(Enum):
16
16
  DEBUG = "debug"
17
17
 
18
18
 
19
+ class Region(Enum):
20
+ """Supported deployment regions"""
21
+ US = "us"
22
+ INDIA = "india"
23
+
24
+ @classmethod
25
+ def from_string(cls, value: str) -> 'Region':
26
+ """Convert string to Region enum, case-insensitive."""
27
+ value_lower = value.lower().strip()
28
+ for region in cls:
29
+ if region.value == value_lower:
30
+ return region
31
+ valid_regions = [r.value for r in cls]
32
+ raise ValueError(f"Invalid region '{value}'. Valid regions: {', '.join(valid_regions)}")
33
+
34
+
35
+ # Region to URL mapping
36
+ REGION_URLS: Dict[str, str] = {
37
+ Region.US: "https://backend.lucidic.ai/api",
38
+ Region.INDIA: "https://in.backend.lucidic.ai/api",
39
+ }
40
+ DEFAULT_REGION = Region.US
41
+ DEBUG_URL = "http://localhost:8000/api"
42
+
43
+
19
44
  @dataclass
20
45
  class NetworkConfig:
21
46
  """Network and connection settings"""
22
47
  base_url: str = "https://backend.lucidic.ai/api"
48
+ region: Optional[Region] = None
23
49
  timeout: int = 30
24
50
  max_retries: int = 3
25
51
  backoff_factor: float = 0.5
26
52
  connection_pool_size: int = 20
27
53
  connection_pool_maxsize: int = 100
28
-
54
+
29
55
  @classmethod
30
- def from_env(cls) -> 'NetworkConfig':
31
- """Load network configuration from environment variables"""
32
- debug = os.getenv("LUCIDIC_DEBUG", "False").lower() == "true"
56
+ def from_env(cls, region: Optional[str] = None, debug: bool = False) -> 'NetworkConfig':
57
+ """Load network configuration from environment variables.
58
+
59
+ Priority: debug > region argument > LUCIDIC_REGION env var > default
60
+
61
+ Args:
62
+ region: Region string override (e.g., "us", "india")
63
+ debug: If True, use localhost URL regardless of region
64
+ """
65
+ # If debug mode, use localhost (ignores region)
66
+ if debug:
67
+ return cls(
68
+ base_url=DEBUG_URL,
69
+ region=None,
70
+ timeout=int(os.getenv("LUCIDIC_TIMEOUT", "30")),
71
+ max_retries=int(os.getenv("LUCIDIC_MAX_RETRIES", "3")),
72
+ backoff_factor=float(os.getenv("LUCIDIC_BACKOFF_FACTOR", "0.5")),
73
+ connection_pool_size=int(os.getenv("LUCIDIC_CONNECTION_POOL_SIZE", "20")),
74
+ connection_pool_maxsize=int(os.getenv("LUCIDIC_CONNECTION_POOL_MAXSIZE", "100"))
75
+ )
76
+
77
+ # Resolve region: argument > env var > default
78
+ region_str = region or os.getenv("LUCIDIC_REGION")
79
+ resolved_region = Region.from_string(region_str) if region_str else DEFAULT_REGION
80
+
33
81
  return cls(
34
- base_url="http://localhost:8000/api" if debug else "https://backend.lucidic.ai/api",
82
+ base_url=REGION_URLS[resolved_region],
83
+ region=resolved_region,
35
84
  timeout=int(os.getenv("LUCIDIC_TIMEOUT", "30")),
36
85
  max_retries=int(os.getenv("LUCIDIC_MAX_RETRIES", "3")),
37
86
  backoff_factor=float(os.getenv("LUCIDIC_BACKOFF_FACTOR", "0.5")),
@@ -40,31 +89,6 @@ class NetworkConfig:
40
89
  )
41
90
 
42
91
 
43
- @dataclass
44
- class EventQueueConfig:
45
- """Event queue processing settings"""
46
- max_queue_size: int = 100000
47
- flush_interval_ms: int = 100
48
- flush_at_count: int = 100
49
- blob_threshold: int = 65536
50
- daemon_mode: bool = True
51
- max_parallel_workers: int = 10
52
- retry_failed: bool = True
53
-
54
- @classmethod
55
- def from_env(cls) -> 'EventQueueConfig':
56
- """Load event queue configuration from environment variables"""
57
- return cls(
58
- max_queue_size=int(os.getenv("LUCIDIC_MAX_QUEUE_SIZE", "100000")),
59
- flush_interval_ms=int(os.getenv("LUCIDIC_FLUSH_INTERVAL", "1000")),
60
- flush_at_count=int(os.getenv("LUCIDIC_FLUSH_AT", "50")),
61
- blob_threshold=int(os.getenv("LUCIDIC_BLOB_THRESHOLD", "65536")),
62
- daemon_mode=os.getenv("LUCIDIC_DAEMON_QUEUE", "true").lower() == "true",
63
- max_parallel_workers=int(os.getenv("LUCIDIC_MAX_PARALLEL", "10")),
64
- retry_failed=os.getenv("LUCIDIC_RETRY_FAILED", "true").lower() == "true"
65
- )
66
-
67
-
68
92
  @dataclass
69
93
  class ErrorHandlingConfig:
70
94
  """Error handling and suppression settings"""
@@ -110,9 +134,11 @@ class SDKConfig:
110
134
  auto_end: bool = True
111
135
  production_monitoring: bool = False
112
136
 
137
+ # Blob threshold for large event payloads (default 64KB)
138
+ blob_threshold: int = 65536
139
+
113
140
  # Sub-configurations
114
141
  network: NetworkConfig = field(default_factory=NetworkConfig)
115
- event_queue: EventQueueConfig = field(default_factory=EventQueueConfig)
116
142
  error_handling: ErrorHandlingConfig = field(default_factory=ErrorHandlingConfig)
117
143
  telemetry: TelemetryConfig = field(default_factory=TelemetryConfig)
118
144
 
@@ -121,26 +147,31 @@ class SDKConfig:
121
147
  debug: bool = False
122
148
 
123
149
  @classmethod
124
- def from_env(cls, **overrides) -> 'SDKConfig':
125
- """Create configuration from environment variables with optional overrides"""
150
+ def from_env(cls, region: Optional[str] = None, **overrides) -> 'SDKConfig':
151
+ """Create configuration from environment variables with optional overrides.
152
+
153
+ Args:
154
+ region: Region string (e.g., "us", "india"). Priority: arg > env var > default
155
+ **overrides: Additional configuration overrides
156
+ """
126
157
  from dotenv import load_dotenv
127
158
  load_dotenv()
128
-
159
+
129
160
  debug = os.getenv("LUCIDIC_DEBUG", "False").lower() == "true"
130
-
161
+
131
162
  config = cls(
132
163
  api_key=os.getenv("LUCIDIC_API_KEY"),
133
164
  agent_id=os.getenv("LUCIDIC_AGENT_ID"),
134
165
  auto_end=os.getenv("LUCIDIC_AUTO_END", "true").lower() == "true",
135
166
  production_monitoring=False,
136
- network=NetworkConfig.from_env(),
137
- event_queue=EventQueueConfig.from_env(),
167
+ blob_threshold=int(os.getenv("LUCIDIC_BLOB_THRESHOLD", "65536")),
168
+ network=NetworkConfig.from_env(region=region, debug=debug),
138
169
  error_handling=ErrorHandlingConfig.from_env(),
139
170
  telemetry=TelemetryConfig.from_env(),
140
171
  environment=Environment.DEBUG if debug else Environment.PRODUCTION,
141
172
  debug=debug
142
173
  )
143
-
174
+
144
175
  # Apply any overrides
145
176
  config.update(**overrides)
146
177
  return config
@@ -165,11 +196,8 @@ class SDKConfig:
165
196
  if not self.agent_id:
166
197
  errors.append("Agent ID is required (LUCIDIC_AGENT_ID)")
167
198
 
168
- if self.event_queue.max_parallel_workers < 1:
169
- errors.append("Max parallel workers must be at least 1")
170
-
171
- if self.event_queue.flush_interval_ms < 10:
172
- errors.append("Flush interval must be at least 10ms")
199
+ if self.blob_threshold < 1024:
200
+ errors.append("Blob threshold must be at least 1024 bytes")
173
201
 
174
202
  return errors
175
203
 
@@ -181,17 +209,14 @@ class SDKConfig:
181
209
  "environment": self.environment.value,
182
210
  "debug": self.debug,
183
211
  "auto_end": self.auto_end,
212
+ "blob_threshold": self.blob_threshold,
184
213
  "network": {
185
214
  "base_url": self.network.base_url,
215
+ "region": self.network.region.value if self.network.region else None,
186
216
  "timeout": self.network.timeout,
187
217
  "max_retries": self.network.max_retries,
188
218
  "connection_pool_size": self.network.connection_pool_size
189
219
  },
190
- "event_queue": {
191
- "max_workers": self.event_queue.max_parallel_workers,
192
- "flush_interval_ms": self.event_queue.flush_interval_ms,
193
- "flush_at_count": self.event_queue.flush_at_count
194
- },
195
220
  "error_handling": {
196
221
  "suppress": self.error_handling.suppress_errors,
197
222
  "cleanup": self.error_handling.cleanup_on_error
lucidicai/core/errors.py CHANGED
@@ -37,9 +37,9 @@ class FeatureFlagError(LucidicError):
37
37
 
38
38
  def install_error_handler():
39
39
  """Install global handler to create ERROR_TRACEBACK events for uncaught exceptions."""
40
- from .sdk.event import create_event
41
- from .sdk.init import get_session_id
42
- from .context import current_parent_event_id
40
+ from ..sdk.event import create_event
41
+ from ..sdk.init import get_session_id
42
+ from ..sdk.context import current_parent_event_id
43
43
 
44
44
  def handle_exception(exc_type, exc_value, exc_traceback):
45
45
  try:
@@ -0,0 +1,321 @@
1
+ """Client-bound decorators for the Lucidic SDK.
2
+
3
+ These decorators are bound to a specific LucidicAI client instance and only
4
+ track events when the current context belongs to that client.
5
+ """
6
+
7
+ import functools
8
+ import inspect
9
+ import json
10
+ import traceback
11
+ from datetime import datetime
12
+ import uuid
13
+ from typing import Any, Callable, Optional, TypeVar, TYPE_CHECKING
14
+ from collections.abc import Iterable
15
+
16
+ from .context import (
17
+ current_session_id,
18
+ current_client,
19
+ current_parent_event_id,
20
+ event_context,
21
+ event_context_async,
22
+ )
23
+ from ..utils.logger import debug, error as log_error, truncate_id
24
+
25
+ if TYPE_CHECKING:
26
+ from ..client import LucidicAI
27
+
28
+ F = TypeVar("F", bound=Callable[..., Any])
29
+
30
+
31
+ def _serialize(value: Any) -> Any:
32
+ """Serialize a value to JSON-compatible format."""
33
+ if isinstance(value, (str, int, float, bool)):
34
+ return value
35
+ if isinstance(value, dict):
36
+ return {k: _serialize(v) for k, v in value.items()}
37
+ if isinstance(value, Iterable) and not isinstance(value, (str, bytes)):
38
+ return [_serialize(v) for v in value]
39
+ try:
40
+ return json.loads(json.dumps(value, default=str))
41
+ except Exception:
42
+ return str(value)
43
+
44
+
45
+ def create_bound_event_decorator(
46
+ client: "LucidicAI", **decorator_kwargs
47
+ ) -> Callable[[F], F]:
48
+ """Create an event decorator bound to a specific client.
49
+
50
+ This decorator only tracks events when:
51
+ 1. The current context has this client active
52
+ 2. There is an active session for this client
53
+
54
+ Args:
55
+ client: The LucidicAI client to bind this decorator to
56
+ **decorator_kwargs: Additional keyword arguments passed to the event
57
+
58
+ Returns:
59
+ A decorator function that wraps the target function
60
+ """
61
+
62
+ def decorator(func: F) -> F:
63
+ @functools.wraps(func)
64
+ def sync_wrapper(*args, **kwargs):
65
+ # Check if this client is active in the current context
66
+ active_client = current_client.get(None)
67
+ if active_client is not client:
68
+ # Not our client - just execute the function
69
+ return func(*args, **kwargs)
70
+
71
+ session_id = current_session_id.get(None)
72
+ if not session_id:
73
+ # No active session - just execute the function
74
+ return func(*args, **kwargs)
75
+
76
+ # Build arguments snapshot
77
+ sig = inspect.signature(func)
78
+ bound = sig.bind(*args, **kwargs)
79
+ bound.apply_defaults()
80
+ args_dict = {
81
+ name: _serialize(val) for name, val in bound.arguments.items()
82
+ }
83
+
84
+ parent_id = current_parent_event_id.get(None)
85
+ pre_event_id = str(uuid.uuid4())
86
+ debug(
87
+ f"[Decorator] Starting {func.__name__} with event ID {truncate_id(pre_event_id)}, parent: {truncate_id(parent_id)}"
88
+ )
89
+ start_time = datetime.now().astimezone()
90
+ result = None
91
+ error: Optional[BaseException] = None
92
+
93
+ try:
94
+ with event_context(pre_event_id):
95
+ # Also inject into OpenTelemetry context for instrumentors
96
+ from ..telemetry.context_bridge import inject_lucidic_context
97
+ from opentelemetry import context as otel_context
98
+
99
+ otel_ctx = inject_lucidic_context()
100
+ token = otel_context.attach(otel_ctx)
101
+ try:
102
+ result = func(*args, **kwargs)
103
+ finally:
104
+ otel_context.detach(token)
105
+ return result
106
+ except Exception as e:
107
+ error = e
108
+ log_error(f"[Decorator] {func.__name__} raised exception: {e}")
109
+ raise
110
+ finally:
111
+ try:
112
+ # Store error as return value with type information
113
+ if error:
114
+ return_val = {
115
+ "error": str(error),
116
+ "error_type": type(error).__name__,
117
+ }
118
+
119
+ # Create a separate error_traceback event for the exception
120
+ try:
121
+ _emit_event_to_client(
122
+ client,
123
+ session_id,
124
+ type="error_traceback",
125
+ error=str(error),
126
+ traceback=traceback.format_exc(),
127
+ parent_event_id=pre_event_id,
128
+ )
129
+ debug(
130
+ f"[Decorator] Created error_traceback event for {func.__name__}"
131
+ )
132
+ except Exception as e:
133
+ debug(
134
+ f"[Decorator] Failed to create error_traceback event: {e}"
135
+ )
136
+ else:
137
+ return_val = _serialize(result)
138
+
139
+ _emit_event_to_client(
140
+ client,
141
+ session_id,
142
+ type="function_call",
143
+ event_id=pre_event_id,
144
+ parent_event_id=parent_id,
145
+ function_name=func.__name__,
146
+ arguments=args_dict,
147
+ return_value=return_val,
148
+ error=str(error) if error else None,
149
+ duration=(datetime.now().astimezone() - start_time).total_seconds(),
150
+ **decorator_kwargs,
151
+ )
152
+ debug(
153
+ f"[Decorator] Created function_call event for {func.__name__}"
154
+ )
155
+ except Exception as e:
156
+ log_error(f"[Decorator] Failed to create function_call event: {e}")
157
+
158
+ @functools.wraps(func)
159
+ async def async_wrapper(*args, **kwargs):
160
+ # Check if this client is active in the current context
161
+ active_client = current_client.get(None)
162
+ if active_client is not client:
163
+ # Not our client - just execute the function
164
+ return await func(*args, **kwargs)
165
+
166
+ session_id = current_session_id.get(None)
167
+ if not session_id:
168
+ # No active session - just execute the function
169
+ return await func(*args, **kwargs)
170
+
171
+ # Build arguments snapshot
172
+ sig = inspect.signature(func)
173
+ bound = sig.bind(*args, **kwargs)
174
+ bound.apply_defaults()
175
+ args_dict = {
176
+ name: _serialize(val) for name, val in bound.arguments.items()
177
+ }
178
+
179
+ parent_id = current_parent_event_id.get(None)
180
+ pre_event_id = str(uuid.uuid4())
181
+ debug(
182
+ f"[Decorator] Starting {func.__name__} with event ID {truncate_id(pre_event_id)}, parent: {truncate_id(parent_id)}"
183
+ )
184
+ start_time = datetime.now().astimezone()
185
+ result = None
186
+ error: Optional[BaseException] = None
187
+
188
+ try:
189
+ async with event_context_async(pre_event_id):
190
+ # Also inject into OpenTelemetry context for instrumentors
191
+ from ..telemetry.context_bridge import inject_lucidic_context
192
+ from opentelemetry import context as otel_context
193
+
194
+ otel_ctx = inject_lucidic_context()
195
+ token = otel_context.attach(otel_ctx)
196
+ try:
197
+ result = await func(*args, **kwargs)
198
+ finally:
199
+ otel_context.detach(token)
200
+ return result
201
+ except Exception as e:
202
+ error = e
203
+ log_error(f"[Decorator] {func.__name__} raised exception: {e}")
204
+ raise
205
+ finally:
206
+ try:
207
+ # Store error as return value with type information
208
+ if error:
209
+ return_val = {
210
+ "error": str(error),
211
+ "error_type": type(error).__name__,
212
+ }
213
+
214
+ # Create a separate error_traceback event for the exception
215
+ try:
216
+ await _aemit_event_to_client(
217
+ client,
218
+ session_id,
219
+ type="error_traceback",
220
+ error=str(error),
221
+ traceback=traceback.format_exc(),
222
+ parent_event_id=pre_event_id,
223
+ )
224
+ debug(
225
+ f"[Decorator] Created error_traceback event for {func.__name__}"
226
+ )
227
+ except Exception as e:
228
+ debug(
229
+ f"[Decorator] Failed to create error_traceback event: {e}"
230
+ )
231
+ else:
232
+ return_val = _serialize(result)
233
+
234
+ await _aemit_event_to_client(
235
+ client,
236
+ session_id,
237
+ type="function_call",
238
+ event_id=pre_event_id,
239
+ parent_event_id=parent_id,
240
+ function_name=func.__name__,
241
+ arguments=args_dict,
242
+ return_value=return_val,
243
+ error=str(error) if error else None,
244
+ duration=(datetime.now().astimezone() - start_time).total_seconds(),
245
+ **decorator_kwargs,
246
+ )
247
+ debug(
248
+ f"[Decorator] Created function_call event for {func.__name__}"
249
+ )
250
+ except Exception as e:
251
+ log_error(f"[Decorator] Failed to create function_call event: {e}")
252
+
253
+ if inspect.iscoroutinefunction(func):
254
+ return async_wrapper # type: ignore
255
+ return sync_wrapper # type: ignore
256
+
257
+ return decorator
258
+
259
+
260
+ def _emit_event_to_client(
261
+ client: "LucidicAI",
262
+ session_id: str,
263
+ type: str,
264
+ **event_data,
265
+ ) -> Optional[str]:
266
+ """Emit an event using the client's event resource.
267
+
268
+ Args:
269
+ client: The LucidicAI client
270
+ session_id: The session ID to associate the event with
271
+ type: The event type (e.g., "function_call", "error_traceback")
272
+ **event_data: Additional event data
273
+
274
+ Returns:
275
+ The event ID if created successfully, None otherwise
276
+ """
277
+ try:
278
+ event_payload = {
279
+ "type": type,
280
+ "session_id": session_id,
281
+ **event_data,
282
+ }
283
+
284
+ # Use the client's event resource
285
+ response = client._resources["events"].create_event(event_payload)
286
+ return response.get("event_id") if response else None
287
+ except Exception as e:
288
+ debug(f"[Decorator] Failed to emit event: {e}")
289
+ return None
290
+
291
+
292
+ async def _aemit_event_to_client(
293
+ client: "LucidicAI",
294
+ session_id: str,
295
+ type: str,
296
+ **event_data,
297
+ ) -> Optional[str]:
298
+ """Emit an event using the client's event resource (async).
299
+
300
+ Args:
301
+ client: The LucidicAI client
302
+ session_id: The session ID to associate the event with
303
+ type: The event type (e.g., "function_call", "error_traceback")
304
+ **event_data: Additional event data
305
+
306
+ Returns:
307
+ The event ID if created successfully, None otherwise
308
+ """
309
+ try:
310
+ event_payload = {
311
+ "type": type,
312
+ "session_id": session_id,
313
+ **event_data,
314
+ }
315
+
316
+ # Use the client's event resource
317
+ response = await client._resources["events"].acreate_event(event_payload)
318
+ return response.get("event_id") if response else None
319
+ except Exception as e:
320
+ debug(f"[Decorator] Failed to emit async event: {e}")
321
+ return None
lucidicai/sdk/context.py CHANGED
@@ -8,11 +8,14 @@ session under concurrency.
8
8
 
9
9
  from contextlib import contextmanager, asynccontextmanager
10
10
  import contextvars
11
- from typing import Optional, Iterator, AsyncIterator, Callable, Any, Dict
11
+ from typing import Optional, Iterator, AsyncIterator, Callable, Any, Dict, TYPE_CHECKING
12
12
  import logging
13
13
  import os
14
14
  import threading
15
15
 
16
+ if TYPE_CHECKING:
17
+ from ..client import LucidicAI
18
+
16
19
 
17
20
  # Context variable for the active Lucidic session id
18
21
  current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
@@ -20,12 +23,27 @@ current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextV
20
23
  )
21
24
 
22
25
 
23
- # NEW: Context variable for parent event nesting
26
+ # Context variable for parent event nesting
24
27
  current_parent_event_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
25
28
  "lucidic.parent_event_id", default=None
26
29
  )
27
30
 
28
31
 
32
+ # Context variable for the active client (for multi-client support)
33
+ current_client: contextvars.ContextVar[Optional["LucidicAI"]] = contextvars.ContextVar(
34
+ "lucidic.client", default=None
35
+ )
36
+
37
+
38
+ def get_active_client() -> Optional["LucidicAI"]:
39
+ """Get the currently active LucidicAI client from context.
40
+
41
+ Returns:
42
+ The active client, or None if no client is bound to the current context.
43
+ """
44
+ return current_client.get(None)
45
+
46
+
29
47
  def set_active_session(session_id: Optional[str]) -> None:
30
48
  """Bind the given session id to the current execution context.
31
49