lucidicai 2.0.2__py3-none-any.whl → 2.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lucidicai/__init__.py +367 -899
- lucidicai/api/__init__.py +1 -0
- lucidicai/api/client.py +218 -0
- lucidicai/api/resources/__init__.py +1 -0
- lucidicai/api/resources/dataset.py +192 -0
- lucidicai/api/resources/event.py +88 -0
- lucidicai/api/resources/session.py +126 -0
- lucidicai/core/__init__.py +1 -0
- lucidicai/core/config.py +223 -0
- lucidicai/core/errors.py +60 -0
- lucidicai/core/types.py +35 -0
- lucidicai/sdk/__init__.py +1 -0
- lucidicai/sdk/context.py +231 -0
- lucidicai/sdk/decorators.py +187 -0
- lucidicai/sdk/error_boundary.py +299 -0
- lucidicai/sdk/event.py +126 -0
- lucidicai/sdk/event_builder.py +304 -0
- lucidicai/sdk/features/__init__.py +1 -0
- lucidicai/sdk/features/dataset.py +605 -0
- lucidicai/sdk/features/feature_flag.py +383 -0
- lucidicai/sdk/init.py +361 -0
- lucidicai/sdk/shutdown_manager.py +302 -0
- lucidicai/telemetry/context_bridge.py +82 -0
- lucidicai/telemetry/context_capture_processor.py +25 -9
- lucidicai/telemetry/litellm_bridge.py +20 -24
- lucidicai/telemetry/lucidic_exporter.py +99 -60
- lucidicai/telemetry/openai_patch.py +295 -0
- lucidicai/telemetry/openai_uninstrument.py +87 -0
- lucidicai/telemetry/telemetry_init.py +16 -1
- lucidicai/telemetry/utils/model_pricing.py +278 -0
- lucidicai/utils/__init__.py +1 -0
- lucidicai/utils/images.py +337 -0
- lucidicai/utils/logger.py +168 -0
- lucidicai/utils/queue.py +393 -0
- {lucidicai-2.0.2.dist-info → lucidicai-2.1.1.dist-info}/METADATA +1 -1
- {lucidicai-2.0.2.dist-info → lucidicai-2.1.1.dist-info}/RECORD +38 -9
- {lucidicai-2.0.2.dist-info → lucidicai-2.1.1.dist-info}/WHEEL +0 -0
- {lucidicai-2.0.2.dist-info → lucidicai-2.1.1.dist-info}/top_level.txt +0 -0
lucidicai/core/config.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
"""Centralized configuration management for Lucidic SDK.
|
|
2
|
+
|
|
3
|
+
This module provides a single source of truth for all SDK configuration,
|
|
4
|
+
including environment variables, defaults, and runtime settings.
|
|
5
|
+
"""
|
|
6
|
+
import os
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Optional, Dict, Any, List
|
|
9
|
+
from enum import Enum
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Environment(Enum):
|
|
13
|
+
"""SDK environment modes"""
|
|
14
|
+
PRODUCTION = "production"
|
|
15
|
+
DEVELOPMENT = "development"
|
|
16
|
+
DEBUG = "debug"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class NetworkConfig:
|
|
21
|
+
"""Network and connection settings"""
|
|
22
|
+
base_url: str = "https://backend.lucidic.ai/api"
|
|
23
|
+
timeout: int = 30
|
|
24
|
+
max_retries: int = 3
|
|
25
|
+
backoff_factor: float = 0.5
|
|
26
|
+
connection_pool_size: int = 20
|
|
27
|
+
connection_pool_maxsize: int = 100
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_env(cls) -> 'NetworkConfig':
|
|
31
|
+
"""Load network configuration from environment variables"""
|
|
32
|
+
debug = os.getenv("LUCIDIC_DEBUG", "False").lower() == "true"
|
|
33
|
+
return cls(
|
|
34
|
+
base_url="http://localhost:8000/api" if debug else "https://backend.lucidic.ai/api",
|
|
35
|
+
timeout=int(os.getenv("LUCIDIC_TIMEOUT", "30")),
|
|
36
|
+
max_retries=int(os.getenv("LUCIDIC_MAX_RETRIES", "3")),
|
|
37
|
+
backoff_factor=float(os.getenv("LUCIDIC_BACKOFF_FACTOR", "0.5")),
|
|
38
|
+
connection_pool_size=int(os.getenv("LUCIDIC_CONNECTION_POOL_SIZE", "20")),
|
|
39
|
+
connection_pool_maxsize=int(os.getenv("LUCIDIC_CONNECTION_POOL_MAXSIZE", "100"))
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class EventQueueConfig:
|
|
45
|
+
"""Event queue processing settings"""
|
|
46
|
+
max_queue_size: int = 100000
|
|
47
|
+
flush_interval_ms: int = 100
|
|
48
|
+
flush_at_count: int = 100
|
|
49
|
+
blob_threshold: int = 65536
|
|
50
|
+
daemon_mode: bool = True
|
|
51
|
+
max_parallel_workers: int = 10
|
|
52
|
+
retry_failed: bool = True
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
def from_env(cls) -> 'EventQueueConfig':
|
|
56
|
+
"""Load event queue configuration from environment variables"""
|
|
57
|
+
return cls(
|
|
58
|
+
max_queue_size=int(os.getenv("LUCIDIC_MAX_QUEUE_SIZE", "100000")),
|
|
59
|
+
flush_interval_ms=int(os.getenv("LUCIDIC_FLUSH_INTERVAL", "1000")),
|
|
60
|
+
flush_at_count=int(os.getenv("LUCIDIC_FLUSH_AT", "50")),
|
|
61
|
+
blob_threshold=int(os.getenv("LUCIDIC_BLOB_THRESHOLD", "65536")),
|
|
62
|
+
daemon_mode=os.getenv("LUCIDIC_DAEMON_QUEUE", "true").lower() == "true",
|
|
63
|
+
max_parallel_workers=int(os.getenv("LUCIDIC_MAX_PARALLEL", "10")),
|
|
64
|
+
retry_failed=os.getenv("LUCIDIC_RETRY_FAILED", "true").lower() == "true"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class ErrorHandlingConfig:
|
|
70
|
+
"""Error handling and suppression settings"""
|
|
71
|
+
suppress_errors: bool = True
|
|
72
|
+
cleanup_on_error: bool = True
|
|
73
|
+
log_suppressed: bool = True
|
|
74
|
+
capture_uncaught: bool = True
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def from_env(cls) -> 'ErrorHandlingConfig':
|
|
78
|
+
"""Load error handling configuration from environment variables"""
|
|
79
|
+
return cls(
|
|
80
|
+
suppress_errors=os.getenv("LUCIDIC_SUPPRESS_ERRORS", "true").lower() == "true",
|
|
81
|
+
cleanup_on_error=os.getenv("LUCIDIC_CLEANUP_ON_ERROR", "true").lower() == "true",
|
|
82
|
+
log_suppressed=os.getenv("LUCIDIC_LOG_SUPPRESSED", "true").lower() == "true",
|
|
83
|
+
capture_uncaught=os.getenv("LUCIDIC_CAPTURE_UNCAUGHT", "true").lower() == "true"
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass
|
|
88
|
+
class TelemetryConfig:
|
|
89
|
+
"""Telemetry and instrumentation settings"""
|
|
90
|
+
providers: List[str] = field(default_factory=list)
|
|
91
|
+
verbose: bool = False
|
|
92
|
+
|
|
93
|
+
@classmethod
|
|
94
|
+
def from_env(cls) -> 'TelemetryConfig':
|
|
95
|
+
"""Load telemetry configuration from environment variables"""
|
|
96
|
+
return cls(
|
|
97
|
+
providers=[], # Set during initialization
|
|
98
|
+
verbose=os.getenv("LUCIDIC_VERBOSE", "False").lower() == "true"
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@dataclass
|
|
103
|
+
class SDKConfig:
|
|
104
|
+
"""Main SDK configuration container"""
|
|
105
|
+
# Required settings
|
|
106
|
+
api_key: Optional[str] = None
|
|
107
|
+
agent_id: Optional[str] = None
|
|
108
|
+
|
|
109
|
+
# Feature flags
|
|
110
|
+
auto_end: bool = True
|
|
111
|
+
production_monitoring: bool = False
|
|
112
|
+
|
|
113
|
+
# Sub-configurations
|
|
114
|
+
network: NetworkConfig = field(default_factory=NetworkConfig)
|
|
115
|
+
event_queue: EventQueueConfig = field(default_factory=EventQueueConfig)
|
|
116
|
+
error_handling: ErrorHandlingConfig = field(default_factory=ErrorHandlingConfig)
|
|
117
|
+
telemetry: TelemetryConfig = field(default_factory=TelemetryConfig)
|
|
118
|
+
|
|
119
|
+
# Runtime settings
|
|
120
|
+
environment: Environment = Environment.PRODUCTION
|
|
121
|
+
debug: bool = False
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def from_env(cls, **overrides) -> 'SDKConfig':
|
|
125
|
+
"""Create configuration from environment variables with optional overrides"""
|
|
126
|
+
from dotenv import load_dotenv
|
|
127
|
+
load_dotenv()
|
|
128
|
+
|
|
129
|
+
debug = os.getenv("LUCIDIC_DEBUG", "False").lower() == "true"
|
|
130
|
+
|
|
131
|
+
config = cls(
|
|
132
|
+
api_key=os.getenv("LUCIDIC_API_KEY"),
|
|
133
|
+
agent_id=os.getenv("LUCIDIC_AGENT_ID"),
|
|
134
|
+
auto_end=os.getenv("LUCIDIC_AUTO_END", "true").lower() == "true",
|
|
135
|
+
production_monitoring=False,
|
|
136
|
+
network=NetworkConfig.from_env(),
|
|
137
|
+
event_queue=EventQueueConfig.from_env(),
|
|
138
|
+
error_handling=ErrorHandlingConfig.from_env(),
|
|
139
|
+
telemetry=TelemetryConfig.from_env(),
|
|
140
|
+
environment=Environment.DEBUG if debug else Environment.PRODUCTION,
|
|
141
|
+
debug=debug
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Apply any overrides
|
|
145
|
+
config.update(**overrides)
|
|
146
|
+
return config
|
|
147
|
+
|
|
148
|
+
def update(self, **kwargs):
|
|
149
|
+
"""Update configuration with new values"""
|
|
150
|
+
for key, value in kwargs.items():
|
|
151
|
+
# Only update if value is not None (to preserve env defaults)
|
|
152
|
+
if value is not None:
|
|
153
|
+
if hasattr(self, key):
|
|
154
|
+
setattr(self, key, value)
|
|
155
|
+
elif key == "providers" and hasattr(self.telemetry, "providers"):
|
|
156
|
+
self.telemetry.providers = value
|
|
157
|
+
|
|
158
|
+
def validate(self) -> List[str]:
|
|
159
|
+
"""Validate configuration and return list of errors"""
|
|
160
|
+
errors = []
|
|
161
|
+
|
|
162
|
+
if not self.api_key:
|
|
163
|
+
errors.append("API key is required (LUCIDIC_API_KEY)")
|
|
164
|
+
|
|
165
|
+
if not self.agent_id:
|
|
166
|
+
errors.append("Agent ID is required (LUCIDIC_AGENT_ID)")
|
|
167
|
+
|
|
168
|
+
if self.event_queue.max_parallel_workers < 1:
|
|
169
|
+
errors.append("Max parallel workers must be at least 1")
|
|
170
|
+
|
|
171
|
+
if self.event_queue.flush_interval_ms < 10:
|
|
172
|
+
errors.append("Flush interval must be at least 10ms")
|
|
173
|
+
|
|
174
|
+
return errors
|
|
175
|
+
|
|
176
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
177
|
+
"""Convert configuration to dictionary"""
|
|
178
|
+
return {
|
|
179
|
+
"api_key": self.api_key[:8] + "..." if self.api_key else None,
|
|
180
|
+
"agent_id": self.agent_id,
|
|
181
|
+
"environment": self.environment.value,
|
|
182
|
+
"debug": self.debug,
|
|
183
|
+
"auto_end": self.auto_end,
|
|
184
|
+
"network": {
|
|
185
|
+
"base_url": self.network.base_url,
|
|
186
|
+
"timeout": self.network.timeout,
|
|
187
|
+
"max_retries": self.network.max_retries,
|
|
188
|
+
"connection_pool_size": self.network.connection_pool_size
|
|
189
|
+
},
|
|
190
|
+
"event_queue": {
|
|
191
|
+
"max_workers": self.event_queue.max_parallel_workers,
|
|
192
|
+
"flush_interval_ms": self.event_queue.flush_interval_ms,
|
|
193
|
+
"flush_at_count": self.event_queue.flush_at_count
|
|
194
|
+
},
|
|
195
|
+
"error_handling": {
|
|
196
|
+
"suppress": self.error_handling.suppress_errors,
|
|
197
|
+
"cleanup": self.error_handling.cleanup_on_error
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
# Global configuration instance
|
|
203
|
+
_config: Optional[SDKConfig] = None
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def get_config() -> SDKConfig:
|
|
207
|
+
"""Get the current SDK configuration"""
|
|
208
|
+
global _config
|
|
209
|
+
if _config is None:
|
|
210
|
+
_config = SDKConfig.from_env()
|
|
211
|
+
return _config
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def set_config(config: SDKConfig):
|
|
215
|
+
"""Set the SDK configuration"""
|
|
216
|
+
global _config
|
|
217
|
+
_config = config
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def reset_config():
|
|
221
|
+
"""Reset configuration to defaults"""
|
|
222
|
+
global _config
|
|
223
|
+
_config = None
|
lucidicai/core/errors.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
import sys
|
|
3
|
+
import traceback
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class LucidicError(Exception):
|
|
7
|
+
"""Base exception for all Lucidic SDK errors"""
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class APIKeyVerificationError(LucidicError):
|
|
12
|
+
"""Exception for API key verification errors"""
|
|
13
|
+
def __init__(self, message):
|
|
14
|
+
super().__init__(f"Could not verify Lucidic API key: {message}")
|
|
15
|
+
|
|
16
|
+
class LucidicNotInitializedError(LucidicError):
|
|
17
|
+
"""Exception for calling Lucidic functions before Lucidic Client is initialized (lai.init())"""
|
|
18
|
+
def __init__(self):
|
|
19
|
+
super().__init__("Client is not initialized. Make sure to call lai.init() to initialize the client before calling other functions.")
|
|
20
|
+
|
|
21
|
+
class PromptError(LucidicError):
|
|
22
|
+
"Exception for errors related to prompt management"
|
|
23
|
+
def __init__(self, message: str):
|
|
24
|
+
super().__init__(f"Error getting Lucidic prompt: {message}")
|
|
25
|
+
|
|
26
|
+
class InvalidOperationError(LucidicError):
|
|
27
|
+
"Exception for errors resulting from attempting an invalid operation"
|
|
28
|
+
def __init__(self, message: str):
|
|
29
|
+
super().__init__(f"An invalid Lucidic operation was attempted: {message}")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class FeatureFlagError(LucidicError):
|
|
33
|
+
"""Exception for feature flag fetch failures"""
|
|
34
|
+
def __init__(self, message: str):
|
|
35
|
+
super().__init__(f"Failed to fetch feature flag: {message}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def install_error_handler():
|
|
39
|
+
"""Install global handler to create ERROR_TRACEBACK events for uncaught exceptions."""
|
|
40
|
+
from .sdk.event import create_event
|
|
41
|
+
from .sdk.init import get_session_id
|
|
42
|
+
from .context import current_parent_event_id
|
|
43
|
+
|
|
44
|
+
def handle_exception(exc_type, exc_value, exc_traceback):
|
|
45
|
+
try:
|
|
46
|
+
if get_session_id():
|
|
47
|
+
tb = ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback))
|
|
48
|
+
create_event(
|
|
49
|
+
type="error_traceback",
|
|
50
|
+
error=str(exc_value),
|
|
51
|
+
traceback=tb
|
|
52
|
+
)
|
|
53
|
+
except Exception:
|
|
54
|
+
pass
|
|
55
|
+
try:
|
|
56
|
+
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
57
|
+
except Exception:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
sys.excepthook = handle_exception
|
lucidicai/core/types.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""Type definitions for the Lucidic SDK."""
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class EventType(Enum):
|
|
7
|
+
"""Supported event types."""
|
|
8
|
+
LLM_GENERATION = "llm_generation"
|
|
9
|
+
FUNCTION_CALL = "function_call"
|
|
10
|
+
ERROR_TRACEBACK = "error_traceback"
|
|
11
|
+
GENERIC = "generic"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Provider type literals
|
|
15
|
+
ProviderType = Literal[
|
|
16
|
+
"openai",
|
|
17
|
+
"anthropic",
|
|
18
|
+
"langchain",
|
|
19
|
+
"pydantic_ai",
|
|
20
|
+
"openai_agents",
|
|
21
|
+
"litellm",
|
|
22
|
+
"bedrock",
|
|
23
|
+
"aws_bedrock",
|
|
24
|
+
"amazon_bedrock",
|
|
25
|
+
"vertexai",
|
|
26
|
+
"vertex_ai",
|
|
27
|
+
"google",
|
|
28
|
+
"google_generativeai",
|
|
29
|
+
"cohere",
|
|
30
|
+
"groq",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# Deprecated type aliases (for backward compatibility)
|
|
35
|
+
StepType = EventType # Steps are now events
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""SDK module for Lucidic AI."""
|
lucidicai/sdk/context.py
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
"""Async-safe and thread-safe context helpers for session (and step, extensible).
|
|
2
|
+
|
|
3
|
+
This module exposes context variables and helpers to bind a Lucidic
|
|
4
|
+
session to the current execution context (threads/async tasks), so
|
|
5
|
+
OpenTelemetry spans can be deterministically attributed to the correct
|
|
6
|
+
session under concurrency.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from contextlib import contextmanager, asynccontextmanager
|
|
10
|
+
import contextvars
|
|
11
|
+
from typing import Optional, Iterator, AsyncIterator, Callable, Any, Dict
|
|
12
|
+
import logging
|
|
13
|
+
import os
|
|
14
|
+
import threading
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Context variable for the active Lucidic session id
|
|
18
|
+
current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
|
|
19
|
+
"lucidic.session_id", default=None
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# NEW: Context variable for parent event nesting
|
|
24
|
+
current_parent_event_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
|
|
25
|
+
"lucidic.parent_event_id", default=None
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def set_active_session(session_id: Optional[str]) -> None:
|
|
30
|
+
"""Bind the given session id to the current execution context.
|
|
31
|
+
|
|
32
|
+
Sets both contextvar and thread-local storage when in a thread.
|
|
33
|
+
"""
|
|
34
|
+
from .init import set_thread_session, is_main_thread
|
|
35
|
+
|
|
36
|
+
current_session_id.set(session_id)
|
|
37
|
+
|
|
38
|
+
# Also set thread-local storage if we're in a non-main thread
|
|
39
|
+
if session_id and not is_main_thread():
|
|
40
|
+
set_thread_session(session_id)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def clear_active_session() -> None:
|
|
44
|
+
"""Clear any active session binding in the current execution context.
|
|
45
|
+
|
|
46
|
+
Clears both contextvar and thread-local storage when in a thread.
|
|
47
|
+
"""
|
|
48
|
+
from .init import clear_thread_session, is_main_thread
|
|
49
|
+
|
|
50
|
+
current_session_id.set(None)
|
|
51
|
+
|
|
52
|
+
# Also clear thread-local storage if we're in a non-main thread
|
|
53
|
+
if not is_main_thread():
|
|
54
|
+
clear_thread_session()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@contextmanager
|
|
58
|
+
def bind_session(session_id: str) -> Iterator[None]:
|
|
59
|
+
"""Context manager to temporarily bind an active session id.
|
|
60
|
+
|
|
61
|
+
Handles both thread-local and context variable storage for proper isolation.
|
|
62
|
+
"""
|
|
63
|
+
from .init import set_thread_session, clear_thread_session, is_main_thread
|
|
64
|
+
|
|
65
|
+
token = current_session_id.set(session_id)
|
|
66
|
+
|
|
67
|
+
# If we're in a non-main thread, also set thread-local storage
|
|
68
|
+
thread_local_set = False
|
|
69
|
+
if not is_main_thread():
|
|
70
|
+
set_thread_session(session_id)
|
|
71
|
+
thread_local_set = True
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
yield
|
|
75
|
+
finally:
|
|
76
|
+
if thread_local_set:
|
|
77
|
+
clear_thread_session()
|
|
78
|
+
current_session_id.reset(token)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@asynccontextmanager
|
|
82
|
+
async def bind_session_async(session_id: str) -> AsyncIterator[None]:
|
|
83
|
+
"""Async context manager to temporarily bind an active session id."""
|
|
84
|
+
from .init import set_task_session, clear_task_session
|
|
85
|
+
|
|
86
|
+
token = current_session_id.set(session_id)
|
|
87
|
+
|
|
88
|
+
# Also set task-local for async isolation
|
|
89
|
+
set_task_session(session_id)
|
|
90
|
+
|
|
91
|
+
try:
|
|
92
|
+
yield
|
|
93
|
+
finally:
|
|
94
|
+
clear_task_session()
|
|
95
|
+
current_session_id.reset(token)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
# NEW: Parent event context managers
|
|
99
|
+
@contextmanager
|
|
100
|
+
def event_context(event_id: str) -> Iterator[None]:
|
|
101
|
+
token = current_parent_event_id.set(event_id)
|
|
102
|
+
try:
|
|
103
|
+
yield
|
|
104
|
+
finally:
|
|
105
|
+
current_parent_event_id.reset(token)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@asynccontextmanager
|
|
109
|
+
async def event_context_async(event_id: str) -> AsyncIterator[None]:
|
|
110
|
+
token = current_parent_event_id.set(event_id)
|
|
111
|
+
try:
|
|
112
|
+
yield
|
|
113
|
+
finally:
|
|
114
|
+
current_parent_event_id.reset(token)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
@contextmanager
|
|
118
|
+
def session(**init_params) -> Iterator[None]:
|
|
119
|
+
"""All-in-one context manager: init → bind → yield → clear → end.
|
|
120
|
+
|
|
121
|
+
Notes:
|
|
122
|
+
- Ignores any provided auto_end parameter and ends the session on context exit.
|
|
123
|
+
- If LUCIDIC_DEBUG is true, logs a warning about ignoring auto_end.
|
|
124
|
+
- Handles thread-local storage for proper thread isolation.
|
|
125
|
+
"""
|
|
126
|
+
# Lazy import to avoid circular imports
|
|
127
|
+
import lucidicai as lai # type: ignore
|
|
128
|
+
from .init import set_thread_session, clear_thread_session, is_main_thread
|
|
129
|
+
|
|
130
|
+
# Force auto_end to False inside a context manager to control explicit end
|
|
131
|
+
user_auto_end = init_params.get('auto_end', None)
|
|
132
|
+
init_params = dict(init_params)
|
|
133
|
+
init_params['auto_end'] = False
|
|
134
|
+
|
|
135
|
+
if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
|
|
136
|
+
logging.getLogger('Lucidic').warning('session(...) ignores auto_end and will end the session at context exit')
|
|
137
|
+
|
|
138
|
+
session_id = lai.init(**init_params)
|
|
139
|
+
token = current_session_id.set(session_id)
|
|
140
|
+
|
|
141
|
+
# If we're in a non-main thread, also set thread-local storage
|
|
142
|
+
thread_local_set = False
|
|
143
|
+
if not is_main_thread():
|
|
144
|
+
set_thread_session(session_id)
|
|
145
|
+
thread_local_set = True
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
yield
|
|
149
|
+
finally:
|
|
150
|
+
if thread_local_set:
|
|
151
|
+
clear_thread_session()
|
|
152
|
+
current_session_id.reset(token)
|
|
153
|
+
try:
|
|
154
|
+
# Pass session_id explicitly to avoid context issues
|
|
155
|
+
lai.end_session(session_id=session_id)
|
|
156
|
+
except Exception:
|
|
157
|
+
# Avoid masking the original exception from the with-block
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@asynccontextmanager
|
|
162
|
+
async def session_async(**init_params) -> AsyncIterator[None]:
|
|
163
|
+
"""Async counterpart of session(...)."""
|
|
164
|
+
import lucidicai as lai # type: ignore
|
|
165
|
+
from .init import set_task_session, clear_task_session
|
|
166
|
+
|
|
167
|
+
user_auto_end = init_params.get('auto_end', None)
|
|
168
|
+
init_params = dict(init_params)
|
|
169
|
+
init_params['auto_end'] = False
|
|
170
|
+
|
|
171
|
+
if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
|
|
172
|
+
logging.getLogger('Lucidic').warning('session_async(...) ignores auto_end and will end the session at context exit')
|
|
173
|
+
|
|
174
|
+
session_id = lai.init(**init_params)
|
|
175
|
+
token = current_session_id.set(session_id)
|
|
176
|
+
|
|
177
|
+
# Set task-local session for true isolation in async
|
|
178
|
+
set_task_session(session_id)
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
yield
|
|
182
|
+
finally:
|
|
183
|
+
# Clear task-local session first
|
|
184
|
+
clear_task_session()
|
|
185
|
+
current_session_id.reset(token)
|
|
186
|
+
try:
|
|
187
|
+
# Pass session_id explicitly to avoid context issues in async
|
|
188
|
+
lai.end_session(session_id=session_id)
|
|
189
|
+
except Exception:
|
|
190
|
+
pass
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def run_session(fn: Callable[..., Any], *fn_args: Any, init_params: Optional[Dict[str, Any]] = None, **fn_kwargs: Any) -> Any:
|
|
194
|
+
"""Run a callable within a full Lucidic session lifecycle context."""
|
|
195
|
+
with session(**(init_params or {})):
|
|
196
|
+
return fn(*fn_args, **fn_kwargs)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def run_in_session(session_id: str, fn: Callable[..., Any], *fn_args: Any, **fn_kwargs: Any) -> Any:
|
|
200
|
+
"""Run a callable with a bound session id. Does not end the session."""
|
|
201
|
+
with bind_session(session_id):
|
|
202
|
+
return fn(*fn_args, **fn_kwargs)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def thread_worker_with_session(session_id: str, target: Callable[..., Any], *args, **kwargs) -> Any:
|
|
206
|
+
"""Wrapper for thread worker functions that ensures proper session isolation.
|
|
207
|
+
|
|
208
|
+
Use this as the target function for threads to ensure each thread gets
|
|
209
|
+
its own session context without bleeding from the parent thread.
|
|
210
|
+
|
|
211
|
+
Example:
|
|
212
|
+
thread = Thread(
|
|
213
|
+
target=thread_worker_with_session,
|
|
214
|
+
args=(session_id, actual_worker_function, arg1, arg2),
|
|
215
|
+
kwargs={'key': 'value'}
|
|
216
|
+
)
|
|
217
|
+
"""
|
|
218
|
+
from .init import set_thread_session, clear_thread_session
|
|
219
|
+
|
|
220
|
+
# Set thread-local session immediately
|
|
221
|
+
set_thread_session(session_id)
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
# Also bind to contextvar for compatibility
|
|
225
|
+
with bind_session(session_id):
|
|
226
|
+
return target(*args, **kwargs)
|
|
227
|
+
finally:
|
|
228
|
+
# Clean up thread-local storage
|
|
229
|
+
clear_thread_session()
|
|
230
|
+
|
|
231
|
+
|