lucidicai 1.3.5__py3-none-any.whl → 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lucidicai/__init__.py +475 -398
- lucidicai/client.py +328 -50
- lucidicai/constants.py +7 -37
- lucidicai/context.py +25 -0
- lucidicai/dataset.py +114 -0
- lucidicai/decorators.py +96 -325
- lucidicai/errors.py +39 -0
- lucidicai/event.py +50 -59
- lucidicai/event_queue.py +466 -0
- lucidicai/feature_flag.py +344 -0
- lucidicai/session.py +9 -71
- lucidicai/singleton.py +20 -17
- lucidicai/streaming.py +15 -50
- lucidicai/telemetry/context_capture_processor.py +65 -0
- lucidicai/telemetry/extract.py +192 -0
- lucidicai/telemetry/litellm_bridge.py +80 -45
- lucidicai/telemetry/lucidic_exporter.py +125 -142
- lucidicai/telemetry/telemetry_init.py +189 -0
- {lucidicai-1.3.5.dist-info → lucidicai-2.0.2.dist-info}/METADATA +1 -1
- {lucidicai-1.3.5.dist-info → lucidicai-2.0.2.dist-info}/RECORD +22 -16
- {lucidicai-1.3.5.dist-info → lucidicai-2.0.2.dist-info}/WHEEL +0 -0
- {lucidicai-1.3.5.dist-info → lucidicai-2.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
from typing import Union, List, Dict, Any, Optional, overload, Tuple, Literal
|
|
5
|
+
from dotenv import load_dotenv
|
|
6
|
+
|
|
7
|
+
from .client import Client
|
|
8
|
+
from .errors import APIKeyVerificationError, FeatureFlagError
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger("Lucidic")
|
|
11
|
+
|
|
12
|
+
# Cache implementation
|
|
13
|
+
class FeatureFlagCache:
|
|
14
|
+
def __init__(self):
|
|
15
|
+
self._cache: Dict[str, tuple[Any, float]] = {}
|
|
16
|
+
self._default_ttl = 300 # 5 minutes
|
|
17
|
+
|
|
18
|
+
def get(self, key: str) -> Optional[Any]:
|
|
19
|
+
if key in self._cache:
|
|
20
|
+
value, expiry = self._cache[key]
|
|
21
|
+
if time.time() < expiry:
|
|
22
|
+
return value
|
|
23
|
+
else:
|
|
24
|
+
del self._cache[key]
|
|
25
|
+
return None
|
|
26
|
+
|
|
27
|
+
def set(self, key: str, value: Any, ttl: int = None):
|
|
28
|
+
if ttl is None:
|
|
29
|
+
ttl = self._default_ttl
|
|
30
|
+
if ttl > 0:
|
|
31
|
+
self._cache[key] = (value, time.time() + ttl)
|
|
32
|
+
|
|
33
|
+
def clear(self):
|
|
34
|
+
self._cache.clear()
|
|
35
|
+
|
|
36
|
+
# Global cache instance
|
|
37
|
+
_flag_cache = FeatureFlagCache()
|
|
38
|
+
|
|
39
|
+
# Sentinel value to distinguish None from missing
|
|
40
|
+
MISSING = object()
|
|
41
|
+
|
|
42
|
+
# Function overloads for type safety
|
|
43
|
+
@overload
|
|
44
|
+
def get_feature_flag(
|
|
45
|
+
flag_name: str,
|
|
46
|
+
default: Any = ...,
|
|
47
|
+
*,
|
|
48
|
+
return_missing: Literal[False] = False,
|
|
49
|
+
cache_ttl: Optional[int] = 300,
|
|
50
|
+
api_key: Optional[str] = None,
|
|
51
|
+
agent_id: Optional[str] = None,
|
|
52
|
+
) -> Any:
|
|
53
|
+
"""Get a single feature flag."""
|
|
54
|
+
...
|
|
55
|
+
|
|
56
|
+
@overload
|
|
57
|
+
def get_feature_flag(
|
|
58
|
+
flag_name: str,
|
|
59
|
+
default: Any = ...,
|
|
60
|
+
*,
|
|
61
|
+
return_missing: Literal[True],
|
|
62
|
+
cache_ttl: Optional[int] = 300,
|
|
63
|
+
api_key: Optional[str] = None,
|
|
64
|
+
agent_id: Optional[str] = None,
|
|
65
|
+
) -> Tuple[Any, List[str]]:
|
|
66
|
+
"""Get a single feature flag with missing info."""
|
|
67
|
+
...
|
|
68
|
+
|
|
69
|
+
@overload
|
|
70
|
+
def get_feature_flag(
|
|
71
|
+
flag_name: List[str],
|
|
72
|
+
defaults: Optional[Dict[str, Any]] = None,
|
|
73
|
+
*,
|
|
74
|
+
return_missing: Literal[False] = False,
|
|
75
|
+
cache_ttl: Optional[int] = 300,
|
|
76
|
+
api_key: Optional[str] = None,
|
|
77
|
+
agent_id: Optional[str] = None,
|
|
78
|
+
) -> Dict[str, Any]:
|
|
79
|
+
"""Get multiple feature flags."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
@overload
|
|
83
|
+
def get_feature_flag(
|
|
84
|
+
flag_name: List[str],
|
|
85
|
+
defaults: Optional[Dict[str, Any]] = None,
|
|
86
|
+
*,
|
|
87
|
+
return_missing: Literal[True],
|
|
88
|
+
cache_ttl: Optional[int] = 300,
|
|
89
|
+
api_key: Optional[str] = None,
|
|
90
|
+
agent_id: Optional[str] = None,
|
|
91
|
+
) -> Tuple[Dict[str, Any], List[str]]:
|
|
92
|
+
"""Get multiple feature flags with missing info."""
|
|
93
|
+
...
|
|
94
|
+
|
|
95
|
+
def get_feature_flag(
|
|
96
|
+
flag_name: Union[str, List[str]],
|
|
97
|
+
default_or_defaults: Any = MISSING,
|
|
98
|
+
*,
|
|
99
|
+
return_missing: bool = False,
|
|
100
|
+
cache_ttl: Optional[int] = 300,
|
|
101
|
+
api_key: Optional[str] = None,
|
|
102
|
+
agent_id: Optional[str] = None,
|
|
103
|
+
) -> Union[Any, Tuple[Any, List[str]], Dict[str, Any], Tuple[Dict[str, Any], List[str]]]:
|
|
104
|
+
"""
|
|
105
|
+
Get feature flag(s) from backend. Raises FeatureFlagError on failure unless default provided.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
flag_name: Single flag name (str) or list of flag names
|
|
109
|
+
default_or_defaults:
|
|
110
|
+
- If flag_name is str: default value for that flag (optional)
|
|
111
|
+
- If flag_name is List[str]: dict of defaults {flag_name: default_value}
|
|
112
|
+
cache_ttl: Cache time-to-live in seconds (0 to disable, -1 for forever)
|
|
113
|
+
api_key: Optional API key
|
|
114
|
+
agent_id: Optional agent ID
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
- If flag_name is str: The flag value (or tuple with missing list if return_missing=True)
|
|
118
|
+
- If flag_name is List[str]: Dict mapping flag_name -> value (or tuple with missing list if return_missing=True)
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
122
|
+
APIKeyVerificationError: If credentials missing
|
|
123
|
+
|
|
124
|
+
Examples:
|
|
125
|
+
# Single flag with default
|
|
126
|
+
retries = lai.get_feature_flag("max_retries", default=3)
|
|
127
|
+
|
|
128
|
+
# Single flag without default (can raise)
|
|
129
|
+
retries = lai.get_feature_flag("max_retries")
|
|
130
|
+
|
|
131
|
+
# Multiple flags
|
|
132
|
+
flags = lai.get_feature_flag(
|
|
133
|
+
["max_retries", "timeout"],
|
|
134
|
+
defaults={"max_retries": 3}
|
|
135
|
+
)
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
return # no op for now
|
|
139
|
+
|
|
140
|
+
load_dotenv()
|
|
141
|
+
|
|
142
|
+
# Determine if single or batch
|
|
143
|
+
is_single = isinstance(flag_name, str)
|
|
144
|
+
flag_names = [flag_name] if is_single else flag_name
|
|
145
|
+
|
|
146
|
+
# Parse defaults
|
|
147
|
+
if is_single:
|
|
148
|
+
has_default = default_or_defaults is not MISSING
|
|
149
|
+
defaults = {flag_name: default_or_defaults} if has_default else {}
|
|
150
|
+
else:
|
|
151
|
+
defaults = default_or_defaults if default_or_defaults not in (None, MISSING) else {}
|
|
152
|
+
|
|
153
|
+
# Track missing flags
|
|
154
|
+
missing_flags = []
|
|
155
|
+
|
|
156
|
+
# Check cache first
|
|
157
|
+
uncached_flags = []
|
|
158
|
+
cached_results = {}
|
|
159
|
+
|
|
160
|
+
if cache_ttl != 0:
|
|
161
|
+
for name in flag_names:
|
|
162
|
+
cache_key = f"{agent_id}:{name}"
|
|
163
|
+
cached_value = _flag_cache.get(cache_key)
|
|
164
|
+
if cached_value is not None:
|
|
165
|
+
cached_results[name] = cached_value
|
|
166
|
+
else:
|
|
167
|
+
uncached_flags.append(name)
|
|
168
|
+
else:
|
|
169
|
+
uncached_flags = flag_names
|
|
170
|
+
|
|
171
|
+
# Fetch uncached flags if needed
|
|
172
|
+
if uncached_flags:
|
|
173
|
+
# Get credentials
|
|
174
|
+
if api_key is None:
|
|
175
|
+
api_key = os.getenv("LUCIDIC_API_KEY", None)
|
|
176
|
+
if api_key is None:
|
|
177
|
+
raise APIKeyVerificationError(
|
|
178
|
+
"Make sure to either pass your API key or set the LUCIDIC_API_KEY environment variable."
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
if agent_id is None:
|
|
182
|
+
agent_id = os.getenv("LUCIDIC_AGENT_ID", None)
|
|
183
|
+
if agent_id is None:
|
|
184
|
+
raise APIKeyVerificationError(
|
|
185
|
+
"Lucidic agent ID not specified. Make sure to either pass your agent ID or set the LUCIDIC_AGENT_ID environment variable."
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
# Get client
|
|
189
|
+
client = Client()
|
|
190
|
+
if not getattr(client, 'initialized', False):
|
|
191
|
+
client = Client(api_key=api_key, agent_id=agent_id)
|
|
192
|
+
else:
|
|
193
|
+
if api_key != client.api_key or agent_id != client.agent_id:
|
|
194
|
+
client.set_api_key(api_key)
|
|
195
|
+
client.agent_id = agent_id
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
# Make batch API call
|
|
199
|
+
response = client.make_request(
|
|
200
|
+
'getfeatureflags',
|
|
201
|
+
'POST',
|
|
202
|
+
{'flag_names': uncached_flags}
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# Process response and update cache
|
|
206
|
+
for name in uncached_flags:
|
|
207
|
+
if name in response['flags']:
|
|
208
|
+
if response['flags'][name]['found']:
|
|
209
|
+
value = response['flags'][name]['value']
|
|
210
|
+
cached_results[name] = value
|
|
211
|
+
|
|
212
|
+
# Cache the value
|
|
213
|
+
if cache_ttl != 0:
|
|
214
|
+
cache_key = f"{agent_id}:{name}"
|
|
215
|
+
_flag_cache.set(cache_key, value, ttl=cache_ttl if cache_ttl > 0 else None)
|
|
216
|
+
else:
|
|
217
|
+
# Flag not found on server
|
|
218
|
+
missing_flags.append(name)
|
|
219
|
+
logger.warning(f"Feature flag '{name}' not found on server")
|
|
220
|
+
|
|
221
|
+
except Exception as e:
|
|
222
|
+
# Log the error
|
|
223
|
+
logger.error(f"Failed to fetch feature flags: {e}")
|
|
224
|
+
|
|
225
|
+
# Check if we have defaults for missing flags
|
|
226
|
+
for name in uncached_flags:
|
|
227
|
+
if name not in cached_results:
|
|
228
|
+
if name in defaults:
|
|
229
|
+
cached_results[name] = defaults[name]
|
|
230
|
+
elif is_single and not return_missing:
|
|
231
|
+
# Single flag without default and not returning missing - raise error
|
|
232
|
+
raise FeatureFlagError(f"'{name}': {e}") from e
|
|
233
|
+
|
|
234
|
+
# Build final result
|
|
235
|
+
result = {}
|
|
236
|
+
for name in flag_names:
|
|
237
|
+
if name in cached_results:
|
|
238
|
+
result[name] = cached_results[name]
|
|
239
|
+
elif name in defaults:
|
|
240
|
+
result[name] = defaults[name]
|
|
241
|
+
else:
|
|
242
|
+
# No value and no default
|
|
243
|
+
missing_flags.append(name)
|
|
244
|
+
if is_single and not return_missing:
|
|
245
|
+
raise FeatureFlagError(f"'{name}' not found and no default provided")
|
|
246
|
+
else:
|
|
247
|
+
result[name] = None
|
|
248
|
+
|
|
249
|
+
# Return based on input type and return_missing flag
|
|
250
|
+
if return_missing:
|
|
251
|
+
return (result[flag_names[0]] if is_single else result, missing_flags)
|
|
252
|
+
else:
|
|
253
|
+
return result[flag_names[0]] if is_single else result
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
# Typed convenience functions
|
|
257
|
+
def get_bool_flag(flag_name: str, default: Optional[bool] = None, **kwargs) -> bool:
|
|
258
|
+
"""
|
|
259
|
+
Get a boolean feature flag with type validation.
|
|
260
|
+
|
|
261
|
+
Raises:
|
|
262
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
263
|
+
TypeError: If flag value is not a boolean
|
|
264
|
+
"""
|
|
265
|
+
return # no op for now
|
|
266
|
+
value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
|
|
267
|
+
if not isinstance(value, bool):
|
|
268
|
+
if default is not None:
|
|
269
|
+
logger.warning(f"Feature flag '{flag_name}' is not a boolean, using default")
|
|
270
|
+
return default
|
|
271
|
+
raise TypeError(f"Feature flag '{flag_name}' expected boolean, got {type(value).__name__}")
|
|
272
|
+
return value
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def get_int_flag(flag_name: str, default: Optional[int] = None, **kwargs) -> int:
|
|
276
|
+
"""
|
|
277
|
+
Get an integer feature flag with type validation.
|
|
278
|
+
|
|
279
|
+
Raises:
|
|
280
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
281
|
+
TypeError: If flag value is not an integer
|
|
282
|
+
"""
|
|
283
|
+
return # no op for now
|
|
284
|
+
value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
|
|
285
|
+
if not isinstance(value, int) or isinstance(value, bool): # bool is subclass of int
|
|
286
|
+
if default is not None:
|
|
287
|
+
logger.warning(f"Feature flag '{flag_name}' is not an integer, using default")
|
|
288
|
+
return default
|
|
289
|
+
raise TypeError(f"Feature flag '{flag_name}' expected integer, got {type(value).__name__}")
|
|
290
|
+
return value
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def get_float_flag(flag_name: str, default: Optional[float] = None, **kwargs) -> float:
|
|
294
|
+
"""
|
|
295
|
+
Get a float feature flag with type validation.
|
|
296
|
+
|
|
297
|
+
Raises:
|
|
298
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
299
|
+
TypeError: If flag value is not a float
|
|
300
|
+
"""
|
|
301
|
+
return # no op for now
|
|
302
|
+
value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
|
|
303
|
+
if not isinstance(value, (int, float)) or isinstance(value, bool):
|
|
304
|
+
if default is not None:
|
|
305
|
+
logger.warning(f"Feature flag '{flag_name}' is not a float, using default")
|
|
306
|
+
return default
|
|
307
|
+
raise TypeError(f"Feature flag '{flag_name}' expected float, got {type(value).__name__}")
|
|
308
|
+
return float(value)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def get_string_flag(flag_name: str, default: Optional[str] = None, **kwargs) -> str:
|
|
312
|
+
"""
|
|
313
|
+
Get a string feature flag with type validation.
|
|
314
|
+
|
|
315
|
+
Raises:
|
|
316
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
317
|
+
TypeError: If flag value is not a string
|
|
318
|
+
"""
|
|
319
|
+
return # no op for now
|
|
320
|
+
value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
|
|
321
|
+
if not isinstance(value, str):
|
|
322
|
+
if default is not None:
|
|
323
|
+
logger.warning(f"Feature flag '{flag_name}' is not a string, using default")
|
|
324
|
+
return default
|
|
325
|
+
raise TypeError(f"Feature flag '{flag_name}' expected string, got {type(value).__name__}")
|
|
326
|
+
return value
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def get_json_flag(flag_name: str, default: Optional[dict] = None, **kwargs) -> dict:
|
|
330
|
+
"""
|
|
331
|
+
Get a JSON object feature flag.
|
|
332
|
+
|
|
333
|
+
Raises:
|
|
334
|
+
FeatureFlagError: If fetch fails and no default provided
|
|
335
|
+
"""
|
|
336
|
+
return # no op for now
|
|
337
|
+
value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
|
|
338
|
+
return value
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def clear_feature_flag_cache():
|
|
342
|
+
"""Clear the feature flag cache."""
|
|
343
|
+
_flag_cache.clear()
|
|
344
|
+
logger.debug("Feature flag cache cleared")
|
lucidicai/session.py
CHANGED
|
@@ -1,16 +1,7 @@
|
|
|
1
|
-
import
|
|
2
|
-
import io
|
|
3
|
-
import logging
|
|
4
|
-
from typing import List, Optional
|
|
5
|
-
|
|
6
|
-
from PIL import Image
|
|
1
|
+
from typing import Optional
|
|
7
2
|
|
|
8
3
|
from .errors import InvalidOperationError, LucidicNotInitializedError
|
|
9
|
-
from .image_upload import get_presigned_url, upload_image_to_s3
|
|
10
|
-
from .step import Step
|
|
11
|
-
from .event import Event
|
|
12
4
|
|
|
13
|
-
logger = logging.getLogger("Lucidic")
|
|
14
5
|
|
|
15
6
|
class Session:
|
|
16
7
|
def __init__(
|
|
@@ -21,9 +12,7 @@ class Session:
|
|
|
21
12
|
):
|
|
22
13
|
self.agent_id = agent_id
|
|
23
14
|
self.session_id = session_id
|
|
24
|
-
self.
|
|
25
|
-
self._active_step: Optional[str] = None # Step ID, not Step object
|
|
26
|
-
self.event_history = dict()
|
|
15
|
+
self.event_history = [] # List[Event]
|
|
27
16
|
self.latest_event = None
|
|
28
17
|
self.is_finished = False
|
|
29
18
|
self.is_successful = None
|
|
@@ -32,13 +21,6 @@ class Session:
|
|
|
32
21
|
self.session_eval_reason = None
|
|
33
22
|
self.has_gif = None
|
|
34
23
|
|
|
35
|
-
@property
|
|
36
|
-
def active_step(self) -> Optional[Step]:
|
|
37
|
-
"""Get the active step object"""
|
|
38
|
-
if self._active_step and self._active_step in self.step_history:
|
|
39
|
-
return self.step_history[self._active_step]
|
|
40
|
-
return None
|
|
41
|
-
|
|
42
24
|
def update_session(
|
|
43
25
|
self,
|
|
44
26
|
**kwargs
|
|
@@ -54,60 +36,16 @@ class Session:
|
|
|
54
36
|
"session_eval_reason": Client().mask(kwargs.get("session_eval_reason", None)),
|
|
55
37
|
"tags": kwargs.get("tags", None)
|
|
56
38
|
}
|
|
57
|
-
|
|
58
|
-
# auto end any unfinished steps
|
|
59
|
-
if kwargs.get("is_finished", None) is True:
|
|
60
|
-
for step_id, step in self.step_history.items():
|
|
61
|
-
if not step.is_finished:
|
|
62
|
-
self.update_step(step_id=step_id, is_finished=True)
|
|
63
|
-
|
|
64
39
|
Client().make_request('updatesession', 'PUT', request_data)
|
|
65
40
|
|
|
66
|
-
def
|
|
41
|
+
def create_event(self, type: str = "generic", **kwargs) -> str:
|
|
42
|
+
"""Proxy to client.create_event bound to this session."""
|
|
67
43
|
if not self.session_id:
|
|
68
44
|
raise LucidicNotInitializedError()
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def update_step(self, **kwargs) -> None:
|
|
75
|
-
if 'step_id' in kwargs and kwargs['step_id'] is not None:
|
|
76
|
-
if kwargs['step_id'] not in self.step_history:
|
|
77
|
-
raise InvalidOperationError("Step ID not found in session history")
|
|
78
|
-
self.step_history[kwargs['step_id']].update_step(**kwargs)
|
|
79
|
-
else:
|
|
80
|
-
if not self._active_step:
|
|
81
|
-
raise InvalidOperationError("No active step to update")
|
|
82
|
-
self.step_history[self._active_step].update_step(**kwargs)
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def create_event(self, **kwargs):
|
|
86
|
-
# Get step_id from kwargs or active step
|
|
87
|
-
if 'step_id' in kwargs and kwargs['step_id'] is not None:
|
|
88
|
-
step_id = kwargs['step_id']
|
|
89
|
-
elif self._active_step:
|
|
90
|
-
step_id = self._active_step
|
|
91
|
-
else:
|
|
92
|
-
step_id = None
|
|
93
|
-
kwargs.pop('step_id', None)
|
|
94
|
-
event = Event(
|
|
95
|
-
session_id=self.session_id,
|
|
96
|
-
step_id=step_id,
|
|
97
|
-
**kwargs
|
|
98
|
-
)
|
|
99
|
-
self.event_history[event.event_id] = event
|
|
100
|
-
self._active_event = event
|
|
101
|
-
return event.event_id
|
|
102
|
-
|
|
103
|
-
def update_event(self, **kwargs):
|
|
104
|
-
if 'event_id' in kwargs and kwargs['event_id'] is not None:
|
|
105
|
-
if kwargs['event_id'] not in self.event_history:
|
|
106
|
-
raise InvalidOperationError("Event ID not found in session history")
|
|
107
|
-
self.event_history[kwargs['event_id']].update_event(**kwargs)
|
|
108
|
-
else:
|
|
109
|
-
if not self._active_event:
|
|
110
|
-
raise InvalidOperationError("No active event to update")
|
|
111
|
-
self._active_event.update_event(**kwargs)
|
|
45
|
+
from .client import Client
|
|
46
|
+
kwargs = dict(kwargs)
|
|
47
|
+
kwargs['session_id'] = self.session_id
|
|
48
|
+
event_id = Client().create_event(type=type, **kwargs)
|
|
49
|
+
return event_id
|
|
112
50
|
|
|
113
51
|
|
lucidicai/singleton.py
CHANGED
|
@@ -1,28 +1,31 @@
|
|
|
1
|
+
import threading
|
|
1
2
|
from .errors import LucidicNotInitializedError
|
|
2
3
|
|
|
3
4
|
lai_inst = {}
|
|
5
|
+
_singleton_lock = threading.Lock()
|
|
4
6
|
|
|
5
7
|
def singleton(class_):
|
|
6
8
|
def getinstance(*args, **kwargs):
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
9
|
+
# Thread-safe singleton pattern
|
|
10
|
+
with _singleton_lock:
|
|
11
|
+
inst = lai_inst.get(class_)
|
|
12
|
+
|
|
13
|
+
# on first access -> no instance yet
|
|
14
|
+
if inst is None:
|
|
15
|
+
# no args/kwargs -> return a NullClient for Client
|
|
16
|
+
if class_.__name__ == 'Client' and not args and not kwargs:
|
|
17
|
+
inst = NullClient()
|
|
18
|
+
else:
|
|
19
|
+
inst = class_(*args, **kwargs)
|
|
20
|
+
lai_inst[class_] = inst
|
|
21
|
+
return inst
|
|
22
|
+
|
|
23
|
+
# existing instance present
|
|
24
|
+
# if NullClient and now real init args are passed -> upgrade it
|
|
25
|
+
if isinstance(inst, NullClient) and (args or kwargs):
|
|
16
26
|
inst = class_(*args, **kwargs)
|
|
17
|
-
|
|
27
|
+
lai_inst[class_] = inst
|
|
18
28
|
return inst
|
|
19
|
-
|
|
20
|
-
# existing instance present
|
|
21
|
-
# if NullClient and now real init args are passed -> upgrade it
|
|
22
|
-
if isinstance(inst, NullClient) and (args or kwargs):
|
|
23
|
-
inst = class_(*args, **kwargs)
|
|
24
|
-
lai_inst[class_] = inst
|
|
25
|
-
return inst
|
|
26
29
|
|
|
27
30
|
return getinstance
|
|
28
31
|
|
lucidicai/streaming.py
CHANGED
|
@@ -21,42 +21,10 @@ class StreamingResponseWrapper:
|
|
|
21
21
|
self.accumulated_content = ""
|
|
22
22
|
self.usage = None
|
|
23
23
|
|
|
24
|
-
#
|
|
25
|
-
self._create_initial_event()
|
|
24
|
+
# We no longer create an initial event; emit a single immutable event on finalize
|
|
26
25
|
|
|
27
26
|
def _create_initial_event(self):
|
|
28
|
-
|
|
29
|
-
try:
|
|
30
|
-
# Check if event_id was already created
|
|
31
|
-
if '_event_id' in self.kwargs:
|
|
32
|
-
self.event_id = self.kwargs['_event_id']
|
|
33
|
-
logger.info(f"[Streaming] Using existing event ID: {self.event_id}")
|
|
34
|
-
return
|
|
35
|
-
|
|
36
|
-
if Client().session:
|
|
37
|
-
description, images = self._format_messages(self.kwargs.get('messages', ''))
|
|
38
|
-
|
|
39
|
-
event_data = {
|
|
40
|
-
'action': description,
|
|
41
|
-
'cost': 0,
|
|
42
|
-
'provider': 'OpenAI',
|
|
43
|
-
'model': self.kwargs.get('model', 'unknown'),
|
|
44
|
-
'tool': 'streaming',
|
|
45
|
-
'raw_request': json.dumps({
|
|
46
|
-
'model': self.kwargs.get('model'),
|
|
47
|
-
'messages': self._serialize_messages(self.kwargs.get('messages', [])),
|
|
48
|
-
'stream': True,
|
|
49
|
-
**{k: v for k, v in self.kwargs.items() if k not in ['messages', 'model', 'stream', '_event_id']}
|
|
50
|
-
})
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
if images:
|
|
54
|
-
event_data['screenshots'] = images
|
|
55
|
-
|
|
56
|
-
self.event_id = Client().session.create_event(**event_data)
|
|
57
|
-
logger.debug(f"[Streaming] Created new streaming event with ID: {self.event_id}")
|
|
58
|
-
except Exception as e:
|
|
59
|
-
logger.error(f"[Streaming] Error creating initial streaming event: {str(e)}")
|
|
27
|
+
return
|
|
60
28
|
|
|
61
29
|
def _format_messages(self, messages):
|
|
62
30
|
"""Format messages for description and extract images"""
|
|
@@ -195,10 +163,6 @@ class StreamingResponseWrapper:
|
|
|
195
163
|
try:
|
|
196
164
|
logger.info(f"[Streaming] Finalizing event {self.event_id}, accumulated content length: {len(self.accumulated_content)}")
|
|
197
165
|
|
|
198
|
-
if not self.event_id:
|
|
199
|
-
logger.warning("[Streaming] No event_id to finalize")
|
|
200
|
-
return
|
|
201
|
-
|
|
202
166
|
if not self.session:
|
|
203
167
|
# Try to get session from client
|
|
204
168
|
try:
|
|
@@ -229,18 +193,19 @@ class StreamingResponseWrapper:
|
|
|
229
193
|
model = self.kwargs.get('model', 'unknown')
|
|
230
194
|
cost = self._calculate_cost(model, prompt_tokens, completion_tokens)
|
|
231
195
|
|
|
232
|
-
#
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
'
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
196
|
+
# Create single immutable event at end
|
|
197
|
+
result_text = self.accumulated_content if self.accumulated_content else "Stream completed (no content received)"
|
|
198
|
+
Client().create_event(
|
|
199
|
+
type="llm_generation",
|
|
200
|
+
model=model,
|
|
201
|
+
messages=self.kwargs.get('messages', []),
|
|
202
|
+
output=result_text,
|
|
203
|
+
input_tokens=int(prompt_tokens),
|
|
204
|
+
output_tokens=int(completion_tokens),
|
|
205
|
+
cost=cost,
|
|
206
|
+
duration=duration,
|
|
207
|
+
)
|
|
208
|
+
logger.info(f"[Streaming] Emitted immutable streaming event")
|
|
244
209
|
|
|
245
210
|
except Exception as e:
|
|
246
211
|
logger.error(f"[Streaming] Error finalizing event {self.event_id}: {str(e)}")
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Context Capture Processor for OpenTelemetry spans.
|
|
3
|
+
|
|
4
|
+
This processor captures Lucidic context (session_id, parent_event_id) at span creation time
|
|
5
|
+
and stores it in span attributes. This ensures context is preserved even when spans are
|
|
6
|
+
processed asynchronously in different threads/contexts.
|
|
7
|
+
|
|
8
|
+
This fixes the nesting issue for ALL providers (OpenAI, Anthropic, LangChain, etc.)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import logging
|
|
12
|
+
from typing import Optional
|
|
13
|
+
from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan
|
|
14
|
+
from opentelemetry.trace import Span
|
|
15
|
+
from opentelemetry import context as otel_context
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger("Lucidic")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ContextCaptureProcessor(SpanProcessor):
|
|
21
|
+
"""Captures Lucidic context at span creation and stores in attributes."""
|
|
22
|
+
|
|
23
|
+
def on_start(self, span: Span, parent_context: Optional[otel_context.Context] = None) -> None:
|
|
24
|
+
"""Called when a span is started - capture context here."""
|
|
25
|
+
try:
|
|
26
|
+
# Import here to avoid circular imports
|
|
27
|
+
from lucidicai.context import current_session_id, current_parent_event_id
|
|
28
|
+
|
|
29
|
+
# Capture session ID from context
|
|
30
|
+
session_id = None
|
|
31
|
+
try:
|
|
32
|
+
session_id = current_session_id.get(None)
|
|
33
|
+
except Exception:
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
# Capture parent event ID from context
|
|
37
|
+
parent_event_id = None
|
|
38
|
+
try:
|
|
39
|
+
parent_event_id = current_parent_event_id.get(None)
|
|
40
|
+
except Exception:
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
# Store in span attributes for later retrieval
|
|
44
|
+
if session_id:
|
|
45
|
+
span.set_attribute("lucidic.session_id", session_id)
|
|
46
|
+
|
|
47
|
+
if parent_event_id:
|
|
48
|
+
span.set_attribute("lucidic.parent_event_id", parent_event_id)
|
|
49
|
+
logger.debug(f"[ContextCapture] Captured parent_event_id {parent_event_id[:8]}... for span {span.name}")
|
|
50
|
+
|
|
51
|
+
except Exception as e:
|
|
52
|
+
# Never fail span creation due to context capture
|
|
53
|
+
logger.debug(f"[ContextCapture] Failed to capture context: {e}")
|
|
54
|
+
|
|
55
|
+
def on_end(self, span: ReadableSpan) -> None:
|
|
56
|
+
"""Called when a span ends - no action needed."""
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
def shutdown(self) -> None:
|
|
60
|
+
"""Shutdown the processor."""
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
64
|
+
"""Force flush - no buffering in this processor."""
|
|
65
|
+
return True
|