lucidicai 1.3.2__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lucidicai/__init__.py CHANGED
@@ -2,6 +2,9 @@ import atexit
2
2
  import logging
3
3
  import os
4
4
  import signal
5
+ import sys
6
+ import traceback
7
+ import threading
5
8
  from typing import List, Literal, Optional
6
9
 
7
10
  from .client import Client
@@ -25,8 +28,35 @@ from .telemetry.otel_init import LucidicTelemetry
25
28
 
26
29
  # Import decorators
27
30
  from .decorators import step, event
31
+ from .context import (
32
+ set_active_session,
33
+ bind_session,
34
+ bind_session_async,
35
+ clear_active_session,
36
+ current_session_id,
37
+ session,
38
+ session_async,
39
+ run_session,
40
+ run_in_session,
41
+ )
28
42
 
29
- ProviderType = Literal["openai", "anthropic", "langchain", "pydantic_ai", "openai_agents", "litellm"]
43
+ ProviderType = Literal[
44
+ "openai",
45
+ "anthropic",
46
+ "langchain",
47
+ "pydantic_ai",
48
+ "openai_agents",
49
+ "litellm",
50
+ "bedrock",
51
+ "aws_bedrock",
52
+ "amazon_bedrock",
53
+ "google",
54
+ "google_generativeai",
55
+ "vertexai",
56
+ "vertex_ai",
57
+ "cohere",
58
+ "groq",
59
+ ]
30
60
 
31
61
  # Configure logging
32
62
  logger = logging.getLogger("Lucidic")
@@ -38,6 +68,137 @@ if not logger.handlers:
38
68
  logger.setLevel(logging.INFO)
39
69
 
40
70
 
71
+ # Crash/exit capture configuration
72
+ MAX_ERROR_DESCRIPTION_LENGTH = 16384
73
+ _crash_handlers_installed = False
74
+ _original_sys_excepthook = None
75
+ _original_threading_excepthook = None
76
+ _shutdown_lock = threading.Lock()
77
+ _is_shutting_down = False
78
+
79
+
80
+ def _mask_and_truncate(text: Optional[str]) -> Optional[str]:
81
+ """Apply masking and truncate to a safe length. Best effort; never raises."""
82
+ if text is None:
83
+ return text
84
+ try:
85
+ masked = Client().mask(text)
86
+ except Exception:
87
+ masked = text
88
+ if masked is None:
89
+ return masked
90
+ return masked[:MAX_ERROR_DESCRIPTION_LENGTH]
91
+
92
+
93
+ def _post_fatal_event(exit_code: int, description: str, extra: Optional[dict] = None) -> None:
94
+ """Best-effort creation of a final Lucidic event on fatal paths.
95
+
96
+ - Idempotent using a process-wide shutdown flag to avoid duplicates when
97
+ multiple hooks fire (signal + excepthook).
98
+ - Swallows all exceptions to avoid interfering with shutdown.
99
+ """
100
+ global _is_shutting_down
101
+ with _shutdown_lock:
102
+ if _is_shutting_down:
103
+ return
104
+ _is_shutting_down = True
105
+ try:
106
+ client = Client()
107
+ session = getattr(client, 'session', None)
108
+ if not session or getattr(session, 'is_finished', False):
109
+ return
110
+ arguments = {"exit_code": exit_code}
111
+ if extra:
112
+ try:
113
+ arguments.update(extra)
114
+ except Exception:
115
+ pass
116
+
117
+ event_id = session.create_event(
118
+ description=_mask_and_truncate(description),
119
+ result=f"process exited with code {exit_code}",
120
+ function_name="__process_exit__",
121
+ arguments=arguments,
122
+ )
123
+ session.update_event(event_id=event_id, is_finished=True)
124
+ except Exception:
125
+ # Never raise during shutdown
126
+ pass
127
+
128
+
129
+ def _install_crash_handlers() -> None:
130
+ """Install global uncaught exception handlers (idempotent)."""
131
+ global _crash_handlers_installed, _original_sys_excepthook, _original_threading_excepthook
132
+ if _crash_handlers_installed:
133
+ return
134
+
135
+ _original_sys_excepthook = sys.excepthook
136
+
137
+ def _sys_hook(exc_type, exc, tb):
138
+ try:
139
+ trace_str = ''.join(traceback.format_exception(exc_type, exc, tb))
140
+ except Exception:
141
+ trace_str = f"Uncaught exception: {getattr(exc_type, '__name__', str(exc_type))}: {exc}"
142
+
143
+ # Emit final event and end the session as unsuccessful
144
+ _post_fatal_event(1, trace_str, {
145
+ "exception_type": getattr(exc_type, "__name__", str(exc_type)),
146
+ "exception_message": str(exc),
147
+ "thread_name": threading.current_thread().name,
148
+ })
149
+ try:
150
+ # Prevent auto_end double work
151
+ client = Client()
152
+ try:
153
+ client.auto_end = False
154
+ except Exception:
155
+ pass
156
+ # End session explicitly as unsuccessful
157
+ end_session()
158
+ except Exception:
159
+ pass
160
+ # Best-effort force flush and shutdown telemetry
161
+ try:
162
+ telemetry = LucidicTelemetry()
163
+ if telemetry.is_initialized():
164
+ try:
165
+ telemetry.force_flush()
166
+ except Exception:
167
+ pass
168
+ try:
169
+ telemetry.uninstrument_all()
170
+ except Exception:
171
+ pass
172
+ except Exception:
173
+ pass
174
+ # Chain to original to preserve default printing/behavior
175
+ try:
176
+ _original_sys_excepthook(exc_type, exc, tb)
177
+ except Exception:
178
+ # Avoid recursion/errors in fatal path
179
+ pass
180
+
181
+ sys.excepthook = _sys_hook
182
+
183
+ # For Python 3.8+, only treat main-thread exceptions as fatal (process-exiting)
184
+ if hasattr(threading, 'excepthook'):
185
+ _original_threading_excepthook = threading.excepthook
186
+
187
+ def _thread_hook(args):
188
+ try:
189
+ if args.thread is threading.main_thread():
190
+ _sys_hook(args.exc_type, args.exc_value, args.exc_traceback)
191
+ except Exception:
192
+ pass
193
+ try:
194
+ _original_threading_excepthook(args)
195
+ except Exception:
196
+ pass
197
+
198
+ threading.excepthook = _thread_hook
199
+
200
+ _crash_handlers_installed = True
201
+
41
202
  def _setup_providers(client: Client, providers: List[ProviderType]) -> None:
42
203
  """Set up providers for the client, avoiding duplication
43
204
 
@@ -81,6 +242,26 @@ def _setup_providers(client: Client, providers: List[ProviderType]) -> None:
81
242
  elif provider == "litellm":
82
243
  client.set_provider(OTelLiteLLMHandler())
83
244
  setup_providers.add("litellm")
245
+ elif provider in ("bedrock", "aws_bedrock", "amazon_bedrock"):
246
+ from .telemetry.otel_handlers import OTelBedrockHandler
247
+ client.set_provider(OTelBedrockHandler())
248
+ setup_providers.add("bedrock")
249
+ elif provider in ("google", "google_generativeai"):
250
+ from .telemetry.otel_handlers import OTelGoogleGenerativeAIHandler
251
+ client.set_provider(OTelGoogleGenerativeAIHandler())
252
+ setup_providers.add("google")
253
+ elif provider in ("vertexai", "vertex_ai"):
254
+ from .telemetry.otel_handlers import OTelVertexAIHandler
255
+ client.set_provider(OTelVertexAIHandler())
256
+ setup_providers.add("vertexai")
257
+ elif provider == "cohere":
258
+ from .telemetry.otel_handlers import OTelCohereHandler
259
+ client.set_provider(OTelCohereHandler())
260
+ setup_providers.add("cohere")
261
+ elif provider == "groq":
262
+ from .telemetry.otel_handlers import OTelGroqHandler
263
+ client.set_provider(OTelGroqHandler())
264
+ setup_providers.add("groq")
84
265
 
85
266
  __all__ = [
86
267
  'Client',
@@ -105,6 +286,14 @@ __all__ = [
105
286
  'InvalidOperationError',
106
287
  'step',
107
288
  'event',
289
+ 'set_active_session',
290
+ 'bind_session',
291
+ 'bind_session_async',
292
+ 'clear_active_session',
293
+ 'session',
294
+ 'session_async',
295
+ 'run_session',
296
+ 'run_in_session',
108
297
  ]
109
298
 
110
299
 
@@ -122,6 +311,7 @@ def init(
122
311
  tags: Optional[list] = None,
123
312
  masking_function = None,
124
313
  auto_end: Optional[bool] = True,
314
+ capture_uncaught: Optional[bool] = True,
125
315
  ) -> str:
126
316
  """
127
317
  Initialize the Lucidic client.
@@ -189,6 +379,17 @@ def init(
189
379
 
190
380
  # Set the auto_end flag on the client
191
381
  client.auto_end = auto_end
382
+ # Bind this session id to the current execution context for async-safety
383
+ try:
384
+ set_active_session(real_session_id)
385
+ except Exception:
386
+ pass
387
+ # Install crash handlers unless explicitly disabled
388
+ try:
389
+ if capture_uncaught:
390
+ _install_crash_handlers()
391
+ except Exception:
392
+ pass
192
393
 
193
394
  logger.info("Session initialized successfully")
194
395
  return real_session_id
@@ -232,6 +433,11 @@ def continue_session(
232
433
  client.auto_end = auto_end
233
434
 
234
435
  logger.info(f"Session {session_id} continuing...")
436
+ # Bind this session id to the current execution context for async-safety
437
+ try:
438
+ set_active_session(session_id)
439
+ except Exception:
440
+ pass
235
441
  return session_id # For consistency
236
442
 
237
443
 
@@ -252,10 +458,20 @@ def update_session(
252
458
  is_successful: Whether the session was successful.
253
459
  is_successful_reason: Session success reason.
254
460
  """
461
+ # Prefer context-bound session over global active session
255
462
  client = Client()
256
- if not client.session:
463
+ target_sid = None
464
+ try:
465
+ target_sid = current_session_id.get(None)
466
+ except Exception:
467
+ target_sid = None
468
+ if not target_sid and client.session:
469
+ target_sid = client.session.session_id
470
+ if not target_sid:
257
471
  return
258
- client.session.update_session(**locals())
472
+ # Use ephemeral session facade to avoid mutating global state
473
+ session = client.session if (client.session and client.session.session_id == target_sid) else Session(agent_id=client.agent_id, session_id=target_sid)
474
+ session.update_session(**locals())
259
475
 
260
476
 
261
477
  def end_session(
@@ -274,17 +490,31 @@ def end_session(
274
490
  is_successful_reason: Session success reason.
275
491
  """
276
492
  client = Client()
277
- if not client.session:
493
+ # Prefer context-bound session id
494
+ target_sid = None
495
+ try:
496
+ target_sid = current_session_id.get(None)
497
+ except Exception:
498
+ target_sid = None
499
+ if not target_sid and client.session:
500
+ target_sid = client.session.session_id
501
+ if not target_sid:
278
502
  return
279
-
280
- # Wait for any pending LiteLLM callbacks before ending session
281
- for provider in client.providers:
282
- if hasattr(provider, '_callback') and hasattr(provider._callback, 'wait_for_pending_callbacks'):
283
- logger.info("Waiting for LiteLLM callbacks to complete before ending session...")
284
- provider._callback.wait_for_pending_callbacks(timeout=5.0)
285
-
286
- client.session.update_session(is_finished=True, **locals())
287
- client.clear()
503
+
504
+ # If ending the globally active session, keep existing cleanup behavior
505
+ if client.session and client.session.session_id == target_sid:
506
+ # Wait for any pending LiteLLM callbacks before ending session
507
+ for provider in client.providers:
508
+ if hasattr(provider, '_callback') and hasattr(provider._callback, 'wait_for_pending_callbacks'):
509
+ logger.info("Waiting for LiteLLM callbacks to complete before ending session...")
510
+ provider._callback.wait_for_pending_callbacks(timeout=5.0)
511
+ client.session.update_session(is_finished=True, **locals())
512
+ client.clear()
513
+ return
514
+
515
+ # Otherwise, end the specified session id without clearing global state
516
+ temp = Session(agent_id=client.agent_id, session_id=target_sid)
517
+ temp.update_session(is_finished=True, **locals())
288
518
 
289
519
 
290
520
  def reset_sdk() -> None:
@@ -330,6 +560,20 @@ def _auto_end_session():
330
560
 
331
561
  def _signal_handler(signum, frame):
332
562
  """Handle interruption signals"""
563
+ # Best-effort final event for signal exits
564
+ try:
565
+ try:
566
+ name = signal.Signals(signum).name
567
+ except Exception:
568
+ name = str(signum)
569
+ try:
570
+ stack_str = ''.join(traceback.format_stack(frame)) if frame else ''
571
+ except Exception:
572
+ stack_str = ''
573
+ desc = _mask_and_truncate(f"Received signal {name}\n{stack_str}")
574
+ _post_fatal_event(128 + signum, desc, {"signal": name, "signum": signum})
575
+ except Exception:
576
+ pass
333
577
  _auto_end_session()
334
578
  _cleanup_telemetry()
335
579
  # Re-raise the signal for default handling
lucidicai/client.py CHANGED
@@ -69,6 +69,10 @@ class Client:
69
69
 
70
70
  def set_provider(self, provider: BaseProvider) -> None:
71
71
  """Set the LLM provider to track"""
72
+ # Avoid duplicate provider registration of the same class
73
+ for existing in self.providers:
74
+ if type(existing) is type(provider):
75
+ return
72
76
  self.providers.append(provider)
73
77
  provider.override()
74
78
 
@@ -134,6 +138,16 @@ class Client:
134
138
  self.initialized = True
135
139
  return self.session.session_id
136
140
 
141
+ def create_event_for_session(self, session_id: str, **kwargs) -> str:
142
+ """Create an event for a specific session id without mutating global session.
143
+
144
+ This avoids cross-thread races by not switching the active session on
145
+ the singleton client. It constructs an ephemeral Session facade to send
146
+ requests under the provided session id.
147
+ """
148
+ temp_session = Session(agent_id=self.agent_id, session_id=session_id)
149
+ return temp_session.create_event(**kwargs)
150
+
137
151
  def continue_session(self, session_id: str):
138
152
  if session_id in self.custom_session_id_translations:
139
153
  session_id = self.custom_session_id_translations[session_id]
@@ -149,7 +163,8 @@ class Client:
149
163
  agent_id=self.agent_id,
150
164
  session_id=real_session_id
151
165
  )
152
- logger.info(f"Session {data.get('session_name', '')} continuing...")
166
+ import logging as _logging
167
+ _logging.getLogger('Lucidic').info(f"Session {data.get('session_name', '')} continuing...")
153
168
  return self.session.session_id
154
169
 
155
170
  def init_mass_sim(self, **kwargs) -> str:
lucidicai/context.py ADDED
@@ -0,0 +1,119 @@
1
+ """Async-safe context helpers for session (and step, extensible).
2
+
3
+ This module exposes context variables and helpers to bind a Lucidic
4
+ session to the current execution context (threads/async tasks), so
5
+ OpenTelemetry spans can be deterministically attributed to the correct
6
+ session under concurrency.
7
+ """
8
+
9
+ from contextlib import contextmanager, asynccontextmanager
10
+ import contextvars
11
+ from typing import Optional, Iterator, AsyncIterator, Callable, Any, Dict
12
+ import logging
13
+ import os
14
+
15
+
16
+ # Context variable for the active Lucidic session id
17
+ current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
18
+ "lucidic.session_id", default=None
19
+ )
20
+
21
+
22
+ def set_active_session(session_id: Optional[str]) -> None:
23
+ """Bind the given session id to the current execution context."""
24
+ current_session_id.set(session_id)
25
+
26
+
27
+ def clear_active_session() -> None:
28
+ """Clear any active session binding in the current execution context."""
29
+ current_session_id.set(None)
30
+
31
+
32
+ @contextmanager
33
+ def bind_session(session_id: str) -> Iterator[None]:
34
+ """Context manager to temporarily bind an active session id."""
35
+ token = current_session_id.set(session_id)
36
+ try:
37
+ yield
38
+ finally:
39
+ current_session_id.reset(token)
40
+
41
+
42
+ @asynccontextmanager
43
+ async def bind_session_async(session_id: str) -> AsyncIterator[None]:
44
+ """Async context manager to temporarily bind an active session id."""
45
+ token = current_session_id.set(session_id)
46
+ try:
47
+ yield
48
+ finally:
49
+ current_session_id.reset(token)
50
+
51
+
52
+ @contextmanager
53
+ def session(**init_params) -> Iterator[None]:
54
+ """All-in-one context manager: init → bind → yield → clear → end.
55
+
56
+ Notes:
57
+ - Ignores any provided auto_end parameter and ends the session on context exit.
58
+ - If LUCIDIC_DEBUG is true, logs a warning about ignoring auto_end.
59
+ """
60
+ # Lazy import to avoid circular imports
61
+ import lucidicai as lai # type: ignore
62
+
63
+ # Force auto_end to False inside a context manager to control explicit end
64
+ user_auto_end = init_params.get('auto_end', None)
65
+ init_params = dict(init_params)
66
+ init_params['auto_end'] = False
67
+
68
+ if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
69
+ logging.getLogger('Lucidic').warning('session(...) ignores auto_end and will end the session at context exit')
70
+
71
+ session_id = lai.init(**init_params)
72
+ token = current_session_id.set(session_id)
73
+ try:
74
+ yield
75
+ finally:
76
+ current_session_id.reset(token)
77
+ try:
78
+ lai.end_session()
79
+ except Exception:
80
+ # Avoid masking the original exception from the with-block
81
+ pass
82
+
83
+
84
+ @asynccontextmanager
85
+ async def session_async(**init_params) -> AsyncIterator[None]:
86
+ """Async counterpart of session(...)."""
87
+ import lucidicai as lai # type: ignore
88
+
89
+ user_auto_end = init_params.get('auto_end', None)
90
+ init_params = dict(init_params)
91
+ init_params['auto_end'] = False
92
+
93
+ if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
94
+ logging.getLogger('Lucidic').warning('session_async(...) ignores auto_end and will end the session at context exit')
95
+
96
+ session_id = lai.init(**init_params)
97
+ token = current_session_id.set(session_id)
98
+ try:
99
+ yield
100
+ finally:
101
+ current_session_id.reset(token)
102
+ try:
103
+ lai.end_session()
104
+ except Exception:
105
+ pass
106
+
107
+
108
+ def run_session(fn: Callable[..., Any], *fn_args: Any, init_params: Optional[Dict[str, Any]] = None, **fn_kwargs: Any) -> Any:
109
+ """Run a callable within a full Lucidic session lifecycle context."""
110
+ with session(**(init_params or {})):
111
+ return fn(*fn_args, **fn_kwargs)
112
+
113
+
114
+ def run_in_session(session_id: str, fn: Callable[..., Any], *fn_args: Any, **fn_kwargs: Any) -> Any:
115
+ """Run a callable with a bound session id. Does not end the session."""
116
+ with bind_session(session_id):
117
+ return fn(*fn_args, **fn_kwargs)
118
+
119
+
@@ -3,6 +3,12 @@ import logging
3
3
  logger = logging.getLogger("Lucidic")
4
4
 
5
5
  MODEL_PRICING = {
6
+
7
+ # OpenAI GPT-5 Series (Verified 2025)
8
+ "gpt-5": {"input": 10.0, "output": 10.0},
9
+ "gpt-5-mini": {"input": 0.250, "output": 2.0},
10
+ "gpt-5-nano": {"input": 0.05, "output": 0.4},
11
+
6
12
  # OpenAI GPT-4o Series (Verified 2025)
7
13
  "gpt-4o": {"input": 2.5, "output": 10.0},
8
14
  "gpt-4o-mini": {"input": 0.15, "output": 0.6},
@@ -190,6 +196,7 @@ PROVIDER_AVERAGES = {
190
196
  "together": {"input": 0.15, "output": 0.15}, # Together AI average
191
197
  "perplexity": {"input": 0.4, "output": 1.5}, # Perplexity average
192
198
  "grok": {"input": 2.4, "output": 12}, # Grok average
199
+ "groq": {"input": 0.3, "output": 0.6}, # Groq average (placeholder)
193
200
  }
194
201
 
195
202
  def get_provider_from_model(model: str) -> str:
@@ -218,6 +225,8 @@ def get_provider_from_model(model: str) -> str:
218
225
  return "perplexity"
219
226
  elif any(grok in model_lower for grok in ["grok", "xAI"]):
220
227
  return "grok"
228
+ elif "groq" in model_lower:
229
+ return "groq"
221
230
  else:
222
231
  return "unknown"
223
232
 
@@ -228,6 +237,8 @@ def normalize_model_name(model: str) -> str:
228
237
  model_lower = model.lower()
229
238
  # Remove provider prefixes (generalizable pattern: any_provider/)
230
239
  model_lower = re.sub(r'^[^/]+/', '', model_lower)
240
+ # Strip Google/Vertex prefixes
241
+ model_lower = model_lower.replace('publishers/google/models/', '').replace('models/', '')
231
242
 
232
243
  # Strip date suffixes (20240229, 20241022, etc.) but preserve model versions like o1-mini, o3-mini
233
244
  # Pattern: remove -YYYYMMDD or -YYYY-MM-DD at the end
@@ -8,6 +8,7 @@ from opentelemetry.trace import StatusCode
8
8
  from opentelemetry.semconv_ai import SpanAttributes
9
9
 
10
10
  from lucidicai.client import Client
11
+ from lucidicai.context import current_session_id
11
12
  from lucidicai.model_pricing import calculate_cost
12
13
  from lucidicai.image_upload import extract_base64_images
13
14
 
@@ -28,9 +29,6 @@ class LucidicSpanExporter(SpanExporter):
28
29
  """Export spans by converting them to Lucidic events"""
29
30
  try:
30
31
  client = Client()
31
- if not client.session:
32
- logger.debug("No active session, skipping span export")
33
- return SpanExportResult.SUCCESS
34
32
 
35
33
  for span in spans:
36
34
  self._process_span(span, client)
@@ -100,6 +98,19 @@ class LucidicSpanExporter(SpanExporter):
100
98
  attributes.get(SpanAttributes.LLM_REQUEST_MODEL) or \
101
99
  attributes.get('gen_ai.request.model') or 'unknown'
102
100
 
101
+ # Resolve target session id for this span
102
+ target_session_id = attributes.get('lucidic.session_id')
103
+ if not target_session_id:
104
+ try:
105
+ target_session_id = current_session_id.get(None)
106
+ except Exception:
107
+ target_session_id = None
108
+ if not target_session_id:
109
+ if getattr(client, 'session', None) and getattr(client.session, 'session_id', None):
110
+ target_session_id = client.session.session_id
111
+ if not target_session_id:
112
+ return None
113
+
103
114
  # Create event
104
115
  event_kwargs = {
105
116
  'description': description,
@@ -115,7 +126,7 @@ class LucidicSpanExporter(SpanExporter):
115
126
  if step_id:
116
127
  event_kwargs['step_id'] = step_id
117
128
 
118
- return client.session.create_event(**event_kwargs)
129
+ return client.create_event_for_session(target_session_id, **event_kwargs)
119
130
 
120
131
  except Exception as e:
121
132
  logger.error(f"Failed to create event from span: {e}")
@@ -143,6 +154,7 @@ class LucidicSpanExporter(SpanExporter):
143
154
  if cost is not None:
144
155
  update_kwargs['cost_added'] = cost
145
156
 
157
+ # Route update to the same session; event_id is globally unique so server resolves it
146
158
  client.session.update_event(**update_kwargs)
147
159
 
148
160
  except Exception as e: