lucidicai 1.3.1__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lucidicai/__init__.py CHANGED
@@ -2,6 +2,9 @@ import atexit
2
2
  import logging
3
3
  import os
4
4
  import signal
5
+ import sys
6
+ import traceback
7
+ import threading
5
8
  from typing import List, Literal, Optional
6
9
 
7
10
  from .client import Client
@@ -25,8 +28,35 @@ from .telemetry.otel_init import LucidicTelemetry
25
28
 
26
29
  # Import decorators
27
30
  from .decorators import step, event
31
+ from .context import (
32
+ set_active_session,
33
+ bind_session,
34
+ bind_session_async,
35
+ clear_active_session,
36
+ current_session_id,
37
+ session,
38
+ session_async,
39
+ run_session,
40
+ run_in_session,
41
+ )
28
42
 
29
- ProviderType = Literal["openai", "anthropic", "langchain", "pydantic_ai", "openai_agents", "litellm"]
43
+ ProviderType = Literal[
44
+ "openai",
45
+ "anthropic",
46
+ "langchain",
47
+ "pydantic_ai",
48
+ "openai_agents",
49
+ "litellm",
50
+ "bedrock",
51
+ "aws_bedrock",
52
+ "amazon_bedrock",
53
+ "google",
54
+ "google_generativeai",
55
+ "vertexai",
56
+ "vertex_ai",
57
+ "cohere",
58
+ "groq",
59
+ ]
30
60
 
31
61
  # Configure logging
32
62
  logger = logging.getLogger("Lucidic")
@@ -38,6 +68,137 @@ if not logger.handlers:
38
68
  logger.setLevel(logging.INFO)
39
69
 
40
70
 
71
+ # Crash/exit capture configuration
72
+ MAX_ERROR_DESCRIPTION_LENGTH = 16384
73
+ _crash_handlers_installed = False
74
+ _original_sys_excepthook = None
75
+ _original_threading_excepthook = None
76
+ _shutdown_lock = threading.Lock()
77
+ _is_shutting_down = False
78
+
79
+
80
+ def _mask_and_truncate(text: Optional[str]) -> Optional[str]:
81
+ """Apply masking and truncate to a safe length. Best effort; never raises."""
82
+ if text is None:
83
+ return text
84
+ try:
85
+ masked = Client().mask(text)
86
+ except Exception:
87
+ masked = text
88
+ if masked is None:
89
+ return masked
90
+ return masked[:MAX_ERROR_DESCRIPTION_LENGTH]
91
+
92
+
93
+ def _post_fatal_event(exit_code: int, description: str, extra: Optional[dict] = None) -> None:
94
+ """Best-effort creation of a final Lucidic event on fatal paths.
95
+
96
+ - Idempotent using a process-wide shutdown flag to avoid duplicates when
97
+ multiple hooks fire (signal + excepthook).
98
+ - Swallows all exceptions to avoid interfering with shutdown.
99
+ """
100
+ global _is_shutting_down
101
+ with _shutdown_lock:
102
+ if _is_shutting_down:
103
+ return
104
+ _is_shutting_down = True
105
+ try:
106
+ client = Client()
107
+ session = getattr(client, 'session', None)
108
+ if not session or getattr(session, 'is_finished', False):
109
+ return
110
+ arguments = {"exit_code": exit_code}
111
+ if extra:
112
+ try:
113
+ arguments.update(extra)
114
+ except Exception:
115
+ pass
116
+
117
+ event_id = session.create_event(
118
+ description=_mask_and_truncate(description),
119
+ result=f"process exited with code {exit_code}",
120
+ function_name="__process_exit__",
121
+ arguments=arguments,
122
+ )
123
+ session.update_event(event_id=event_id, is_finished=True)
124
+ except Exception:
125
+ # Never raise during shutdown
126
+ pass
127
+
128
+
129
+ def _install_crash_handlers() -> None:
130
+ """Install global uncaught exception handlers (idempotent)."""
131
+ global _crash_handlers_installed, _original_sys_excepthook, _original_threading_excepthook
132
+ if _crash_handlers_installed:
133
+ return
134
+
135
+ _original_sys_excepthook = sys.excepthook
136
+
137
+ def _sys_hook(exc_type, exc, tb):
138
+ try:
139
+ trace_str = ''.join(traceback.format_exception(exc_type, exc, tb))
140
+ except Exception:
141
+ trace_str = f"Uncaught exception: {getattr(exc_type, '__name__', str(exc_type))}: {exc}"
142
+
143
+ # Emit final event and end the session as unsuccessful
144
+ _post_fatal_event(1, trace_str, {
145
+ "exception_type": getattr(exc_type, "__name__", str(exc_type)),
146
+ "exception_message": str(exc),
147
+ "thread_name": threading.current_thread().name,
148
+ })
149
+ try:
150
+ # Prevent auto_end double work
151
+ client = Client()
152
+ try:
153
+ client.auto_end = False
154
+ except Exception:
155
+ pass
156
+ # End session explicitly as unsuccessful
157
+ end_session()
158
+ except Exception:
159
+ pass
160
+ # Best-effort force flush and shutdown telemetry
161
+ try:
162
+ telemetry = LucidicTelemetry()
163
+ if telemetry.is_initialized():
164
+ try:
165
+ telemetry.force_flush()
166
+ except Exception:
167
+ pass
168
+ try:
169
+ telemetry.uninstrument_all()
170
+ except Exception:
171
+ pass
172
+ except Exception:
173
+ pass
174
+ # Chain to original to preserve default printing/behavior
175
+ try:
176
+ _original_sys_excepthook(exc_type, exc, tb)
177
+ except Exception:
178
+ # Avoid recursion/errors in fatal path
179
+ pass
180
+
181
+ sys.excepthook = _sys_hook
182
+
183
+ # For Python 3.8+, only treat main-thread exceptions as fatal (process-exiting)
184
+ if hasattr(threading, 'excepthook'):
185
+ _original_threading_excepthook = threading.excepthook
186
+
187
+ def _thread_hook(args):
188
+ try:
189
+ if args.thread is threading.main_thread():
190
+ _sys_hook(args.exc_type, args.exc_value, args.exc_traceback)
191
+ except Exception:
192
+ pass
193
+ try:
194
+ _original_threading_excepthook(args)
195
+ except Exception:
196
+ pass
197
+
198
+ threading.excepthook = _thread_hook
199
+
200
+ _crash_handlers_installed = True
201
+
41
202
  def _setup_providers(client: Client, providers: List[ProviderType]) -> None:
42
203
  """Set up providers for the client, avoiding duplication
43
204
 
@@ -81,6 +242,26 @@ def _setup_providers(client: Client, providers: List[ProviderType]) -> None:
81
242
  elif provider == "litellm":
82
243
  client.set_provider(OTelLiteLLMHandler())
83
244
  setup_providers.add("litellm")
245
+ elif provider in ("bedrock", "aws_bedrock", "amazon_bedrock"):
246
+ from .telemetry.otel_handlers import OTelBedrockHandler
247
+ client.set_provider(OTelBedrockHandler())
248
+ setup_providers.add("bedrock")
249
+ elif provider in ("google", "google_generativeai"):
250
+ from .telemetry.otel_handlers import OTelGoogleGenerativeAIHandler
251
+ client.set_provider(OTelGoogleGenerativeAIHandler())
252
+ setup_providers.add("google")
253
+ elif provider in ("vertexai", "vertex_ai"):
254
+ from .telemetry.otel_handlers import OTelVertexAIHandler
255
+ client.set_provider(OTelVertexAIHandler())
256
+ setup_providers.add("vertexai")
257
+ elif provider == "cohere":
258
+ from .telemetry.otel_handlers import OTelCohereHandler
259
+ client.set_provider(OTelCohereHandler())
260
+ setup_providers.add("cohere")
261
+ elif provider == "groq":
262
+ from .telemetry.otel_handlers import OTelGroqHandler
263
+ client.set_provider(OTelGroqHandler())
264
+ setup_providers.add("groq")
84
265
 
85
266
  __all__ = [
86
267
  'Client',
@@ -105,6 +286,14 @@ __all__ = [
105
286
  'InvalidOperationError',
106
287
  'step',
107
288
  'event',
289
+ 'set_active_session',
290
+ 'bind_session',
291
+ 'bind_session_async',
292
+ 'clear_active_session',
293
+ 'session',
294
+ 'session_async',
295
+ 'run_session',
296
+ 'run_in_session',
108
297
  ]
109
298
 
110
299
 
@@ -117,10 +306,12 @@ def init(
117
306
  providers: Optional[List[ProviderType]] = [],
118
307
  production_monitoring: Optional[bool] = False,
119
308
  mass_sim_id: Optional[str] = None,
309
+ experiment_id: Optional[str] = None,
120
310
  rubrics: Optional[list] = None,
121
311
  tags: Optional[list] = None,
122
312
  masking_function = None,
123
313
  auto_end: Optional[bool] = True,
314
+ capture_uncaught: Optional[bool] = True,
124
315
  ) -> str:
125
316
  """
126
317
  Initialize the Lucidic client.
@@ -133,6 +324,7 @@ def init(
133
324
  task: Task description.
134
325
  providers: List of provider types ("openai", "anthropic", "langchain", "pydantic_ai").
135
326
  mass_sim_id: Optional mass simulation ID, if session is to be part of a mass simulation.
327
+ experiment_id: Optional experiment ID, if session is to be part of an experiment.
136
328
  rubrics: Optional rubrics for evaluation, list of strings.
137
329
  tags: Optional tags for the session, list of strings.
138
330
  masking_function: Optional function to mask sensitive data.
@@ -180,12 +372,24 @@ def init(
180
372
  tags=tags,
181
373
  production_monitoring=production_monitoring,
182
374
  session_id=session_id,
375
+ experiment_id=experiment_id,
183
376
  )
184
377
  if masking_function:
185
378
  client.masking_function = masking_function
186
379
 
187
380
  # Set the auto_end flag on the client
188
381
  client.auto_end = auto_end
382
+ # Bind this session id to the current execution context for async-safety
383
+ try:
384
+ set_active_session(real_session_id)
385
+ except Exception:
386
+ pass
387
+ # Install crash handlers unless explicitly disabled
388
+ try:
389
+ if capture_uncaught:
390
+ _install_crash_handlers()
391
+ except Exception:
392
+ pass
189
393
 
190
394
  logger.info("Session initialized successfully")
191
395
  return real_session_id
@@ -229,6 +433,11 @@ def continue_session(
229
433
  client.auto_end = auto_end
230
434
 
231
435
  logger.info(f"Session {session_id} continuing...")
436
+ # Bind this session id to the current execution context for async-safety
437
+ try:
438
+ set_active_session(session_id)
439
+ except Exception:
440
+ pass
232
441
  return session_id # For consistency
233
442
 
234
443
 
@@ -249,10 +458,20 @@ def update_session(
249
458
  is_successful: Whether the session was successful.
250
459
  is_successful_reason: Session success reason.
251
460
  """
461
+ # Prefer context-bound session over global active session
252
462
  client = Client()
253
- if not client.session:
463
+ target_sid = None
464
+ try:
465
+ target_sid = current_session_id.get(None)
466
+ except Exception:
467
+ target_sid = None
468
+ if not target_sid and client.session:
469
+ target_sid = client.session.session_id
470
+ if not target_sid:
254
471
  return
255
- client.session.update_session(**locals())
472
+ # Use ephemeral session facade to avoid mutating global state
473
+ session = client.session if (client.session and client.session.session_id == target_sid) else Session(agent_id=client.agent_id, session_id=target_sid)
474
+ session.update_session(**locals())
256
475
 
257
476
 
258
477
  def end_session(
@@ -271,17 +490,31 @@ def end_session(
271
490
  is_successful_reason: Session success reason.
272
491
  """
273
492
  client = Client()
274
- if not client.session:
493
+ # Prefer context-bound session id
494
+ target_sid = None
495
+ try:
496
+ target_sid = current_session_id.get(None)
497
+ except Exception:
498
+ target_sid = None
499
+ if not target_sid and client.session:
500
+ target_sid = client.session.session_id
501
+ if not target_sid:
275
502
  return
276
-
277
- # Wait for any pending LiteLLM callbacks before ending session
278
- for provider in client.providers:
279
- if hasattr(provider, '_callback') and hasattr(provider._callback, 'wait_for_pending_callbacks'):
280
- logger.info("Waiting for LiteLLM callbacks to complete before ending session...")
281
- provider._callback.wait_for_pending_callbacks(timeout=5.0)
282
-
283
- client.session.update_session(is_finished=True, **locals())
284
- client.clear()
503
+
504
+ # If ending the globally active session, keep existing cleanup behavior
505
+ if client.session and client.session.session_id == target_sid:
506
+ # Wait for any pending LiteLLM callbacks before ending session
507
+ for provider in client.providers:
508
+ if hasattr(provider, '_callback') and hasattr(provider._callback, 'wait_for_pending_callbacks'):
509
+ logger.info("Waiting for LiteLLM callbacks to complete before ending session...")
510
+ provider._callback.wait_for_pending_callbacks(timeout=5.0)
511
+ client.session.update_session(is_finished=True, **locals())
512
+ client.clear()
513
+ return
514
+
515
+ # Otherwise, end the specified session id without clearing global state
516
+ temp = Session(agent_id=client.agent_id, session_id=target_sid)
517
+ temp.update_session(is_finished=True, **locals())
285
518
 
286
519
 
287
520
  def reset_sdk() -> None:
@@ -327,6 +560,20 @@ def _auto_end_session():
327
560
 
328
561
  def _signal_handler(signum, frame):
329
562
  """Handle interruption signals"""
563
+ # Best-effort final event for signal exits
564
+ try:
565
+ try:
566
+ name = signal.Signals(signum).name
567
+ except Exception:
568
+ name = str(signum)
569
+ try:
570
+ stack_str = ''.join(traceback.format_stack(frame)) if frame else ''
571
+ except Exception:
572
+ stack_str = ''
573
+ desc = _mask_and_truncate(f"Received signal {name}\n{stack_str}")
574
+ _post_fatal_event(128 + signum, desc, {"signal": name, "signum": signum})
575
+ except Exception:
576
+ pass
330
577
  _auto_end_session()
331
578
  _cleanup_telemetry()
332
579
  # Re-raise the signal for default handling
@@ -487,7 +734,9 @@ def create_event(
487
734
  result: Optional[str] = None,
488
735
  cost_added: Optional[float] = None,
489
736
  model: Optional[str] = None,
490
- screenshots: Optional[List[str]] = None
737
+ screenshots: Optional[List[str]] = None,
738
+ function_name: Optional[str] = None,
739
+ arguments: Optional[dict] = None,
491
740
  ) -> str:
492
741
  """
493
742
  Create a new event in the current step. Current step must not be finished.
@@ -498,6 +747,8 @@ def create_event(
498
747
  cost_added: Cost added by the event.
499
748
  model: Model used for the event.
500
749
  screenshots: List of screenshots encoded in base64.
750
+ function_name: Name of the function that created the event.
751
+ arguments: Arguments of the function that created the event.
501
752
  """
502
753
 
503
754
  client = Client()
@@ -512,7 +763,9 @@ def update_event(
512
763
  result: Optional[str] = None,
513
764
  cost_added: Optional[float] = None,
514
765
  model: Optional[str] = None,
515
- screenshots: Optional[List[str]] = None
766
+ screenshots: Optional[List[str]] = None,
767
+ function_name: Optional[str] = None,
768
+ arguments: Optional[dict] = None,
516
769
  ) -> None:
517
770
  """
518
771
  Update the event with the given ID in the current step.
@@ -524,6 +777,8 @@ def update_event(
524
777
  cost_added: Cost added by the event.
525
778
  model: Model used for the event.
526
779
  screenshots: List of screenshots encoded in base64.
780
+ function_name: Name of the function that created the event.
781
+ arguments: Arguments of the function that created the event.
527
782
  """
528
783
  client = Client()
529
784
  if not client.session:
@@ -537,7 +792,9 @@ def end_event(
537
792
  result: Optional[str] = None,
538
793
  cost_added: Optional[float] = None,
539
794
  model: Optional[str] = None,
540
- screenshots: Optional[List[str]] = None
795
+ screenshots: Optional[List[str]] = None,
796
+ function_name: Optional[str] = None,
797
+ arguments: Optional[dict] = None,
541
798
  ) -> None:
542
799
  """
543
800
  End the latest event in the current step.
@@ -548,6 +805,9 @@ def end_event(
548
805
  result: Result of the event.
549
806
  cost_added: Cost added by the event.
550
807
  model: Model used for the event.
808
+ screenshots: List of screenshots encoded in base64.
809
+ function_name: Name of the function that created the event.
810
+ arguments: Arguments of the function that created the event.
551
811
  """
552
812
  client = Client()
553
813
  if not client.session:
lucidicai/client.py CHANGED
@@ -69,6 +69,10 @@ class Client:
69
69
 
70
70
  def set_provider(self, provider: BaseProvider) -> None:
71
71
  """Set the LLM provider to track"""
72
+ # Avoid duplicate provider registration of the same class
73
+ for existing in self.providers:
74
+ if type(existing) is type(provider):
75
+ return
72
76
  self.providers.append(provider)
73
77
  provider.override()
74
78
 
@@ -85,6 +89,7 @@ class Client:
85
89
  tags: Optional[list] = None,
86
90
  production_monitoring: Optional[bool] = False,
87
91
  session_id: Optional[str] = None,
92
+ experiment_id: Optional[str] = None,
88
93
  ) -> None:
89
94
  if session_id:
90
95
  # Check if it's a known session ID, maybe custom and maybe real
@@ -107,6 +112,7 @@ class Client:
107
112
  "session_name": session_name,
108
113
  "task": task,
109
114
  "mass_sim_id": mass_sim_id,
115
+ "experiment_id": experiment_id,
110
116
  "rubrics": rubrics,
111
117
  "tags": tags,
112
118
  "session_id": session_id
@@ -124,6 +130,7 @@ class Client:
124
130
  session_id=real_session_id,
125
131
  session_name=session_name,
126
132
  mass_sim_id=mass_sim_id,
133
+ experiment_id=experiment_id,
127
134
  task=task,
128
135
  rubrics=rubrics,
129
136
  tags=tags,
@@ -131,6 +138,16 @@ class Client:
131
138
  self.initialized = True
132
139
  return self.session.session_id
133
140
 
141
+ def create_event_for_session(self, session_id: str, **kwargs) -> str:
142
+ """Create an event for a specific session id without mutating global session.
143
+
144
+ This avoids cross-thread races by not switching the active session on
145
+ the singleton client. It constructs an ephemeral Session facade to send
146
+ requests under the provided session id.
147
+ """
148
+ temp_session = Session(agent_id=self.agent_id, session_id=session_id)
149
+ return temp_session.create_event(**kwargs)
150
+
134
151
  def continue_session(self, session_id: str):
135
152
  if session_id in self.custom_session_id_translations:
136
153
  session_id = self.custom_session_id_translations[session_id]
@@ -146,7 +163,8 @@ class Client:
146
163
  agent_id=self.agent_id,
147
164
  session_id=real_session_id
148
165
  )
149
- logger.info(f"Session {data.get('session_name', '')} continuing...")
166
+ import logging as _logging
167
+ _logging.getLogger('Lucidic').info(f"Session {data.get('session_name', '')} continuing...")
150
168
  return self.session.session_id
151
169
 
152
170
  def init_mass_sim(self, **kwargs) -> str:
lucidicai/context.py ADDED
@@ -0,0 +1,119 @@
1
+ """Async-safe context helpers for session (and step, extensible).
2
+
3
+ This module exposes context variables and helpers to bind a Lucidic
4
+ session to the current execution context (threads/async tasks), so
5
+ OpenTelemetry spans can be deterministically attributed to the correct
6
+ session under concurrency.
7
+ """
8
+
9
+ from contextlib import contextmanager, asynccontextmanager
10
+ import contextvars
11
+ from typing import Optional, Iterator, AsyncIterator, Callable, Any, Dict
12
+ import logging
13
+ import os
14
+
15
+
16
+ # Context variable for the active Lucidic session id
17
+ current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
18
+ "lucidic.session_id", default=None
19
+ )
20
+
21
+
22
+ def set_active_session(session_id: Optional[str]) -> None:
23
+ """Bind the given session id to the current execution context."""
24
+ current_session_id.set(session_id)
25
+
26
+
27
+ def clear_active_session() -> None:
28
+ """Clear any active session binding in the current execution context."""
29
+ current_session_id.set(None)
30
+
31
+
32
+ @contextmanager
33
+ def bind_session(session_id: str) -> Iterator[None]:
34
+ """Context manager to temporarily bind an active session id."""
35
+ token = current_session_id.set(session_id)
36
+ try:
37
+ yield
38
+ finally:
39
+ current_session_id.reset(token)
40
+
41
+
42
+ @asynccontextmanager
43
+ async def bind_session_async(session_id: str) -> AsyncIterator[None]:
44
+ """Async context manager to temporarily bind an active session id."""
45
+ token = current_session_id.set(session_id)
46
+ try:
47
+ yield
48
+ finally:
49
+ current_session_id.reset(token)
50
+
51
+
52
+ @contextmanager
53
+ def session(**init_params) -> Iterator[None]:
54
+ """All-in-one context manager: init → bind → yield → clear → end.
55
+
56
+ Notes:
57
+ - Ignores any provided auto_end parameter and ends the session on context exit.
58
+ - If LUCIDIC_DEBUG is true, logs a warning about ignoring auto_end.
59
+ """
60
+ # Lazy import to avoid circular imports
61
+ import lucidicai as lai # type: ignore
62
+
63
+ # Force auto_end to False inside a context manager to control explicit end
64
+ user_auto_end = init_params.get('auto_end', None)
65
+ init_params = dict(init_params)
66
+ init_params['auto_end'] = False
67
+
68
+ if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
69
+ logging.getLogger('Lucidic').warning('session(...) ignores auto_end and will end the session at context exit')
70
+
71
+ session_id = lai.init(**init_params)
72
+ token = current_session_id.set(session_id)
73
+ try:
74
+ yield
75
+ finally:
76
+ current_session_id.reset(token)
77
+ try:
78
+ lai.end_session()
79
+ except Exception:
80
+ # Avoid masking the original exception from the with-block
81
+ pass
82
+
83
+
84
+ @asynccontextmanager
85
+ async def session_async(**init_params) -> AsyncIterator[None]:
86
+ """Async counterpart of session(...)."""
87
+ import lucidicai as lai # type: ignore
88
+
89
+ user_auto_end = init_params.get('auto_end', None)
90
+ init_params = dict(init_params)
91
+ init_params['auto_end'] = False
92
+
93
+ if os.getenv('LUCIDIC_DEBUG', 'False') == 'True' and user_auto_end is not None:
94
+ logging.getLogger('Lucidic').warning('session_async(...) ignores auto_end and will end the session at context exit')
95
+
96
+ session_id = lai.init(**init_params)
97
+ token = current_session_id.set(session_id)
98
+ try:
99
+ yield
100
+ finally:
101
+ current_session_id.reset(token)
102
+ try:
103
+ lai.end_session()
104
+ except Exception:
105
+ pass
106
+
107
+
108
+ def run_session(fn: Callable[..., Any], *fn_args: Any, init_params: Optional[Dict[str, Any]] = None, **fn_kwargs: Any) -> Any:
109
+ """Run a callable within a full Lucidic session lifecycle context."""
110
+ with session(**(init_params or {})):
111
+ return fn(*fn_args, **fn_kwargs)
112
+
113
+
114
+ def run_in_session(session_id: str, fn: Callable[..., Any], *fn_args: Any, **fn_kwargs: Any) -> Any:
115
+ """Run a callable with a bound session id. Does not end the session."""
116
+ with bind_session(session_id):
117
+ return fn(*fn_args, **fn_kwargs)
118
+
119
+
lucidicai/decorators.py CHANGED
@@ -5,6 +5,7 @@ import inspect
5
5
  import json
6
6
  import logging
7
7
  from typing import Any, Callable, Optional, TypeVar, Union
8
+ from collections.abc import Iterable
8
9
 
9
10
  from .client import Client
10
11
  from .errors import LucidicNotInitializedError
@@ -219,27 +220,44 @@ def event(
219
220
 
220
221
  # Build event description from inputs if not provided
221
222
  event_desc = description
223
+ function_name = func.__name__
224
+
225
+ # Get function signature
226
+ sig = inspect.signature(func)
227
+ bound_args = sig.bind(*args, **kwargs)
228
+ bound_args.apply_defaults()
229
+
230
+ def serialize(value):
231
+ if isinstance(value, str):
232
+ return value
233
+ if isinstance(value, int):
234
+ return value
235
+ if isinstance(value, float):
236
+ return value
237
+ if isinstance(value, bool):
238
+ return value
239
+ if isinstance(value, dict):
240
+ return {k: serialize(v) for k, v in value.items()}
241
+ if isinstance(value, Iterable):
242
+ return [serialize(v) for v in value]
243
+ return str(value)
244
+
245
+ # Construct JSONable object of args
246
+ args_dict = {
247
+ param_name: serialize(param_value) # Recursive - maybe change later
248
+ for param_name, param_value in bound_args.arguments.items()
249
+ }
250
+
222
251
  if not event_desc:
223
- # Get function signature
224
- sig = inspect.signature(func)
225
- bound_args = sig.bind(*args, **kwargs)
226
- bound_args.apply_defaults()
227
-
228
- # Create string representation of inputs
229
- input_parts = []
230
- for param_name, param_value in bound_args.arguments.items():
231
- try:
232
- input_parts.append(f"{param_name}={repr(param_value)}")
233
- except Exception:
234
- input_parts.append(f"{param_name}=<{type(param_value).__name__}>")
235
-
236
- event_desc = f"{func.__name__}({', '.join(input_parts)})"
252
+ event_desc = f"Function {function_name}({json.dumps(args_dict)})"
237
253
 
238
254
  # Create the event
239
255
  event_id = create_event(
240
256
  description=event_desc,
241
257
  model=model,
242
- cost_added=cost_added
258
+ cost_added=cost_added,
259
+ function_name=function_name,
260
+ arguments=args_dict,
243
261
  )
244
262
  tok = _current_event.set(event_id)
245
263
  try:
lucidicai/event.py CHANGED
@@ -48,7 +48,9 @@ class Event:
48
48
  "cost_added": kwargs.get("cost_added", None),
49
49
  "model": kwargs.get("model", None),
50
50
  "nscreenshots": len(self.screenshots) + num_new_screenshots,
51
- "duration": kwargs.get("duration", None)
51
+ "duration": kwargs.get("duration", None),
52
+ "function_name": kwargs.get("function_name", None),
53
+ "arguments": kwargs.get("arguments", None),
52
54
  }
53
55
 
54
56
  def _upload_screenshots(self, **kwargs) -> None: