lucidicai 1.3.5__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lucidicai/client.py CHANGED
@@ -1,22 +1,28 @@
1
1
  import os
2
2
  import time
3
+ import threading
3
4
  from datetime import datetime, timezone
4
- from typing import Optional, Tuple
5
+ from typing import Optional, Tuple, Dict, Any
5
6
 
6
7
  import requests
7
8
  import logging
9
+ import json
8
10
  from requests.adapters import HTTPAdapter, Retry
9
11
  from urllib3.util import Retry
10
12
 
11
13
 
12
14
  from .errors import APIKeyVerificationError, InvalidOperationError, LucidicNotInitializedError
13
- from .telemetry.base_provider import BaseProvider
14
15
  from .session import Session
15
16
  from .singleton import singleton, clear_singletons
16
17
  from .lru import LRUCache
18
+ from .event import Event
19
+ from .event_queue import EventQueue
20
+ import uuid
17
21
 
18
22
  NETWORK_RETRIES = 3
19
23
 
24
+ logger = logging.getLogger("Lucidic")
25
+
20
26
 
21
27
  @singleton
22
28
  class Client:
@@ -25,16 +31,16 @@ class Client:
25
31
  api_key: str,
26
32
  agent_id: str,
27
33
  ):
28
- self.base_url = "https://analytics.lucidic.ai/api" if not (os.getenv("LUCIDIC_DEBUG", 'False') == 'True') else "http://localhost:8000/api"
34
+ self.base_url = "https://backend.lucidic.ai/api" if not (os.getenv("LUCIDIC_DEBUG", 'False') == 'True') else "http://localhost:8000/api"
29
35
  self.initialized = False
30
36
  self.session = None
31
37
  self.previous_sessions = LRUCache(500) # For LRU cache of previously initialized sessions
32
38
  self.custom_session_id_translations = LRUCache(500) # For translations of custom session IDs to real session IDs
33
- self.providers = []
34
39
  self.api_key = api_key
35
40
  self.agent_id = agent_id
36
41
  self.masking_function = None
37
42
  self.auto_end = False # Default to False until explicitly set during init
43
+ self._shutdown = False # Flag to prevent requests after shutdown
38
44
  self.request_session = requests.Session()
39
45
  retry_cfg = Retry(
40
46
  total=3, # 3 attempts in total
@@ -46,6 +52,19 @@ class Client:
46
52
  self.request_session.mount("https://", adapter)
47
53
  self.set_api_key(api_key)
48
54
  self.prompts = dict()
55
+ # Initialize event queue (non-blocking event delivery)
56
+ self._event_queue = EventQueue(self)
57
+
58
+ # Track telemetry state to prevent re-initialization
59
+ # These are process-wide singletons for telemetry
60
+ self._telemetry_lock = threading.Lock() # Prevent race conditions
61
+ self._tracer_provider = None
62
+ self._instrumentors = {} # Dict to track which providers are instrumented
63
+ self._telemetry_initialized = False
64
+
65
+ # Track active sessions to prevent premature EventQueue shutdown
66
+ self._active_sessions_lock = threading.Lock()
67
+ self._active_sessions = set() # Set of active session IDs
49
68
 
50
69
  def set_api_key(self, api_key: str):
51
70
  self.api_key = api_key
@@ -56,34 +75,23 @@ class Client:
56
75
  raise APIKeyVerificationError("Invalid API Key")
57
76
 
58
77
  def clear(self):
59
- self.undo_overrides()
78
+ # Clean up singleton state
60
79
  clear_singletons()
61
80
  self.initialized = False
62
81
  self.session = None
63
- self.providers = []
64
82
  del self
65
83
 
66
84
  def verify_api_key(self, base_url: str, api_key: str) -> Tuple[str, str]:
67
85
  data = self.make_request('verifyapikey', 'GET', {}) # TODO: Verify against agent ID provided
68
86
  return data["project"], data["project_id"]
69
87
 
70
- def set_provider(self, provider: BaseProvider) -> None:
71
- """Set the LLM provider to track"""
72
- # Avoid duplicate provider registration of the same class
73
- for existing in self.providers:
74
- if type(existing) is type(provider):
75
- return
76
- self.providers.append(provider)
77
- provider.override()
78
-
79
- def undo_overrides(self):
80
- for provider in self.providers:
81
- provider.undo_override()
88
+ def set_provider(self, provider) -> None:
89
+ """Deprecated: manual provider overrides removed (no-op)."""
90
+ return
82
91
 
83
92
  def init_session(
84
93
  self,
85
94
  session_name: str,
86
- mass_sim_id: Optional[str] = None,
87
95
  task: Optional[str] = None,
88
96
  rubrics: Optional[list] = None,
89
97
  tags: Optional[list] = None,
@@ -111,7 +119,6 @@ class Client:
111
119
  "agent_id": self.agent_id,
112
120
  "session_name": session_name,
113
121
  "task": task,
114
- "mass_sim_id": mass_sim_id,
115
122
  "experiment_id": experiment_id,
116
123
  "rubrics": rubrics,
117
124
  "tags": tags,
@@ -129,47 +136,47 @@ class Client:
129
136
  agent_id=self.agent_id,
130
137
  session_id=real_session_id,
131
138
  session_name=session_name,
132
- mass_sim_id=mass_sim_id,
133
139
  experiment_id=experiment_id,
134
140
  task=task,
135
141
  rubrics=rubrics,
136
142
  tags=tags,
137
143
  )
144
+
145
+ # Track this as an active session
146
+ with self._active_sessions_lock:
147
+ self._active_sessions.add(real_session_id)
148
+ if logger.isEnabledFor(logging.DEBUG):
149
+ logger.debug(f"[Client] Added active session {real_session_id[:8]}..., total: {len(self._active_sessions)}")
150
+
138
151
  self.initialized = True
139
152
  return self.session.session_id
140
153
 
154
+ def mark_session_inactive(self, session_id: str) -> None:
155
+ """Mark a session as inactive. Used when ending a session."""
156
+ with self._active_sessions_lock:
157
+ if session_id in self._active_sessions:
158
+ self._active_sessions.discard(session_id)
159
+ if logger.isEnabledFor(logging.DEBUG):
160
+ logger.debug(f"[Client] Removed active session {session_id[:8]}..., remaining: {len(self._active_sessions)}")
161
+
162
+ def has_active_sessions(self) -> bool:
163
+ """Check if there are any active sessions."""
164
+ with self._active_sessions_lock:
165
+ return len(self._active_sessions) > 0
166
+
141
167
  def create_event_for_session(self, session_id: str, **kwargs) -> str:
142
- """Create an event for a specific session id without mutating global session.
168
+ """Create an event for a specific session id (new typed model).
143
169
 
144
- This avoids cross-thread races by not switching the active session on
145
- the singleton client. It constructs an ephemeral Session facade to send
146
- requests under the provided session id.
170
+ This avoids mutating the global session and directly uses the new
171
+ event API. Prefer passing typed fields and a 'type' argument.
147
172
  """
148
- temp_session = Session(agent_id=self.agent_id, session_id=session_id)
149
- return temp_session.create_event(**kwargs)
150
-
151
- def continue_session(self, session_id: str):
152
- if session_id in self.custom_session_id_translations:
153
- session_id = self.custom_session_id_translations[session_id]
154
- if self.session and self.session.session_id == session_id:
155
- return self.session.session_id
156
- if self.session:
157
- self.previous_sessions[self.session.session_id] = self.session
158
- data = self.make_request('continuesession', 'POST', {"session_id": session_id})
159
- real_session_id = data["session_id"]
160
- if session_id != real_session_id:
161
- self.custom_session_id_translations[session_id] = real_session_id
162
- self.session = Session(
163
- agent_id=self.agent_id,
164
- session_id=real_session_id
165
- )
166
- import logging as _logging
167
- _logging.getLogger('Lucidic').info(f"Session {data.get('session_name', '')} continuing...")
168
- return self.session.session_id
173
+ kwargs = dict(kwargs)
174
+ kwargs['session_id'] = session_id
175
+ return self.create_event(**kwargs)
169
176
 
170
- def init_mass_sim(self, **kwargs) -> str:
177
+ def create_experiment(self, **kwargs) -> str:
171
178
  kwargs['agent_id'] = self.agent_id
172
- return self.make_request('initmasssim', 'POST', kwargs)['mass_sim_id']
179
+ return self.make_request('createexperiment', 'POST', kwargs)['experiment_id']
173
180
 
174
181
  def get_prompt(self, prompt_name, cache_ttl, label) -> str:
175
182
  current_time = time.time()
@@ -194,6 +201,13 @@ class Client:
194
201
  return prompt
195
202
 
196
203
  def make_request(self, endpoint, method, data):
204
+ # Check if client is shutting down
205
+ if self._shutdown:
206
+ logger.warning(f"[HTTP] Attempted request after shutdown: {endpoint}")
207
+ return {}
208
+
209
+ data = {k: v for k, v in data.items() if v is not None}
210
+
197
211
  http_methods = {
198
212
  "GET": lambda data: self.request_session.get(f"{self.base_url}/{endpoint}", params=data),
199
213
  "POST": lambda data: self.request_session.post(f"{self.base_url}/{endpoint}", json=data),
@@ -201,7 +215,14 @@ class Client:
201
215
  "DELETE": lambda data: self.request_session.delete(f"{self.base_url}/{endpoint}", params=data),
202
216
  } # TODO: make into enum
203
217
  data['current_time'] = datetime.now().astimezone(timezone.utc).isoformat()
218
+ # Debug: print final payload about to be sent
219
+ try:
220
+ dbg = json.dumps({"endpoint": endpoint, "method": method, "body": data}, ensure_ascii=False)
221
+ logger.debug(f"[HTTP] Sending request: {dbg}")
222
+ except Exception:
223
+ logger.debug(f"[HTTP] Sending request to {endpoint} {method}")
204
224
  func = http_methods[method]
225
+ response = None
205
226
  for _ in range(NETWORK_RETRIES):
206
227
  try:
207
228
  response = func(data)
@@ -222,6 +243,150 @@ class Client:
222
243
  raise InvalidOperationError(f"Request to Lucidic AI Backend failed: {e.response.text}")
223
244
  return response.json()
224
245
 
246
+ # ==== New Typed Event Model Helpers ====
247
+ def _build_payload(self, type: str, kwargs: Dict[str, Any]) -> Dict[str, Any]:
248
+ """Build type-specific payload and place unrecognized keys in misc."""
249
+ # Remove non-payload top-level fields from kwargs copy
250
+ non_payload_fields = [
251
+ 'parent_event_id', 'tags', 'metadata', 'occurred_at', 'duration', 'session_id',
252
+ 'event_id'
253
+ ]
254
+ for field in non_payload_fields:
255
+ if field in kwargs:
256
+ kwargs.pop(field, None)
257
+
258
+ if type == "llm_generation":
259
+ return self._build_llm_payload(kwargs)
260
+ elif type == "function_call":
261
+ return self._build_function_payload(kwargs)
262
+ elif type == "error_traceback":
263
+ return self._build_error_payload(kwargs)
264
+ else:
265
+ return self._build_generic_payload(kwargs)
266
+
267
+ def _build_llm_payload(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
268
+ payload: Dict[str, Any] = {
269
+ "request": {},
270
+ "response": {},
271
+ "usage": {},
272
+ "status": "ok",
273
+ "misc": {}
274
+ }
275
+ # Request fields
276
+ for field in ["provider", "model", "messages", "params"]:
277
+ if field in kwargs:
278
+ payload["request"][field] = kwargs.pop(field)
279
+ # Response fields
280
+ for field in ["output", "messages", "tool_calls", "thinking", "raw"]:
281
+ if field in kwargs:
282
+ payload["response"][field] = kwargs.pop(field)
283
+ # Usage fields
284
+ for field in ["input_tokens", "output_tokens", "cache", "cost"]:
285
+ if field in kwargs:
286
+ payload["usage"][field] = kwargs.pop(field)
287
+ # Status / error
288
+ if 'status' in kwargs:
289
+ payload['status'] = kwargs.pop('status')
290
+ if 'error' in kwargs:
291
+ payload['error'] = kwargs.pop('error')
292
+ payload["misc"] = kwargs
293
+ return payload
294
+
295
+ def _build_function_payload(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
296
+ payload: Dict[str, Any] = {
297
+ "function_name": kwargs.pop("function_name", "unknown"),
298
+ "arguments": kwargs.pop("arguments", {}),
299
+ "return_value": kwargs.pop("return_value", None),
300
+ "misc": kwargs
301
+ }
302
+ return payload
303
+
304
+ def _build_error_payload(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
305
+ payload: Dict[str, Any] = {
306
+ "error": kwargs.pop("error", ""),
307
+ "traceback": kwargs.pop("traceback", ""),
308
+ "misc": kwargs
309
+ }
310
+ return payload
311
+
312
+ def _build_generic_payload(self, kwargs: Dict[str, Any]) -> Dict[str, Any]:
313
+ payload: Dict[str, Any] = {
314
+ "details": kwargs.pop("details", kwargs.pop("description", "")),
315
+ "misc": kwargs
316
+ }
317
+ return payload
318
+
319
+ def create_event(self, type: str = "generic", **kwargs) -> str:
320
+ """Create a typed event (non-blocking) and return client-side UUID.
321
+
322
+ - Generates and returns client_event_id immediately
323
+ - Enqueues the full event for background processing via EventQueue
324
+ - Supports parent nesting via client-side parent_event_id
325
+ - Handles client-side blob thresholding in the queue
326
+ """
327
+ # Resolve session_id: explicit -> context -> current session
328
+ session_id = kwargs.pop('session_id', None)
329
+ if not session_id:
330
+ try:
331
+ from .context import current_session_id
332
+ session_id = current_session_id.get(None)
333
+ except Exception:
334
+ session_id = None
335
+ if not session_id and self.session:
336
+ session_id = self.session.session_id
337
+ if not session_id:
338
+ raise InvalidOperationError("No active session for event creation")
339
+
340
+ # Parent event id from kwargs or parent context (client-side)
341
+ parent_event_id = kwargs.get('parent_event_id')
342
+ if not parent_event_id:
343
+ try:
344
+ from .context import current_parent_event_id
345
+ parent_event_id = current_parent_event_id.get(None)
346
+ except Exception:
347
+ parent_event_id = None
348
+
349
+ # Build payload (typed)
350
+ payload = self._build_payload(type, dict(kwargs))
351
+
352
+ # Occurred-at
353
+ from datetime import datetime as _dt
354
+ _occ = kwargs.get("occurred_at")
355
+ if isinstance(_occ, str):
356
+ occurred_at_str = _occ
357
+ elif isinstance(_occ, _dt):
358
+ if _occ.tzinfo is None:
359
+ local_tz = _dt.now().astimezone().tzinfo
360
+ occurred_at_str = _occ.replace(tzinfo=local_tz).isoformat()
361
+ else:
362
+ occurred_at_str = _occ.isoformat()
363
+ else:
364
+ occurred_at_str = _dt.now().astimezone().isoformat()
365
+
366
+ # Client-side UUIDs
367
+ client_event_id = kwargs.get('event_id') or str(uuid.uuid4())
368
+
369
+ # Build request body with client ids
370
+ event_request: Dict[str, Any] = {
371
+ "session_id": session_id,
372
+ "client_event_id": client_event_id,
373
+ "client_parent_event_id": parent_event_id,
374
+ "type": type,
375
+ "occurred_at": occurred_at_str,
376
+ "duration": kwargs.get("duration"),
377
+ "tags": kwargs.get("tags", []),
378
+ "metadata": kwargs.get("metadata", {}),
379
+ "payload": payload,
380
+ }
381
+
382
+ # Queue for background processing and return immediately
383
+ self._event_queue.queue_event(event_request)
384
+ return client_event_id
385
+
386
+ def update_event(self, event_id: str, type: Optional[str] = None, **kwargs) -> str:
387
+ """Deprecated: events are immutable in the new model."""
388
+ raise InvalidOperationError("update_event is no longer supported. Events are immutable.")
389
+
225
390
  def mask(self, data):
226
391
  if not self.masking_function:
227
392
  return data
@@ -232,4 +397,114 @@ class Client:
232
397
  except Exception as e:
233
398
  logger = logging.getLogger('Lucidic')
234
399
  logger.error(f"Error in custom masking function: {repr(e)}")
235
- return "<Error in custom masking function, this is a fully-masked placeholder>"
400
+ return "<Error in custom masking function, this is a fully-masked placeholder>"
401
+
402
+ def initialize_telemetry(self, providers: list) -> bool:
403
+ """
404
+ Initialize telemetry with the given providers.
405
+ This is a true singleton - only the first call creates the TracerProvider.
406
+ Subsequent calls only add new instrumentors if needed.
407
+
408
+ Args:
409
+ providers: List of provider names to instrument
410
+
411
+ Returns:
412
+ True if telemetry was successfully initialized or already initialized
413
+ """
414
+ with self._telemetry_lock:
415
+ try:
416
+ # Create TracerProvider only once per process
417
+ if self._tracer_provider is None:
418
+ logger.debug("[Telemetry] Creating TracerProvider (first initialization)")
419
+
420
+ from opentelemetry import trace
421
+ from opentelemetry.sdk.trace import TracerProvider
422
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
423
+ from opentelemetry.sdk.resources import Resource
424
+
425
+ resource = Resource.create({
426
+ "service.name": "lucidic-ai",
427
+ "service.version": "1.0.0",
428
+ "lucidic.agent_id": self.agent_id,
429
+ })
430
+
431
+ # Create provider with shutdown_on_exit=False for our control
432
+ self._tracer_provider = TracerProvider(resource=resource, shutdown_on_exit=False)
433
+
434
+ # Add context capture processor FIRST
435
+ from .telemetry.context_capture_processor import ContextCaptureProcessor
436
+ context_processor = ContextCaptureProcessor()
437
+ self._tracer_provider.add_span_processor(context_processor)
438
+
439
+ # Add exporter processor for sending spans to Lucidic
440
+ from .telemetry.lucidic_exporter import LucidicSpanExporter
441
+ exporter = LucidicSpanExporter()
442
+ # Configure for faster export: 100ms interval instead of default 5000ms
443
+ # This matches the TypeScript SDK's flush interval pattern
444
+ export_processor = BatchSpanProcessor(
445
+ exporter,
446
+ schedule_delay_millis=100, # Export every 100ms
447
+ max_export_batch_size=512, # Reasonable batch size
448
+ max_queue_size=2048 # Larger queue for burst handling
449
+ )
450
+ self._tracer_provider.add_span_processor(export_processor)
451
+
452
+ # Set as global provider (only happens once)
453
+ try:
454
+ trace.set_tracer_provider(self._tracer_provider)
455
+ logger.debug("[Telemetry] Set global TracerProvider")
456
+ except Exception as e:
457
+ # This is OK - might already be set
458
+ logger.debug(f"[Telemetry] Global provider already set: {e}")
459
+
460
+ self._telemetry_initialized = True
461
+
462
+ # Now instrument the requested providers (can happen multiple times)
463
+ if providers:
464
+ from .telemetry.telemetry_init import instrument_providers
465
+ new_instrumentors = instrument_providers(providers, self._tracer_provider, self._instrumentors)
466
+ # Update our tracking dict
467
+ self._instrumentors.update(new_instrumentors)
468
+ logger.debug(f"[Telemetry] Instrumented providers: {list(new_instrumentors.keys())}")
469
+
470
+ return True
471
+
472
+ except Exception as e:
473
+ logger.error(f"[Telemetry] Failed to initialize: {e}")
474
+ return False
475
+
476
+ def flush_telemetry(self, timeout_seconds: float = 2.0) -> bool:
477
+ """
478
+ Flush all OpenTelemetry spans to ensure they're exported.
479
+
480
+ This method blocks until all buffered spans in the TracerProvider
481
+ are exported or the timeout is reached. Critical for ensuring
482
+ LLM generation events are not lost during shutdown.
483
+
484
+ Handles both active and shutdown TracerProviders gracefully.
485
+
486
+ Args:
487
+ timeout_seconds: Maximum time to wait for flush completion
488
+
489
+ Returns:
490
+ True if flush succeeded, False if timeout occurred
491
+ """
492
+ try:
493
+ if self._tracer_provider:
494
+ # Check if provider is already shutdown
495
+ if hasattr(self._tracer_provider, '_shutdown') and self._tracer_provider._shutdown:
496
+ logger.debug("[Telemetry] TracerProvider already shutdown, skipping flush")
497
+ return True
498
+
499
+ # Convert seconds to milliseconds for OpenTelemetry
500
+ timeout_millis = int(timeout_seconds * 1000)
501
+ success = self._tracer_provider.force_flush(timeout_millis)
502
+ if success:
503
+ logger.debug(f"[Telemetry] Successfully flushed spans (timeout={timeout_seconds}s)")
504
+ else:
505
+ logger.warning(f"[Telemetry] Flush timed out after {timeout_seconds}s")
506
+ return success
507
+ return True # No provider = nothing to flush = success
508
+ except Exception as e:
509
+ logger.error(f"[Telemetry] Failed to flush spans: {e}")
510
+ return False
lucidicai/constants.py CHANGED
@@ -1,33 +1,6 @@
1
- """Constants used throughout the Lucidic SDK"""
1
+ """Constants used throughout the Lucidic SDK (steps removed)."""
2
2
 
3
- # Step states
4
- class StepState:
5
- """Constants for step states"""
6
- RUNNING = "Running: {agent_name}"
7
- FINISHED = "Finished: {agent_name}"
8
- HANDOFF = "Handoff: {agent_name}"
9
- TRANSFERRED = "Transferred to {agent_name}"
10
- ERROR = "Error in {agent_name}"
11
-
12
- # Step actions
13
- class StepAction:
14
- """Constants for step actions"""
15
- EXECUTE = "Execute {agent_name}"
16
- TRANSFER = "Transfer from {from_agent}"
17
- HANDOFF = "Handoff from {from_agent}"
18
- DELIVERED = "{agent_name} finished processing"
19
- FAILED = "Agent execution failed"
20
-
21
- # Step goals
22
- class StepGoal:
23
- """Constants for step goals"""
24
- PROCESS_REQUEST = "Process request"
25
- CONTINUE_PROCESSING = "Continue processing"
26
- CONTINUE_WITH = "Continue with {agent_name}"
27
- PROCESSING_FINISHED = "Processing finished"
28
- ERROR = "Error: {error}"
29
-
30
- # Event descriptions
3
+ # Event descriptions (generic)
31
4
  class EventDescription:
32
5
  """Constants for event descriptions"""
33
6
  TOOL_CALL = "Tool call: {tool_name}"
@@ -48,12 +21,9 @@ class LogMessage:
48
21
  """Constants for log messages"""
49
22
  SESSION_INIT = "Session initialized successfully"
50
23
  SESSION_CONTINUE = "Session {session_id} continuing..."
51
- INSTRUMENTATION_ENABLED = "OpenAI Agents SDK instrumentation enabled"
52
- INSTRUMENTATION_DISABLED = "OpenAI Agents SDK instrumentation disabled"
53
- NO_ACTIVE_SESSION = "No active session for agent tracking"
24
+ INSTRUMENTATION_ENABLED = "Instrumentation enabled"
25
+ INSTRUMENTATION_DISABLED = "Instrumentation disabled"
26
+ NO_ACTIVE_SESSION = "No active session for tracking"
54
27
  HANDLER_INTERCEPTED = "Intercepted {method} call"
55
- AGENT_RUNNING = "Running agent '{agent_name}' with prompt: {prompt}"
56
- AGENT_COMPLETED = "Agent completed successfully"
57
- STEP_CREATED = "Created step: {step_id}"
58
- STEP_ENDED = "Step ended: {step_id}"
59
- HANDOFF_DETECTED = "Handoff chain detected: {chain}"
28
+ AGENT_RUNNING = "Running agent '{agent_name}'"
29
+ AGENT_COMPLETED = "Agent completed successfully"
lucidicai/context.py CHANGED
@@ -19,6 +19,12 @@ current_session_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextV
19
19
  )
20
20
 
21
21
 
22
+ # NEW: Context variable for parent event nesting
23
+ current_parent_event_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar(
24
+ "lucidic.parent_event_id", default=None
25
+ )
26
+
27
+
22
28
  def set_active_session(session_id: Optional[str]) -> None:
23
29
  """Bind the given session id to the current execution context."""
24
30
  current_session_id.set(session_id)
@@ -49,6 +55,25 @@ async def bind_session_async(session_id: str) -> AsyncIterator[None]:
49
55
  current_session_id.reset(token)
50
56
 
51
57
 
58
+ # NEW: Parent event context managers
59
+ @contextmanager
60
+ def event_context(event_id: str) -> Iterator[None]:
61
+ token = current_parent_event_id.set(event_id)
62
+ try:
63
+ yield
64
+ finally:
65
+ current_parent_event_id.reset(token)
66
+
67
+
68
+ @asynccontextmanager
69
+ async def event_context_async(event_id: str) -> AsyncIterator[None]:
70
+ token = current_parent_event_id.set(event_id)
71
+ try:
72
+ yield
73
+ finally:
74
+ current_parent_event_id.reset(token)
75
+
76
+
52
77
  @contextmanager
53
78
  def session(**init_params) -> Iterator[None]:
54
79
  """All-in-one context manager: init → bind → yield → clear → end.