lucidicai 1.3.5__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,336 @@
1
+ import os
2
+ import logging
3
+ import time
4
+ from typing import Union, List, Dict, Any, Optional, overload, Tuple, Literal
5
+ from dotenv import load_dotenv
6
+
7
+ from .client import Client
8
+ from .errors import APIKeyVerificationError, FeatureFlagError
9
+
10
+ logger = logging.getLogger("Lucidic")
11
+
12
+ # Cache implementation
13
+ class FeatureFlagCache:
14
+ def __init__(self):
15
+ self._cache: Dict[str, tuple[Any, float]] = {}
16
+ self._default_ttl = 300 # 5 minutes
17
+
18
+ def get(self, key: str) -> Optional[Any]:
19
+ if key in self._cache:
20
+ value, expiry = self._cache[key]
21
+ if time.time() < expiry:
22
+ return value
23
+ else:
24
+ del self._cache[key]
25
+ return None
26
+
27
+ def set(self, key: str, value: Any, ttl: int = None):
28
+ if ttl is None:
29
+ ttl = self._default_ttl
30
+ if ttl > 0:
31
+ self._cache[key] = (value, time.time() + ttl)
32
+
33
+ def clear(self):
34
+ self._cache.clear()
35
+
36
+ # Global cache instance
37
+ _flag_cache = FeatureFlagCache()
38
+
39
+ # Sentinel value to distinguish None from missing
40
+ MISSING = object()
41
+
42
+ # Function overloads for type safety
43
+ @overload
44
+ def get_feature_flag(
45
+ flag_name: str,
46
+ default: Any = ...,
47
+ *,
48
+ return_missing: Literal[False] = False,
49
+ cache_ttl: Optional[int] = 300,
50
+ api_key: Optional[str] = None,
51
+ agent_id: Optional[str] = None,
52
+ ) -> Any:
53
+ """Get a single feature flag."""
54
+ ...
55
+
56
+ @overload
57
+ def get_feature_flag(
58
+ flag_name: str,
59
+ default: Any = ...,
60
+ *,
61
+ return_missing: Literal[True],
62
+ cache_ttl: Optional[int] = 300,
63
+ api_key: Optional[str] = None,
64
+ agent_id: Optional[str] = None,
65
+ ) -> Tuple[Any, List[str]]:
66
+ """Get a single feature flag with missing info."""
67
+ ...
68
+
69
+ @overload
70
+ def get_feature_flag(
71
+ flag_name: List[str],
72
+ defaults: Optional[Dict[str, Any]] = None,
73
+ *,
74
+ return_missing: Literal[False] = False,
75
+ cache_ttl: Optional[int] = 300,
76
+ api_key: Optional[str] = None,
77
+ agent_id: Optional[str] = None,
78
+ ) -> Dict[str, Any]:
79
+ """Get multiple feature flags."""
80
+ ...
81
+
82
+ @overload
83
+ def get_feature_flag(
84
+ flag_name: List[str],
85
+ defaults: Optional[Dict[str, Any]] = None,
86
+ *,
87
+ return_missing: Literal[True],
88
+ cache_ttl: Optional[int] = 300,
89
+ api_key: Optional[str] = None,
90
+ agent_id: Optional[str] = None,
91
+ ) -> Tuple[Dict[str, Any], List[str]]:
92
+ """Get multiple feature flags with missing info."""
93
+ ...
94
+
95
+ def get_feature_flag(
96
+ flag_name: Union[str, List[str]],
97
+ default_or_defaults: Any = MISSING,
98
+ *,
99
+ return_missing: bool = False,
100
+ cache_ttl: Optional[int] = 300,
101
+ api_key: Optional[str] = None,
102
+ agent_id: Optional[str] = None,
103
+ ) -> Union[Any, Tuple[Any, List[str]], Dict[str, Any], Tuple[Dict[str, Any], List[str]]]:
104
+ """
105
+ Get feature flag(s) from backend. Raises FeatureFlagError on failure unless default provided.
106
+
107
+ Args:
108
+ flag_name: Single flag name (str) or list of flag names
109
+ default_or_defaults:
110
+ - If flag_name is str: default value for that flag (optional)
111
+ - If flag_name is List[str]: dict of defaults {flag_name: default_value}
112
+ cache_ttl: Cache time-to-live in seconds (0 to disable, -1 for forever)
113
+ api_key: Optional API key
114
+ agent_id: Optional agent ID
115
+
116
+ Returns:
117
+ - If flag_name is str: The flag value (or tuple with missing list if return_missing=True)
118
+ - If flag_name is List[str]: Dict mapping flag_name -> value (or tuple with missing list if return_missing=True)
119
+
120
+ Raises:
121
+ FeatureFlagError: If fetch fails and no default provided
122
+ APIKeyVerificationError: If credentials missing
123
+
124
+ Examples:
125
+ # Single flag with default
126
+ retries = lai.get_feature_flag("max_retries", default=3)
127
+
128
+ # Single flag without default (can raise)
129
+ retries = lai.get_feature_flag("max_retries")
130
+
131
+ # Multiple flags
132
+ flags = lai.get_feature_flag(
133
+ ["max_retries", "timeout"],
134
+ defaults={"max_retries": 3}
135
+ )
136
+ """
137
+ load_dotenv()
138
+
139
+ # Determine if single or batch
140
+ is_single = isinstance(flag_name, str)
141
+ flag_names = [flag_name] if is_single else flag_name
142
+
143
+ # Parse defaults
144
+ if is_single:
145
+ has_default = default_or_defaults is not MISSING
146
+ defaults = {flag_name: default_or_defaults} if has_default else {}
147
+ else:
148
+ defaults = default_or_defaults if default_or_defaults not in (None, MISSING) else {}
149
+
150
+ # Track missing flags
151
+ missing_flags = []
152
+
153
+ # Check cache first
154
+ uncached_flags = []
155
+ cached_results = {}
156
+
157
+ if cache_ttl != 0:
158
+ for name in flag_names:
159
+ cache_key = f"{agent_id}:{name}"
160
+ cached_value = _flag_cache.get(cache_key)
161
+ if cached_value is not None:
162
+ cached_results[name] = cached_value
163
+ else:
164
+ uncached_flags.append(name)
165
+ else:
166
+ uncached_flags = flag_names
167
+
168
+ # Fetch uncached flags if needed
169
+ if uncached_flags:
170
+ # Get credentials
171
+ if api_key is None:
172
+ api_key = os.getenv("LUCIDIC_API_KEY", None)
173
+ if api_key is None:
174
+ raise APIKeyVerificationError(
175
+ "Make sure to either pass your API key or set the LUCIDIC_API_KEY environment variable."
176
+ )
177
+
178
+ if agent_id is None:
179
+ agent_id = os.getenv("LUCIDIC_AGENT_ID", None)
180
+ if agent_id is None:
181
+ raise APIKeyVerificationError(
182
+ "Lucidic agent ID not specified. Make sure to either pass your agent ID or set the LUCIDIC_AGENT_ID environment variable."
183
+ )
184
+
185
+ # Get client
186
+ client = Client()
187
+ if not getattr(client, 'initialized', False):
188
+ client = Client(api_key=api_key, agent_id=agent_id)
189
+ else:
190
+ if api_key != client.api_key or agent_id != client.agent_id:
191
+ client.set_api_key(api_key)
192
+ client.agent_id = agent_id
193
+
194
+ try:
195
+ # Make batch API call
196
+ response = client.make_request(
197
+ 'getfeatureflags',
198
+ 'POST',
199
+ {'flag_names': uncached_flags}
200
+ )
201
+
202
+ # Process response and update cache
203
+ for name in uncached_flags:
204
+ if name in response['flags']:
205
+ if response['flags'][name]['found']:
206
+ value = response['flags'][name]['value']
207
+ cached_results[name] = value
208
+
209
+ # Cache the value
210
+ if cache_ttl != 0:
211
+ cache_key = f"{agent_id}:{name}"
212
+ _flag_cache.set(cache_key, value, ttl=cache_ttl if cache_ttl > 0 else None)
213
+ else:
214
+ # Flag not found on server
215
+ missing_flags.append(name)
216
+ logger.warning(f"Feature flag '{name}' not found on server")
217
+
218
+ except Exception as e:
219
+ # Log the error
220
+ logger.error(f"Failed to fetch feature flags: {e}")
221
+
222
+ # Check if we have defaults for missing flags
223
+ for name in uncached_flags:
224
+ if name not in cached_results:
225
+ if name in defaults:
226
+ cached_results[name] = defaults[name]
227
+ elif is_single and not return_missing:
228
+ # Single flag without default and not returning missing - raise error
229
+ raise FeatureFlagError(f"'{name}': {e}") from e
230
+
231
+ # Build final result
232
+ result = {}
233
+ for name in flag_names:
234
+ if name in cached_results:
235
+ result[name] = cached_results[name]
236
+ elif name in defaults:
237
+ result[name] = defaults[name]
238
+ else:
239
+ # No value and no default
240
+ missing_flags.append(name)
241
+ if is_single and not return_missing:
242
+ raise FeatureFlagError(f"'{name}' not found and no default provided")
243
+ else:
244
+ result[name] = None
245
+
246
+ # Return based on input type and return_missing flag
247
+ if return_missing:
248
+ return (result[flag_names[0]] if is_single else result, missing_flags)
249
+ else:
250
+ return result[flag_names[0]] if is_single else result
251
+
252
+
253
+ # Typed convenience functions
254
+ def get_bool_flag(flag_name: str, default: Optional[bool] = None, **kwargs) -> bool:
255
+ """
256
+ Get a boolean feature flag with type validation.
257
+
258
+ Raises:
259
+ FeatureFlagError: If fetch fails and no default provided
260
+ TypeError: If flag value is not a boolean
261
+ """
262
+ value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
263
+ if not isinstance(value, bool):
264
+ if default is not None:
265
+ logger.warning(f"Feature flag '{flag_name}' is not a boolean, using default")
266
+ return default
267
+ raise TypeError(f"Feature flag '{flag_name}' expected boolean, got {type(value).__name__}")
268
+ return value
269
+
270
+
271
+ def get_int_flag(flag_name: str, default: Optional[int] = None, **kwargs) -> int:
272
+ """
273
+ Get an integer feature flag with type validation.
274
+
275
+ Raises:
276
+ FeatureFlagError: If fetch fails and no default provided
277
+ TypeError: If flag value is not an integer
278
+ """
279
+ value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
280
+ if not isinstance(value, int) or isinstance(value, bool): # bool is subclass of int
281
+ if default is not None:
282
+ logger.warning(f"Feature flag '{flag_name}' is not an integer, using default")
283
+ return default
284
+ raise TypeError(f"Feature flag '{flag_name}' expected integer, got {type(value).__name__}")
285
+ return value
286
+
287
+
288
+ def get_float_flag(flag_name: str, default: Optional[float] = None, **kwargs) -> float:
289
+ """
290
+ Get a float feature flag with type validation.
291
+
292
+ Raises:
293
+ FeatureFlagError: If fetch fails and no default provided
294
+ TypeError: If flag value is not a float
295
+ """
296
+ value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
297
+ if not isinstance(value, (int, float)) or isinstance(value, bool):
298
+ if default is not None:
299
+ logger.warning(f"Feature flag '{flag_name}' is not a float, using default")
300
+ return default
301
+ raise TypeError(f"Feature flag '{flag_name}' expected float, got {type(value).__name__}")
302
+ return float(value)
303
+
304
+
305
+ def get_string_flag(flag_name: str, default: Optional[str] = None, **kwargs) -> str:
306
+ """
307
+ Get a string feature flag with type validation.
308
+
309
+ Raises:
310
+ FeatureFlagError: If fetch fails and no default provided
311
+ TypeError: If flag value is not a string
312
+ """
313
+ value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
314
+ if not isinstance(value, str):
315
+ if default is not None:
316
+ logger.warning(f"Feature flag '{flag_name}' is not a string, using default")
317
+ return default
318
+ raise TypeError(f"Feature flag '{flag_name}' expected string, got {type(value).__name__}")
319
+ return value
320
+
321
+
322
+ def get_json_flag(flag_name: str, default: Optional[dict] = None, **kwargs) -> dict:
323
+ """
324
+ Get a JSON object feature flag.
325
+
326
+ Raises:
327
+ FeatureFlagError: If fetch fails and no default provided
328
+ """
329
+ value = get_feature_flag(flag_name, default=default if default is not None else MISSING, **kwargs)
330
+ return value
331
+
332
+
333
+ def clear_feature_flag_cache():
334
+ """Clear the feature flag cache."""
335
+ _flag_cache.clear()
336
+ logger.debug("Feature flag cache cleared")
lucidicai/session.py CHANGED
@@ -1,16 +1,7 @@
1
- import base64
2
- import io
3
- import logging
4
- from typing import List, Optional
5
-
6
- from PIL import Image
1
+ from typing import Optional
7
2
 
8
3
  from .errors import InvalidOperationError, LucidicNotInitializedError
9
- from .image_upload import get_presigned_url, upload_image_to_s3
10
- from .step import Step
11
- from .event import Event
12
4
 
13
- logger = logging.getLogger("Lucidic")
14
5
 
15
6
  class Session:
16
7
  def __init__(
@@ -21,9 +12,7 @@ class Session:
21
12
  ):
22
13
  self.agent_id = agent_id
23
14
  self.session_id = session_id
24
- self.step_history = dict()
25
- self._active_step: Optional[str] = None # Step ID, not Step object
26
- self.event_history = dict()
15
+ self.event_history = [] # List[Event]
27
16
  self.latest_event = None
28
17
  self.is_finished = False
29
18
  self.is_successful = None
@@ -32,13 +21,6 @@ class Session:
32
21
  self.session_eval_reason = None
33
22
  self.has_gif = None
34
23
 
35
- @property
36
- def active_step(self) -> Optional[Step]:
37
- """Get the active step object"""
38
- if self._active_step and self._active_step in self.step_history:
39
- return self.step_history[self._active_step]
40
- return None
41
-
42
24
  def update_session(
43
25
  self,
44
26
  **kwargs
@@ -54,60 +36,16 @@ class Session:
54
36
  "session_eval_reason": Client().mask(kwargs.get("session_eval_reason", None)),
55
37
  "tags": kwargs.get("tags", None)
56
38
  }
57
-
58
- # auto end any unfinished steps
59
- if kwargs.get("is_finished", None) is True:
60
- for step_id, step in self.step_history.items():
61
- if not step.is_finished:
62
- self.update_step(step_id=step_id, is_finished=True)
63
-
64
39
  Client().make_request('updatesession', 'PUT', request_data)
65
40
 
66
- def create_step(self, **kwargs) -> str:
41
+ def create_event(self, type: str = "generic", **kwargs) -> str:
42
+ """Proxy to client.create_event bound to this session."""
67
43
  if not self.session_id:
68
44
  raise LucidicNotInitializedError()
69
- step = Step(session_id=self.session_id, **kwargs)
70
- self.step_history[step.step_id] = step
71
- self._active_step = step.step_id
72
- return step.step_id
73
-
74
- def update_step(self, **kwargs) -> None:
75
- if 'step_id' in kwargs and kwargs['step_id'] is not None:
76
- if kwargs['step_id'] not in self.step_history:
77
- raise InvalidOperationError("Step ID not found in session history")
78
- self.step_history[kwargs['step_id']].update_step(**kwargs)
79
- else:
80
- if not self._active_step:
81
- raise InvalidOperationError("No active step to update")
82
- self.step_history[self._active_step].update_step(**kwargs)
83
-
84
-
85
- def create_event(self, **kwargs):
86
- # Get step_id from kwargs or active step
87
- if 'step_id' in kwargs and kwargs['step_id'] is not None:
88
- step_id = kwargs['step_id']
89
- elif self._active_step:
90
- step_id = self._active_step
91
- else:
92
- step_id = None
93
- kwargs.pop('step_id', None)
94
- event = Event(
95
- session_id=self.session_id,
96
- step_id=step_id,
97
- **kwargs
98
- )
99
- self.event_history[event.event_id] = event
100
- self._active_event = event
101
- return event.event_id
102
-
103
- def update_event(self, **kwargs):
104
- if 'event_id' in kwargs and kwargs['event_id'] is not None:
105
- if kwargs['event_id'] not in self.event_history:
106
- raise InvalidOperationError("Event ID not found in session history")
107
- self.event_history[kwargs['event_id']].update_event(**kwargs)
108
- else:
109
- if not self._active_event:
110
- raise InvalidOperationError("No active event to update")
111
- self._active_event.update_event(**kwargs)
45
+ from .client import Client
46
+ kwargs = dict(kwargs)
47
+ kwargs['session_id'] = self.session_id
48
+ event_id = Client().create_event(type=type, **kwargs)
49
+ return event_id
112
50
 
113
51
 
lucidicai/singleton.py CHANGED
@@ -1,28 +1,31 @@
1
+ import threading
1
2
  from .errors import LucidicNotInitializedError
2
3
 
3
4
  lai_inst = {}
5
+ _singleton_lock = threading.Lock()
4
6
 
5
7
  def singleton(class_):
6
8
  def getinstance(*args, **kwargs):
7
-
8
- inst = lai_inst.get(class_)
9
-
10
- # on first access -> no instance yet
11
- if inst is None:
12
- # no args/kwargs -> return a NullClient for Client
13
- if class_.__name__ == 'Client' and not args and not kwargs:
14
- inst = NullClient()
15
- else:
9
+ # Thread-safe singleton pattern
10
+ with _singleton_lock:
11
+ inst = lai_inst.get(class_)
12
+
13
+ # on first access -> no instance yet
14
+ if inst is None:
15
+ # no args/kwargs -> return a NullClient for Client
16
+ if class_.__name__ == 'Client' and not args and not kwargs:
17
+ inst = NullClient()
18
+ else:
19
+ inst = class_(*args, **kwargs)
20
+ lai_inst[class_] = inst
21
+ return inst
22
+
23
+ # existing instance present
24
+ # if NullClient and now real init args are passed -> upgrade it
25
+ if isinstance(inst, NullClient) and (args or kwargs):
16
26
  inst = class_(*args, **kwargs)
17
- lai_inst[class_] = inst
27
+ lai_inst[class_] = inst
18
28
  return inst
19
-
20
- # existing instance present
21
- # if NullClient and now real init args are passed -> upgrade it
22
- if isinstance(inst, NullClient) and (args or kwargs):
23
- inst = class_(*args, **kwargs)
24
- lai_inst[class_] = inst
25
- return inst
26
29
 
27
30
  return getinstance
28
31
 
lucidicai/streaming.py CHANGED
@@ -21,42 +21,10 @@ class StreamingResponseWrapper:
21
21
  self.accumulated_content = ""
22
22
  self.usage = None
23
23
 
24
- # Create initial event
25
- self._create_initial_event()
24
+ # We no longer create an initial event; emit a single immutable event on finalize
26
25
 
27
26
  def _create_initial_event(self):
28
- """Create the initial event for streaming response"""
29
- try:
30
- # Check if event_id was already created
31
- if '_event_id' in self.kwargs:
32
- self.event_id = self.kwargs['_event_id']
33
- logger.info(f"[Streaming] Using existing event ID: {self.event_id}")
34
- return
35
-
36
- if Client().session:
37
- description, images = self._format_messages(self.kwargs.get('messages', ''))
38
-
39
- event_data = {
40
- 'action': description,
41
- 'cost': 0,
42
- 'provider': 'OpenAI',
43
- 'model': self.kwargs.get('model', 'unknown'),
44
- 'tool': 'streaming',
45
- 'raw_request': json.dumps({
46
- 'model': self.kwargs.get('model'),
47
- 'messages': self._serialize_messages(self.kwargs.get('messages', [])),
48
- 'stream': True,
49
- **{k: v for k, v in self.kwargs.items() if k not in ['messages', 'model', 'stream', '_event_id']}
50
- })
51
- }
52
-
53
- if images:
54
- event_data['screenshots'] = images
55
-
56
- self.event_id = Client().session.create_event(**event_data)
57
- logger.debug(f"[Streaming] Created new streaming event with ID: {self.event_id}")
58
- except Exception as e:
59
- logger.error(f"[Streaming] Error creating initial streaming event: {str(e)}")
27
+ return
60
28
 
61
29
  def _format_messages(self, messages):
62
30
  """Format messages for description and extract images"""
@@ -195,10 +163,6 @@ class StreamingResponseWrapper:
195
163
  try:
196
164
  logger.info(f"[Streaming] Finalizing event {self.event_id}, accumulated content length: {len(self.accumulated_content)}")
197
165
 
198
- if not self.event_id:
199
- logger.warning("[Streaming] No event_id to finalize")
200
- return
201
-
202
166
  if not self.session:
203
167
  # Try to get session from client
204
168
  try:
@@ -229,18 +193,19 @@ class StreamingResponseWrapper:
229
193
  model = self.kwargs.get('model', 'unknown')
230
194
  cost = self._calculate_cost(model, prompt_tokens, completion_tokens)
231
195
 
232
- # Update event with safe defaults
233
- update_data = {
234
- 'result': self.accumulated_content if self.accumulated_content else "Stream completed (no content received)",
235
- 'cost_added': cost if cost else 0.0,
236
- 'is_finished': True,
237
- 'model': model
238
- }
239
-
240
- update_data['event_id'] = self.event_id
241
- logger.info(f"[Streaming] Updating event with: result='{update_data.get('result', '')[:50]}...', cost_added={update_data.get('cost_added', 0)}, is_finished=True")
242
- self.session.update_event(**update_data)
243
- logger.info(f"[Streaming] Successfully finalized streaming event {self.event_id}")
196
+ # Create single immutable event at end
197
+ result_text = self.accumulated_content if self.accumulated_content else "Stream completed (no content received)"
198
+ Client().create_event(
199
+ type="llm_generation",
200
+ model=model,
201
+ messages=self.kwargs.get('messages', []),
202
+ output=result_text,
203
+ input_tokens=int(prompt_tokens),
204
+ output_tokens=int(completion_tokens),
205
+ cost=cost,
206
+ duration=duration,
207
+ )
208
+ logger.info(f"[Streaming] Emitted immutable streaming event")
244
209
 
245
210
  except Exception as e:
246
211
  logger.error(f"[Streaming] Error finalizing event {self.event_id}: {str(e)}")
@@ -0,0 +1,65 @@
1
+ """
2
+ Context Capture Processor for OpenTelemetry spans.
3
+
4
+ This processor captures Lucidic context (session_id, parent_event_id) at span creation time
5
+ and stores it in span attributes. This ensures context is preserved even when spans are
6
+ processed asynchronously in different threads/contexts.
7
+
8
+ This fixes the nesting issue for ALL providers (OpenAI, Anthropic, LangChain, etc.)
9
+ """
10
+
11
+ import logging
12
+ from typing import Optional
13
+ from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan
14
+ from opentelemetry.trace import Span
15
+ from opentelemetry import context as otel_context
16
+
17
+ logger = logging.getLogger("Lucidic")
18
+
19
+
20
+ class ContextCaptureProcessor(SpanProcessor):
21
+ """Captures Lucidic context at span creation and stores in attributes."""
22
+
23
+ def on_start(self, span: Span, parent_context: Optional[otel_context.Context] = None) -> None:
24
+ """Called when a span is started - capture context here."""
25
+ try:
26
+ # Import here to avoid circular imports
27
+ from lucidicai.context import current_session_id, current_parent_event_id
28
+
29
+ # Capture session ID from context
30
+ session_id = None
31
+ try:
32
+ session_id = current_session_id.get(None)
33
+ except Exception:
34
+ pass
35
+
36
+ # Capture parent event ID from context
37
+ parent_event_id = None
38
+ try:
39
+ parent_event_id = current_parent_event_id.get(None)
40
+ except Exception:
41
+ pass
42
+
43
+ # Store in span attributes for later retrieval
44
+ if session_id:
45
+ span.set_attribute("lucidic.session_id", session_id)
46
+
47
+ if parent_event_id:
48
+ span.set_attribute("lucidic.parent_event_id", parent_event_id)
49
+ logger.debug(f"[ContextCapture] Captured parent_event_id {parent_event_id[:8]}... for span {span.name}")
50
+
51
+ except Exception as e:
52
+ # Never fail span creation due to context capture
53
+ logger.debug(f"[ContextCapture] Failed to capture context: {e}")
54
+
55
+ def on_end(self, span: ReadableSpan) -> None:
56
+ """Called when a span ends - no action needed."""
57
+ pass
58
+
59
+ def shutdown(self) -> None:
60
+ """Shutdown the processor."""
61
+ pass
62
+
63
+ def force_flush(self, timeout_millis: int = 30000) -> bool:
64
+ """Force flush - no buffering in this processor."""
65
+ return True