lucidicai 1.2.15__py3-none-any.whl → 1.2.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. lucidicai/__init__.py +111 -21
  2. lucidicai/client.py +22 -5
  3. lucidicai/decorators.py +357 -0
  4. lucidicai/event.py +2 -2
  5. lucidicai/image_upload.py +24 -1
  6. lucidicai/providers/anthropic_handler.py +0 -7
  7. lucidicai/providers/image_storage.py +45 -0
  8. lucidicai/providers/langchain.py +0 -78
  9. lucidicai/providers/lucidic_exporter.py +259 -0
  10. lucidicai/providers/lucidic_span_processor.py +648 -0
  11. lucidicai/providers/openai_agents_instrumentor.py +307 -0
  12. lucidicai/providers/openai_handler.py +1 -56
  13. lucidicai/providers/otel_handlers.py +266 -0
  14. lucidicai/providers/otel_init.py +197 -0
  15. lucidicai/providers/otel_provider.py +168 -0
  16. lucidicai/providers/pydantic_ai_handler.py +2 -19
  17. lucidicai/providers/text_storage.py +53 -0
  18. lucidicai/providers/universal_image_interceptor.py +276 -0
  19. lucidicai/session.py +17 -4
  20. lucidicai/step.py +4 -4
  21. lucidicai/streaming.py +2 -3
  22. lucidicai/telemetry/__init__.py +0 -0
  23. lucidicai/telemetry/base_provider.py +21 -0
  24. lucidicai/telemetry/lucidic_exporter.py +259 -0
  25. lucidicai/telemetry/lucidic_span_processor.py +665 -0
  26. lucidicai/telemetry/openai_agents_instrumentor.py +306 -0
  27. lucidicai/telemetry/opentelemetry_converter.py +436 -0
  28. lucidicai/telemetry/otel_handlers.py +266 -0
  29. lucidicai/telemetry/otel_init.py +197 -0
  30. lucidicai/telemetry/otel_provider.py +168 -0
  31. lucidicai/telemetry/pydantic_ai_handler.py +600 -0
  32. lucidicai/telemetry/utils/__init__.py +0 -0
  33. lucidicai/telemetry/utils/image_storage.py +45 -0
  34. lucidicai/telemetry/utils/text_storage.py +53 -0
  35. lucidicai/telemetry/utils/universal_image_interceptor.py +276 -0
  36. {lucidicai-1.2.15.dist-info → lucidicai-1.2.17.dist-info}/METADATA +1 -1
  37. lucidicai-1.2.17.dist-info/RECORD +49 -0
  38. lucidicai-1.2.15.dist-info/RECORD +0 -25
  39. {lucidicai-1.2.15.dist-info → lucidicai-1.2.17.dist-info}/WHEEL +0 -0
  40. {lucidicai-1.2.15.dist-info → lucidicai-1.2.17.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,197 @@
1
+ """OpenTelemetry initialization and configuration for Lucidic"""
2
+ import logging
3
+ from typing import List, Optional
4
+
5
+ from opentelemetry import trace
6
+ from opentelemetry.sdk.trace import TracerProvider
7
+ from opentelemetry.sdk.resources import Resource
8
+ from opentelemetry.instrumentation.openai import OpenAIInstrumentor
9
+ from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
10
+ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
11
+
12
+ from .lucidic_span_processor import LucidicSpanProcessor
13
+ from .otel_provider import OpenTelemetryProvider
14
+ from lucidicai.client import Client
15
+
16
+ logger = logging.getLogger("Lucidic")
17
+
18
+
19
+ class LucidicTelemetry:
20
+ """Manages OpenTelemetry initialization for Lucidic"""
21
+
22
+ _instance = None
23
+ _initialized = False
24
+
25
+ def __new__(cls):
26
+ if cls._instance is None:
27
+ cls._instance = super().__new__(cls)
28
+ return cls._instance
29
+
30
+ def __init__(self):
31
+ if not self._initialized:
32
+ self.tracer_provider = None
33
+ self.span_processor = None
34
+ self.instrumentors = {}
35
+ self.provider = OpenTelemetryProvider()
36
+ self._initialized = True
37
+
38
+ def initialize(self, agent_id: str, service_name: str = "lucidic-ai") -> None:
39
+ """Initialize OpenTelemetry with Lucidic configuration"""
40
+ if self.tracer_provider:
41
+ logger.debug("OpenTelemetry already initialized")
42
+ return
43
+
44
+ try:
45
+ # Create resource
46
+ resource = Resource.create({
47
+ "service.name": service_name,
48
+ "service.version": "1.0.0",
49
+ "lucidic.agent_id": agent_id,
50
+ })
51
+
52
+ # Create tracer provider
53
+ self.tracer_provider = TracerProvider(resource=resource)
54
+
55
+ # Add our custom span processor for real-time event handling
56
+ self.span_processor = LucidicSpanProcessor()
57
+ self.tracer_provider.add_span_processor(self.span_processor)
58
+
59
+ # Set as global provider
60
+ trace.set_tracer_provider(self.tracer_provider)
61
+
62
+ logger.info("[LucidicTelemetry] OpenTelemetry initialized")
63
+
64
+ except Exception as e:
65
+ logger.error(f"Failed to initialize OpenTelemetry: {e}")
66
+ raise
67
+
68
+ def instrument_providers(self, providers: List[str]) -> None:
69
+ """Instrument specified providers"""
70
+ for provider in providers:
71
+ try:
72
+ if provider == "openai" and provider not in self.instrumentors:
73
+ self._instrument_openai()
74
+ elif provider == "anthropic" and provider not in self.instrumentors:
75
+ self._instrument_anthropic()
76
+ elif provider == "langchain" and provider not in self.instrumentors:
77
+ self._instrument_langchain()
78
+ elif provider == "pydantic_ai":
79
+ # Custom instrumentation needed
80
+ logger.info(f"[LucidicTelemetry] Pydantic AI will use manual instrumentation")
81
+ elif provider == "openai_agents":
82
+ # OpenAI Agents uses the same OpenAI instrumentation
83
+ self._instrument_openai_agents()
84
+ except Exception as e:
85
+ logger.error(f"Failed to instrument {provider}: {e}")
86
+
87
+ def _instrument_openai(self) -> None:
88
+ """Instrument OpenAI"""
89
+ try:
90
+ # Get client for masking function
91
+ client = Client()
92
+
93
+ # Configure instrumentation
94
+ instrumentor = OpenAIInstrumentor()
95
+
96
+ # Create a custom callback for getting attributes
97
+ def get_custom_attributes():
98
+ attrs = {}
99
+
100
+ # Add step context if available
101
+ if client.session and client.session.active_step:
102
+ attrs["lucidic.step_id"] = client.session.active_step.step_id
103
+
104
+ return attrs
105
+
106
+ instrumentor.instrument(
107
+ tracer_provider=self.tracer_provider,
108
+ enrich_token_usage=True,
109
+ exception_logger=lambda e: logger.error(f"OpenAI error: {e}"),
110
+ get_common_metrics_attributes=get_custom_attributes,
111
+ enable_trace_context_propagation=True,
112
+ use_legacy_attributes=True # Force legacy attributes mode for now
113
+ )
114
+
115
+ self.instrumentors["openai"] = instrumentor
116
+ logger.info("[LucidicTelemetry] Instrumented OpenAI")
117
+
118
+ except Exception as e:
119
+ logger.error(f"Failed to instrument OpenAI: {e}")
120
+ raise
121
+
122
+ def _instrument_anthropic(self) -> None:
123
+ """Instrument Anthropic"""
124
+ try:
125
+ instrumentor = AnthropicInstrumentor()
126
+
127
+ # Get client for context
128
+ client = Client()
129
+
130
+ def get_custom_attributes():
131
+ attrs = {}
132
+ if client.session and client.session.active_step:
133
+ attrs["lucidic.step_id"] = client.session.active_step.step_id
134
+ return attrs
135
+
136
+ instrumentor.instrument(
137
+ tracer_provider=self.tracer_provider,
138
+ exception_logger=lambda e: logger.error(f"Anthropic error: {e}"),
139
+ get_common_metrics_attributes=get_custom_attributes,
140
+ use_legacy_attributes=True # Force legacy attributes mode
141
+ )
142
+
143
+ self.instrumentors["anthropic"] = instrumentor
144
+ logger.info("[LucidicTelemetry] Instrumented Anthropic")
145
+
146
+ except Exception as e:
147
+ logger.error(f"Failed to instrument Anthropic: {e}")
148
+ raise
149
+
150
+ def _instrument_langchain(self) -> None:
151
+ """Instrument LangChain"""
152
+ try:
153
+ instrumentor = LangchainInstrumentor()
154
+ instrumentor.instrument(tracer_provider=self.tracer_provider)
155
+
156
+ self.instrumentors["langchain"] = instrumentor
157
+ logger.info("[LucidicTelemetry] Instrumented LangChain")
158
+
159
+ except Exception as e:
160
+ logger.error(f"Failed to instrument LangChain: {e}")
161
+ raise
162
+
163
+ def _instrument_openai_agents(self) -> None:
164
+ """Instrument OpenAI Agents SDK"""
165
+ try:
166
+ from .openai_agents_instrumentor import OpenAIAgentsInstrumentor
167
+
168
+ instrumentor = OpenAIAgentsInstrumentor(tracer_provider=self.tracer_provider)
169
+ instrumentor.instrument()
170
+
171
+ self.instrumentors["openai_agents"] = instrumentor
172
+ logger.info("[LucidicTelemetry] Instrumented OpenAI Agents SDK")
173
+
174
+ except Exception as e:
175
+ logger.error(f"Failed to instrument OpenAI Agents SDK: {e}")
176
+ raise
177
+
178
+ def uninstrument_all(self) -> None:
179
+ """Uninstrument all providers"""
180
+ for name, instrumentor in self.instrumentors.items():
181
+ try:
182
+ instrumentor.uninstrument()
183
+ logger.info(f"[LucidicTelemetry] Uninstrumented {name}")
184
+ except Exception as e:
185
+ logger.error(f"Failed to uninstrument {name}: {e}")
186
+
187
+ self.instrumentors.clear()
188
+
189
+ # Shutdown tracer provider
190
+ if self.tracer_provider:
191
+ self.tracer_provider.shutdown()
192
+ self.tracer_provider = None
193
+ self.span_processor = None
194
+
195
+ def is_initialized(self) -> bool:
196
+ """Check if telemetry is initialized"""
197
+ return self.tracer_provider is not None
@@ -0,0 +1,168 @@
1
+ """OpenTelemetry-based provider implementation"""
2
+ import logging
3
+ from typing import Optional, List, Dict, Any
4
+ from contextlib import contextmanager
5
+
6
+ from opentelemetry import trace, context
7
+ from opentelemetry.trace import Tracer, Span
8
+ from opentelemetry.sdk.trace import TracerProvider
9
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
10
+ from opentelemetry.sdk.resources import Resource
11
+ from opentelemetry.instrumentation.openai import OpenAIInstrumentor
12
+ from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
13
+ from opentelemetry.instrumentation.langchain import LangchainInstrumentor
14
+ from opentelemetry.semconv_ai import SpanAttributes
15
+
16
+ from .lucidic_exporter import LucidicSpanExporter
17
+ from .base_providers import BaseProvider
18
+ from lucidicai.client import Client
19
+
20
+ logger = logging.getLogger("Lucidic")
21
+
22
+
23
+ class OpenTelemetryProvider(BaseProvider):
24
+ """Provider that uses OpenTelemetry instrumentations instead of monkey-patching"""
25
+
26
+ def __init__(self):
27
+ super().__init__()
28
+ self._provider_name = "OpenTelemetry"
29
+ self.tracer_provider = None
30
+ self.tracer = None
31
+ self.instrumentors = {}
32
+ self._active_spans = {}
33
+
34
+ def initialize_telemetry(self, service_name: str = "lucidic-ai", agent_id: str = None) -> None:
35
+ """Initialize OpenTelemetry with Lucidic exporter"""
36
+ # Create resource with service info
37
+ resource = Resource.create({
38
+ "service.name": service_name,
39
+ "service.version": "1.0.0",
40
+ "lucidic.agent_id": agent_id or ""
41
+ })
42
+
43
+ # Create tracer provider
44
+ self.tracer_provider = TracerProvider(resource=resource)
45
+
46
+ # Add our custom exporter
47
+ lucidic_exporter = LucidicSpanExporter()
48
+ span_processor = BatchSpanProcessor(lucidic_exporter)
49
+ self.tracer_provider.add_span_processor(span_processor)
50
+
51
+ # Set as global provider
52
+ trace.set_tracer_provider(self.tracer_provider)
53
+
54
+ # Get tracer
55
+ self.tracer = trace.get_tracer(__name__)
56
+
57
+ def handle_response(self, response, kwargs, session: Optional = None):
58
+ """Handle responses - not needed with OTEL approach"""
59
+ return response
60
+
61
+ def override(self):
62
+ """Initialize OpenTelemetry instrumentations"""
63
+ try:
64
+ client = Client()
65
+
66
+ # Initialize telemetry if not already done
67
+ if not self.tracer_provider:
68
+ self.initialize_telemetry(agent_id=client.agent_id)
69
+
70
+ # No actual override needed - instrumentations will be enabled separately
71
+ logger.info("[OpenTelemetry Provider] Initialized")
72
+
73
+ except Exception as e:
74
+ logger.error(f"Failed to initialize OpenTelemetry: {e}")
75
+ raise
76
+
77
+ def undo_override(self):
78
+ """Uninstrument all providers"""
79
+ for name, instrumentor in self.instrumentors.items():
80
+ try:
81
+ instrumentor.uninstrument()
82
+ logger.info(f"[OpenTelemetry Provider] Uninstrumented {name}")
83
+ except Exception as e:
84
+ logger.error(f"Failed to uninstrument {name}: {e}")
85
+
86
+ self.instrumentors.clear()
87
+
88
+ # Shutdown tracer provider
89
+ if self.tracer_provider:
90
+ self.tracer_provider.shutdown()
91
+
92
+ def instrument_openai(self) -> None:
93
+ """Instrument OpenAI with OpenLLMetry"""
94
+ if "openai" not in self.instrumentors:
95
+ try:
96
+ instrumentor = OpenAIInstrumentor()
97
+ instrumentor.instrument(
98
+ tracer_provider=self.tracer_provider,
99
+ enrich_token_usage=True,
100
+ exception_logger=lambda e: logger.error(f"OpenAI error: {e}")
101
+ )
102
+ self.instrumentors["openai"] = instrumentor
103
+ logger.info("[OpenTelemetry Provider] Instrumented OpenAI")
104
+ except Exception as e:
105
+ logger.error(f"Failed to instrument OpenAI: {e}")
106
+
107
+ def instrument_anthropic(self) -> None:
108
+ """Instrument Anthropic with OpenLLMetry"""
109
+ if "anthropic" not in self.instrumentors:
110
+ try:
111
+ instrumentor = AnthropicInstrumentor()
112
+ instrumentor.instrument(
113
+ tracer_provider=self.tracer_provider,
114
+ exception_logger=lambda e: logger.error(f"Anthropic error: {e}")
115
+ )
116
+ self.instrumentors["anthropic"] = instrumentor
117
+ logger.info("[OpenTelemetry Provider] Instrumented Anthropic")
118
+ except Exception as e:
119
+ logger.error(f"Failed to instrument Anthropic: {e}")
120
+
121
+ def instrument_langchain(self) -> None:
122
+ """Instrument LangChain with OpenLLMetry"""
123
+ if "langchain" not in self.instrumentors:
124
+ try:
125
+ instrumentor = LangchainInstrumentor()
126
+ instrumentor.instrument(tracer_provider=self.tracer_provider)
127
+ self.instrumentors["langchain"] = instrumentor
128
+ logger.info("[OpenTelemetry Provider] Instrumented LangChain")
129
+ except Exception as e:
130
+ logger.error(f"Failed to instrument LangChain: {e}")
131
+
132
+ def instrument_pydantic_ai(self) -> None:
133
+ """Instrument Pydantic AI"""
134
+ # Note: OpenLLMetry doesn't have a Pydantic AI instrumentation yet
135
+ # We'll need to create custom instrumentation or use manual spans
136
+ logger.info("[OpenTelemetry Provider] Pydantic AI instrumentation not yet available in OpenLLMetry")
137
+
138
+ @contextmanager
139
+ def trace_step(self, step_id: str, state: str = None, action: str = None, goal: str = None):
140
+ """Context manager to associate spans with a specific step"""
141
+ span = self.tracer.start_span(
142
+ name=f"step.{step_id}",
143
+ attributes={
144
+ "lucidic.step_id": step_id,
145
+ "lucidic.step.state": state or "",
146
+ "lucidic.step.action": action or "",
147
+ "lucidic.step.goal": goal or ""
148
+ }
149
+ )
150
+
151
+ token = context.attach(trace.set_span_in_context(span))
152
+ try:
153
+ yield span
154
+ finally:
155
+ context.detach(token)
156
+ span.end()
157
+
158
+ def add_image_to_span(self, image_data: str, image_type: str = "screenshot") -> None:
159
+ """Add image data to current span"""
160
+ current_span = trace.get_current_span()
161
+ if current_span and current_span.is_recording():
162
+ current_span.set_attribute(f"lucidic.image.{image_type}", image_data)
163
+
164
+ def set_step_context(self, step_id: str) -> None:
165
+ """Set step ID in current span context"""
166
+ current_span = trace.get_current_span()
167
+ if current_span and current_span.is_recording():
168
+ current_span.set_attribute("lucidic.step_id", step_id)
@@ -1,7 +1,7 @@
1
1
  """Pydantic AI provider handler for the Lucidic API"""
2
2
  from typing import Any, Dict, Optional
3
3
 
4
- from .base_providers import BaseProvider
4
+ from .legacy.base_providers import BaseProvider
5
5
  from lucidicai.client import Client
6
6
  from lucidicai.model_pricing import calculate_cost
7
7
  from lucidicai.singleton import singleton
@@ -381,11 +381,6 @@ class PydanticAIHandler(BaseProvider):
381
381
 
382
382
  def _wrap_request(self, model_instance, messages, model_settings, model_request_parameters, original_method):
383
383
  """Wrap regular request method to track LLM calls"""
384
- # Create event before API call
385
- step = Client().session.active_step
386
- if step is None:
387
- return original_method(model_instance, messages, model_settings, model_request_parameters)
388
-
389
384
  description = self._format_messages(messages)
390
385
  event_id = Client().session.create_event(
391
386
  description=description,
@@ -412,13 +407,6 @@ class PydanticAIHandler(BaseProvider):
412
407
 
413
408
  def _wrap_request_stream_context_manager(self, model_instance, messages, model_settings, model_request_parameters, original_method):
414
409
  """Return an async context manager for streaming requests"""
415
- # Create event before API call
416
- event_id = None
417
- step = Client().session.active_step
418
-
419
- if step is None:
420
- return original_method(model_instance, messages, model_settings, model_request_parameters)
421
-
422
410
  description = self._format_messages(messages)
423
411
  event_id = Client().session.create_event(
424
412
  description=description,
@@ -466,13 +454,8 @@ class PydanticAIHandler(BaseProvider):
466
454
 
467
455
  async def _wrap_request_stream(self, model_instance, messages, model_settings, model_request_parameters, original_method):
468
456
  """Wrap streaming request method"""
469
- # Create event before API call
470
- step = Client().session.active_step
471
- if step is None:
472
- return original_method(model_instance, messages, model_settings, model_request_parameters)
473
-
474
457
  description = self._format_messages(messages)
475
- event = step.create_event(
458
+ event = Client().session.create_event(
476
459
  description=description,
477
460
  result="Streaming response..."
478
461
  )
@@ -0,0 +1,53 @@
1
+ """Thread-local storage for text content from multimodal messages"""
2
+ import threading
3
+ from typing import List, Optional
4
+ import logging
5
+
6
+ logger = logging.getLogger("Lucidic")
7
+
8
+ # Thread-local storage for text content
9
+ _text_storage = threading.local()
10
+
11
+ def store_text(text: str, message_index: int = 0) -> None:
12
+ """Store text content for a message
13
+
14
+ Args:
15
+ text: The text content to store
16
+ message_index: The index of the message (default 0)
17
+ """
18
+ if not hasattr(_text_storage, 'texts'):
19
+ _text_storage.texts = {}
20
+
21
+ _text_storage.texts[message_index] = text
22
+ logger.debug(f"[TextStorage] Stored text for message {message_index}: {text[:50]}...")
23
+
24
+ def get_stored_text(message_index: int = 0) -> Optional[str]:
25
+ """Get stored text for a message
26
+
27
+ Args:
28
+ message_index: The index of the message (default 0)
29
+
30
+ Returns:
31
+ The stored text or None if not found
32
+ """
33
+ if not hasattr(_text_storage, 'texts'):
34
+ return None
35
+
36
+ return _text_storage.texts.get(message_index)
37
+
38
+ def get_all_stored_texts() -> dict:
39
+ """Get all stored texts
40
+
41
+ Returns:
42
+ Dictionary of message_index -> text
43
+ """
44
+ if not hasattr(_text_storage, 'texts'):
45
+ return {}
46
+
47
+ return _text_storage.texts.copy()
48
+
49
+ def clear_stored_texts() -> None:
50
+ """Clear all stored texts"""
51
+ if hasattr(_text_storage, 'texts'):
52
+ _text_storage.texts = {}
53
+ logger.debug("[TextStorage] Cleared all stored texts")