lucidicai 1.3.1__tar.gz → 1.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {lucidicai-1.3.1 → lucidicai-1.3.5}/PKG-INFO +1 -1
  2. {lucidicai-1.3.1 → lucidicai-1.3.5}/README.md +232 -2
  3. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/__init__.py +276 -16
  4. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/client.py +19 -1
  5. lucidicai-1.3.5/lucidicai/context.py +119 -0
  6. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/decorators.py +33 -15
  7. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/event.py +3 -1
  8. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/model_pricing.py +11 -0
  9. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/session.py +1 -1
  10. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/lucidic_exporter.py +16 -4
  11. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/lucidic_span_processor.py +67 -49
  12. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/otel_handlers.py +207 -59
  13. lucidicai-1.3.5/lucidicai/telemetry/otel_init.py +312 -0
  14. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/otel_provider.py +15 -5
  15. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/utils/universal_image_interceptor.py +89 -0
  16. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai.egg-info/PKG-INFO +1 -1
  17. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai.egg-info/SOURCES.txt +1 -0
  18. {lucidicai-1.3.1 → lucidicai-1.3.5}/setup.py +1 -1
  19. lucidicai-1.3.1/lucidicai/telemetry/otel_init.py +0 -200
  20. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/constants.py +0 -0
  21. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/errors.py +0 -0
  22. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/image_upload.py +0 -0
  23. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/lru.py +0 -0
  24. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/singleton.py +0 -0
  25. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/step.py +0 -0
  26. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/streaming.py +0 -0
  27. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/__init__.py +0 -0
  28. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/base_provider.py +0 -0
  29. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/litellm_bridge.py +0 -0
  30. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/openai_agents_instrumentor.py +0 -0
  31. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/opentelemetry_converter.py +0 -0
  32. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/pydantic_ai_handler.py +0 -0
  33. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/utils/__init__.py +0 -0
  34. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/utils/image_storage.py +0 -0
  35. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai/telemetry/utils/text_storage.py +0 -0
  36. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai.egg-info/dependency_links.txt +0 -0
  37. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai.egg-info/requires.txt +0 -0
  38. {lucidicai-1.3.1 → lucidicai-1.3.5}/lucidicai.egg-info/top_level.txt +0 -0
  39. {lucidicai-1.3.1 → lucidicai-1.3.5}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lucidicai
3
- Version: 1.3.1
3
+ Version: 1.3.5
4
4
  Summary: Lucidic AI Python SDK
5
5
  Author: Andy Liang
6
6
  Author-email: andy@lucidic.ai
@@ -5,7 +5,7 @@ The official Python SDK for [Lucidic AI](https://lucidic.ai), providing comprehe
5
5
  ## Features
6
6
 
7
7
  - **Session & Step Tracking** - Track complex AI agent workflows with hierarchical session management
8
- - **Multi-Provider Support** - Automatic instrumentation for OpenAI, Anthropic, LangChain, and more
8
+ - **Multi-Provider Support** - Automatic instrumentation for OpenAI, Anthropic, LangChain, Google Generative AI (Gemini), Vertex AI, AWS Bedrock, Cohere, Groq, and more
9
9
  - **Real-time Analytics** - Monitor costs, performance, and behavior of your AI applications
10
10
  - **Data Privacy** - Built-in masking functions to protect sensitive information
11
11
  - **Screenshot Support** - Capture and analyze visual context in your AI workflows
@@ -49,6 +49,21 @@ lai.end_step()
49
49
  lai.end_session(is_successful=True)
50
50
  ```
51
51
 
52
+ ### Quick Start (context manager)
53
+
54
+ ```python
55
+ import lucidicai as lai
56
+ from openai import OpenAI
57
+
58
+ # All-in-one lifecycle: init → bind → run → auto-end at context exit
59
+ with lai.session(session_name="My AI Assistant", providers=["openai"]):
60
+ client = OpenAI()
61
+ response = client.chat.completions.create(
62
+ model="gpt-5",
63
+ messages=[{"role": "user", "content": "Hello, how are you?"}]
64
+ )
65
+ ```
66
+
52
67
  ## Configuration
53
68
 
54
69
  ### Environment Variables
@@ -67,7 +82,7 @@ lai.init(
67
82
  session_name="My Session", # Required: Name for this session
68
83
  api_key="...", # Optional: Override env var
69
84
  agent_id="...", # Optional: Override env var
70
- providers=["openai", "anthropic"], # Optional: LLM providers to track
85
+ providers=["openai", "anthropic", "google", "vertexai", "bedrock", "cohere", "groq"], # Optional: LLM providers to track
71
86
  task="Process customer request", # Optional: High-level task description
72
87
  production_monitoring=False, # Optional: Production mode flag
73
88
  auto_end=True, # Optional: Auto-end session on exit (default: True)
@@ -100,6 +115,109 @@ lai.update_session(
100
115
  lai.end_session(is_successful=True, session_eval=0.9)
101
116
  ```
102
117
 
118
+ ### Session Context (async-safe)
119
+
120
+ Lucidic uses Python contextvars to bind a session to the current execution context (threads/async tasks). This guarantees spans from concurrent requests are attributed to the correct session.
121
+
122
+ There are three recommended patterns:
123
+
124
+ 1) Full lifecycle (auto-end on exit)
125
+
126
+ ```python
127
+ import lucidicai as lai
128
+ from openai import OpenAI
129
+
130
+ with lai.session(session_name="order-flow", providers=["openai"]):
131
+ OpenAI().chat.completions.create(
132
+ model="gpt-5",
133
+ messages=[{"role":"user","content":"Place order"}]
134
+ )
135
+ # Session automatically ends at context exit.
136
+ # Note: any auto_end argument is ignored inside session(...).
137
+ ```
138
+
139
+ Async variant:
140
+
141
+ ```python
142
+ import lucidicai as lai
143
+ from openai import AsyncOpenAI
144
+ import asyncio
145
+
146
+ async def main():
147
+ async with lai.session_async(session_name="async-flow", providers=["openai"]):
148
+ await AsyncOpenAI().chat.completions.create(
149
+ model="gpt-5",
150
+ messages=[{"role":"user","content":"Hello"}]
151
+ )
152
+
153
+ asyncio.run(main())
154
+ ```
155
+
156
+ 2) Bind-only (does NOT end the session)
157
+
158
+ ```python
159
+ import lucidicai as lai
160
+ from openai import OpenAI
161
+
162
+ sid = lai.init(session_name="request-123", providers=["openai"], auto_end=False)
163
+ with lai.bind_session(sid):
164
+ OpenAI().chat.completions.create(
165
+ model="gpt-5",
166
+ messages=[{"role":"user","content":"..."}]
167
+ )
168
+ # Session remains open. End explicitly when ready:
169
+ lai.end_session()
170
+ ```
171
+
172
+ Async variant:
173
+
174
+ ```python
175
+ sid = lai.init(session_name="request-async", providers=["openai"], auto_end=False)
176
+
177
+ async def run():
178
+ async with lai.bind_session_async(sid):
179
+ await AsyncOpenAI().chat.completions.create(
180
+ model="gpt-5",
181
+ messages=[{"role":"user","content":"..."}]
182
+ )
183
+
184
+ asyncio.run(run())
185
+ # End later
186
+ lai.end_session()
187
+ ```
188
+
189
+ 3) Fully manual
190
+
191
+ ```python
192
+ sid = lai.init(session_name="manual", providers=["openai"], auto_end=True)
193
+ lai.set_active_session(sid)
194
+ # ... your workflow ...
195
+ lai.clear_active_session()
196
+ # End now, or rely on auto_end at process exit
197
+ lai.end_session()
198
+ ```
199
+
200
+ Function wrappers are also provided:
201
+
202
+ ```python
203
+ def do_work():
204
+ from openai import OpenAI
205
+ return OpenAI().chat.completions.create(model="gpt-5", messages=[{"role":"user","content":"wrapped"}])
206
+
207
+ # Full lifecycle in one call
208
+ result = lai.run_session(do_work, init_params={"session_name":"wrapped","providers":["openai"]})
209
+
210
+ # Bind-only wrapper
211
+ sid = lai.init(session_name="bound-only", providers=["openai"], auto_end=False)
212
+ result = lai.run_in_session(sid, do_work)
213
+ lai.end_session()
214
+ ```
215
+
216
+ Notes:
217
+ - The context managers are safe for threads and asyncio tasks.
218
+ - `session(...)` always ends the session at context exit (ignores any provided auto_end).
219
+ - Existing single-threaded usage (plain `init` + provider calls) remains supported.
220
+
103
221
  ### Automatic Session Management (auto_end)
104
222
 
105
223
  By default, Lucidic automatically ends your session when your process exits, ensuring no data is lost. This feature is enabled by default but can be controlled:
@@ -118,6 +236,8 @@ The auto_end feature:
118
236
  - Prevents data loss from forgotten `end_session()` calls
119
237
  - Can be disabled for cases where you need explicit control
120
238
 
239
+ When using `session(...)` or `session_async(...)`, the session will end at context exit regardless of the `auto_end` setting. A debug warning is logged if `auto_end` is provided in that context.
240
+
121
241
  ### Steps
122
242
  Steps break down complex workflows into discrete, trackable units.
123
243
 
@@ -199,6 +319,65 @@ llm = ChatOpenAI(model="gpt-4")
199
319
  response = llm.invoke([HumanMessage(content="Hello!")])
200
320
  ```
201
321
 
322
+ ### Google Generative AI (Gemini)
323
+ ```python
324
+ import google.generativeai as genai
325
+
326
+ lai.init(session_name="Gemini Example", providers=["google"]) # or "google_generativeai"
327
+ genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
328
+
329
+ model = genai.GenerativeModel("gemini-1.5-flash")
330
+ resp = model.generate_content("Write a haiku about clouds")
331
+ ```
332
+
333
+ ### Vertex AI
334
+ ```python
335
+ from google.cloud import aiplatform
336
+ from vertexai.generative_models import GenerativeModel
337
+
338
+ lai.init(session_name="Vertex Example", providers=["vertexai"]) # or "vertex_ai"
339
+ aiplatform.init(project=os.getenv("GCP_PROJECT"), location=os.getenv("GCP_REGION", "us-central1"))
340
+
341
+ model = GenerativeModel("gemini-1.5-flash")
342
+ resp = model.generate_content("Say hello")
343
+ ```
344
+
345
+ ### AWS Bedrock
346
+ ```python
347
+ import boto3
348
+
349
+ lai.init(session_name="Bedrock Example", providers=["bedrock"]) # or "aws_bedrock", "amazon_bedrock"
350
+ client = boto3.client("bedrock-runtime", region_name=os.getenv("AWS_REGION", "us-east-1"))
351
+
352
+ resp = client.invoke_model(
353
+ modelId=os.getenv("BEDROCK_MODEL_ID", "amazon.nova-lite-v1:0"),
354
+ body=b'{"inputText": "Hello from Bedrock"}',
355
+ contentType="application/json",
356
+ accept="application/json",
357
+ )
358
+ ```
359
+
360
+ ### Cohere
361
+ ```python
362
+ import cohere
363
+
364
+ lai.init(session_name="Cohere Example", providers=["cohere"])
365
+ co = cohere.ClientV2(api_key=os.getenv("COHERE_API_KEY"))
366
+ resp = co.chat(model="command-r", messages=[{"role":"user","content":"Hello"}])
367
+ ```
368
+
369
+ ### Groq
370
+ ```python
371
+ from groq import Groq
372
+
373
+ lai.init(session_name="Groq Example", providers=["groq"])
374
+ client = Groq(api_key=os.getenv("GROQ_API_KEY"))
375
+ resp = client.chat.completions.create(
376
+ model="llama-3.1-8b-instant",
377
+ messages=[{"role":"user","content":"Hello from Groq"}],
378
+ )
379
+ ```
380
+
202
381
  ## Advanced Features
203
382
 
204
383
  ### Decorators
@@ -399,6 +578,57 @@ except LucidicNotInitializedError:
399
578
  print("SDK not initialized - call lai.init() first")
400
579
  ```
401
580
 
581
+ ## Crash events on uncaught exceptions
582
+
583
+ When the SDK is initialized, Lucidic will capture uncaught exceptions and create a final crash event before the process exits. This is enabled by default and requires no additional configuration.
584
+
585
+ ### Behavior
586
+
587
+ - On an uncaught exception (main thread):
588
+ - A Lucidic event is created and linked to the active session.
589
+ - The event description contains the full Python traceback. If a `masking_function` was provided to `lai.init()`, it is applied; long descriptions are truncated to ~16K characters.
590
+ - The event result is set to: "process exited with code 1".
591
+ - The session is ended as unsuccessful with reason `uncaughtException` (independent of `auto_end`).
592
+ - The telemetry provider is best-effort flushed and shut down.
593
+ - Python’s default exit behavior is preserved (exit code 1 and default exception printing).
594
+
595
+ - On signals (`SIGINT`, `SIGTERM`):
596
+ - A final event is created with a description that includes the signal name and a best-effort stack snapshot.
597
+ - The event result is set to: `"process exited with code <128+signum>"` (e.g., 130 for SIGINT, 143 for SIGTERM).
598
+ - Existing auto-end and telemetry cleanup run, and default signal semantics are preserved.
599
+
600
+ ### Configuration
601
+
602
+ - Enabled by default after `lai.init(...)`. To opt out:
603
+
604
+ ```python
605
+ import lucidicai as lai
606
+
607
+ lai.init(
608
+ session_name="my-session",
609
+ capture_uncaught=False, # disables crash event capture
610
+ )
611
+ ```
612
+
613
+ This behavior is independent of `auto_end`; even when `auto_end` is `False`, the SDK will end the session as unsuccessful in this fatal path.
614
+
615
+ ### Caveats and lifecycle notes
616
+
617
+ - Multiple handlers and ordering:
618
+ - If other libraries register their own handlers, ordering can affect which path runs first. Lucidic guards against duplication, but if another handler exits the process earlier, the crash event may not complete.
619
+
620
+ - Main-thread semantics:
621
+ - Only uncaught exceptions on the main thread are treated as process-ending. Exceptions in worker threads do not exit the process by default and are not recorded as crash events by this mechanism.
622
+
623
+ - Best-effort transport:
624
+ - Network issues or abrupt termination (e.g., forced container kill, `os._exit`) can prevent event delivery despite best efforts.
625
+
626
+ - Exit semantics:
627
+ - We do not call `sys.exit(1)` from the handler; Python already exits with code 1 for uncaught exceptions, and default printing is preserved by chaining to the original `sys.excepthook`.
628
+
629
+ - Not intercepted:
630
+ - `SystemExit` raised explicitly (e.g., `sys.exit(...)`) and `os._exit(...)` calls are not treated as uncaught exceptions and will not produce a crash event.
631
+
402
632
  ## Best Practices
403
633
 
404
634
  1. **Initialize Once**: Call `lai.init()` at the start of your application or workflow