observa-sdk 0.0.21 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -41,6 +41,8 @@ const observa = init({
41
41
 
42
42
  ## Quick Start
43
43
 
44
+ > **📊 Automatic Feedback Tracking**: When using `observeVercelAI()`, all responses automatically include feedback helpers (`result.observa.like()` and `result.observa.dislike()`). No additional setup needed - just add UI buttons! See the [Feedback](#collecting-user-feedback-like-dislike) section below.
45
+
44
46
  ### Auto-Capture with OpenAI (Recommended)
45
47
 
46
48
  The easiest way to track LLM calls is using the `observeOpenAI()` wrapper - it automatically captures 90%+ of your LLM interactions:
@@ -118,7 +120,19 @@ const response = await wrappedAnthropic.messages.create({
118
120
 
119
121
  ### Auto-Capture with Vercel AI SDK
120
122
 
121
- Vercel AI SDK is a unified SDK that works with multiple providers:
123
+ Vercel AI SDK is a unified SDK that works with multiple providers (OpenAI, Anthropic, Google, etc.).
124
+
125
+ #### Installation
126
+
127
+ First, install the required packages:
128
+
129
+ ```bash
130
+ npm install observa-sdk ai @ai-sdk/openai @ai-sdk/anthropic
131
+ # or for other providers:
132
+ npm install @ai-sdk/google @ai-sdk/cohere
133
+ ```
134
+
135
+ #### Basic Example (Node.js/Server)
122
136
 
123
137
  ```typescript
124
138
  import { init } from "observa-sdk";
@@ -150,25 +164,134 @@ const stream = await ai.streamText({
150
164
  for await (const chunk of stream.textStream) {
151
165
  process.stdout.write(chunk);
152
166
  }
167
+
168
+ // ✅ Feedback helpers are automatically available on result objects!
169
+ // result.observa.like() and result.observa.dislike() are ready to use
153
170
  ```
154
171
 
155
- ### Legacy Manual Tracking
172
+ #### Next.js App Router Example
173
+
174
+ > **💡 Feedback Ready**: Responses from `ai.streamText()` and `ai.generateText()` automatically include feedback helpers on `result.observa`. See [Collecting User Feedback](#collecting-user-feedback-like-dislike) section for UI examples.
156
175
 
157
- For more control, you can still use the manual `track()` method:
176
+ For Next.js applications, use the route handler pattern:
158
177
 
159
178
  ```typescript
160
- // Track AI interactions with simple wrapping
161
- const response = await observa.track({ query: "What is the weather?" }, () =>
162
- fetch("https://api.openai.com/v1/chat/completions", {
163
- method: "POST",
164
- headers: {
165
- /* ... */
179
+ // app/api/chat/route.ts
180
+ import { streamText, UIMessage, convertToModelMessages } from "ai";
181
+ import { init } from "observa-sdk";
182
+ import { openai } from "@ai-sdk/openai";
183
+
184
+ const observa = init({
185
+ apiKey: process.env.OBSERVA_API_KEY!,
186
+ apiUrl: process.env.OBSERVA_API_URL,
187
+ });
188
+
189
+ const ai = observa.observeVercelAI({ streamText }, {
190
+ name: "my-nextjs-app",
191
+ });
192
+
193
+ export async function POST(req: Request) {
194
+ const { messages }: { messages: UIMessage[] } = await req.json();
195
+
196
+ const result = await ai.streamText({
197
+ model: openai("gpt-4"),
198
+ messages: await convertToModelMessages(messages),
199
+ });
200
+
201
+ // Return streaming response for Next.js
202
+ return result.toUIMessageStreamResponse();
203
+ }
204
+ ```
205
+
206
+ #### Client-Side with React (useChat Hook)
207
+
208
+ **Basic Example:**
209
+ ```typescript
210
+ // app/page.tsx
211
+ "use client";
212
+ import { useChat } from "@ai-sdk/react";
213
+
214
+ export default function Chat() {
215
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
216
+ api: "/api/chat",
217
+ });
218
+
219
+ return (
220
+ <div>
221
+ {messages.map((message) => (
222
+ <div key={message.id}>{message.content}</div>
223
+ ))}
224
+ <form onSubmit={handleSubmit}>
225
+ <input value={input} onChange={handleInputChange} />
226
+ <button type="submit">Send</button>
227
+ </form>
228
+ </div>
229
+ );
230
+ }
231
+ ```
232
+
233
+ **With Feedback Buttons:**
234
+
235
+ To add like/dislike feedback, you'll need to expose the `traceId` and `spanId` from your API route and use `observa.trackFeedback()`. See the [Feedback](#collecting-user-feedback-like-dislike) section for complete examples.
236
+
237
+ #### With Tools/Function Calling
238
+
239
+ Observa automatically tracks tool calls:
240
+
241
+ ```typescript
242
+ import { z } from "zod";
243
+
244
+ const result = await ai.streamText({
245
+ model: openai("gpt-4"),
246
+ messages: [...],
247
+ tools: {
248
+ getWeather: {
249
+ description: "Get the weather for a location",
250
+ parameters: z.object({
251
+ location: z.string(),
252
+ }),
253
+ execute: async ({ location }) => {
254
+ // Tool implementation - automatically tracked by Observa
255
+ return { temperature: 72, condition: "sunny" };
256
+ },
166
257
  },
167
- body: JSON.stringify({
168
- /* ... */
169
- }),
170
- })
171
- );
258
+ },
259
+ });
260
+ ```
261
+
262
+ #### Model Format Options
263
+
264
+ Vercel AI SDK supports two model formats:
265
+
266
+ 1. **Provider function** (recommended):
267
+ ```typescript
268
+ import { openai } from "@ai-sdk/openai";
269
+ import { anthropic } from "@ai-sdk/anthropic";
270
+
271
+ model: openai("gpt-4")
272
+ model: anthropic("claude-3-opus-20240229")
273
+ ```
274
+
275
+ 2. **String format** (for AI Gateway):
276
+ ```typescript
277
+ model: "openai/gpt-4"
278
+ model: "anthropic/claude-3-opus-20240229"
279
+ ```
280
+
281
+ #### Error Handling
282
+
283
+ Errors are automatically tracked:
284
+
285
+ ```typescript
286
+ try {
287
+ const result = await ai.generateText({
288
+ model: openai("gpt-4"),
289
+ prompt: "Hello!",
290
+ });
291
+ } catch (error) {
292
+ // Error is automatically tracked in Observa
293
+ console.error("LLM call failed:", error);
294
+ }
172
295
  ```
173
296
 
174
297
  ### Manual Tracking (Advanced)
@@ -191,6 +314,280 @@ const spanId = observa.trackLLMCall({
191
314
 
192
315
  See the [API Reference](#api-reference) section for all available methods.
193
316
 
317
+ ## Collecting User Feedback (Like/Dislike)
318
+
319
+ **Feedback helpers are automatically attached** to response objects when using `observeVercelAI()`. No additional setup needed - just add UI buttons!
320
+
321
+ ### Automatic Feedback Helpers
322
+
323
+ When you use `observa.observeVercelAI()`, all response objects automatically include feedback helpers:
324
+
325
+ ```typescript
326
+ const result = await ai.generateText({
327
+ model: openai('gpt-4'),
328
+ prompt: 'What is the capital of France?',
329
+ });
330
+
331
+ // Feedback helpers are automatically available on result.observa
332
+ result.observa.like(); // User liked the response
333
+ result.observa.dislike({ comment: "Wrong answer" }); // User disliked with comment
334
+
335
+ // All helpers have traceId and parentSpanId already bound - no manual linking needed!
336
+ ```
337
+
338
+ ### Server-Side Example (Node.js/API Route)
339
+
340
+ In your API route or server handler:
341
+
342
+ ```typescript
343
+ // app/api/chat/route.ts
344
+ import { generateText } from "ai";
345
+ import { init } from "observa-sdk";
346
+ import { openai } from "@ai-sdk/openai";
347
+
348
+ const observa = init({
349
+ apiKey: process.env.OBSERVA_API_KEY!,
350
+ });
351
+
352
+ const ai = observa.observeVercelAI({ generateText });
353
+
354
+ export async function POST(req: Request) {
355
+ const { prompt } = await req.json();
356
+
357
+ const result = await ai.generateText({
358
+ model: openai('gpt-4'),
359
+ prompt,
360
+ });
361
+
362
+ // Return both the text and observa metadata for frontend feedback
363
+ return Response.json({
364
+ text: result.text,
365
+ // Expose observa metadata so frontend can submit feedback
366
+ observa: {
367
+ traceId: result.observa.traceId,
368
+ spanId: result.observa.spanId,
369
+ // Frontend can use these to call observa.trackFeedback()
370
+ },
371
+ });
372
+ }
373
+ ```
374
+
375
+ ### Frontend Example (React)
376
+
377
+ In your React component, collect feedback from users:
378
+
379
+ ```typescript
380
+ // app/page.tsx
381
+ "use client";
382
+ import { useState } from "react";
383
+ import { init } from "observa-sdk";
384
+
385
+ const observa = init({
386
+ apiKey: process.env.NEXT_PUBLIC_OBSERVA_API_KEY!,
387
+ });
388
+
389
+ export default function Chat() {
390
+ const [responses, setResponses] = useState<Array<{
391
+ id: string;
392
+ text: string;
393
+ observa: { traceId: string | null; spanId: string };
394
+ }>>([]);
395
+
396
+ async function handleSubmit(prompt: string) {
397
+ const response = await fetch('/api/chat', {
398
+ method: 'POST',
399
+ body: JSON.stringify({ prompt }),
400
+ });
401
+ const data = await response.json();
402
+
403
+ setResponses(prev => [...prev, {
404
+ id: Date.now().toString(),
405
+ text: data.text,
406
+ observa: data.observa,
407
+ }]);
408
+ }
409
+
410
+ function handleFeedback(responseId: string, type: 'like' | 'dislike', comment?: string) {
411
+ const response = responses.find(r => r.id === responseId);
412
+ if (!response?.observa.traceId) return;
413
+
414
+ // Use trackFeedback with traceId and parentSpanId from the response
415
+ observa.trackFeedback({
416
+ type,
417
+ traceId: response.observa.traceId,
418
+ parentSpanId: response.observa.spanId,
419
+ comment,
420
+ });
421
+ }
422
+
423
+ return (
424
+ <div>
425
+ {responses.map((response) => (
426
+ <div key={response.id}>
427
+ <p>{response.text}</p>
428
+ <div>
429
+ <button onClick={() => handleFeedback(response.id, 'like')}>
430
+ 👍 Like
431
+ </button>
432
+ <button onClick={() => handleFeedback(response.id, 'dislike')}>
433
+ 👎 Dislike
434
+ </button>
435
+ </div>
436
+ </div>
437
+ ))}
438
+ </div>
439
+ );
440
+ }
441
+ ```
442
+
443
+ ### Convenience Methods
444
+
445
+ The SDK provides convenience methods for easier API:
446
+
447
+ ```typescript
448
+ // Direct SDK usage
449
+ observa.like({
450
+ traceId: "trace-123",
451
+ parentSpanId: "span-456",
452
+ userId: "user-789",
453
+ });
454
+
455
+ observa.dislike({
456
+ traceId: "trace-123",
457
+ parentSpanId: "span-456",
458
+ comment: "Incorrect information",
459
+ userId: "user-789",
460
+ });
461
+ ```
462
+
463
+ ### Complete Next.js Example with Feedback
464
+
465
+ Here's a complete working example:
466
+
467
+ **Backend (`app/api/chat/route.ts`):**
468
+ ```typescript
469
+ import { generateText } from "ai";
470
+ import { init } from "observa-sdk";
471
+ import { openai } from "@ai-sdk/openai";
472
+
473
+ const observa = init({
474
+ apiKey: process.env.OBSERVA_API_KEY!,
475
+ });
476
+
477
+ const ai = observa.observeVercelAI({ generateText });
478
+
479
+ export async function POST(req: Request) {
480
+ const { prompt } = await req.json();
481
+
482
+ const result = await ai.generateText({
483
+ model: openai('gpt-4'),
484
+ prompt,
485
+ });
486
+
487
+ return Response.json({
488
+ text: result.text,
489
+ observa: {
490
+ traceId: result.observa.traceId,
491
+ spanId: result.observa.spanId,
492
+ },
493
+ });
494
+ }
495
+ ```
496
+
497
+ **Frontend (`app/page.tsx`):**
498
+ ```typescript
499
+ "use client";
500
+ import { useState } from "react";
501
+ import { init } from "observa-sdk";
502
+
503
+ const observa = init({
504
+ apiKey: process.env.NEXT_PUBLIC_OBSERVA_API_KEY!,
505
+ });
506
+
507
+ export default function Chat() {
508
+ const [messages, setMessages] = useState<Array<{
509
+ id: string;
510
+ text: string;
511
+ observa?: { traceId: string | null; spanId: string };
512
+ }>>([]);
513
+ const [input, setInput] = useState("");
514
+
515
+ async function handleSubmit(e: React.FormEvent) {
516
+ e.preventDefault();
517
+ const prompt = input;
518
+ setInput("");
519
+
520
+ const response = await fetch('/api/chat', {
521
+ method: 'POST',
522
+ headers: { 'Content-Type': 'application/json' },
523
+ body: JSON.stringify({ prompt }),
524
+ });
525
+ const data = await response.json();
526
+
527
+ setMessages(prev => [...prev, {
528
+ id: Date.now().toString(),
529
+ text: data.text,
530
+ observa: data.observa,
531
+ }]);
532
+ }
533
+
534
+ function handleFeedback(messageId: string, type: 'like' | 'dislike', comment?: string) {
535
+ const message = messages.find(m => m.id === messageId);
536
+ if (!message?.observa?.traceId) return;
537
+
538
+ if (type === 'like') {
539
+ observa.like({
540
+ traceId: message.observa.traceId,
541
+ parentSpanId: message.observa.spanId,
542
+ });
543
+ } else {
544
+ observa.dislike({
545
+ traceId: message.observa.traceId,
546
+ parentSpanId: message.observa.spanId,
547
+ comment,
548
+ });
549
+ }
550
+ }
551
+
552
+ return (
553
+ <div>
554
+ {messages.map((message) => (
555
+ <div key={message.id}>
556
+ <p>{message.text}</p>
557
+ {message.observa && (
558
+ <div>
559
+ <button onClick={() => handleFeedback(message.id, 'like')}>
560
+ 👍 Like
561
+ </button>
562
+ <button onClick={() => handleFeedback(message.id, 'dislike')}>
563
+ 👎 Dislike
564
+ </button>
565
+ </div>
566
+ )}
567
+ </div>
568
+ ))}
569
+ <form onSubmit={handleSubmit}>
570
+ <input
571
+ value={input}
572
+ onChange={(e) => setInput(e.target.value)}
573
+ placeholder="Ask a question..."
574
+ />
575
+ <button type="submit">Send</button>
576
+ </form>
577
+ </div>
578
+ );
579
+ }
580
+ ```
581
+
582
+ **Key Points:**
583
+ - ✅ Feedback helpers are automatically attached to response objects
584
+ - ✅ `traceId` and `parentSpanId` are already bound - no manual linking needed
585
+ - ✅ Expose `result.observa.traceId` and `result.observa.spanId` to your frontend
586
+ - ✅ Use `observa.like()` or `observa.dislike()` on the frontend with the trace/span IDs
587
+ - ✅ Feedback appears automatically in your Observa dashboard
588
+
589
+ For more advanced feedback options (ratings, corrections, etc.), see the [Feedback API Reference](#observatrackfeedbackoptions) section.
590
+
194
591
  ## Multi-Tenant Architecture
195
592
 
196
593
  Observa SDK uses a **multi-tenant shared runtime architecture** for optimal cost, scalability, and operational simplicity.
@@ -565,44 +962,6 @@ Track errors with structured classification.
565
962
  - `errorCategory`: Error category
566
963
  - `errorCode`: Error code
567
964
 
568
- ### `observa.track(event, action)`
569
-
570
- Track an AI interaction (legacy method, still supported).
571
-
572
- **Parameters**:
573
-
574
- - `event.query` (required): The user query/prompt
575
- - `event.context` (optional): Additional context
576
- - `event.model` (optional): Model identifier
577
- - `event.metadata` (optional): Custom metadata
578
- - `action`: Function that returns a `Promise<Response>` (typically a fetch call)
579
-
580
- **Returns**: `Promise<Response>` (the original response, unmodified)
581
-
582
- **Example**:
583
-
584
- ```typescript
585
- const response = await observa.track(
586
- {
587
- query: "What is machine learning?",
588
- model: "gpt-4",
589
- metadata: { userId: "123" },
590
- },
591
- () =>
592
- fetch("https://api.openai.com/v1/chat/completions", {
593
- method: "POST",
594
- headers: {
595
- Authorization: `Bearer ${openaiKey}`,
596
- "Content-Type": "application/json",
597
- },
598
- body: JSON.stringify({
599
- model: "gpt-4",
600
- messages: [{ role: "user", content: "What is machine learning?" }],
601
- }),
602
- })
603
- );
604
- ```
605
-
606
965
  ### `observa.trackFeedback(options)`
607
966
 
608
967
  Track user feedback (likes, dislikes, ratings, corrections) for AI interactions.
@@ -630,6 +989,25 @@ Track user feedback (likes, dislikes, ratings, corrections) for AI interactions.
630
989
 
631
990
  #### Basic Like/Dislike Feedback
632
991
 
992
+ **Using convenience methods (recommended):**
993
+
994
+ ```typescript
995
+ // User clicks "like" button after receiving AI response
996
+ const feedbackSpanId = observa.like({
997
+ conversationId: "conv-123",
998
+ userId: "user-456",
999
+ });
1000
+
1001
+ // User clicks "dislike" button
1002
+ observa.dislike({
1003
+ comment: "The answer was incorrect",
1004
+ conversationId: "conv-123",
1005
+ userId: "user-456",
1006
+ });
1007
+ ```
1008
+
1009
+ **Or using trackFeedback directly:**
1010
+
633
1011
  ```typescript
634
1012
  // User clicks "like" button after receiving AI response
635
1013
  const feedbackSpanId = observa.trackFeedback({
@@ -649,6 +1027,92 @@ observa.trackFeedback({
649
1027
  });
650
1028
  ```
651
1029
 
1030
+ #### Feedback with Vercel AI SDK (Automatic Helpers)
1031
+
1032
+ **Feedback helpers are automatically attached to response objects:**
1033
+
1034
+ ```typescript
1035
+ import { generateText, streamText } from 'ai';
1036
+ import { openai } from '@ai-sdk/openai';
1037
+
1038
+ const ai = observa.observeVercelAI({ generateText, streamText });
1039
+
1040
+ // Generate a response
1041
+ const result = await ai.generateText({
1042
+ model: openai('gpt-4'),
1043
+ prompt: 'What is the capital of France?',
1044
+ });
1045
+
1046
+ // Feedback helpers are automatically available on result.observa
1047
+ // No traceId or parentSpanId needed - automatically linked!
1048
+
1049
+ // In your UI component - simple like/dislike buttons
1050
+ <button onClick={() => result.observa.like()}>
1051
+ 👍 Like
1052
+ </button>
1053
+
1054
+ <button onClick={() => result.observa.dislike({ comment: "Wrong answer" })}>
1055
+ 👎 Dislike
1056
+ </button>
1057
+
1058
+ // Or with more context
1059
+ result.observa.like({
1060
+ userId: currentUser.id,
1061
+ conversationId: currentConversation.id,
1062
+ });
1063
+
1064
+ result.observa.dislike({
1065
+ comment: "The capital is Paris, not Lyon",
1066
+ userId: currentUser.id,
1067
+ });
1068
+ ```
1069
+
1070
+ #### React Component Example
1071
+
1072
+ ```typescript
1073
+ import { useState } from 'react';
1074
+ import { useChat } from '@ai-sdk/react';
1075
+
1076
+ function ChatWithFeedback() {
1077
+ const { messages, append, isLoading } = useChat({
1078
+ api: '/api/chat', // Your API route that uses observa.observeVercelAI
1079
+ });
1080
+
1081
+ const handleFeedback = async (messageId: string, type: 'like' | 'dislike') => {
1082
+ // Get the message from your API response
1083
+ // (traceId and spanId are automatically included in response if using instrumentation)
1084
+
1085
+ // If using server actions or custom API:
1086
+ await fetch('/api/feedback', {
1087
+ method: 'POST',
1088
+ body: JSON.stringify({ messageId, type }),
1089
+ });
1090
+ };
1091
+
1092
+ return (
1093
+ <div>
1094
+ {messages.map((message) => (
1095
+ <div key={message.id}>
1096
+ <div>{message.content}</div>
1097
+ {message.role === 'assistant' && (
1098
+ <div>
1099
+ <button onClick={() => handleFeedback(message.id, 'like')}>
1100
+ 👍 Like
1101
+ </button>
1102
+ <button onClick={() => handleFeedback(message.id, 'dislike')}>
1103
+ 👎 Dislike
1104
+ </button>
1105
+ </div>
1106
+ )}
1107
+ </div>
1108
+ ))}
1109
+ </div>
1110
+ );
1111
+ }
1112
+ ```
1113
+
1114
+ **Note**: If you're using `observa.observeVercelAI()`, feedback helpers (`result.observa.like()` and `result.observa.dislike()`) are automatically attached to response objects. The `traceId` and `parentSpanId` are already bound, so you don't need to manage them manually - just call the methods directly!
1115
+
652
1116
  #### Rating Feedback (1-5 Scale)
653
1117
 
654
1118
  ```typescript
@@ -699,7 +1163,14 @@ const llmSpanId = observa.trackLLMCall({
699
1163
  // ... other LLM call data
700
1164
  });
701
1165
 
702
- // Link feedback directly to the LLM call span
1166
+ // Link feedback directly to the LLM call span using convenience method
1167
+ observa.like({
1168
+ parentSpanId: llmSpanId, // Attach feedback to the specific LLM call
1169
+ conversationId: "conv-123",
1170
+ userId: "user-456",
1171
+ });
1172
+
1173
+ // Or using trackFeedback directly
703
1174
  observa.trackFeedback({
704
1175
  type: "like",
705
1176
  parentSpanId: llmSpanId, // Attach feedback to the specific LLM call