raindrop-ai 0.0.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,549 @@
1
+ import { Attachment } from '@dawn/schemas/ingest';
2
+ import { Span } from '@opentelemetry/api';
3
+
4
+ /**
5
+ * Interface for tracking events.
6
+ *
7
+ * @param eventId - An optional event ID for the event.
8
+ * @param event - The name of the event.
9
+ * @param properties - An optional record of properties for the event.
10
+ * @param timestamp - An optional timestamp for the event.
11
+ * @param userId - The ID of the user. This is a required field.
12
+ * @param anonymousId - An optional anonymous ID for the user.
13
+ */
14
+ interface TrackEvent {
15
+ eventId?: string;
16
+ event: string;
17
+ properties?: Record<string, any>;
18
+ timestamp?: string;
19
+ userId: string;
20
+ }
21
+ type BasicSignal = {
22
+ eventId: string;
23
+ name: "thumbs_up" | "thumbs_down" | string;
24
+ timestamp?: string;
25
+ properties?: {
26
+ [key: string]: any;
27
+ };
28
+ attachmentId?: string;
29
+ };
30
+ type DefaultSignal = BasicSignal & {
31
+ type?: "default";
32
+ };
33
+ type FeedbackSignal = BasicSignal & {
34
+ type: "feedback";
35
+ comment?: string;
36
+ };
37
+ type EditSignal = BasicSignal & {
38
+ type: "edit";
39
+ after?: string;
40
+ };
41
+ type SignalEvent = DefaultSignal | FeedbackSignal | EditSignal;
42
+ /**
43
+ * Interface for identifying events.
44
+ *
45
+ * @param userId - The ID of the user. This is a required field.
46
+ * @param traits - An optional object of traits for the user.
47
+ * @param anonymousId - An optional anonymous ID for the user.
48
+ * TODO: ensure at least one out of userId or anonymousId is present
49
+ */
50
+ interface IdentifyEvent {
51
+ userId: string;
52
+ traits?: object;
53
+ }
54
+ /**
55
+ * Type definition for AI tracking events.
56
+ * In addition to the properties of a TrackEvent, an AiTrackEvent may have a 'model' property.
57
+ * It must have at least one of 'input' or 'output' property.
58
+ *
59
+ * @property model - An optional model property for the even
60
+ * @property input - An optional input property for the event, required if output is not provided.
61
+ * @property output - An optional output property for the event, required if input is not provided.
62
+ */
63
+ type AiTrackEvent = Omit<TrackEvent, "type"> & {
64
+ model?: string;
65
+ convoId?: string;
66
+ attachments?: Attachment[];
67
+ } & ({
68
+ input: string;
69
+ output?: string;
70
+ } | {
71
+ input?: string;
72
+ output: string;
73
+ });
74
+ /**
75
+ * Type definition for partial AI tracking events used with trackAiPartial.
76
+ * Requires an eventId to correlate partial updates. All other fields are optional.
77
+ */
78
+ type PartialAiTrackEvent = Partial<AiTrackEvent> & {
79
+ eventId: string;
80
+ isPending?: boolean;
81
+ };
82
+ type TraceContextOutput = Omit<PartialAiTrackEvent, "eventId"> & {
83
+ output: string;
84
+ eventId?: string;
85
+ };
86
+ /**
87
+ * Configuration for a traced workflow.
88
+ *
89
+ * Workflows represent higher-level operations that might contain multiple
90
+ * tasks. They help organize traces into logical units of work.
91
+ *
92
+ * @property name - Name of the workflow for identification in traces
93
+ * @property inputParameters - Optional array of input parameters for the workflow
94
+ * @property properties - Optional key-value pairs for additional metadata
95
+ */
96
+ interface WorkflowParams {
97
+ name: string;
98
+ inputParameters?: unknown[];
99
+ properties?: Record<string, string>;
100
+ }
101
+ /**
102
+ * Configuration for a traced task.
103
+ *
104
+ * Tasks represent individual operations within a workflow, such as an LLM call,
105
+ * a tool invocation, a retrieval operation, or internal processing.
106
+ *
107
+ * @property name - Name of the task for identification in traces
108
+ * @property kind - Category of the task (llm, tool, retrival, or internal)
109
+ * @property properties - Optional key-value pairs for additional metadata
110
+ * @property inputParameters - Optional array of input parameters for the task
111
+ */
112
+ interface TaskParams {
113
+ name: string;
114
+ kind: "llm" | "tool" | "retrival" | "internal";
115
+ properties?: Record<string, string>;
116
+ inputParameters?: unknown[];
117
+ }
118
+ /**
119
+ * Represents an active tracing interaction for tracking and analyzing AI interactions.
120
+ *
121
+ * An Interaction is the core entity for tracing and instrumenting AI operations,
122
+ * allowing you to organize your code into workflows and tasks while capturing
123
+ * important metadata and contextual information. Use the methods on this interface
124
+ * to record your AI interaction's lifecycle from input to output, and to add
125
+ * structured context about the operation.
126
+ *
127
+ * Interactions are typically created using the `tracing.begin()` method and ended
128
+ * with the `end()` method. Between these calls, you can add properties, attachments,
129
+ * and nested traced operations.
130
+ *
131
+ * @example
132
+ * // Basic usage pattern
133
+ * const interaction = tracing.begin({
134
+ * userId: "user-123",
135
+ * convoId: "conversation-456"
136
+ * });
137
+ *
138
+ * // Set the user's input
139
+ * interaction.setInput("Tell me a joke about AI");
140
+ *
141
+ * // Add context properties
142
+ * interaction.setProperty("intent", "entertainment");
143
+ *
144
+ * // Run a traced workflow
145
+ * const result = await interaction.withWorkflow("joke_generation", async () => {
146
+ * // Run a traced task for the LLM call
147
+ * return await interaction.withTask({
148
+ * name: "llm_completion",
149
+ * kind: "llm"
150
+ * }, async () => {
151
+ * const completion = await openai.chat.completions.create({
152
+ * model: "gpt-4",
153
+ * messages: [{ role: "user", content: "Tell me a joke about AI" }]
154
+ * });
155
+ * return completion.choices[0].message.content;
156
+ * });
157
+ * });
158
+ *
159
+ * // End the interaction with the result
160
+ * interaction.end(result);
161
+ */
162
+ type Interaction = {
163
+ /**
164
+ * Creates a traced workflow that executes the provided function.
165
+ *
166
+ * @template T The return type of the function
167
+ * @param params Either a WorkflowParams object or a string representing the workflow name
168
+ * @param fn The function to execute within the traced workflow
169
+ * @param thisArg Optional. The value to use as `this` when executing the function.
170
+ * Useful when passing methods from classes that require their class instance context.
171
+ * @param args Additional arguments to pass to the function
172
+ * @returns A promise that resolves to the function's return value
173
+ *
174
+ * @example
175
+ * // Content generation workflow with business context
176
+ * await interaction.withWorkflow({
177
+ * name: "joke_generator_workflow",
178
+ * properties: {
179
+ * content_type: "humor",
180
+ * audience: "developers",
181
+ * distribution_channel: "slack",
182
+ * priority: "normal"
183
+ * }
184
+ * }, async () => {
185
+ * const joke = await createJoke();
186
+ * const translation = await translateToPirate(joke);
187
+ * const signature = await generateSignature(translation);
188
+ * return translation + "\n\n" + signature;
189
+ * });
190
+ *
191
+ * @example
192
+ * // Using thisArg to preserve class context in LLM service
193
+ * class AIServiceClient {
194
+ * private apiKey: string;
195
+ * private chatHistory: Message[];
196
+ *
197
+ * async generateResponse(prompt: string) {
198
+ * // Method that uses this.apiKey and this.chatHistory
199
+ * const completion = await openai.chat.completions.create({
200
+ * model: "gpt-4",
201
+ * messages: [...this.chatHistory, { role: "user", content: prompt }],
202
+ * api_key: this.apiKey
203
+ * });
204
+ * return completion.choices[0].message.content;
205
+ * }
206
+ *
207
+ * async processConversation(prompt: string, conversationId: string) {
208
+ * // Pass this as thisArg to preserve context
209
+ * return await interaction.withWorkflow(
210
+ * {
211
+ * name: "conversation_flow",
212
+ * properties: {
213
+ * request_source: "mobile_app",
214
+ * conversation_stage: "follow_up",
215
+ * user_subscription: "premium",
216
+ * locale: "en-US"
217
+ * }
218
+ * },
219
+ * this.generateResponse,
220
+ * this, // thisArg to access this.apiKey and this.chatHistory
221
+ * prompt
222
+ * );
223
+ * }
224
+ * }
225
+ */
226
+ withWorkflow<T>(params: WorkflowParams | string, fn: (...args: any[]) => Promise<T> | T, thisArg?: any, ...args: any[]): Promise<T>;
227
+ /**
228
+ * Creates a traced task that executes the provided function.
229
+ *
230
+ * @template T The return type of the function
231
+ * @param params Either a TaskParams object or a string representing the task name
232
+ * @param fn The function to execute within the traced task
233
+ * @param thisArg Optional. The value to use as `this` when executing the function.
234
+ * Useful when passing methods from classes that require their class instance context.
235
+ * @param args Additional arguments to pass to the function
236
+ * @returns A promise that resolves to the function's return value
237
+ *
238
+ * @example
239
+ * // LLM task with business metadata
240
+ * await interaction.withTask({
241
+ * name: "joke_creation",
242
+ * kind: "llm",
243
+ * properties: {
244
+ * request_source: "web",
245
+ * feature_flag: "beta_jokes",
246
+ * experiment_group: "creative_boost",
247
+ * user_segment: "developer"
248
+ * }
249
+ * }, async () => {
250
+ * const completion = await openai.chat.completions.create({
251
+ * model: "gpt-3.5-turbo",
252
+ * temperature: 0.7,
253
+ * max_tokens: 2048,
254
+ * messages: [
255
+ * { role: "system", content: "You are a helpful assistant." },
256
+ * { role: "user", content: "I can't log into my account." }
257
+ * ]
258
+ * });
259
+ * return completion.choices[0].message.content;
260
+ * });
261
+ *
262
+ * @example
263
+ * // Using thisArg with an embedding service
264
+ * class EmbeddingService {
265
+ * private embedModel: string;
266
+ * private dimension: number;
267
+ *
268
+ * async embedText(text: string) {
269
+ * // Uses this.embedModel and this.dimension
270
+ * const embedding = await openai.embeddings.create({
271
+ * model: this.embedModel,
272
+ * input: text
273
+ * });
274
+ * return embedding.data[0].embedding.slice(0, this.dimension);
275
+ * }
276
+ *
277
+ * async getEmbedding(document: string, docId: string) {
278
+ * return interaction.withTask(
279
+ * {
280
+ * name: "text_embedding",
281
+ * kind: "llm",
282
+ * properties: {
283
+ * document_type: "knowledge_base",
284
+ * doc_id: docId,
285
+ * language: "en",
286
+ * content_source: "internal_wiki"
287
+ * }
288
+ * },
289
+ * this.embedText,
290
+ * this, // thisArg preserves access to this.embedModel and this.dimension
291
+ * document // Text to be embedded
292
+ * );
293
+ * }
294
+ * }
295
+ */
296
+ withTask<T>(params: TaskParams | string, fn: (...args: any[]) => Promise<T> | T, thisArg?: any, ...args: any[]): Promise<T>;
297
+ /**
298
+ * Creates a task span that can be used to manually record a task.
299
+ *
300
+ * @param params TaskParams object with task configuration
301
+ * @returns A span that can be used to record a task
302
+ */
303
+ createTask(params: TaskParams): Span;
304
+ /**
305
+ * Executes a function with additional context properties.
306
+ *
307
+ * @template T The return type of the function
308
+ * @param properties Object containing context properties to add
309
+ * @param fn The function to execute with additional context
310
+ * @param thisArg Optional. The value to use as `this` when executing the function.
311
+ * Useful when passing methods from classes that require their class instance context.
312
+ * @param args Additional arguments to pass to the function
313
+ * @returns The function's return value or a promise that resolves to it
314
+ *
315
+ * @example
316
+ * // Adding business context to LLM calls
317
+ * const result = interaction.withContext(
318
+ * {
319
+ * user_intent: "troubleshooting",
320
+ * product_area: "authentication",
321
+ * session_priority: "high",
322
+ * customer_tier: "enterprise"
323
+ * },
324
+ * async () => {
325
+ * const completion = await openai.chat.completions.create({
326
+ * model: "gpt-4",
327
+ * temperature: 0.7,
328
+ * max_tokens: 2048,
329
+ * messages: [
330
+ * { role: "system", content: "You are a helpful assistant." },
331
+ * { role: "user", content: "I can't log into my account." }
332
+ * ]
333
+ * });
334
+ * return completion.choices[0].message.content;
335
+ * }
336
+ * );
337
+ *
338
+ * @example
339
+ * // Using thisArg with semantic search application
340
+ * class SearchService {
341
+ * private vectorDB: VectorDatabase;
342
+ * private queryProcessor: QueryProcessor;
343
+ *
344
+ * async findRelevantDocuments(query: string) {
345
+ * // Uses this.vectorDB and this.queryProcessor
346
+ * const processedQuery = this.queryProcessor.enhance(query);
347
+ * return this.vectorDB.similaritySearch(processedQuery, 5);
348
+ * }
349
+ *
350
+ * async search(userQuery: string, userId: string) {
351
+ * return interaction.withContext(
352
+ * {
353
+ * search_type: "semantic",
354
+ * user_history: "returning_user",
355
+ * search_filter: "documentation",
356
+ * analytics_source: "help_center"
357
+ * },
358
+ * this.findRelevantDocuments,
359
+ * this, // thisArg preserves access to this.vectorDB and this.queryProcessor
360
+ * userQuery // Search query to process
361
+ * );
362
+ * }
363
+ * }
364
+ */
365
+ withContext<T>(properties: Record<string, string>, fn: (...args: any[]) => Promise<T> | T, thisArg?: any, ...args: any[]): T | Promise<T>;
366
+ /**
367
+ * Sets multiple properties on the interaction context.
368
+ *
369
+ * @param properties Object containing properties to set
370
+ */
371
+ setProperties(properties: Record<string, string>): void;
372
+ /**
373
+ * Sets a single property on the interaction context.
374
+ *
375
+ * @param key The property key
376
+ * @param value The property value
377
+ */
378
+ setProperty(key: string, value: string): void;
379
+ /**
380
+ * Adds an attachment to the interaction.
381
+ *
382
+ * @param attachment The attachment to add
383
+ *
384
+ * @example
385
+ * // Adding various attachment types to an interaction
386
+ *
387
+ * // 1. Text attachment for context
388
+ * interaction.addAttachment([{
389
+ * type: "text",
390
+ * name: "Additional Info",
391
+ * value: "A very long document",
392
+ * role: "input",
393
+ * }]);
394
+ *
395
+ * // 2. Image attachment (output)
396
+ * interaction.addAttachment([{
397
+ * type: "image",
398
+ * value: "https://example.com/image.png",
399
+ * role: "output"
400
+ * }]);
401
+ *
402
+ * // 3. Iframe for embedded content
403
+ * interaction.addAttachment([{
404
+ * type: "iframe",
405
+ * name: "Generated UI",
406
+ * value: "https://newui.generated.com",
407
+ * role: "output",
408
+ * }]);
409
+ *
410
+ * // 4. Code snippet
411
+ * interaction.addAttachment([{
412
+ * type: "code",
413
+ * name: "Generated SQL Query",
414
+ * value: "SELECT * FROM users WHERE os_build = '17.1'",
415
+ * role: "output",
416
+ * language: "sql"
417
+ * }]);
418
+ *
419
+ * // All attachments are included when the interaction ends
420
+ * interaction.end("The weather is sunny and warm.");
421
+ */
422
+ addAttachments(attachments: Attachment[]): void;
423
+ /**
424
+ * Sets the input for the interaction.
425
+ *
426
+ * @param input The input string
427
+ */
428
+ setInput(input: string): void;
429
+ /**
430
+ * Ends the interaction and sends analytics data.
431
+ *
432
+ * This method completes the interaction lifecycle by:
433
+ * 1. Setting the trace_id as a property (if available)
434
+ * 2. Sending analytics data including the event ID, conversation ID,
435
+ * user ID, input, output, attachments, and any custom properties
436
+ *
437
+ * @param output The final output string for this interaction
438
+ *
439
+ * @example
440
+ * // Start and end an interaction
441
+ * const interaction = tracing.begin({
442
+ * userId: "user-123",
443
+ * convoId: "convo-456"
444
+ * });
445
+ *
446
+ * interaction.setInput("What can you help me with today?");
447
+ * // ... process the request ...
448
+ *
449
+ * // End the interaction with the response
450
+ * interaction.end("I can help you with various tasks. Here are some examples...");
451
+ */
452
+ finish(resultEvent: TraceContextOutput): void;
453
+ };
454
+
455
+ interface AnalyticsConfig {
456
+ writeKey: string;
457
+ bufferSize?: number;
458
+ bufferTimeout?: number;
459
+ debugLogs?: boolean;
460
+ endpoint?: string;
461
+ redactPii?: boolean;
462
+ }
463
+ declare const MAX_INGEST_SIZE_BYTES: number;
464
+ declare class Raindrop {
465
+ private writeKey;
466
+ private apiUrl;
467
+ private buffer;
468
+ private bufferSize;
469
+ private bufferTimeout;
470
+ private flushTimer;
471
+ private debugLogs;
472
+ private redactPii;
473
+ private context;
474
+ private _tracing;
475
+ private partialEventBuffer;
476
+ private partialEventTimeouts;
477
+ constructor(config: AnalyticsConfig);
478
+ private formatEndpoint;
479
+ /**
480
+ * Begins a new interaction.
481
+ *
482
+ * @param traceContext - The trace context for the interaction.
483
+ * @returns The interaction object.
484
+ */
485
+ begin(traceContext: PartialAiTrackEvent & {
486
+ event: string;
487
+ userId: string;
488
+ }): Interaction;
489
+ /**
490
+ * Resumes an existing interaction.
491
+ *
492
+ * @param eventId - The ID of the interaction to resume.
493
+ * @returns The interaction object.
494
+ */
495
+ resumeInteraction(eventId: string): Interaction;
496
+ /**
497
+ * Track AI events. In addiiton to normal event properties, you can provide an "input", "output", or "model" parameter.
498
+ * It takes an AiTrackEvent as input and sends it to the /track-ai endpoint of the dawn api.
499
+ *
500
+ * @param event - The AiTrackEvent (you must specify at least one of input/output properties)
501
+ * @returns A Promise that resolves when the event has been successfully sent.
502
+ *
503
+ * Example usage:
504
+ * ```typescript
505
+ * dawnAnalytics.track_ai({
506
+ * event: "chat", //name of the event
507
+ * model: "claude", //optional
508
+ * input: "what's up?", // input or output is required
509
+ * output: "not much human, how are you?", //input or output is required
510
+ * userId: "cn123456789",
511
+ * });
512
+ * ```
513
+ */
514
+ trackAi(event: AiTrackEvent | AiTrackEvent[]): string | Array<string | undefined> | undefined;
515
+ private isEnvWithLocalStorage;
516
+ setUserDetails(event: IdentifyEvent): void;
517
+ trackSignal(signal: SignalEvent | SignalEvent[]): void | void[];
518
+ private getSize;
519
+ private saveToBuffer;
520
+ private flush;
521
+ private sendBatchToApi;
522
+ private getContext;
523
+ private formatZodError;
524
+ /**
525
+ * Deeply merges properties of the source object into the target object.
526
+ * Modifies the target object in place. Handles nested plain objects.
527
+ */
528
+ private deepMergeObjects;
529
+ /**
530
+ * Internal method for tracking partial AI events. use .begin() to start an interaction instead.
531
+ *
532
+ * @param event - The PartialAiTrackEvent, requires eventId.
533
+ */
534
+ _trackAiPartial(event: PartialAiTrackEvent): void;
535
+ /**
536
+ * Flushes a single accumulated partial event by its ID.
537
+ * This is called internally by the timeout or by the close method.
538
+ * @param eventId - The ID of the partial event to flush.
539
+ */
540
+ private flushPartialEvent;
541
+ /**
542
+ * Sends a single prepared event object to the 'events/track_partial' endpoint.
543
+ * @param event - The event data conforming to ClientAiTrack schema.
544
+ */
545
+ private sendPartialEvent;
546
+ close(): Promise<void>;
547
+ }
548
+
549
+ export { type AiTrackEvent as A, MAX_INGEST_SIZE_BYTES as M, Raindrop as R };