@usetransactional/llm-node 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,289 @@
1
+ /**
2
+ * LLM Ops Type Definitions
3
+ *
4
+ * Core types for the LLM Ops SDK.
5
+ */
6
+ declare enum TraceStatus {
7
+ RUNNING = "RUNNING",
8
+ COMPLETED = "COMPLETED",
9
+ ERROR = "ERROR"
10
+ }
11
+ declare enum ObservationType {
12
+ SPAN = "SPAN",
13
+ GENERATION = "GENERATION",
14
+ EVENT = "EVENT"
15
+ }
16
+ declare enum ObservationLevel {
17
+ DEBUG = "DEBUG",
18
+ INFO = "INFO",
19
+ WARNING = "WARNING",
20
+ ERROR = "ERROR"
21
+ }
22
+ interface Trace {
23
+ id: string;
24
+ projectId: number;
25
+ sessionId?: string;
26
+ name: string;
27
+ status: TraceStatus;
28
+ input?: Record<string, unknown>;
29
+ output?: Record<string, unknown>;
30
+ metadata?: Record<string, unknown>;
31
+ tags?: string[];
32
+ userId?: string;
33
+ startTime: string;
34
+ endTime?: string;
35
+ totalTokens: number;
36
+ totalCost: number;
37
+ latencyMs?: number;
38
+ }
39
+ interface CreateTraceParams {
40
+ name: string;
41
+ sessionId?: string;
42
+ input?: Record<string, unknown>;
43
+ metadata?: Record<string, unknown>;
44
+ tags?: string[];
45
+ userId?: string;
46
+ }
47
+ interface UpdateTraceParams {
48
+ status?: TraceStatus;
49
+ output?: Record<string, unknown>;
50
+ metadata?: Record<string, unknown>;
51
+ endTime?: string;
52
+ }
53
+ interface Observation {
54
+ id: string;
55
+ traceId: string;
56
+ parentObservationId?: string;
57
+ type: ObservationType;
58
+ name: string;
59
+ status: TraceStatus;
60
+ modelName?: string;
61
+ input?: Record<string, unknown>;
62
+ output?: Record<string, unknown>;
63
+ promptTokens?: number;
64
+ completionTokens?: number;
65
+ totalTokens?: number;
66
+ cost?: number;
67
+ startTime: string;
68
+ endTime?: string;
69
+ latencyMs?: number;
70
+ metadata?: Record<string, unknown>;
71
+ level?: ObservationLevel;
72
+ }
73
+ interface CreateObservationParams {
74
+ traceId?: string;
75
+ parentObservationId?: string;
76
+ type: ObservationType;
77
+ name: string;
78
+ modelName?: string;
79
+ input?: Record<string, unknown>;
80
+ metadata?: Record<string, unknown>;
81
+ level?: ObservationLevel;
82
+ }
83
+ interface UpdateObservationParams {
84
+ status?: TraceStatus;
85
+ output?: Record<string, unknown>;
86
+ promptTokens?: number;
87
+ completionTokens?: number;
88
+ metadata?: Record<string, unknown>;
89
+ endTime?: string;
90
+ }
91
+ interface Session {
92
+ id: string;
93
+ projectId: number;
94
+ externalId?: string;
95
+ userId?: string;
96
+ metadata?: Record<string, unknown>;
97
+ startTime: string;
98
+ endTime?: string;
99
+ traceCount: number;
100
+ totalTokens: number;
101
+ totalCost: number;
102
+ }
103
+ interface UpsertSessionParams {
104
+ id?: string;
105
+ userId?: string;
106
+ metadata?: Record<string, unknown>;
107
+ }
108
+ interface BatchIngestParams {
109
+ traces?: CreateTraceParams[];
110
+ observations?: CreateObservationParams[];
111
+ sessions?: UpsertSessionParams[];
112
+ }
113
+ interface BatchIngestResult {
114
+ success: boolean;
115
+ tracesCreated: number;
116
+ observationsCreated: number;
117
+ sessionsCreated: number;
118
+ errors?: string[];
119
+ }
120
+ interface LlmOpsConfig {
121
+ /** DSN format: https://{publicKey}@api.transactional.dev/observability/{projectId} */
122
+ dsn?: string;
123
+ /** Public key (alternative to DSN) */
124
+ publicKey?: string;
125
+ /** Project ID (alternative to DSN) */
126
+ projectId?: number;
127
+ /** Base URL (alternative to DSN) */
128
+ baseUrl?: string;
129
+ /** Enable/disable tracing (default: true) */
130
+ enabled?: boolean;
131
+ /** Batch size before flushing (default: 100) */
132
+ batchSize?: number;
133
+ /** Flush interval in ms (default: 5000) */
134
+ flushInterval?: number;
135
+ /** Enable debug logging (default: false) */
136
+ debug?: boolean;
137
+ }
138
+ interface TraceHandle {
139
+ id: string;
140
+ end: (params?: {
141
+ output?: Record<string, unknown>;
142
+ }) => Promise<void>;
143
+ error: (error: Error) => Promise<void>;
144
+ }
145
+ interface ObservationHandle {
146
+ id: string;
147
+ end: (params?: {
148
+ output?: Record<string, unknown>;
149
+ promptTokens?: number;
150
+ completionTokens?: number;
151
+ }) => Promise<void>;
152
+ error: (error: Error) => Promise<void>;
153
+ }
154
+
155
+ /**
156
+ * LLM Ops Client
157
+ *
158
+ * Main client for sending traces and observations to the LLM Ops API.
159
+ */
160
+
161
+ declare function setTraceContext(traceId: string, observationId?: string): void;
162
+ declare function getTraceContext(): {
163
+ traceId?: string;
164
+ observationId?: string;
165
+ };
166
+ declare function clearTraceContext(): void;
167
+ declare class LlmOpsClient {
168
+ private config;
169
+ private queue;
170
+ private flushTimer?;
171
+ private pendingFlush?;
172
+ constructor(config: LlmOpsConfig);
173
+ private parseConfig;
174
+ private startFlushTimer;
175
+ private log;
176
+ private enqueue;
177
+ /**
178
+ * Create a new trace
179
+ */
180
+ trace(params: CreateTraceParams): TraceHandle;
181
+ /**
182
+ * Update an existing trace
183
+ */
184
+ updateTrace(traceId: string, params: UpdateTraceParams): Promise<void>;
185
+ /**
186
+ * Create a new observation (span, generation, or event)
187
+ */
188
+ observation(params: CreateObservationParams): ObservationHandle;
189
+ /**
190
+ * Create a generation observation (LLM call)
191
+ */
192
+ generation(params: Omit<CreateObservationParams, 'type'>): ObservationHandle;
193
+ /**
194
+ * Create a span observation
195
+ */
196
+ span(params: Omit<CreateObservationParams, 'type'>): ObservationHandle;
197
+ /**
198
+ * Create an event observation
199
+ */
200
+ event(params: Omit<CreateObservationParams, 'type'>): ObservationHandle;
201
+ /**
202
+ * Update an existing observation
203
+ */
204
+ updateObservation(observationId: string, params: UpdateObservationParams): Promise<void>;
205
+ /**
206
+ * Flush queued events to the API
207
+ */
208
+ flush(): Promise<void>;
209
+ private sendBatch;
210
+ /**
211
+ * Shutdown the client and flush remaining events
212
+ */
213
+ shutdown(): Promise<void>;
214
+ }
215
+
216
+ /**
217
+ * Transactional LLM Ops SDK
218
+ *
219
+ * AI observability with cost tracking, trace analysis, and performance monitoring.
220
+ *
221
+ * @example
222
+ * ```typescript
223
+ * import { initLlmOps, getLlmOps } from 'transactional-llm';
224
+ *
225
+ * initLlmOps({
226
+ * dsn: process.env.TRANSACTIONAL_LLM_OPS_DSN!,
227
+ * });
228
+ *
229
+ * const llmOps = getLlmOps();
230
+ *
231
+ * const trace = llmOps.trace({
232
+ * name: 'chat-completion',
233
+ * input: { prompt: 'Hello!' },
234
+ * });
235
+ *
236
+ * const generation = llmOps.generation({
237
+ * name: 'gpt-4o',
238
+ * modelName: 'gpt-4o',
239
+ * });
240
+ *
241
+ * await generation.end({
242
+ * output: { content: 'Hi there!' },
243
+ * promptTokens: 10,
244
+ * completionTokens: 5,
245
+ * });
246
+ *
247
+ * await trace.end({ output: { response: 'Hi there!' } });
248
+ * ```
249
+ */
250
+
251
+ /**
252
+ * Initialize the LLM Ops SDK
253
+ *
254
+ * Call this once at application startup before using any tracing functions.
255
+ *
256
+ * @param config - Configuration options including DSN
257
+ * @returns The initialized client instance
258
+ *
259
+ * @example
260
+ * ```typescript
261
+ * initLlmOps({
262
+ * dsn: 'https://pk_...@api.transactional.dev/observability/42',
263
+ * });
264
+ * ```
265
+ */
266
+ declare function initLlmOps(config: LlmOpsConfig): LlmOpsClient;
267
+ /**
268
+ * Get the LLM Ops client instance
269
+ *
270
+ * @throws Error if SDK has not been initialized
271
+ * @returns The client instance
272
+ *
273
+ * @example
274
+ * ```typescript
275
+ * const llmOps = getLlmOps();
276
+ * const trace = llmOps.trace({ name: 'my-trace' });
277
+ * ```
278
+ */
279
+ declare function getLlmOps(): LlmOpsClient;
280
+ /**
281
+ * Check if the SDK is initialized
282
+ */
283
+ declare function isInitialized(): boolean;
284
+ /**
285
+ * Reset the SDK (mainly for testing)
286
+ */
287
+ declare function resetLlmOps(): void;
288
+
289
+ export { type BatchIngestParams, type BatchIngestResult, type CreateObservationParams, type CreateTraceParams, LlmOpsClient, type LlmOpsConfig, type Observation, type ObservationHandle, ObservationLevel, ObservationType, type Session, type Trace, type TraceHandle, TraceStatus, type UpdateObservationParams, type UpdateTraceParams, clearTraceContext, getLlmOps, getTraceContext, initLlmOps, isInitialized, resetLlmOps, setTraceContext };
package/dist/index.js ADDED
@@ -0,0 +1,355 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ LlmOpsClient: () => LlmOpsClient,
24
+ ObservationLevel: () => ObservationLevel,
25
+ ObservationType: () => ObservationType,
26
+ TraceStatus: () => TraceStatus,
27
+ clearTraceContext: () => clearTraceContext,
28
+ getLlmOps: () => getLlmOps,
29
+ getTraceContext: () => getTraceContext,
30
+ initLlmOps: () => initLlmOps,
31
+ isInitialized: () => isInitialized,
32
+ resetLlmOps: () => resetLlmOps,
33
+ setTraceContext: () => setTraceContext
34
+ });
35
+ module.exports = __toCommonJS(index_exports);
36
+
37
+ // src/client.ts
38
+ var import_nanoid = require("nanoid");
39
+ var currentTraceId;
40
+ var currentObservationId;
41
+ function setTraceContext(traceId, observationId) {
42
+ currentTraceId = traceId;
43
+ currentObservationId = observationId;
44
+ }
45
+ function getTraceContext() {
46
+ return { traceId: currentTraceId, observationId: currentObservationId };
47
+ }
48
+ function clearTraceContext() {
49
+ currentTraceId = void 0;
50
+ currentObservationId = void 0;
51
+ }
52
+ var LlmOpsClient = class {
53
+ constructor(config) {
54
+ this.queue = [];
55
+ this.config = this.parseConfig(config);
56
+ if (this.config.enabled) {
57
+ this.startFlushTimer();
58
+ }
59
+ }
60
+ parseConfig(config) {
61
+ let publicKey = config.publicKey;
62
+ let projectId = config.projectId;
63
+ let baseUrl = config.baseUrl || "https://api.transactional.dev";
64
+ if (config.dsn) {
65
+ try {
66
+ const url = new URL(config.dsn);
67
+ publicKey = url.username;
68
+ const pathParts = url.pathname.split("/").filter(Boolean);
69
+ projectId = parseInt(pathParts[pathParts.length - 1] || "0");
70
+ baseUrl = `${url.protocol}//${url.host}`;
71
+ } catch {
72
+ throw new Error(`Invalid DSN format: ${config.dsn}`);
73
+ }
74
+ }
75
+ if (!publicKey || !projectId) {
76
+ throw new Error("LlmOps requires either a DSN or publicKey + projectId");
77
+ }
78
+ return {
79
+ publicKey,
80
+ projectId,
81
+ baseUrl,
82
+ enabled: config.enabled ?? true,
83
+ batchSize: config.batchSize ?? 100,
84
+ flushInterval: config.flushInterval ?? 5e3,
85
+ debug: config.debug ?? false
86
+ };
87
+ }
88
+ startFlushTimer() {
89
+ this.flushTimer = setInterval(() => {
90
+ this.flush().catch((err) => {
91
+ if (this.config.debug) {
92
+ console.error("[LlmOps] Flush error:", err);
93
+ }
94
+ });
95
+ }, this.config.flushInterval);
96
+ }
97
+ log(message, ...args) {
98
+ if (this.config.debug) {
99
+ console.log(`[LlmOps] ${message}`, ...args);
100
+ }
101
+ }
102
+ enqueue(item) {
103
+ if (!this.config.enabled) return;
104
+ this.queue.push(item);
105
+ this.log("Enqueued:", item.type, item.id);
106
+ if (this.queue.length >= this.config.batchSize) {
107
+ this.flush().catch((err) => {
108
+ if (this.config.debug) {
109
+ console.error("[LlmOps] Flush error:", err);
110
+ }
111
+ });
112
+ }
113
+ }
114
+ /**
115
+ * Create a new trace
116
+ */
117
+ trace(params) {
118
+ const traceId = (0, import_nanoid.nanoid)();
119
+ const startTime = (/* @__PURE__ */ new Date()).toISOString();
120
+ this.enqueue({
121
+ type: "trace",
122
+ id: traceId,
123
+ projectId: this.config.projectId,
124
+ ...params,
125
+ status: "RUNNING",
126
+ startTime
127
+ });
128
+ setTraceContext(traceId);
129
+ return {
130
+ id: traceId,
131
+ end: async (endParams) => {
132
+ await this.updateTrace(traceId, {
133
+ status: "COMPLETED",
134
+ output: endParams?.output,
135
+ endTime: (/* @__PURE__ */ new Date()).toISOString()
136
+ });
137
+ clearTraceContext();
138
+ },
139
+ error: async (error) => {
140
+ await this.updateTrace(traceId, {
141
+ status: "ERROR",
142
+ metadata: { error: error.message, stack: error.stack },
143
+ endTime: (/* @__PURE__ */ new Date()).toISOString()
144
+ });
145
+ clearTraceContext();
146
+ }
147
+ };
148
+ }
149
+ /**
150
+ * Update an existing trace
151
+ */
152
+ async updateTrace(traceId, params) {
153
+ this.enqueue({
154
+ type: "trace",
155
+ id: traceId,
156
+ ...params
157
+ });
158
+ }
159
+ /**
160
+ * Create a new observation (span, generation, or event)
161
+ */
162
+ observation(params) {
163
+ const observationId = (0, import_nanoid.nanoid)();
164
+ const startTime = (/* @__PURE__ */ new Date()).toISOString();
165
+ const context = getTraceContext();
166
+ const traceId = params.traceId || context.traceId;
167
+ if (!traceId) {
168
+ throw new Error("No trace context found. Create a trace first.");
169
+ }
170
+ const { type: observationType, ...restParams } = params;
171
+ this.enqueue({
172
+ type: "observation",
173
+ id: observationId,
174
+ traceId,
175
+ parentObservationId: restParams.parentObservationId || context.observationId,
176
+ ...restParams,
177
+ observationType,
178
+ status: "RUNNING",
179
+ startTime
180
+ });
181
+ setTraceContext(traceId, observationId);
182
+ return {
183
+ id: observationId,
184
+ end: async (endParams) => {
185
+ await this.updateObservation(observationId, {
186
+ status: "COMPLETED",
187
+ output: endParams?.output,
188
+ promptTokens: endParams?.promptTokens,
189
+ completionTokens: endParams?.completionTokens,
190
+ endTime: (/* @__PURE__ */ new Date()).toISOString()
191
+ });
192
+ setTraceContext(traceId, params.parentObservationId || context.observationId);
193
+ },
194
+ error: async (error) => {
195
+ await this.updateObservation(observationId, {
196
+ status: "ERROR",
197
+ metadata: { error: error.message, stack: error.stack },
198
+ endTime: (/* @__PURE__ */ new Date()).toISOString()
199
+ });
200
+ setTraceContext(traceId, params.parentObservationId || context.observationId);
201
+ }
202
+ };
203
+ }
204
+ /**
205
+ * Create a generation observation (LLM call)
206
+ */
207
+ generation(params) {
208
+ return this.observation({
209
+ ...params,
210
+ type: "GENERATION"
211
+ });
212
+ }
213
+ /**
214
+ * Create a span observation
215
+ */
216
+ span(params) {
217
+ return this.observation({
218
+ ...params,
219
+ type: "SPAN"
220
+ });
221
+ }
222
+ /**
223
+ * Create an event observation
224
+ */
225
+ event(params) {
226
+ return this.observation({
227
+ ...params,
228
+ type: "EVENT"
229
+ });
230
+ }
231
+ /**
232
+ * Update an existing observation
233
+ */
234
+ async updateObservation(observationId, params) {
235
+ this.enqueue({
236
+ type: "observation",
237
+ id: observationId,
238
+ ...params
239
+ });
240
+ }
241
+ /**
242
+ * Flush queued events to the API
243
+ */
244
+ async flush() {
245
+ if (this.queue.length === 0) return;
246
+ if (this.pendingFlush) {
247
+ await this.pendingFlush;
248
+ }
249
+ const batch = this.queue.splice(0, this.config.batchSize);
250
+ this.log("Flushing", batch.length, "items");
251
+ this.pendingFlush = this.sendBatch(batch);
252
+ await this.pendingFlush;
253
+ this.pendingFlush = void 0;
254
+ }
255
+ async sendBatch(batch) {
256
+ try {
257
+ const response = await fetch(
258
+ `${this.config.baseUrl}/observability/ingest/batch`,
259
+ {
260
+ method: "POST",
261
+ headers: {
262
+ "Authorization": `Bearer ${this.config.publicKey}`,
263
+ "Content-Type": "application/json"
264
+ },
265
+ body: JSON.stringify({
266
+ projectId: this.config.projectId,
267
+ batch
268
+ })
269
+ }
270
+ );
271
+ if (!response.ok) {
272
+ const text = await response.text();
273
+ throw new Error(`Failed to send batch: ${response.status} ${text}`);
274
+ }
275
+ this.log("Batch sent successfully");
276
+ } catch (error) {
277
+ this.queue.unshift(...batch);
278
+ throw error;
279
+ }
280
+ }
281
+ /**
282
+ * Shutdown the client and flush remaining events
283
+ */
284
+ async shutdown() {
285
+ if (this.flushTimer) {
286
+ clearInterval(this.flushTimer);
287
+ }
288
+ await this.flush();
289
+ this.log("Shutdown complete");
290
+ }
291
+ };
292
+
293
+ // src/types/index.ts
294
+ var TraceStatus = /* @__PURE__ */ ((TraceStatus2) => {
295
+ TraceStatus2["RUNNING"] = "RUNNING";
296
+ TraceStatus2["COMPLETED"] = "COMPLETED";
297
+ TraceStatus2["ERROR"] = "ERROR";
298
+ return TraceStatus2;
299
+ })(TraceStatus || {});
300
+ var ObservationType = /* @__PURE__ */ ((ObservationType2) => {
301
+ ObservationType2["SPAN"] = "SPAN";
302
+ ObservationType2["GENERATION"] = "GENERATION";
303
+ ObservationType2["EVENT"] = "EVENT";
304
+ return ObservationType2;
305
+ })(ObservationType || {});
306
+ var ObservationLevel = /* @__PURE__ */ ((ObservationLevel2) => {
307
+ ObservationLevel2["DEBUG"] = "DEBUG";
308
+ ObservationLevel2["INFO"] = "INFO";
309
+ ObservationLevel2["WARNING"] = "WARNING";
310
+ ObservationLevel2["ERROR"] = "ERROR";
311
+ return ObservationLevel2;
312
+ })(ObservationLevel || {});
313
+
314
+ // src/index.ts
315
+ var defaultClient = null;
316
+ function initLlmOps(config) {
317
+ if (defaultClient) {
318
+ console.warn("[LlmOps] SDK already initialized. Ignoring duplicate initialization.");
319
+ return defaultClient;
320
+ }
321
+ defaultClient = new LlmOpsClient(config);
322
+ return defaultClient;
323
+ }
324
+ function getLlmOps() {
325
+ if (!defaultClient) {
326
+ throw new Error(
327
+ "LLM Ops SDK not initialized. Call initLlmOps() first."
328
+ );
329
+ }
330
+ return defaultClient;
331
+ }
332
+ function isInitialized() {
333
+ return defaultClient !== null;
334
+ }
335
+ function resetLlmOps() {
336
+ if (defaultClient) {
337
+ defaultClient.shutdown().catch(() => {
338
+ });
339
+ defaultClient = null;
340
+ }
341
+ }
342
+ // Annotate the CommonJS export names for ESM import in node:
343
+ 0 && (module.exports = {
344
+ LlmOpsClient,
345
+ ObservationLevel,
346
+ ObservationType,
347
+ TraceStatus,
348
+ clearTraceContext,
349
+ getLlmOps,
350
+ getTraceContext,
351
+ initLlmOps,
352
+ isInitialized,
353
+ resetLlmOps,
354
+ setTraceContext
355
+ });
package/dist/index.mjs ADDED
@@ -0,0 +1,26 @@
1
+ import {
2
+ LlmOpsClient,
3
+ ObservationLevel,
4
+ ObservationType,
5
+ TraceStatus,
6
+ clearTraceContext,
7
+ getLlmOps,
8
+ getTraceContext,
9
+ initLlmOps,
10
+ isInitialized,
11
+ resetLlmOps,
12
+ setTraceContext
13
+ } from "./chunk-IR6P3PV4.mjs";
14
+ export {
15
+ LlmOpsClient,
16
+ ObservationLevel,
17
+ ObservationType,
18
+ TraceStatus,
19
+ clearTraceContext,
20
+ getLlmOps,
21
+ getTraceContext,
22
+ initLlmOps,
23
+ isInitialized,
24
+ resetLlmOps,
25
+ setTraceContext
26
+ };