@llmtracer/sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +138 -0
- package/dist/index.d.ts +118 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +558 -0
- package/dist/index.js.map +1 -0
- package/package.json +34 -0
package/README.md
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
# @llmtracer/sdk
|
|
2
|
+
|
|
3
|
+
See where your AI budget goes. Lightweight LLM cost tracking SDK for OpenAI.
|
|
4
|
+
|
|
5
|
+
Wrap your OpenAI client in two lines and get automatic tracking of every API call -- tokens, latency, cost, and model usage -- with zero changes to your application code.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @llmtracer/sdk
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Quickstart
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { LLMTracer } from "@llmtracer/sdk";
|
|
17
|
+
import OpenAI from "openai";
|
|
18
|
+
|
|
19
|
+
const tracer = new LLMTracer({
|
|
20
|
+
apiKey: process.env.LLMTRACER_KEY,
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const openai = new OpenAI();
|
|
24
|
+
|
|
25
|
+
// 2 lines -- that's it
|
|
26
|
+
tracer.instrumentOpenAI(openai, {
|
|
27
|
+
tags: { feature: "customer-support-bot", env: "production" },
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// Every OpenAI call is now automatically tracked
|
|
31
|
+
const response = await openai.chat.completions.create({
|
|
32
|
+
model: "gpt-4o",
|
|
33
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// In serverless (Lambda, Cloud Functions), flush before returning
|
|
37
|
+
await tracer.flush();
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Serverless Usage
|
|
41
|
+
|
|
42
|
+
In environments like AWS Lambda or Google Cloud Functions, call `flush()` before your function returns to ensure all events are sent:
|
|
43
|
+
|
|
44
|
+
```typescript
|
|
45
|
+
export async function handler(event) {
|
|
46
|
+
const response = await openai.chat.completions.create({ ... });
|
|
47
|
+
await tracer.flush();
|
|
48
|
+
return response;
|
|
49
|
+
}
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
## Agentic Workflow Tracking
|
|
53
|
+
|
|
54
|
+
Group related LLM calls into traces with named phases:
|
|
55
|
+
|
|
56
|
+
```typescript
|
|
57
|
+
await tracer.trace("user-request-123", async (t) => {
|
|
58
|
+
await t.phase("planning", async () => {
|
|
59
|
+
await openai.chat.completions.create({ ... });
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
await t.phase("execution", async () => {
|
|
63
|
+
await openai.chat.completions.create({ ... });
|
|
64
|
+
});
|
|
65
|
+
});
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
## Streaming Support
|
|
69
|
+
|
|
70
|
+
Streaming calls are instrumented automatically. Token counts are captured from the final chunk:
|
|
71
|
+
|
|
72
|
+
```typescript
|
|
73
|
+
const stream = await openai.chat.completions.create({
|
|
74
|
+
model: "gpt-4o",
|
|
75
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
76
|
+
stream: true,
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
for await (const chunk of stream) {
|
|
80
|
+
// use chunk as normal
|
|
81
|
+
}
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
## Configuration
|
|
85
|
+
|
|
86
|
+
| Option | Type | Default | Description |
|
|
87
|
+
|---|---|---|---|
|
|
88
|
+
| `apiKey` | `string` | *required* | Your LLM Tracer API key (starts with `lt_`) |
|
|
89
|
+
| `endpoint` | `string` | Production URL | Ingestion endpoint URL |
|
|
90
|
+
| `maxBatchSize` | `number` | `50` | Max events per batch before auto-flush |
|
|
91
|
+
| `flushIntervalMs` | `number` | `10000` | Auto-flush interval in milliseconds |
|
|
92
|
+
| `maxQueueSize` | `number` | `10000` | Max events in queue before dropping oldest |
|
|
93
|
+
| `maxRetries` | `number` | `3` | Max retry attempts for failed flushes |
|
|
94
|
+
| `retryBaseMs` | `number` | `1000` | Base delay for exponential backoff |
|
|
95
|
+
| `sampleRate` | `number` | `1.0` | Sampling rate (0.0-1.0). `1.0` captures everything |
|
|
96
|
+
| `capturePrompt` | `boolean` | `false` | Whether to capture full prompt content |
|
|
97
|
+
| `debug` | `boolean` | `false` | Enable debug logging to console |
|
|
98
|
+
| `onFlush` | `function` | `null` | Callback after each flush with stats |
|
|
99
|
+
| `onError` | `function` | `null` | Callback on transport errors |
|
|
100
|
+
|
|
101
|
+
## API Reference
|
|
102
|
+
|
|
103
|
+
### `new LLMTracer(config)`
|
|
104
|
+
|
|
105
|
+
Create a new tracer instance. See [Configuration](#configuration) for options.
|
|
106
|
+
|
|
107
|
+
### `tracer.instrumentOpenAI(client, options?)`
|
|
108
|
+
|
|
109
|
+
Instrument an OpenAI client instance. All subsequent `chat.completions.create` calls (streaming and non-streaming) will be tracked automatically.
|
|
110
|
+
|
|
111
|
+
- `client` -- an OpenAI client instance
|
|
112
|
+
- `options.tags` -- key-value pairs attached to every event (e.g. `{ env: "production" }`)
|
|
113
|
+
|
|
114
|
+
### `tracer.flush(): Promise<void>`
|
|
115
|
+
|
|
116
|
+
Flush all buffered events to the backend. Call this in serverless environments before the function returns.
|
|
117
|
+
|
|
118
|
+
### `tracer.trace(traceId, fn): Promise<void>`
|
|
119
|
+
|
|
120
|
+
Track an agentic workflow. All LLM calls within the callback are grouped under the given `traceId`. Use `t.phase(name, fn)` inside the callback to label phases.
|
|
121
|
+
|
|
122
|
+
### `tracer.shutdown(): Promise<void>`
|
|
123
|
+
|
|
124
|
+
Flush remaining events and stop the auto-flush timer. Call this on graceful shutdown.
|
|
125
|
+
|
|
126
|
+
## Reliability
|
|
127
|
+
|
|
128
|
+
The SDK is designed to never interfere with your application:
|
|
129
|
+
|
|
130
|
+
- **Never throws** -- all internal errors are swallowed silently (enable `debug: true` for visibility)
|
|
131
|
+
- **Batching** -- events are queued and sent in configurable batches
|
|
132
|
+
- **Retry with backoff** -- failed flushes are retried with exponential backoff and jitter
|
|
133
|
+
- **Circuit breaker** -- after 5 consecutive failures, stops attempting for 60 seconds
|
|
134
|
+
- **Queue overflow** -- drops oldest events when the queue exceeds `maxQueueSize`
|
|
135
|
+
|
|
136
|
+
## License
|
|
137
|
+
|
|
138
|
+
MIT
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/** Configuration for LLMTracer SDK (Section 9.1 of spec) */
|
|
2
|
+
export interface LLMTracerConfig {
|
|
3
|
+
/** Your LLM Tracer API key (starts with lt_) */
|
|
4
|
+
apiKey: string;
|
|
5
|
+
/** Ingestion endpoint URL */
|
|
6
|
+
endpoint?: string;
|
|
7
|
+
/** Max events per batch before auto-flush. Default: 50 */
|
|
8
|
+
maxBatchSize?: number;
|
|
9
|
+
/** Auto-flush interval in ms. Default: 10000 (10s) */
|
|
10
|
+
flushIntervalMs?: number;
|
|
11
|
+
/** Max events in queue before dropping oldest. Default: 10000 */
|
|
12
|
+
maxQueueSize?: number;
|
|
13
|
+
/** Max retry attempts for failed flushes. Default: 3 */
|
|
14
|
+
maxRetries?: number;
|
|
15
|
+
/** Base delay for exponential backoff in ms. Default: 1000 */
|
|
16
|
+
retryBaseMs?: number;
|
|
17
|
+
/** Sampling rate 0.0-1.0. Default: 1.0 (capture everything) */
|
|
18
|
+
sampleRate?: number;
|
|
19
|
+
/** Whether to capture full prompt content. Default: false */
|
|
20
|
+
capturePrompt?: boolean;
|
|
21
|
+
/** Enable debug logging. Default: false */
|
|
22
|
+
debug?: boolean;
|
|
23
|
+
/** Callback after each flush (Section 9.2) */
|
|
24
|
+
onFlush?: (stats: FlushStats) => void;
|
|
25
|
+
/** Callback on transport errors (Section 9.2) */
|
|
26
|
+
onError?: (error: Error) => void;
|
|
27
|
+
}
|
|
28
|
+
/** Stats passed to onFlush callback */
|
|
29
|
+
export interface FlushStats {
|
|
30
|
+
sent: number;
|
|
31
|
+
accepted: number;
|
|
32
|
+
rejected: number;
|
|
33
|
+
dropped: number;
|
|
34
|
+
last_error: Error | null;
|
|
35
|
+
}
|
|
36
|
+
/** Tags passed to instrumentOpenAI */
|
|
37
|
+
export interface InstrumentOptions {
|
|
38
|
+
tags?: Record<string, string>;
|
|
39
|
+
}
|
|
40
|
+
/** Trace context for agentic workflow tracking (Section 3.3) */
|
|
41
|
+
interface TraceContext {
|
|
42
|
+
traceId: string;
|
|
43
|
+
currentPhase: string | null;
|
|
44
|
+
sampled: boolean;
|
|
45
|
+
phase: (name: string, fn: () => Promise<unknown>) => Promise<unknown>;
|
|
46
|
+
}
|
|
47
|
+
export declare class LLMTracer {
|
|
48
|
+
private readonly apiKey;
|
|
49
|
+
private readonly endpoint;
|
|
50
|
+
private readonly maxBatchSize;
|
|
51
|
+
private readonly flushIntervalMs;
|
|
52
|
+
private readonly maxQueueSize;
|
|
53
|
+
private readonly maxRetries;
|
|
54
|
+
private readonly retryBaseMs;
|
|
55
|
+
private readonly sampleRate;
|
|
56
|
+
private readonly capturePrompt;
|
|
57
|
+
private readonly debugMode;
|
|
58
|
+
private readonly onFlushCallback;
|
|
59
|
+
private readonly onErrorCallback;
|
|
60
|
+
private queue;
|
|
61
|
+
private droppedCount;
|
|
62
|
+
private flushTimer;
|
|
63
|
+
private circuitState;
|
|
64
|
+
private consecutiveFailures;
|
|
65
|
+
private circuitOpenedAt;
|
|
66
|
+
private lastErrorSummary;
|
|
67
|
+
private readonly circuitFailureThreshold;
|
|
68
|
+
private readonly circuitCooldownMs;
|
|
69
|
+
private activeTraceId;
|
|
70
|
+
private activePhase;
|
|
71
|
+
private activeTraceSampled;
|
|
72
|
+
private flushing;
|
|
73
|
+
constructor(config: LLMTracerConfig);
|
|
74
|
+
/**
|
|
75
|
+
* Wraps an OpenAI client to automatically capture usage events.
|
|
76
|
+
* Supports both non-streaming and streaming calls.
|
|
77
|
+
*
|
|
78
|
+
* Usage:
|
|
79
|
+
* tracer.instrumentOpenAI(openai, { tags: { service: "api" } });
|
|
80
|
+
*/
|
|
81
|
+
instrumentOpenAI(openai: any, options?: InstrumentOptions): void;
|
|
82
|
+
/**
|
|
83
|
+
* Flush all buffered events to the backend.
|
|
84
|
+
* CRITICAL: Call this before returning in serverless environments.
|
|
85
|
+
*
|
|
86
|
+
* Usage:
|
|
87
|
+
* await tracer.flush();
|
|
88
|
+
*/
|
|
89
|
+
flush(): Promise<void>;
|
|
90
|
+
/**
|
|
91
|
+
* Track an agentic workflow with phases.
|
|
92
|
+
* All LLM calls within this trace are grouped together.
|
|
93
|
+
* Sampling is per-trace (Section 11.1).
|
|
94
|
+
*
|
|
95
|
+
* Usage:
|
|
96
|
+
* await tracer.trace("user-request-123", async (t) => {
|
|
97
|
+
* await t.phase("planning", async () => {
|
|
98
|
+
* await openai.chat.completions.create(...);
|
|
99
|
+
* });
|
|
100
|
+
* });
|
|
101
|
+
*/
|
|
102
|
+
trace(traceId: string, fn: (ctx: TraceContext) => Promise<unknown>): Promise<void>;
|
|
103
|
+
/**
|
|
104
|
+
* Flush remaining events and stop the auto-flush timer.
|
|
105
|
+
*/
|
|
106
|
+
shutdown(): Promise<void>;
|
|
107
|
+
private handleNonStreamingCall;
|
|
108
|
+
private handleStreamingCall;
|
|
109
|
+
private captureEvent;
|
|
110
|
+
private sendBatch;
|
|
111
|
+
private handleTransportError;
|
|
112
|
+
private enforceQueueLimit;
|
|
113
|
+
private handleInternalError;
|
|
114
|
+
private log;
|
|
115
|
+
private sleep;
|
|
116
|
+
}
|
|
117
|
+
export {};
|
|
118
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAIA,4DAA4D;AAC5D,MAAM,WAAW,eAAe;IAC9B,gDAAgD;IAChD,MAAM,EAAE,MAAM,CAAC;IACf,6BAA6B;IAC7B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,0DAA0D;IAC1D,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,sDAAsD;IACtD,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,iEAAiE;IACjE,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,wDAAwD;IACxD,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,8DAA8D;IAC9D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,+DAA+D;IAC/D,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,6DAA6D;IAC7D,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,2CAA2C;IAC3C,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,8CAA8C;IAC9C,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC;IACtC,iDAAiD;IACjD,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED,uCAAuC;AACvC,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,KAAK,GAAG,IAAI,CAAC;CAC1B;AAED,sCAAsC;AACtC,MAAM,WAAW,iBAAiB;IAChC,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAC/B;AAuBD,gEAAgE;AAChE,UAAU,YAAY;IACpB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;CACvE;AAWD,qBAAa,SAAS;IACpB,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAS;IAClC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAS;IACtC,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAS;IACzC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAS;IACtC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAS;IACpC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAS;IACrC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAS;IACpC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAU;IACxC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAU;IACpC,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAuC;IACvE,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAkC;IAGlE,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,YAAY,CAAa;IAGjC,OAAO,CAAC,UAAU,CAA+C;IAGjE,OAAO,CAAC,YAAY,CAA0B;IAC9C,OAAO,CAAC,mBAAmB,CAAa;IACxC,OAAO,CAAC,eAAe,CAAa;IACpC,OAAO,CAAC,gBAAgB,CAAc;IACtC,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAK;IAC7C,OAAO,CAAC,QAAQ,CAAC,iBAAiB,CAAS;IAG3C,OAAO,CAAC,aAAa,CAAuB;IAC5C,OAAO,CAAC,WAAW,CAAuB;IAC1C,OAAO,CAAC,kBAAkB,CAAiB;IAG3C,OAAO,CAAC,QAAQ,CAAkB;gBAEtB,MAAM,EAAE,eAAe;IAsCnC;;;;;;OAMG;IACH,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,iBAAiB,GAAG,IAAI;IAkDhE;;;;;;OAMG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IA8E5B;;;;;;;;;;;OAWG;IACG,KAAK,CACT,OAAO,EAAE,MAAM,EACf,EAAE,EAAE,CAAC,GAAG,EAAE,YAAY,KAAK,OAAO,CAAC,OAAO,CAAC,GAC1C,OAAO,CAAC,IAAI,CAAC;IAsChB;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;YAUjB,sBAAsB;YAkDtB,mBAAmB;IAkHjC,OAAO,CAAC,YAAY;YAyEN,SAAS;IAuGvB,OAAO,CAAC,oBAAoB;IAuB5B,OAAO,CAAC,iBAAiB;IAczB,OAAO,CAAC,mBAAmB;IAiB3B,OAAO,CAAC,GAAG;IAMX,OAAO,CAAC,KAAK;CAGd"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,558 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LLMTracer = void 0;
|
|
4
|
+
const crypto_1 = require("crypto");
|
|
5
|
+
// ─── SDK Version ───────────────────────────────────────────────────
|
|
6
|
+
const SDK_VERSION = "1.0.0";
|
|
7
|
+
const SDK_LANGUAGE = "typescript";
|
|
8
|
+
// ─── Main SDK Class ────────────────────────────────────────────────
|
|
9
|
+
class LLMTracer {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
// Event queue
|
|
12
|
+
this.queue = [];
|
|
13
|
+
this.droppedCount = 0;
|
|
14
|
+
// Flush timer
|
|
15
|
+
this.flushTimer = null;
|
|
16
|
+
// Circuit breaker (Section 6.3: 5 failures → open, 60s cooldown)
|
|
17
|
+
this.circuitState = "closed";
|
|
18
|
+
this.consecutiveFailures = 0;
|
|
19
|
+
this.circuitOpenedAt = 0;
|
|
20
|
+
this.lastErrorSummary = "";
|
|
21
|
+
this.circuitFailureThreshold = 5;
|
|
22
|
+
this.circuitCooldownMs = 60000;
|
|
23
|
+
// Trace context for agentic workflows
|
|
24
|
+
this.activeTraceId = null;
|
|
25
|
+
this.activePhase = null;
|
|
26
|
+
this.activeTraceSampled = true;
|
|
27
|
+
// Flush lock to prevent concurrent flushes
|
|
28
|
+
this.flushing = false;
|
|
29
|
+
this.apiKey = config.apiKey;
|
|
30
|
+
this.endpoint = config.endpoint || "https://v1events-agbjrxekeq-uc.a.run.app";
|
|
31
|
+
this.maxBatchSize = config.maxBatchSize ?? 50;
|
|
32
|
+
this.flushIntervalMs = config.flushIntervalMs ?? 10000;
|
|
33
|
+
this.maxQueueSize = config.maxQueueSize ?? 10000;
|
|
34
|
+
this.maxRetries = config.maxRetries ?? 3;
|
|
35
|
+
this.retryBaseMs = config.retryBaseMs ?? 1000;
|
|
36
|
+
this.sampleRate = config.sampleRate ?? 1.0;
|
|
37
|
+
this.capturePrompt = config.capturePrompt ?? false;
|
|
38
|
+
this.debugMode = config.debug ?? false;
|
|
39
|
+
this.onFlushCallback = config.onFlush ?? null;
|
|
40
|
+
this.onErrorCallback = config.onError ?? null;
|
|
41
|
+
// Start auto-flush timer
|
|
42
|
+
this.flushTimer = setInterval(() => {
|
|
43
|
+
this.flush().catch(() => { });
|
|
44
|
+
}, this.flushIntervalMs);
|
|
45
|
+
// Unref so it doesn't keep the process alive
|
|
46
|
+
if (this.flushTimer && typeof this.flushTimer.unref === "function") {
|
|
47
|
+
this.flushTimer.unref();
|
|
48
|
+
}
|
|
49
|
+
// Best-effort process exit hooks (Section 6.1)
|
|
50
|
+
try {
|
|
51
|
+
process.on("beforeExit", () => {
|
|
52
|
+
this.flush().catch(() => { });
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
catch {
|
|
56
|
+
// Not in Node.js environment — skip
|
|
57
|
+
}
|
|
58
|
+
this.log("LLMTracer initialized");
|
|
59
|
+
}
|
|
60
|
+
// ─── Public API: instrumentOpenAI (Section 3.1) ─────────────────
|
|
61
|
+
/**
|
|
62
|
+
* Wraps an OpenAI client to automatically capture usage events.
|
|
63
|
+
* Supports both non-streaming and streaming calls.
|
|
64
|
+
*
|
|
65
|
+
* Usage:
|
|
66
|
+
* tracer.instrumentOpenAI(openai, { tags: { service: "api" } });
|
|
67
|
+
*/
|
|
68
|
+
instrumentOpenAI(openai, options) {
|
|
69
|
+
try {
|
|
70
|
+
const defaultTags = options?.tags || {};
|
|
71
|
+
const self = this;
|
|
72
|
+
// Store original create method
|
|
73
|
+
const originalCreate = openai.chat?.completions?.create?.bind(openai.chat.completions);
|
|
74
|
+
if (!originalCreate) {
|
|
75
|
+
this.log("Warning: Could not find openai.chat.completions.create");
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
// Replace with instrumented version
|
|
79
|
+
openai.chat.completions.create = async function (params, requestOptions) {
|
|
80
|
+
const startTime = Date.now();
|
|
81
|
+
const isStreaming = params?.stream === true;
|
|
82
|
+
if (isStreaming) {
|
|
83
|
+
return self.handleStreamingCall(originalCreate, params, requestOptions, startTime, defaultTags);
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
return self.handleNonStreamingCall(originalCreate, params, requestOptions, startTime, defaultTags);
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
this.log("OpenAI client instrumented");
|
|
90
|
+
}
|
|
91
|
+
catch (err) {
|
|
92
|
+
// P0: Never break customer app
|
|
93
|
+
this.handleInternalError("instrumentOpenAI", err);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
// ─── Public API: flush (Section 3.2) ─────────────────────────────
|
|
97
|
+
/**
|
|
98
|
+
* Flush all buffered events to the backend.
|
|
99
|
+
* CRITICAL: Call this before returning in serverless environments.
|
|
100
|
+
*
|
|
101
|
+
* Usage:
|
|
102
|
+
* await tracer.flush();
|
|
103
|
+
*/
|
|
104
|
+
async flush() {
|
|
105
|
+
if (this.flushing || this.queue.length === 0) {
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
this.flushing = true;
|
|
109
|
+
try {
|
|
110
|
+
// Check circuit breaker
|
|
111
|
+
if (this.circuitState === "open") {
|
|
112
|
+
const elapsed = Date.now() - this.circuitOpenedAt;
|
|
113
|
+
if (elapsed < this.circuitCooldownMs) {
|
|
114
|
+
this.log(`Circuit open, ${Math.round((this.circuitCooldownMs - elapsed) / 1000)}s until retry`);
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
// Try half-open
|
|
118
|
+
this.circuitState = "half_open";
|
|
119
|
+
this.log("Circuit half-open, attempting flush");
|
|
120
|
+
}
|
|
121
|
+
// Take batch from queue (up to maxBatchSize)
|
|
122
|
+
const batch = this.queue.splice(0, this.maxBatchSize);
|
|
123
|
+
const droppedThisFlush = this.droppedCount;
|
|
124
|
+
this.droppedCount = 0;
|
|
125
|
+
let lastError = null;
|
|
126
|
+
let accepted = 0;
|
|
127
|
+
let rejected = 0;
|
|
128
|
+
try {
|
|
129
|
+
const result = await this.sendBatch(batch);
|
|
130
|
+
accepted = result.accepted;
|
|
131
|
+
rejected = result.rejected;
|
|
132
|
+
// Success — reset circuit breaker
|
|
133
|
+
this.consecutiveFailures = 0;
|
|
134
|
+
if (this.circuitState === "half_open") {
|
|
135
|
+
this.circuitState = "closed";
|
|
136
|
+
this.log("Circuit closed");
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
catch (err) {
|
|
140
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
141
|
+
this.handleTransportError(lastError);
|
|
142
|
+
// Put events back in queue for retry
|
|
143
|
+
this.queue.unshift(...batch);
|
|
144
|
+
this.enforceQueueLimit();
|
|
145
|
+
}
|
|
146
|
+
// Invoke onFlush callback (Section 9.2)
|
|
147
|
+
if (this.onFlushCallback) {
|
|
148
|
+
try {
|
|
149
|
+
this.onFlushCallback({
|
|
150
|
+
sent: batch.length,
|
|
151
|
+
accepted,
|
|
152
|
+
rejected,
|
|
153
|
+
dropped: droppedThisFlush,
|
|
154
|
+
last_error: lastError,
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
catch {
|
|
158
|
+
// Never break on callback errors
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
finally {
|
|
163
|
+
this.flushing = false;
|
|
164
|
+
}
|
|
165
|
+
// If there are more events queued, schedule another flush
|
|
166
|
+
if (this.queue.length >= this.maxBatchSize) {
|
|
167
|
+
// Use setImmediate/setTimeout to avoid stack overflow
|
|
168
|
+
setTimeout(() => this.flush().catch(() => { }), 0);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
// ─── Public API: trace (Section 3.3 — Agentic Workflows) ────────
|
|
172
|
+
/**
|
|
173
|
+
* Track an agentic workflow with phases.
|
|
174
|
+
* All LLM calls within this trace are grouped together.
|
|
175
|
+
* Sampling is per-trace (Section 11.1).
|
|
176
|
+
*
|
|
177
|
+
* Usage:
|
|
178
|
+
* await tracer.trace("user-request-123", async (t) => {
|
|
179
|
+
* await t.phase("planning", async () => {
|
|
180
|
+
* await openai.chat.completions.create(...);
|
|
181
|
+
* });
|
|
182
|
+
* });
|
|
183
|
+
*/
|
|
184
|
+
async trace(traceId, fn) {
|
|
185
|
+
try {
|
|
186
|
+
// Per-trace sampling (Section 11.1)
|
|
187
|
+
const sampled = Math.random() < this.sampleRate;
|
|
188
|
+
this.activeTraceId = traceId;
|
|
189
|
+
this.activeTraceSampled = sampled;
|
|
190
|
+
this.activePhase = null;
|
|
191
|
+
const ctx = {
|
|
192
|
+
traceId,
|
|
193
|
+
currentPhase: null,
|
|
194
|
+
sampled,
|
|
195
|
+
phase: async (name, phaseFn) => {
|
|
196
|
+
this.activePhase = name;
|
|
197
|
+
ctx.currentPhase = name;
|
|
198
|
+
try {
|
|
199
|
+
await phaseFn();
|
|
200
|
+
}
|
|
201
|
+
finally {
|
|
202
|
+
this.activePhase = null;
|
|
203
|
+
ctx.currentPhase = null;
|
|
204
|
+
}
|
|
205
|
+
},
|
|
206
|
+
};
|
|
207
|
+
await fn(ctx);
|
|
208
|
+
}
|
|
209
|
+
catch (err) {
|
|
210
|
+
// Re-throw user errors — we only swallow our own errors
|
|
211
|
+
throw err;
|
|
212
|
+
}
|
|
213
|
+
finally {
|
|
214
|
+
this.activeTraceId = null;
|
|
215
|
+
this.activePhase = null;
|
|
216
|
+
this.activeTraceSampled = true;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
// ─── Public API: shutdown ────────────────────────────────────────
|
|
220
|
+
/**
|
|
221
|
+
* Flush remaining events and stop the auto-flush timer.
|
|
222
|
+
*/
|
|
223
|
+
async shutdown() {
|
|
224
|
+
if (this.flushTimer) {
|
|
225
|
+
clearInterval(this.flushTimer);
|
|
226
|
+
this.flushTimer = null;
|
|
227
|
+
}
|
|
228
|
+
await this.flush();
|
|
229
|
+
}
|
|
230
|
+
// ─── Non-Streaming Handler ───────────────────────────────────────
|
|
231
|
+
async handleNonStreamingCall(originalCreate, params, requestOptions, startTime, defaultTags) {
|
|
232
|
+
let response;
|
|
233
|
+
let success = true;
|
|
234
|
+
let error = null;
|
|
235
|
+
try {
|
|
236
|
+
response = await originalCreate(params, requestOptions);
|
|
237
|
+
}
|
|
238
|
+
catch (err) {
|
|
239
|
+
success = false;
|
|
240
|
+
error = err;
|
|
241
|
+
}
|
|
242
|
+
// Capture event asynchronously (P0: no awaits on hot path)
|
|
243
|
+
try {
|
|
244
|
+
const latencyMs = Date.now() - startTime;
|
|
245
|
+
const model = response?.model || params?.model || "unknown";
|
|
246
|
+
const tokensIn = response?.usage?.prompt_tokens ?? 0;
|
|
247
|
+
const tokensOut = response?.usage?.completion_tokens ?? 0;
|
|
248
|
+
this.captureEvent({
|
|
249
|
+
provider: "openai",
|
|
250
|
+
model,
|
|
251
|
+
tokens_in: tokensIn,
|
|
252
|
+
tokens_out: tokensOut,
|
|
253
|
+
latency_ms: latencyMs,
|
|
254
|
+
success,
|
|
255
|
+
messages: params?.messages,
|
|
256
|
+
tags: defaultTags,
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
catch (err) {
|
|
260
|
+
// P0: Never break customer app
|
|
261
|
+
this.handleInternalError("handleNonStreamingCall", err);
|
|
262
|
+
}
|
|
263
|
+
// Re-throw the original error if the API call failed
|
|
264
|
+
if (!success) {
|
|
265
|
+
throw error;
|
|
266
|
+
}
|
|
267
|
+
return response;
|
|
268
|
+
}
|
|
269
|
+
// ─── Streaming Handler (Section 7) ───────────────────────────────
|
|
270
|
+
async handleStreamingCall(originalCreate, params, requestOptions, startTime, defaultTags) {
|
|
271
|
+
try {
|
|
272
|
+
// Shallow-clone to avoid mutating user's params (Section 7.3)
|
|
273
|
+
const clonedParams = { ...params };
|
|
274
|
+
// Inject stream_options for accurate token counts (Section 7.3)
|
|
275
|
+
if (!clonedParams.stream_options) {
|
|
276
|
+
clonedParams.stream_options = { include_usage: true };
|
|
277
|
+
}
|
|
278
|
+
else if (clonedParams.stream_options.include_usage === false) {
|
|
279
|
+
// Debug warning when user explicitly disables usage
|
|
280
|
+
this.log("Warning: stream_options.include_usage is false — token counts will be 0");
|
|
281
|
+
}
|
|
282
|
+
const stream = await originalCreate(clonedParams, requestOptions);
|
|
283
|
+
// Wrap the async iterator to capture the final usage event
|
|
284
|
+
const self = this;
|
|
285
|
+
const model = params?.model || "unknown";
|
|
286
|
+
const messages = params?.messages;
|
|
287
|
+
let tokensIn = 0;
|
|
288
|
+
let tokensOut = 0;
|
|
289
|
+
let streamSuccess = true;
|
|
290
|
+
let streamModel = model;
|
|
291
|
+
const originalIterator = stream[Symbol.asyncIterator].bind(stream);
|
|
292
|
+
stream[Symbol.asyncIterator] = function () {
|
|
293
|
+
const iterator = originalIterator();
|
|
294
|
+
return {
|
|
295
|
+
async next() {
|
|
296
|
+
try {
|
|
297
|
+
const result = await iterator.next();
|
|
298
|
+
if (!result.done) {
|
|
299
|
+
const chunk = result.value;
|
|
300
|
+
// Capture usage from the final chunk (Section 7.1)
|
|
301
|
+
if (chunk.usage) {
|
|
302
|
+
tokensIn = chunk.usage.prompt_tokens ?? tokensIn;
|
|
303
|
+
tokensOut = chunk.usage.completion_tokens ?? tokensOut;
|
|
304
|
+
}
|
|
305
|
+
// Capture model from chunk if available
|
|
306
|
+
if (chunk.model) {
|
|
307
|
+
streamModel = chunk.model;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
if (result.done) {
|
|
311
|
+
// Stream complete — capture one event (Section 7.1)
|
|
312
|
+
const latencyMs = Date.now() - startTime;
|
|
313
|
+
self.captureEvent({
|
|
314
|
+
provider: "openai",
|
|
315
|
+
model: streamModel,
|
|
316
|
+
tokens_in: tokensIn,
|
|
317
|
+
tokens_out: tokensOut,
|
|
318
|
+
latency_ms: latencyMs,
|
|
319
|
+
success: streamSuccess,
|
|
320
|
+
messages,
|
|
321
|
+
tags: defaultTags,
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
return result;
|
|
325
|
+
}
|
|
326
|
+
catch (err) {
|
|
327
|
+
// Stream error (Section 7.4)
|
|
328
|
+
streamSuccess = false;
|
|
329
|
+
const latencyMs = Date.now() - startTime;
|
|
330
|
+
self.captureEvent({
|
|
331
|
+
provider: "openai",
|
|
332
|
+
model: streamModel,
|
|
333
|
+
tokens_in: tokensIn,
|
|
334
|
+
tokens_out: tokensOut,
|
|
335
|
+
latency_ms: latencyMs,
|
|
336
|
+
success: false,
|
|
337
|
+
messages,
|
|
338
|
+
tags: defaultTags,
|
|
339
|
+
});
|
|
340
|
+
throw err; // Re-throw to customer code
|
|
341
|
+
}
|
|
342
|
+
},
|
|
343
|
+
return: iterator.return?.bind(iterator),
|
|
344
|
+
throw: iterator.throw?.bind(iterator),
|
|
345
|
+
};
|
|
346
|
+
};
|
|
347
|
+
return stream;
|
|
348
|
+
}
|
|
349
|
+
catch (err) {
|
|
350
|
+
// Stream setup failed
|
|
351
|
+
const latencyMs = Date.now() - startTime;
|
|
352
|
+
this.captureEvent({
|
|
353
|
+
provider: "openai",
|
|
354
|
+
model: params?.model || "unknown",
|
|
355
|
+
tokens_in: 0,
|
|
356
|
+
tokens_out: 0,
|
|
357
|
+
latency_ms: latencyMs,
|
|
358
|
+
success: false,
|
|
359
|
+
messages: params?.messages,
|
|
360
|
+
tags: defaultTags,
|
|
361
|
+
});
|
|
362
|
+
throw err; // Re-throw to customer code
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
// ─── Event Capture ───────────────────────────────────────────────
|
|
366
|
+
captureEvent(data) {
|
|
367
|
+
try {
|
|
368
|
+
// Per-trace sampling (Section 11.1)
|
|
369
|
+
if (this.activeTraceId && !this.activeTraceSampled) {
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
// Per-event sampling (Section 11.2)
|
|
373
|
+
if (!this.activeTraceId && Math.random() >= this.sampleRate) {
|
|
374
|
+
return;
|
|
375
|
+
}
|
|
376
|
+
// Compute prompt_hash (Section 10.1)
|
|
377
|
+
let promptHash = null;
|
|
378
|
+
let promptContent = null;
|
|
379
|
+
if (data.messages) {
|
|
380
|
+
const messagesJson = JSON.stringify(data.messages);
|
|
381
|
+
promptHash = (0, crypto_1.createHash)("sha256").update(messagesJson).digest("hex");
|
|
382
|
+
if (this.capturePrompt) {
|
|
383
|
+
promptContent = messagesJson;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
const event = {
|
|
387
|
+
event_id: `evt_${(0, crypto_1.randomUUID)()}`,
|
|
388
|
+
timestamp: new Date().toISOString(),
|
|
389
|
+
provider: data.provider,
|
|
390
|
+
model: data.model,
|
|
391
|
+
tokens_in: data.tokens_in,
|
|
392
|
+
tokens_out: data.tokens_out,
|
|
393
|
+
latency_ms: data.latency_ms,
|
|
394
|
+
success: data.success,
|
|
395
|
+
trace_id: this.activeTraceId,
|
|
396
|
+
span_id: null,
|
|
397
|
+
parent_span_id: null,
|
|
398
|
+
phase: this.activePhase,
|
|
399
|
+
prompt_hash: promptHash,
|
|
400
|
+
prompt_content: promptContent,
|
|
401
|
+
tags: { ...data.tags },
|
|
402
|
+
sdk_version: SDK_VERSION,
|
|
403
|
+
sdk_language: SDK_LANGUAGE,
|
|
404
|
+
};
|
|
405
|
+
this.queue.push(event);
|
|
406
|
+
this.enforceQueueLimit();
|
|
407
|
+
this.log(`Event captured: ${event.model} (${event.tokens_in}in/${event.tokens_out}out)`);
|
|
408
|
+
// Auto-flush if batch is full
|
|
409
|
+
if (this.queue.length >= this.maxBatchSize) {
|
|
410
|
+
this.flush().catch(() => { });
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
catch (err) {
|
|
414
|
+
// P0: Never break customer app
|
|
415
|
+
this.handleInternalError("captureEvent", err);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
// ─── Transport (Section 5 & 6) ──────────────────────────────────
|
|
419
|
+
async sendBatch(events) {
|
|
420
|
+
const idempotencyKey = `idem_${(0, crypto_1.randomUUID)()}`;
|
|
421
|
+
const body = JSON.stringify({
|
|
422
|
+
schema_version: "1.0",
|
|
423
|
+
events,
|
|
424
|
+
});
|
|
425
|
+
let lastError = null;
|
|
426
|
+
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
|
|
427
|
+
try {
|
|
428
|
+
if (attempt > 0) {
|
|
429
|
+
// Exponential backoff with jitter (Section 6.2)
|
|
430
|
+
const delay = this.retryBaseMs * Math.pow(2, attempt - 1) * (0.5 + Math.random() * 0.5);
|
|
431
|
+
await this.sleep(delay);
|
|
432
|
+
}
|
|
433
|
+
const response = await fetch(this.endpoint, {
|
|
434
|
+
method: "POST",
|
|
435
|
+
headers: {
|
|
436
|
+
"Content-Type": "application/json",
|
|
437
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
438
|
+
"X-Idempotency-Key": idempotencyKey,
|
|
439
|
+
},
|
|
440
|
+
body,
|
|
441
|
+
});
|
|
442
|
+
// Success
|
|
443
|
+
if (response.status === 202) {
|
|
444
|
+
const data = (await response.json());
|
|
445
|
+
this.log(`Flush success: ${data.accepted} accepted, ${data.rejected} rejected`);
|
|
446
|
+
return { accepted: data.accepted ?? 0, rejected: data.rejected ?? 0 };
|
|
447
|
+
}
|
|
448
|
+
// Read response body for error details
|
|
449
|
+
let responseBody = "";
|
|
450
|
+
try {
|
|
451
|
+
responseBody = await response.text();
|
|
452
|
+
}
|
|
453
|
+
catch {
|
|
454
|
+
// Ignore body read failures
|
|
455
|
+
}
|
|
456
|
+
// Rate limited or tier exceeded (Section 5.5, 5.6)
|
|
457
|
+
if (response.status === 429) {
|
|
458
|
+
const retryAfter = response.headers.get("Retry-After");
|
|
459
|
+
this.log(`Flush failed: 429 Rate Limited - retry after ${retryAfter}s`);
|
|
460
|
+
lastError = new Error(`Rate limited (429)`);
|
|
461
|
+
this.lastErrorSummary = "429 Rate Limited";
|
|
462
|
+
continue;
|
|
463
|
+
}
|
|
464
|
+
// Auth error — don't retry
|
|
465
|
+
if (response.status === 401) {
|
|
466
|
+
this.log(`Flush failed: 401 Unauthorized - ${responseBody}`);
|
|
467
|
+
this.log("Hint: API key may be invalid. Check your apiKey value.");
|
|
468
|
+
lastError = new Error("Unauthorized — check your API key");
|
|
469
|
+
this.lastErrorSummary = "401 Unauthorized";
|
|
470
|
+
break;
|
|
471
|
+
}
|
|
472
|
+
// Bad request — don't retry
|
|
473
|
+
if (response.status === 400) {
|
|
474
|
+
this.log(`Flush failed: 400 Bad Request - ${responseBody}`);
|
|
475
|
+
this.log("Hint: Request rejected. SDK version may be outdated.");
|
|
476
|
+
lastError = new Error(`Bad request (400): ${responseBody}`);
|
|
477
|
+
this.lastErrorSummary = "400 Bad Request";
|
|
478
|
+
break;
|
|
479
|
+
}
|
|
480
|
+
// Server error — retry
|
|
481
|
+
if (response.status >= 500) {
|
|
482
|
+
this.log(`Flush failed: ${response.status} Server Error - ${responseBody}`);
|
|
483
|
+
lastError = new Error(`Server error (${response.status})`);
|
|
484
|
+
this.lastErrorSummary = `${response.status} Server Error`;
|
|
485
|
+
continue;
|
|
486
|
+
}
|
|
487
|
+
// Other client error — don't retry
|
|
488
|
+
this.log(`Flush failed: ${response.status} - ${responseBody}`);
|
|
489
|
+
lastError = new Error(`HTTP ${response.status}: ${responseBody}`);
|
|
490
|
+
this.lastErrorSummary = `${response.status}`;
|
|
491
|
+
break;
|
|
492
|
+
}
|
|
493
|
+
catch (err) {
|
|
494
|
+
// Network error — retry
|
|
495
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
496
|
+
this.log(`Flush failed: Network error - ${lastError.message}`);
|
|
497
|
+
this.log("Hint: Check your internet connection.");
|
|
498
|
+
this.lastErrorSummary = "Network error";
|
|
499
|
+
continue;
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
throw lastError || new Error("Unknown transport error");
|
|
503
|
+
}
|
|
504
|
+
// ─── Circuit Breaker (Section 6.3) ──────────────────────────────
|
|
505
|
+
handleTransportError(error) {
|
|
506
|
+
this.consecutiveFailures++;
|
|
507
|
+
if (this.consecutiveFailures >= this.circuitFailureThreshold) {
|
|
508
|
+
this.circuitState = "open";
|
|
509
|
+
this.circuitOpenedAt = Date.now();
|
|
510
|
+
this.log(`Circuit opened after ${this.consecutiveFailures} failures (last error: ${this.lastErrorSummary}). Will retry in ${this.circuitCooldownMs / 1000}s.`);
|
|
511
|
+
}
|
|
512
|
+
// Invoke onError callback (Section 9.2)
|
|
513
|
+
if (this.onErrorCallback) {
|
|
514
|
+
try {
|
|
515
|
+
this.onErrorCallback(error);
|
|
516
|
+
}
|
|
517
|
+
catch {
|
|
518
|
+
// Never break on callback errors
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
// ─── Queue Management (Section 6.4) ─────────────────────────────
|
|
523
|
+
enforceQueueLimit() {
|
|
524
|
+
if (this.queue.length > this.maxQueueSize) {
|
|
525
|
+
const excess = this.queue.length - this.maxQueueSize;
|
|
526
|
+
this.queue.splice(0, excess); // Drop oldest (FIFO)
|
|
527
|
+
this.droppedCount += excess;
|
|
528
|
+
if (this.debugMode) {
|
|
529
|
+
console.warn(`[LLMTracer] Dropped ${excess} events (queue full)`);
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
// ─── Helpers ─────────────────────────────────────────────────────
|
|
534
|
+
handleInternalError(context, err) {
|
|
535
|
+
// P0: Never break customer app — swallow and log
|
|
536
|
+
if (this.debugMode) {
|
|
537
|
+
console.warn(`[LLMTracer] Internal error in ${context}:`, err);
|
|
538
|
+
}
|
|
539
|
+
if (this.onErrorCallback) {
|
|
540
|
+
try {
|
|
541
|
+
this.onErrorCallback(err instanceof Error ? err : new Error(String(err)));
|
|
542
|
+
}
|
|
543
|
+
catch {
|
|
544
|
+
// Even callback errors are swallowed
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
log(message) {
|
|
549
|
+
if (this.debugMode) {
|
|
550
|
+
console.log(`[LLMTracer] ${message}`);
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
sleep(ms) {
|
|
554
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
exports.LLMTracer = LLMTracer;
|
|
558
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,mCAAgD;AA2EhD,sEAAsE;AACtE,MAAM,WAAW,GAAG,OAAO,CAAC;AAC5B,MAAM,YAAY,GAAG,YAAY,CAAC;AAKlC,sEAAsE;AAEtE,MAAa,SAAS;IAqCpB,YAAY,MAAuB;QAvBnC,cAAc;QACN,UAAK,GAAkB,EAAE,CAAC;QAC1B,iBAAY,GAAW,CAAC,CAAC;QAEjC,cAAc;QACN,eAAU,GAA0C,IAAI,CAAC;QAEjE,iEAAiE;QACzD,iBAAY,GAAiB,QAAQ,CAAC;QACtC,wBAAmB,GAAW,CAAC,CAAC;QAChC,oBAAe,GAAW,CAAC,CAAC;QAC5B,qBAAgB,GAAW,EAAE,CAAC;QACrB,4BAAuB,GAAG,CAAC,CAAC;QAC5B,sBAAiB,GAAG,KAAK,CAAC;QAE3C,sCAAsC;QAC9B,kBAAa,GAAkB,IAAI,CAAC;QACpC,gBAAW,GAAkB,IAAI,CAAC;QAClC,uBAAkB,GAAY,IAAI,CAAC;QAE3C,2CAA2C;QACnC,aAAQ,GAAY,KAAK,CAAC;QAGhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,0CAA0C,CAAC;QAC9E,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,YAAY,IAAI,EAAE,CAAC;QAC9C,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,KAAK,CAAC;QACvD,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,YAAY,IAAI,KAAK,CAAC;QACjD,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC;QAC9C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,GAAG,CAAC;QAC3C,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK,CAAC;QACnD,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,IAAI,KAAK,CAAC;QACvC,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,OAAO,IAAI,IAAI,CAAC;QAC9C,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,OAAO,IAAI,IAAI,CAAC;QAE9C,yBAAyB;QACzB,IAAI,CAAC,UAAU,GAAG,WAAW,CAAC,GAAG,EAAE;YACjC,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;QAC/B,CAAC,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;QAEzB,6CAA6C;QAC7C,IAAI,IAAI,CAAC,UAAU,IAAI,OAAO,IAAI,CAAC,UAAU,CAAC,KAAK,KAAK,UAAU,EAAE,CAAC;YACnE,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;QAC1B,CAAC;QAED,+CAA+C;QAC/C,IAAI,CAAC;YACH,OAAO,CAAC,EAAE,CAAC,YAAY,EAAE,GAAG,EAAE;gBAC5B,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;YAC/B,CAAC,CAAC,CAAC;QACL,CAAC;QAAC,MAAM,CAAC;YACP,oCAAoC;QACtC,CAAC;QAED,IAAI,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;IACpC,CAAC;IAED,mEAAmE;IAEnE;;;;;;OAMG;IACH,gBAAgB,CAAC,MAAW,EAAE,OAA2B;QACvD,IAAI,CAAC;YACH,MAAM,WAAW,GAAG,OAAO,EAAE,IAAI,IAAI,EAAE,CAAC;YACxC,MAAM,IAAI,GAAG,IAAI,CAAC;YAElB,+BAA+B;YAC/B,MAAM,cAAc,GAClB,MAAM,CAAC,IAAI,EAAE,WAAW,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAElE,IAAI,CAAC,cAAc,EAAE,CAAC;gBACpB,IAAI,CAAC,GAAG,CAAC,wDAAwD,CAAC,CAAC;gBACnE,OAAO;YACT,CAAC;YAED,oCAAoC;YACpC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,GAAG,KAAK,WACpC,MAAW,EACX,cAAoB;gBAEpB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;gBAC7B,MAAM,WAAW,GAAG,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC;gBAE5C,IAAI,WAAW,EAAE,CAAC;oBAChB,OAAO,IAAI,CAAC,mBAAmB,CAC7B,cAAc,EACd,MAAM,EACN,cAAc,EACd,SAAS,EACT,WAAW,CACZ,CAAC;gBACJ,CAAC;qBAAM,CAAC;oBACN,OAAO,IAAI,CAAC,sBAAsB,CAChC,cAAc,EACd,MAAM,EACN,cAAc,EACd,SAAS,EACT,WAAW,CACZ,CAAC;gBACJ,CAAC;YACH,CAAC,CAAC;YAEF,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC;QACzC,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,+BAA+B;YAC/B,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,EAAE,GAAG,CAAC,CAAC;QACpD,CAAC;IACH,CAAC;IAED,oEAAoE;IAEpE;;;;;;OAMG;IACH,KAAK,CAAC,KAAK;QACT,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7C,OAAO;QACT,CAAC;QAED,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;QAErB,IAAI,CAAC;YACH,wBAAwB;YACxB,IAAI,IAAI,CAAC,YAAY,KAAK,MAAM,EAAE,CAAC;gBACjC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,eAAe,CAAC;gBAClD,IAAI,OAAO,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;oBACrC,IAAI,CAAC,GAAG,CACN,iBAAiB,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,iBAAiB,GAAG,OAAO,CAAC,GAAG,IAAI,CAAC,eAAe,CACtF,CAAC;oBACF,OAAO;gBACT,CAAC;gBACD,gBAAgB;gBAChB,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC;gBAChC,IAAI,CAAC,GAAG,CAAC,qCAAqC,CAAC,CAAC;YAClD,CAAC;YAED,6CAA6C;YAC7C,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC3C,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;YAEtB,IAAI,SAAS,GAAiB,IAAI,CAAC;YACnC,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,IAAI,QAAQ,GAAG,CAAC,CAAC;YAEjB,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;gBAC3C,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;gBAC3B,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;gBAE3B,kCAAkC;gBAClC,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;gBAC7B,IAAI,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE,CAAC;oBACtC,IAAI,CAAC,YAAY,GAAG,QAAQ,CAAC;oBAC7B,IAAI,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;gBAC7B,CAAC;YACH,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,SAAS,GAAG,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;gBAChE,IAAI,CAAC,oBAAoB,CAAC,SAAS,CAAC,CAAC;gBAErC,qCAAqC;gBACrC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC;gBAC7B,IAAI,CAAC,iBAAiB,EAAE,CAAC;YAC3B,CAAC;YAED,wCAAwC;YACxC,IAAI,IAAI,CAAC,eAAe,EAAE,CAAC;gBACzB,IAAI,CAAC;oBACH,IAAI,CAAC,eAAe,CAAC;wBACnB,IAAI,EAAE,KAAK,CAAC,MAAM;wBAClB,QAAQ;wBACR,QAAQ;wBACR,OAAO,EAAE,gBAAgB;wBACzB,UAAU,EAAE,SAAS;qBACtB,CAAC,CAAC;gBACL,CAAC;gBAAC,MAAM,CAAC;oBACP,iCAAiC;gBACnC,CAAC;YACH,CAAC;QACH,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC;QACxB,CAAC;QAED,0DAA0D;QAC1D,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;YAC3C,sDAAsD;YACtD,UAAU,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACpD,CAAC;IACH,CAAC;IAED,mEAAmE;IAEnE;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,KAAK,CACT,OAAe,EACf,EAA2C;QAE3C,IAAI,CAAC;YACH,oCAAoC;YACpC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC,UAAU,CAAC;YAEhD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC;YAC7B,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC;YAClC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;YAExB,MAAM,GAAG,GAAiB;gBACxB,OAAO;gBACP,YAAY,EAAE,IAAI;gBAClB,OAAO;gBACP,KAAK,EAAE,KAAK,EAAE,IAAY,EAAE,OAA+B,EAAE,EAAE;oBAC7D,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;oBACxB,GAAG,CAAC,YAAY,GAAG,IAAI,CAAC;oBACxB,IAAI,CAAC;wBACH,MAAM,OAAO,EAAE,CAAC;oBAClB,CAAC;4BAAS,CAAC;wBACT,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;wBACxB,GAAG,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC1B,CAAC;gBACH,CAAC;aACF,CAAC;YAEF,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;QAChB,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,wDAAwD;YACxD,MAAM,GAAG,CAAC;QACZ,CAAC;gBAAS,CAAC;YACT,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC;YAC1B,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;YACxB,IAAI,CAAC,kBAAkB,GAAG,IAAI,CAAC;QACjC,CAAC;IACH,CAAC;IAED,oEAAoE;IAEpE;;OAEG;IACH,KAAK,CAAC,QAAQ;QACZ,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACpB,aAAa,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC/B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;QACzB,CAAC;QACD,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;IAED,oEAAoE;IAE5D,KAAK,CAAC,sBAAsB,CAClC,cAAwB,EACxB,MAAW,EACX,cAAmB,EACnB,SAAiB,EACjB,WAAmC;QAEnC,IAAI,QAAa,CAAC;QAClB,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,IAAI,KAAK,GAAQ,IAAI,CAAC;QAEtB,IAAI,CAAC;YACH,QAAQ,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;QAC1D,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,GAAG,KAAK,CAAC;YAChB,KAAK,GAAG,GAAG,CAAC;QACd,CAAC;QAED,2DAA2D;QAC3D,IAAI,CAAC;YACH,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACzC,MAAM,KAAK,GAAG,QAAQ,EAAE,KAAK,IAAI,MAAM,EAAE,KAAK,IAAI,SAAS,CAAC;YAC5D,MAAM,QAAQ,GAAG,QAAQ,EAAE,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;YACrD,MAAM,SAAS,GAAG,QAAQ,EAAE,KAAK,EAAE,iBAAiB,IAAI,CAAC,CAAC;YAE1D,IAAI,CAAC,YAAY,CAAC;gBAChB,QAAQ,EAAE,QAAQ;gBAClB,KAAK;gBACL,SAAS,EAAE,QAAQ;gBACnB,UAAU,EAAE,SAAS;gBACrB,UAAU,EAAE,SAAS;gBACrB,OAAO;gBACP,QAAQ,EAAE,MAAM,EAAE,QAAQ;gBAC1B,IAAI,EAAE,WAAW;aAClB,CAAC,CAAC;QACL,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,+BAA+B;YAC/B,IAAI,CAAC,mBAAmB,CAAC,wBAAwB,EAAE,GAAG,CAAC,CAAC;QAC1D,CAAC;QAED,qDAAqD;QACrD,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,MAAM,KAAK,CAAC;QACd,CAAC;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,oEAAoE;IAE5D,KAAK,CAAC,mBAAmB,CAC/B,cAAwB,EACxB,MAAW,EACX,cAAmB,EACnB,SAAiB,EACjB,WAAmC;QAEnC,IAAI,CAAC;YACH,8DAA8D;YAC9D,MAAM,YAAY,GAAG,EAAE,GAAG,MAAM,EAAE,CAAC;YAEnC,gEAAgE;YAChE,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;gBACjC,YAAY,CAAC,cAAc,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;YACxD,CAAC;iBAAM,IAAI,YAAY,CAAC,cAAc,CAAC,aAAa,KAAK,KAAK,EAAE,CAAC;gBAC/D,oDAAoD;gBACpD,IAAI,CAAC,GAAG,CACN,yEAAyE,CAC1E,CAAC;YACJ,CAAC;YAED,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,YAAY,EAAE,cAAc,CAAC,CAAC;YAElE,2DAA2D;YAC3D,MAAM,IAAI,GAAG,IAAI,CAAC;YAClB,MAAM,KAAK,GAAG,MAAM,EAAE,KAAK,IAAI,SAAS,CAAC;YACzC,MAAM,QAAQ,GAAG,MAAM,EAAE,QAAQ,CAAC;YAClC,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,IAAI,SAAS,GAAG,CAAC,CAAC;YAClB,IAAI,aAAa,GAAG,IAAI,CAAC;YACzB,IAAI,WAAW,GAAG,KAAK,CAAC;YAExB,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAEnE,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG;gBAC7B,MAAM,QAAQ,GAAG,gBAAgB,EAAE,CAAC;gBACpC,OAAO;oBACL,KAAK,CAAC,IAAI;wBACR,IAAI,CAAC;4BACH,MAAM,MAAM,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;4BAErC,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;gCACjB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;gCAE3B,mDAAmD;gCACnD,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;oCAChB,QAAQ,GAAG,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,QAAQ,CAAC;oCACjD,SAAS,GAAG,KAAK,CAAC,KAAK,CAAC,iBAAiB,IAAI,SAAS,CAAC;gCACzD,CAAC;gCAED,wCAAwC;gCACxC,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;oCAChB,WAAW,GAAG,KAAK,CAAC,KAAK,CAAC;gCAC5B,CAAC;4BACH,CAAC;4BAED,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;gCAChB,oDAAoD;gCACpD,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;gCACzC,IAAI,CAAC,YAAY,CAAC;oCAChB,QAAQ,EAAE,QAAQ;oCAClB,KAAK,EAAE,WAAW;oCAClB,SAAS,EAAE,QAAQ;oCACnB,UAAU,EAAE,SAAS;oCACrB,UAAU,EAAE,SAAS;oCACrB,OAAO,EAAE,aAAa;oCACtB,QAAQ;oCACR,IAAI,EAAE,WAAW;iCAClB,CAAC,CAAC;4BACL,CAAC;4BAED,OAAO,MAAM,CAAC;wBAChB,CAAC;wBAAC,OAAO,GAAG,EAAE,CAAC;4BACb,6BAA6B;4BAC7B,aAAa,GAAG,KAAK,CAAC;4BACtB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;4BACzC,IAAI,CAAC,YAAY,CAAC;gCAChB,QAAQ,EAAE,QAAQ;gCAClB,KAAK,EAAE,WAAW;gCAClB,SAAS,EAAE,QAAQ;gCACnB,UAAU,EAAE,SAAS;gCACrB,UAAU,EAAE,SAAS;gCACrB,OAAO,EAAE,KAAK;gCACd,QAAQ;gCACR,IAAI,EAAE,WAAW;6BAClB,CAAC,CAAC;4BACH,MAAM,GAAG,CAAC,CAAC,4BAA4B;wBACzC,CAAC;oBACH,CAAC;oBACD,MAAM,EAAE,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC;oBACvC,KAAK,EAAE,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC;iBACtC,CAAC;YACJ,CAAC,CAAC;YAEF,OAAO,MAAM,CAAC;QAChB,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,sBAAsB;YACtB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACzC,IAAI,CAAC,YAAY,CAAC;gBAChB,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,MAAM,EAAE,KAAK,IAAI,SAAS;gBACjC,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,UAAU,EAAE,SAAS;gBACrB,OAAO,EAAE,KAAK;gBACd,QAAQ,EAAE,MAAM,EAAE,QAAQ;gBAC1B,IAAI,EAAE,WAAW;aAClB,CAAC,CAAC;YACH,MAAM,GAAG,CAAC,CAAC,4BAA4B;QACzC,CAAC;IACH,CAAC;IAED,oEAAoE;IAE5D,YAAY,CAAC,IASpB;QACC,IAAI,CAAC;YACH,oCAAoC;YACpC,IAAI,IAAI,CAAC,aAAa,IAAI,CAAC,IAAI,CAAC,kBAAkB,EAAE,CAAC;gBACnD,OAAO;YACT,CAAC;YAED,oCAAoC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;gBAC5D,OAAO;YACT,CAAC;YAED,qCAAqC;YACrC,IAAI,UAAU,GAAkB,IAAI,CAAC;YACrC,IAAI,aAAa,GAAkB,IAAI,CAAC;YAExC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAClB,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBACnD,UAAU,GAAG,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBAErE,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC;oBACvB,aAAa,GAAG,YAAY,CAAC;gBAC/B,CAAC;YACH,CAAC;YAED,MAAM,KAAK,GAAgB;gBACzB,QAAQ,EAAE,OAAO,IAAA,mBAAU,GAAE,EAAE;gBAC/B,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBACnC,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,SAAS,EAAE,IAAI,CAAC,SAAS;gBACzB,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,QAAQ,EAAE,IAAI,CAAC,aAAa;gBAC5B,OAAO,EAAE,IAAI;gBACb,cAAc,EAAE,IAAI;gBACpB,KAAK,EAAE,IAAI,CAAC,WAAW;gBACvB,WAAW,EAAE,UAAU;gBACvB,cAAc,EAAE,aAAa;gBAC7B,IAAI,EAAE,EAAE,GAAG,IAAI,CAAC,IAAI,EAAE;gBACtB,WAAW,EAAE,WAAW;gBACxB,YAAY,EAAE,YAAY;aAC3B,CAAC;YAEF,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACvB,IAAI,CAAC,iBAAiB,EAAE,CAAC;YAEzB,IAAI,CAAC,GAAG,CACN,mBAAmB,KAAK,CAAC,KAAK,KAAK,KAAK,CAAC,SAAS,MAAM,KAAK,CAAC,UAAU,MAAM,CAC/E,CAAC;YAEF,8BAA8B;YAC9B,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,YAAY,EAAE,CAAC;gBAC3C,IAAI,CAAC,KAAK,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,+BAA+B;YAC/B,IAAI,CAAC,mBAAmB,CAAC,cAAc,EAAE,GAAG,CAAC,CAAC;QAChD,CAAC;IACH,CAAC;IAED,mEAAmE;IAE3D,KAAK,CAAC,SAAS,CACrB,MAAqB;QAErB,MAAM,cAAc,GAAG,QAAQ,IAAA,mBAAU,GAAE,EAAE,CAAC;QAE9C,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YAC1B,cAAc,EAAE,KAAK;YACrB,MAAM;SACP,CAAC,CAAC;QAEH,IAAI,SAAS,GAAiB,IAAI,CAAC;QAEnC,KAAK,IAAI,OAAO,GAAG,CAAC,EAAE,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE,OAAO,EAAE,EAAE,CAAC;YAC5D,IAAI,CAAC;gBACH,IAAI,OAAO,GAAG,CAAC,EAAE,CAAC;oBAChB,gDAAgD;oBAChD,MAAM,KAAK,GACT,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,CAAC;oBAC5E,MAAM,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBAC1B,CAAC;gBAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE;oBAC1C,MAAM,EAAE,MAAM;oBACd,OAAO,EAAE;wBACP,cAAc,EAAE,kBAAkB;wBAClC,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;wBACtC,mBAAmB,EAAE,cAAc;qBACpC;oBACD,IAAI;iBACL,CAAC,CAAC;gBAEH,UAAU;gBACV,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;oBAC5B,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAQ,CAAC;oBAC5C,IAAI,CAAC,GAAG,CACN,kBAAkB,IAAI,CAAC,QAAQ,cAAc,IAAI,CAAC,QAAQ,WAAW,CACtE,CAAC;oBACF,OAAO,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,EAAE,CAAC;gBACxE,CAAC;gBAED,uCAAuC;gBACvC,IAAI,YAAY,GAAG,EAAE,CAAC;gBACtB,IAAI,CAAC;oBACH,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;gBACvC,CAAC;gBAAC,MAAM,CAAC;oBACP,4BAA4B;gBAC9B,CAAC;gBAED,mDAAmD;gBACnD,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;oBAC5B,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;oBACvD,IAAI,CAAC,GAAG,CAAC,gDAAgD,UAAU,GAAG,CAAC,CAAC;oBACxE,SAAS,GAAG,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;oBAC5C,IAAI,CAAC,gBAAgB,GAAG,kBAAkB,CAAC;oBAC3C,SAAS;gBACX,CAAC;gBAED,2BAA2B;gBAC3B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;oBAC5B,IAAI,CAAC,GAAG,CAAC,oCAAoC,YAAY,EAAE,CAAC,CAAC;oBAC7D,IAAI,CAAC,GAAG,CAAC,wDAAwD,CAAC,CAAC;oBACnE,SAAS,GAAG,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;oBAC3D,IAAI,CAAC,gBAAgB,GAAG,kBAAkB,CAAC;oBAC3C,MAAM;gBACR,CAAC;gBAED,4BAA4B;gBAC5B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;oBAC5B,IAAI,CAAC,GAAG,CAAC,mCAAmC,YAAY,EAAE,CAAC,CAAC;oBAC5D,IAAI,CAAC,GAAG,CAAC,sDAAsD,CAAC,CAAC;oBACjE,SAAS,GAAG,IAAI,KAAK,CAAC,sBAAsB,YAAY,EAAE,CAAC,CAAC;oBAC5D,IAAI,CAAC,gBAAgB,GAAG,iBAAiB,CAAC;oBAC1C,MAAM;gBACR,CAAC;gBAED,uBAAuB;gBACvB,IAAI,QAAQ,CAAC,MAAM,IAAI,GAAG,EAAE,CAAC;oBAC3B,IAAI,CAAC,GAAG,CAAC,iBAAiB,QAAQ,CAAC,MAAM,mBAAmB,YAAY,EAAE,CAAC,CAAC;oBAC5E,SAAS,GAAG,IAAI,KAAK,CAAC,iBAAiB,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC3D,IAAI,CAAC,gBAAgB,GAAG,GAAG,QAAQ,CAAC,MAAM,eAAe,CAAC;oBAC1D,SAAS;gBACX,CAAC;gBAED,mCAAmC;gBACnC,IAAI,CAAC,GAAG,CAAC,iBAAiB,QAAQ,CAAC,MAAM,MAAM,YAAY,EAAE,CAAC,CAAC;gBAC/D,SAAS,GAAG,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,KAAK,YAAY,EAAE,CAAC,CAAC;gBAClE,IAAI,CAAC,gBAAgB,GAAG,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC;gBAC7C,MAAM;YACR,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,wBAAwB;gBACxB,SAAS,GAAG,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;gBAChE,IAAI,CAAC,GAAG,CAAC,iCAAiC,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;gBAC/D,IAAI,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAC;gBAClD,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC;gBACxC,SAAS;YACX,CAAC;QACH,CAAC;QAED,MAAM,SAAS,IAAI,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC1D,CAAC;IAED,mEAAmE;IAE3D,oBAAoB,CAAC,KAAY;QACvC,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAE3B,IAAI,IAAI,CAAC,mBAAmB,IAAI,IAAI,CAAC,uBAAuB,EAAE,CAAC;YAC7D,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC;YAC3B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YAClC,IAAI,CAAC,GAAG,CACN,wBAAwB,IAAI,CAAC,mBAAmB,0BAA0B,IAAI,CAAC,gBAAgB,oBAAoB,IAAI,CAAC,iBAAiB,GAAG,IAAI,IAAI,CACrJ,CAAC;QACJ,CAAC;QAED,wCAAwC;QACxC,IAAI,IAAI,CAAC,eAAe,EAAE,CAAC;YACzB,IAAI,CAAC;gBACH,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;YAC9B,CAAC;YAAC,MAAM,CAAC;gBACP,iCAAiC;YACnC,CAAC;QACH,CAAC;IACH,CAAC;IAED,mEAAmE;IAE3D,iBAAiB;QACvB,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;YAC1C,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC;YACrD,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,qBAAqB;YACnD,IAAI,CAAC,YAAY,IAAI,MAAM,CAAC;YAE5B,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;gBACnB,OAAO,CAAC,IAAI,CAAC,uBAAuB,MAAM,sBAAsB,CAAC,CAAC;YACpE,CAAC;QACH,CAAC;IACH,CAAC;IAED,oEAAoE;IAE5D,mBAAmB,CAAC,OAAe,EAAE,GAAY;QACvD,iDAAiD;QACjD,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,OAAO,CAAC,IAAI,CAAC,iCAAiC,OAAO,GAAG,EAAE,GAAG,CAAC,CAAC;QACjE,CAAC;QAED,IAAI,IAAI,CAAC,eAAe,EAAE,CAAC;YACzB,IAAI,CAAC;gBACH,IAAI,CAAC,eAAe,CAClB,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CACpD,CAAC;YACJ,CAAC;YAAC,MAAM,CAAC;gBACP,qCAAqC;YACvC,CAAC;QACH,CAAC;IACH,CAAC;IAEO,GAAG,CAAC,OAAe;QACzB,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,OAAO,CAAC,GAAG,CAAC,eAAe,OAAO,EAAE,CAAC,CAAC;QACxC,CAAC;IACH,CAAC;IAEO,KAAK,CAAC,EAAU;QACtB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AA9qBD,8BA8qBC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@llmtracer/sdk",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "See where your AI budget goes. Lightweight LLM cost tracking SDK.",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"files": ["dist"],
|
|
8
|
+
"scripts": {
|
|
9
|
+
"build": "tsc",
|
|
10
|
+
"test": "node test/test-with-openai.js"
|
|
11
|
+
},
|
|
12
|
+
"keywords": ["llm", "openai", "cost", "tracking", "observability"],
|
|
13
|
+
"author": "LLM Tracer <hello@llmtracer.com>",
|
|
14
|
+
"license": "MIT",
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "https://github.com/llmtracer/sdk.git"
|
|
18
|
+
},
|
|
19
|
+
"homepage": "https://github.com/llmtracer/sdk#readme",
|
|
20
|
+
"bugs": {
|
|
21
|
+
"url": "https://github.com/llmtracer/sdk/issues"
|
|
22
|
+
},
|
|
23
|
+
"dependencies": {},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"typescript": "^5.3.0",
|
|
26
|
+
"openai": "^4.0.0"
|
|
27
|
+
},
|
|
28
|
+
"peerDependencies": {
|
|
29
|
+
"openai": ">=4.0.0"
|
|
30
|
+
},
|
|
31
|
+
"engines": {
|
|
32
|
+
"node": ">=18"
|
|
33
|
+
}
|
|
34
|
+
}
|