@llmtap/shared 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +290 -0
- package/dist/index.d.ts +290 -0
- package/dist/index.js +319 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +277 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +43 -0
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
/** Represents a single LLM API call or operation */
|
|
2
|
+
interface Span {
|
|
3
|
+
/** Unique identifier for this span (16-char hex) */
|
|
4
|
+
spanId: string;
|
|
5
|
+
/** Groups related spans into a trace (32-char hex) */
|
|
6
|
+
traceId: string;
|
|
7
|
+
/** Links to parent span for agent workflows */
|
|
8
|
+
parentSpanId?: string;
|
|
9
|
+
/** Human-readable name, e.g. "chat gpt-4o" */
|
|
10
|
+
name: string;
|
|
11
|
+
/** Operation type: "chat", "embeddings", "text_completion" */
|
|
12
|
+
operationName: string;
|
|
13
|
+
/** Provider: "openai", "anthropic", "google", "deepseek", "groq", etc. */
|
|
14
|
+
providerName: string;
|
|
15
|
+
startTime: number;
|
|
16
|
+
endTime?: number;
|
|
17
|
+
duration?: number;
|
|
18
|
+
requestModel: string;
|
|
19
|
+
responseModel?: string;
|
|
20
|
+
inputTokens?: number;
|
|
21
|
+
outputTokens?: number;
|
|
22
|
+
totalTokens?: number;
|
|
23
|
+
inputCost?: number;
|
|
24
|
+
outputCost?: number;
|
|
25
|
+
totalCost?: number;
|
|
26
|
+
temperature?: number;
|
|
27
|
+
maxTokens?: number;
|
|
28
|
+
topP?: number;
|
|
29
|
+
inputMessages?: Message[];
|
|
30
|
+
outputMessages?: Message[];
|
|
31
|
+
toolCalls?: ToolCall[];
|
|
32
|
+
status: "ok" | "error";
|
|
33
|
+
errorType?: string;
|
|
34
|
+
errorMessage?: string;
|
|
35
|
+
tags?: Record<string, string>;
|
|
36
|
+
sessionId?: string;
|
|
37
|
+
userId?: string;
|
|
38
|
+
}
|
|
39
|
+
/** Input for creating a span (before it ends) */
|
|
40
|
+
interface SpanInput {
|
|
41
|
+
spanId: string;
|
|
42
|
+
traceId: string;
|
|
43
|
+
parentSpanId?: string;
|
|
44
|
+
name: string;
|
|
45
|
+
operationName: string;
|
|
46
|
+
providerName: string;
|
|
47
|
+
startTime: number;
|
|
48
|
+
endTime?: number;
|
|
49
|
+
duration?: number;
|
|
50
|
+
requestModel: string;
|
|
51
|
+
responseModel?: string;
|
|
52
|
+
inputTokens?: number;
|
|
53
|
+
outputTokens?: number;
|
|
54
|
+
totalTokens?: number;
|
|
55
|
+
inputCost?: number;
|
|
56
|
+
outputCost?: number;
|
|
57
|
+
totalCost?: number;
|
|
58
|
+
temperature?: number;
|
|
59
|
+
maxTokens?: number;
|
|
60
|
+
topP?: number;
|
|
61
|
+
inputMessages?: Message[];
|
|
62
|
+
outputMessages?: Message[];
|
|
63
|
+
toolCalls?: ToolCall[];
|
|
64
|
+
status: "ok" | "error";
|
|
65
|
+
errorType?: string;
|
|
66
|
+
errorMessage?: string;
|
|
67
|
+
tags?: Record<string, string>;
|
|
68
|
+
sessionId?: string;
|
|
69
|
+
userId?: string;
|
|
70
|
+
}
|
|
71
|
+
interface Message {
|
|
72
|
+
role: "system" | "user" | "assistant" | "tool";
|
|
73
|
+
content: string | null;
|
|
74
|
+
name?: string;
|
|
75
|
+
toolCallId?: string;
|
|
76
|
+
}
|
|
77
|
+
interface ToolCall {
|
|
78
|
+
id: string;
|
|
79
|
+
name: string;
|
|
80
|
+
arguments: string;
|
|
81
|
+
result?: string;
|
|
82
|
+
duration?: number;
|
|
83
|
+
}
|
|
84
|
+
interface Trace {
|
|
85
|
+
traceId: string;
|
|
86
|
+
name: string;
|
|
87
|
+
startTime: number;
|
|
88
|
+
endTime?: number;
|
|
89
|
+
status: "ok" | "error";
|
|
90
|
+
spanCount: number;
|
|
91
|
+
totalTokens: number;
|
|
92
|
+
totalCost: number;
|
|
93
|
+
totalDuration?: number;
|
|
94
|
+
sessionId?: string;
|
|
95
|
+
metadata?: Record<string, string>;
|
|
96
|
+
}
|
|
97
|
+
interface TraceWithSpans extends Trace {
|
|
98
|
+
spans: Span[];
|
|
99
|
+
}
|
|
100
|
+
interface Stats {
|
|
101
|
+
period: string;
|
|
102
|
+
totalTraces: number;
|
|
103
|
+
totalSpans: number;
|
|
104
|
+
totalTokens: number;
|
|
105
|
+
totalCost: number;
|
|
106
|
+
avgDuration: number;
|
|
107
|
+
errorCount: number;
|
|
108
|
+
errorRate: number;
|
|
109
|
+
byProvider: ProviderStats[];
|
|
110
|
+
byModel: ModelStats[];
|
|
111
|
+
}
|
|
112
|
+
interface ProviderStats {
|
|
113
|
+
provider: string;
|
|
114
|
+
spanCount: number;
|
|
115
|
+
totalTokens: number;
|
|
116
|
+
totalCost: number;
|
|
117
|
+
avgDuration: number;
|
|
118
|
+
}
|
|
119
|
+
interface ModelStats {
|
|
120
|
+
model: string;
|
|
121
|
+
provider: string;
|
|
122
|
+
spanCount: number;
|
|
123
|
+
totalTokens: number;
|
|
124
|
+
totalCost: number;
|
|
125
|
+
avgDuration: number;
|
|
126
|
+
}
|
|
127
|
+
interface CostOverTime {
|
|
128
|
+
timestamp: number;
|
|
129
|
+
cost: number;
|
|
130
|
+
tokens: number;
|
|
131
|
+
spans: number;
|
|
132
|
+
}
|
|
133
|
+
/** Configuration for the LLMTap SDK */
|
|
134
|
+
interface LLMTapConfig {
|
|
135
|
+
/** Collector URL (default: http://localhost:4781) */
|
|
136
|
+
collectorUrl?: string;
|
|
137
|
+
/** Whether to capture message content (default: true) */
|
|
138
|
+
captureContent?: boolean;
|
|
139
|
+
/** Maximum spans to buffer when collector is offline (default: 1000) */
|
|
140
|
+
maxBufferSize?: number;
|
|
141
|
+
/** Custom tags to add to all spans */
|
|
142
|
+
defaultTags?: Record<string, string>;
|
|
143
|
+
/** Session ID to group traces */
|
|
144
|
+
sessionId?: string;
|
|
145
|
+
/** Whether SDK is enabled (default: true) */
|
|
146
|
+
enabled?: boolean;
|
|
147
|
+
/** Enable debug logging to console (default: false) */
|
|
148
|
+
debug?: boolean;
|
|
149
|
+
/** Error handler called when span transport fails (default: silent) */
|
|
150
|
+
onError?: (error: Error, context: {
|
|
151
|
+
spanCount: number;
|
|
152
|
+
retryable: boolean;
|
|
153
|
+
}) => void;
|
|
154
|
+
}
|
|
155
|
+
/** Options for the wrap() function */
|
|
156
|
+
interface WrapOptions {
|
|
157
|
+
/** Custom tags for spans from this client */
|
|
158
|
+
tags?: Record<string, string>;
|
|
159
|
+
/** Override provider name detection */
|
|
160
|
+
provider?: string;
|
|
161
|
+
}
|
|
162
|
+
/** Model pricing: cost per 1 million tokens in USD */
|
|
163
|
+
interface ModelPricing {
|
|
164
|
+
provider: string;
|
|
165
|
+
model: string;
|
|
166
|
+
inputCostPer1M: number;
|
|
167
|
+
outputCostPer1M: number;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
declare const DEFAULT_COLLECTOR_PORT = 4781;
|
|
171
|
+
declare const DEFAULT_COLLECTOR_URL = "http://localhost:4781";
|
|
172
|
+
declare const DEFAULT_MAX_BUFFER_SIZE = 1000;
|
|
173
|
+
declare const DB_DIR_NAME = ".llmtap";
|
|
174
|
+
declare const DB_FILE_NAME = "data.db";
|
|
175
|
+
declare const VERSION = "0.1.0";
|
|
176
|
+
declare const ROUTES: {
|
|
177
|
+
readonly INGEST_SPANS: "/v1/spans";
|
|
178
|
+
readonly LIST_TRACES: "/v1/traces";
|
|
179
|
+
readonly GET_TRACE_SPANS: "/v1/traces/:traceId/spans";
|
|
180
|
+
readonly GET_STATS: "/v1/stats";
|
|
181
|
+
readonly GET_SESSIONS: "/v1/sessions";
|
|
182
|
+
readonly GET_DB_INFO: "/v1/db-info";
|
|
183
|
+
readonly SSE_STREAM: "/v1/stream";
|
|
184
|
+
};
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Set pricing for a single model. Overrides built-in pricing.
|
|
188
|
+
*/
|
|
189
|
+
declare function setPricing(provider: string, model: string, inputCostPer1M: number, outputCostPer1M: number): void;
|
|
190
|
+
/**
|
|
191
|
+
* Set pricing for multiple models at once.
|
|
192
|
+
*/
|
|
193
|
+
declare function setPricingBulk(entries: ModelPricing[]): void;
|
|
194
|
+
/**
|
|
195
|
+
* Load pricing from a remote JSON URL.
|
|
196
|
+
* Expects an array of ModelPricing objects.
|
|
197
|
+
* Pass a fetch-compatible function (globalThis.fetch or node-fetch).
|
|
198
|
+
*/
|
|
199
|
+
declare function loadPricingFromURL(url: string, fetchFn: (url: string) => Promise<{
|
|
200
|
+
ok: boolean;
|
|
201
|
+
status: number;
|
|
202
|
+
json: () => Promise<unknown>;
|
|
203
|
+
}>): Promise<void>;
|
|
204
|
+
/**
|
|
205
|
+
* Remove all runtime pricing overrides.
|
|
206
|
+
*/
|
|
207
|
+
declare function clearPricingOverrides(): void;
|
|
208
|
+
/**
|
|
209
|
+
* Get all pricing entries (overrides merged on top of built-in).
|
|
210
|
+
*/
|
|
211
|
+
declare function getAllPricing(): ModelPricing[];
|
|
212
|
+
/**
|
|
213
|
+
* Pricing data for popular LLM models.
|
|
214
|
+
* Costs are per 1 million tokens in USD.
|
|
215
|
+
* Last updated: 2025-01
|
|
216
|
+
*/
|
|
217
|
+
declare const MODEL_PRICING: ModelPricing[];
|
|
218
|
+
/**
|
|
219
|
+
* Look up pricing for a model. Returns null if model is not found.
|
|
220
|
+
* Tries exact match first, then prefix match for versioned models.
|
|
221
|
+
*/
|
|
222
|
+
declare function getModelPricing(provider: string, model: string): ModelPricing | null;
|
|
223
|
+
/**
|
|
224
|
+
* Calculate cost for a given number of tokens
|
|
225
|
+
*/
|
|
226
|
+
declare function calculateCost(provider: string, model: string, inputTokens: number, outputTokens: number): {
|
|
227
|
+
inputCost: number;
|
|
228
|
+
outputCost: number;
|
|
229
|
+
totalCost: number;
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Convert LLMTap spans to OpenTelemetry OTLP JSON format.
|
|
234
|
+
*
|
|
235
|
+
* Follows the OTLP/HTTP JSON specification and the
|
|
236
|
+
* OpenTelemetry GenAI Semantic Conventions for LLM spans.
|
|
237
|
+
*
|
|
238
|
+
* @see https://opentelemetry.io/docs/specs/otlp/
|
|
239
|
+
* @see https://opentelemetry.io/docs/specs/semconv/gen-ai/
|
|
240
|
+
*/
|
|
241
|
+
interface OtlpAttribute {
|
|
242
|
+
key: string;
|
|
243
|
+
value: {
|
|
244
|
+
stringValue?: string;
|
|
245
|
+
intValue?: string;
|
|
246
|
+
doubleValue?: number;
|
|
247
|
+
boolValue?: boolean;
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
interface OtlpEvent {
|
|
251
|
+
name: string;
|
|
252
|
+
timeUnixNano: string;
|
|
253
|
+
attributes: OtlpAttribute[];
|
|
254
|
+
}
|
|
255
|
+
interface OtlpSpan {
|
|
256
|
+
traceId: string;
|
|
257
|
+
spanId: string;
|
|
258
|
+
parentSpanId?: string;
|
|
259
|
+
name: string;
|
|
260
|
+
kind: number;
|
|
261
|
+
startTimeUnixNano: string;
|
|
262
|
+
endTimeUnixNano: string;
|
|
263
|
+
attributes: OtlpAttribute[];
|
|
264
|
+
events: OtlpEvent[];
|
|
265
|
+
status: {
|
|
266
|
+
code: number;
|
|
267
|
+
message?: string;
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
interface OtlpResourceSpans {
|
|
271
|
+
resource: {
|
|
272
|
+
attributes: OtlpAttribute[];
|
|
273
|
+
};
|
|
274
|
+
scopeSpans: Array<{
|
|
275
|
+
scope: {
|
|
276
|
+
name: string;
|
|
277
|
+
version: string;
|
|
278
|
+
};
|
|
279
|
+
spans: OtlpSpan[];
|
|
280
|
+
}>;
|
|
281
|
+
}
|
|
282
|
+
interface OtlpExportPayload {
|
|
283
|
+
resourceSpans: OtlpResourceSpans[];
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Convert an array of LLMTap spans to OTLP JSON export format.
|
|
287
|
+
*/
|
|
288
|
+
declare function spansToOtlp(spans: Span[], serviceName?: string): OtlpExportPayload;
|
|
289
|
+
|
|
290
|
+
export { type CostOverTime, DB_DIR_NAME, DB_FILE_NAME, DEFAULT_COLLECTOR_PORT, DEFAULT_COLLECTOR_URL, DEFAULT_MAX_BUFFER_SIZE, type LLMTapConfig, MODEL_PRICING, type Message, type ModelPricing, type ModelStats, type OtlpExportPayload, type ProviderStats, ROUTES, type Span, type SpanInput, type Stats, type ToolCall, type Trace, type TraceWithSpans, VERSION, type WrapOptions, calculateCost, clearPricingOverrides, getAllPricing, getModelPricing, loadPricingFromURL, setPricing, setPricingBulk, spansToOtlp };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
/** Represents a single LLM API call or operation */
|
|
2
|
+
interface Span {
|
|
3
|
+
/** Unique identifier for this span (16-char hex) */
|
|
4
|
+
spanId: string;
|
|
5
|
+
/** Groups related spans into a trace (32-char hex) */
|
|
6
|
+
traceId: string;
|
|
7
|
+
/** Links to parent span for agent workflows */
|
|
8
|
+
parentSpanId?: string;
|
|
9
|
+
/** Human-readable name, e.g. "chat gpt-4o" */
|
|
10
|
+
name: string;
|
|
11
|
+
/** Operation type: "chat", "embeddings", "text_completion" */
|
|
12
|
+
operationName: string;
|
|
13
|
+
/** Provider: "openai", "anthropic", "google", "deepseek", "groq", etc. */
|
|
14
|
+
providerName: string;
|
|
15
|
+
startTime: number;
|
|
16
|
+
endTime?: number;
|
|
17
|
+
duration?: number;
|
|
18
|
+
requestModel: string;
|
|
19
|
+
responseModel?: string;
|
|
20
|
+
inputTokens?: number;
|
|
21
|
+
outputTokens?: number;
|
|
22
|
+
totalTokens?: number;
|
|
23
|
+
inputCost?: number;
|
|
24
|
+
outputCost?: number;
|
|
25
|
+
totalCost?: number;
|
|
26
|
+
temperature?: number;
|
|
27
|
+
maxTokens?: number;
|
|
28
|
+
topP?: number;
|
|
29
|
+
inputMessages?: Message[];
|
|
30
|
+
outputMessages?: Message[];
|
|
31
|
+
toolCalls?: ToolCall[];
|
|
32
|
+
status: "ok" | "error";
|
|
33
|
+
errorType?: string;
|
|
34
|
+
errorMessage?: string;
|
|
35
|
+
tags?: Record<string, string>;
|
|
36
|
+
sessionId?: string;
|
|
37
|
+
userId?: string;
|
|
38
|
+
}
|
|
39
|
+
/** Input for creating a span (before it ends) */
|
|
40
|
+
interface SpanInput {
|
|
41
|
+
spanId: string;
|
|
42
|
+
traceId: string;
|
|
43
|
+
parentSpanId?: string;
|
|
44
|
+
name: string;
|
|
45
|
+
operationName: string;
|
|
46
|
+
providerName: string;
|
|
47
|
+
startTime: number;
|
|
48
|
+
endTime?: number;
|
|
49
|
+
duration?: number;
|
|
50
|
+
requestModel: string;
|
|
51
|
+
responseModel?: string;
|
|
52
|
+
inputTokens?: number;
|
|
53
|
+
outputTokens?: number;
|
|
54
|
+
totalTokens?: number;
|
|
55
|
+
inputCost?: number;
|
|
56
|
+
outputCost?: number;
|
|
57
|
+
totalCost?: number;
|
|
58
|
+
temperature?: number;
|
|
59
|
+
maxTokens?: number;
|
|
60
|
+
topP?: number;
|
|
61
|
+
inputMessages?: Message[];
|
|
62
|
+
outputMessages?: Message[];
|
|
63
|
+
toolCalls?: ToolCall[];
|
|
64
|
+
status: "ok" | "error";
|
|
65
|
+
errorType?: string;
|
|
66
|
+
errorMessage?: string;
|
|
67
|
+
tags?: Record<string, string>;
|
|
68
|
+
sessionId?: string;
|
|
69
|
+
userId?: string;
|
|
70
|
+
}
|
|
71
|
+
interface Message {
|
|
72
|
+
role: "system" | "user" | "assistant" | "tool";
|
|
73
|
+
content: string | null;
|
|
74
|
+
name?: string;
|
|
75
|
+
toolCallId?: string;
|
|
76
|
+
}
|
|
77
|
+
interface ToolCall {
|
|
78
|
+
id: string;
|
|
79
|
+
name: string;
|
|
80
|
+
arguments: string;
|
|
81
|
+
result?: string;
|
|
82
|
+
duration?: number;
|
|
83
|
+
}
|
|
84
|
+
interface Trace {
|
|
85
|
+
traceId: string;
|
|
86
|
+
name: string;
|
|
87
|
+
startTime: number;
|
|
88
|
+
endTime?: number;
|
|
89
|
+
status: "ok" | "error";
|
|
90
|
+
spanCount: number;
|
|
91
|
+
totalTokens: number;
|
|
92
|
+
totalCost: number;
|
|
93
|
+
totalDuration?: number;
|
|
94
|
+
sessionId?: string;
|
|
95
|
+
metadata?: Record<string, string>;
|
|
96
|
+
}
|
|
97
|
+
interface TraceWithSpans extends Trace {
|
|
98
|
+
spans: Span[];
|
|
99
|
+
}
|
|
100
|
+
interface Stats {
|
|
101
|
+
period: string;
|
|
102
|
+
totalTraces: number;
|
|
103
|
+
totalSpans: number;
|
|
104
|
+
totalTokens: number;
|
|
105
|
+
totalCost: number;
|
|
106
|
+
avgDuration: number;
|
|
107
|
+
errorCount: number;
|
|
108
|
+
errorRate: number;
|
|
109
|
+
byProvider: ProviderStats[];
|
|
110
|
+
byModel: ModelStats[];
|
|
111
|
+
}
|
|
112
|
+
interface ProviderStats {
|
|
113
|
+
provider: string;
|
|
114
|
+
spanCount: number;
|
|
115
|
+
totalTokens: number;
|
|
116
|
+
totalCost: number;
|
|
117
|
+
avgDuration: number;
|
|
118
|
+
}
|
|
119
|
+
interface ModelStats {
|
|
120
|
+
model: string;
|
|
121
|
+
provider: string;
|
|
122
|
+
spanCount: number;
|
|
123
|
+
totalTokens: number;
|
|
124
|
+
totalCost: number;
|
|
125
|
+
avgDuration: number;
|
|
126
|
+
}
|
|
127
|
+
interface CostOverTime {
|
|
128
|
+
timestamp: number;
|
|
129
|
+
cost: number;
|
|
130
|
+
tokens: number;
|
|
131
|
+
spans: number;
|
|
132
|
+
}
|
|
133
|
+
/** Configuration for the LLMTap SDK */
|
|
134
|
+
interface LLMTapConfig {
|
|
135
|
+
/** Collector URL (default: http://localhost:4781) */
|
|
136
|
+
collectorUrl?: string;
|
|
137
|
+
/** Whether to capture message content (default: true) */
|
|
138
|
+
captureContent?: boolean;
|
|
139
|
+
/** Maximum spans to buffer when collector is offline (default: 1000) */
|
|
140
|
+
maxBufferSize?: number;
|
|
141
|
+
/** Custom tags to add to all spans */
|
|
142
|
+
defaultTags?: Record<string, string>;
|
|
143
|
+
/** Session ID to group traces */
|
|
144
|
+
sessionId?: string;
|
|
145
|
+
/** Whether SDK is enabled (default: true) */
|
|
146
|
+
enabled?: boolean;
|
|
147
|
+
/** Enable debug logging to console (default: false) */
|
|
148
|
+
debug?: boolean;
|
|
149
|
+
/** Error handler called when span transport fails (default: silent) */
|
|
150
|
+
onError?: (error: Error, context: {
|
|
151
|
+
spanCount: number;
|
|
152
|
+
retryable: boolean;
|
|
153
|
+
}) => void;
|
|
154
|
+
}
|
|
155
|
+
/** Options for the wrap() function */
|
|
156
|
+
interface WrapOptions {
|
|
157
|
+
/** Custom tags for spans from this client */
|
|
158
|
+
tags?: Record<string, string>;
|
|
159
|
+
/** Override provider name detection */
|
|
160
|
+
provider?: string;
|
|
161
|
+
}
|
|
162
|
+
/** Model pricing: cost per 1 million tokens in USD */
|
|
163
|
+
interface ModelPricing {
|
|
164
|
+
provider: string;
|
|
165
|
+
model: string;
|
|
166
|
+
inputCostPer1M: number;
|
|
167
|
+
outputCostPer1M: number;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
declare const DEFAULT_COLLECTOR_PORT = 4781;
|
|
171
|
+
declare const DEFAULT_COLLECTOR_URL = "http://localhost:4781";
|
|
172
|
+
declare const DEFAULT_MAX_BUFFER_SIZE = 1000;
|
|
173
|
+
declare const DB_DIR_NAME = ".llmtap";
|
|
174
|
+
declare const DB_FILE_NAME = "data.db";
|
|
175
|
+
declare const VERSION = "0.1.0";
|
|
176
|
+
declare const ROUTES: {
|
|
177
|
+
readonly INGEST_SPANS: "/v1/spans";
|
|
178
|
+
readonly LIST_TRACES: "/v1/traces";
|
|
179
|
+
readonly GET_TRACE_SPANS: "/v1/traces/:traceId/spans";
|
|
180
|
+
readonly GET_STATS: "/v1/stats";
|
|
181
|
+
readonly GET_SESSIONS: "/v1/sessions";
|
|
182
|
+
readonly GET_DB_INFO: "/v1/db-info";
|
|
183
|
+
readonly SSE_STREAM: "/v1/stream";
|
|
184
|
+
};
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Set pricing for a single model. Overrides built-in pricing.
|
|
188
|
+
*/
|
|
189
|
+
declare function setPricing(provider: string, model: string, inputCostPer1M: number, outputCostPer1M: number): void;
|
|
190
|
+
/**
|
|
191
|
+
* Set pricing for multiple models at once.
|
|
192
|
+
*/
|
|
193
|
+
declare function setPricingBulk(entries: ModelPricing[]): void;
|
|
194
|
+
/**
|
|
195
|
+
* Load pricing from a remote JSON URL.
|
|
196
|
+
* Expects an array of ModelPricing objects.
|
|
197
|
+
* Pass a fetch-compatible function (globalThis.fetch or node-fetch).
|
|
198
|
+
*/
|
|
199
|
+
declare function loadPricingFromURL(url: string, fetchFn: (url: string) => Promise<{
|
|
200
|
+
ok: boolean;
|
|
201
|
+
status: number;
|
|
202
|
+
json: () => Promise<unknown>;
|
|
203
|
+
}>): Promise<void>;
|
|
204
|
+
/**
|
|
205
|
+
* Remove all runtime pricing overrides.
|
|
206
|
+
*/
|
|
207
|
+
declare function clearPricingOverrides(): void;
|
|
208
|
+
/**
|
|
209
|
+
* Get all pricing entries (overrides merged on top of built-in).
|
|
210
|
+
*/
|
|
211
|
+
declare function getAllPricing(): ModelPricing[];
|
|
212
|
+
/**
|
|
213
|
+
* Pricing data for popular LLM models.
|
|
214
|
+
* Costs are per 1 million tokens in USD.
|
|
215
|
+
* Last updated: 2025-01
|
|
216
|
+
*/
|
|
217
|
+
declare const MODEL_PRICING: ModelPricing[];
|
|
218
|
+
/**
|
|
219
|
+
* Look up pricing for a model. Returns null if model is not found.
|
|
220
|
+
* Tries exact match first, then prefix match for versioned models.
|
|
221
|
+
*/
|
|
222
|
+
declare function getModelPricing(provider: string, model: string): ModelPricing | null;
|
|
223
|
+
/**
|
|
224
|
+
* Calculate cost for a given number of tokens
|
|
225
|
+
*/
|
|
226
|
+
declare function calculateCost(provider: string, model: string, inputTokens: number, outputTokens: number): {
|
|
227
|
+
inputCost: number;
|
|
228
|
+
outputCost: number;
|
|
229
|
+
totalCost: number;
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Convert LLMTap spans to OpenTelemetry OTLP JSON format.
|
|
234
|
+
*
|
|
235
|
+
* Follows the OTLP/HTTP JSON specification and the
|
|
236
|
+
* OpenTelemetry GenAI Semantic Conventions for LLM spans.
|
|
237
|
+
*
|
|
238
|
+
* @see https://opentelemetry.io/docs/specs/otlp/
|
|
239
|
+
* @see https://opentelemetry.io/docs/specs/semconv/gen-ai/
|
|
240
|
+
*/
|
|
241
|
+
interface OtlpAttribute {
|
|
242
|
+
key: string;
|
|
243
|
+
value: {
|
|
244
|
+
stringValue?: string;
|
|
245
|
+
intValue?: string;
|
|
246
|
+
doubleValue?: number;
|
|
247
|
+
boolValue?: boolean;
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
interface OtlpEvent {
|
|
251
|
+
name: string;
|
|
252
|
+
timeUnixNano: string;
|
|
253
|
+
attributes: OtlpAttribute[];
|
|
254
|
+
}
|
|
255
|
+
interface OtlpSpan {
|
|
256
|
+
traceId: string;
|
|
257
|
+
spanId: string;
|
|
258
|
+
parentSpanId?: string;
|
|
259
|
+
name: string;
|
|
260
|
+
kind: number;
|
|
261
|
+
startTimeUnixNano: string;
|
|
262
|
+
endTimeUnixNano: string;
|
|
263
|
+
attributes: OtlpAttribute[];
|
|
264
|
+
events: OtlpEvent[];
|
|
265
|
+
status: {
|
|
266
|
+
code: number;
|
|
267
|
+
message?: string;
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
interface OtlpResourceSpans {
|
|
271
|
+
resource: {
|
|
272
|
+
attributes: OtlpAttribute[];
|
|
273
|
+
};
|
|
274
|
+
scopeSpans: Array<{
|
|
275
|
+
scope: {
|
|
276
|
+
name: string;
|
|
277
|
+
version: string;
|
|
278
|
+
};
|
|
279
|
+
spans: OtlpSpan[];
|
|
280
|
+
}>;
|
|
281
|
+
}
|
|
282
|
+
interface OtlpExportPayload {
|
|
283
|
+
resourceSpans: OtlpResourceSpans[];
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Convert an array of LLMTap spans to OTLP JSON export format.
|
|
287
|
+
*/
|
|
288
|
+
declare function spansToOtlp(spans: Span[], serviceName?: string): OtlpExportPayload;
|
|
289
|
+
|
|
290
|
+
export { type CostOverTime, DB_DIR_NAME, DB_FILE_NAME, DEFAULT_COLLECTOR_PORT, DEFAULT_COLLECTOR_URL, DEFAULT_MAX_BUFFER_SIZE, type LLMTapConfig, MODEL_PRICING, type Message, type ModelPricing, type ModelStats, type OtlpExportPayload, type ProviderStats, ROUTES, type Span, type SpanInput, type Stats, type ToolCall, type Trace, type TraceWithSpans, VERSION, type WrapOptions, calculateCost, clearPricingOverrides, getAllPricing, getModelPricing, loadPricingFromURL, setPricing, setPricingBulk, spansToOtlp };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
DB_DIR_NAME: () => DB_DIR_NAME,
|
|
24
|
+
DB_FILE_NAME: () => DB_FILE_NAME,
|
|
25
|
+
DEFAULT_COLLECTOR_PORT: () => DEFAULT_COLLECTOR_PORT,
|
|
26
|
+
DEFAULT_COLLECTOR_URL: () => DEFAULT_COLLECTOR_URL,
|
|
27
|
+
DEFAULT_MAX_BUFFER_SIZE: () => DEFAULT_MAX_BUFFER_SIZE,
|
|
28
|
+
MODEL_PRICING: () => MODEL_PRICING,
|
|
29
|
+
ROUTES: () => ROUTES,
|
|
30
|
+
VERSION: () => VERSION,
|
|
31
|
+
calculateCost: () => calculateCost,
|
|
32
|
+
clearPricingOverrides: () => clearPricingOverrides,
|
|
33
|
+
getAllPricing: () => getAllPricing,
|
|
34
|
+
getModelPricing: () => getModelPricing,
|
|
35
|
+
loadPricingFromURL: () => loadPricingFromURL,
|
|
36
|
+
setPricing: () => setPricing,
|
|
37
|
+
setPricingBulk: () => setPricingBulk,
|
|
38
|
+
spansToOtlp: () => spansToOtlp
|
|
39
|
+
});
|
|
40
|
+
module.exports = __toCommonJS(index_exports);
|
|
41
|
+
|
|
42
|
+
// src/constants.ts
|
|
43
|
+
var DEFAULT_COLLECTOR_PORT = 4781;
|
|
44
|
+
var DEFAULT_COLLECTOR_URL = `http://localhost:${DEFAULT_COLLECTOR_PORT}`;
|
|
45
|
+
var DEFAULT_MAX_BUFFER_SIZE = 1e3;
|
|
46
|
+
var DB_DIR_NAME = ".llmtap";
|
|
47
|
+
var DB_FILE_NAME = "data.db";
|
|
48
|
+
var VERSION = "0.1.0";
|
|
49
|
+
var ROUTES = {
|
|
50
|
+
INGEST_SPANS: "/v1/spans",
|
|
51
|
+
LIST_TRACES: "/v1/traces",
|
|
52
|
+
GET_TRACE_SPANS: "/v1/traces/:traceId/spans",
|
|
53
|
+
GET_STATS: "/v1/stats",
|
|
54
|
+
GET_SESSIONS: "/v1/sessions",
|
|
55
|
+
GET_DB_INFO: "/v1/db-info",
|
|
56
|
+
SSE_STREAM: "/v1/stream"
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// src/pricing.ts
|
|
60
|
+
var pricingOverrides = [];
|
|
61
|
+
function setPricing(provider, model, inputCostPer1M, outputCostPer1M) {
|
|
62
|
+
const existing = pricingOverrides.findIndex(
|
|
63
|
+
(p) => p.provider === provider.toLowerCase() && p.model === model.toLowerCase()
|
|
64
|
+
);
|
|
65
|
+
const entry = {
|
|
66
|
+
provider: provider.toLowerCase(),
|
|
67
|
+
model: model.toLowerCase(),
|
|
68
|
+
inputCostPer1M,
|
|
69
|
+
outputCostPer1M
|
|
70
|
+
};
|
|
71
|
+
if (existing >= 0) {
|
|
72
|
+
pricingOverrides[existing] = entry;
|
|
73
|
+
} else {
|
|
74
|
+
pricingOverrides.push(entry);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function setPricingBulk(entries) {
|
|
78
|
+
for (const entry of entries) {
|
|
79
|
+
setPricing(entry.provider, entry.model, entry.inputCostPer1M, entry.outputCostPer1M);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
async function loadPricingFromURL(url, fetchFn) {
|
|
83
|
+
const res = await fetchFn(url);
|
|
84
|
+
if (!res.ok) throw new Error(`Failed to load pricing from ${url}: HTTP ${res.status}`);
|
|
85
|
+
const data = await res.json();
|
|
86
|
+
setPricingBulk(data);
|
|
87
|
+
}
|
|
88
|
+
function clearPricingOverrides() {
|
|
89
|
+
pricingOverrides.length = 0;
|
|
90
|
+
}
|
|
91
|
+
function getAllPricing() {
|
|
92
|
+
const merged = /* @__PURE__ */ new Map();
|
|
93
|
+
for (const entry of MODEL_PRICING) {
|
|
94
|
+
merged.set(`${entry.provider}::${entry.model}`, entry);
|
|
95
|
+
}
|
|
96
|
+
for (const entry of pricingOverrides) {
|
|
97
|
+
merged.set(`${entry.provider}::${entry.model}`, entry);
|
|
98
|
+
}
|
|
99
|
+
return [...merged.values()];
|
|
100
|
+
}
|
|
101
|
+
var MODEL_PRICING = [
|
|
102
|
+
// OpenAI
|
|
103
|
+
{ provider: "openai", model: "gpt-4o", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
104
|
+
{ provider: "openai", model: "gpt-4o-2024-11-20", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
105
|
+
{ provider: "openai", model: "gpt-4o-2024-08-06", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
106
|
+
{ provider: "openai", model: "gpt-4o-mini", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },
|
|
107
|
+
{ provider: "openai", model: "gpt-4o-mini-2024-07-18", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },
|
|
108
|
+
{ provider: "openai", model: "gpt-4-turbo", inputCostPer1M: 10, outputCostPer1M: 30 },
|
|
109
|
+
{ provider: "openai", model: "gpt-4", inputCostPer1M: 30, outputCostPer1M: 60 },
|
|
110
|
+
{ provider: "openai", model: "gpt-3.5-turbo", inputCostPer1M: 0.5, outputCostPer1M: 1.5 },
|
|
111
|
+
{ provider: "openai", model: "o1", inputCostPer1M: 15, outputCostPer1M: 60 },
|
|
112
|
+
{ provider: "openai", model: "o1-mini", inputCostPer1M: 3, outputCostPer1M: 12 },
|
|
113
|
+
{ provider: "openai", model: "o1-preview", inputCostPer1M: 15, outputCostPer1M: 60 },
|
|
114
|
+
{ provider: "openai", model: "o3-mini", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },
|
|
115
|
+
{ provider: "openai", model: "o3", inputCostPer1M: 10, outputCostPer1M: 40 },
|
|
116
|
+
{ provider: "openai", model: "o4-mini", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },
|
|
117
|
+
// Anthropic
|
|
118
|
+
{ provider: "anthropic", model: "claude-sonnet-4-20250514", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
119
|
+
{ provider: "anthropic", model: "claude-3-5-sonnet-20241022", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
120
|
+
{ provider: "anthropic", model: "claude-3-5-sonnet-20240620", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
121
|
+
{ provider: "anthropic", model: "claude-3-5-haiku-20241022", inputCostPer1M: 0.8, outputCostPer1M: 4 },
|
|
122
|
+
{ provider: "anthropic", model: "claude-3-opus-20240229", inputCostPer1M: 15, outputCostPer1M: 75 },
|
|
123
|
+
{ provider: "anthropic", model: "claude-3-haiku-20240307", inputCostPer1M: 0.25, outputCostPer1M: 1.25 },
|
|
124
|
+
{ provider: "anthropic", model: "claude-opus-4", inputCostPer1M: 15, outputCostPer1M: 75 },
|
|
125
|
+
{ provider: "anthropic", model: "claude-sonnet-4", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
126
|
+
// Google
|
|
127
|
+
{ provider: "google", model: "gemini-2.0-flash", inputCostPer1M: 0.1, outputCostPer1M: 0.4 },
|
|
128
|
+
{ provider: "google", model: "gemini-2.0-flash-lite", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },
|
|
129
|
+
{ provider: "google", model: "gemini-1.5-pro", inputCostPer1M: 1.25, outputCostPer1M: 5 },
|
|
130
|
+
{ provider: "google", model: "gemini-1.5-flash", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },
|
|
131
|
+
{ provider: "google", model: "gemini-2.5-pro", inputCostPer1M: 1.25, outputCostPer1M: 10 },
|
|
132
|
+
{ provider: "google", model: "gemini-2.5-flash", inputCostPer1M: 0.3, outputCostPer1M: 2.5 },
|
|
133
|
+
// OpenAI-compatible providers
|
|
134
|
+
{ provider: "deepseek", model: "deepseek-chat", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },
|
|
135
|
+
{ provider: "deepseek", model: "deepseek-v3", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },
|
|
136
|
+
{ provider: "deepseek", model: "deepseek-reasoner", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },
|
|
137
|
+
{ provider: "deepseek", model: "deepseek-r1", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },
|
|
138
|
+
{ provider: "deepseek", model: "deepseek-coder-v2", inputCostPer1M: 0.14, outputCostPer1M: 0.28 },
|
|
139
|
+
{ provider: "groq", model: "llama-3.3-70b-versatile", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
140
|
+
{ provider: "groq", model: "llama-3.1-8b-instant", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },
|
|
141
|
+
{ provider: "groq", model: "llama-3.1-70b-versatile", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
142
|
+
{ provider: "groq", model: "llama-3-8b-8192", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },
|
|
143
|
+
{ provider: "groq", model: "llama-3-70b-8192", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
144
|
+
{ provider: "groq", model: "gemma2-9b-it", inputCostPer1M: 0.2, outputCostPer1M: 0.2 },
|
|
145
|
+
{ provider: "groq", model: "mixtral-8x7b-32768", inputCostPer1M: 0.24, outputCostPer1M: 0.24 },
|
|
146
|
+
{ provider: "xai", model: "grok-2", inputCostPer1M: 2, outputCostPer1M: 10 },
|
|
147
|
+
{ provider: "xai", model: "grok-3", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
148
|
+
// Ollama / local models (free)
|
|
149
|
+
{ provider: "ollama", model: "llama3", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
150
|
+
{ provider: "ollama", model: "llama3.1", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
151
|
+
{ provider: "ollama", model: "mistral", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
152
|
+
{ provider: "ollama", model: "codellama", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
153
|
+
{ provider: "ollama", model: "deepseek-r1", inputCostPer1M: 0, outputCostPer1M: 0 }
|
|
154
|
+
];
|
|
155
|
+
function getModelPricing(provider, model) {
|
|
156
|
+
const normalized = model.toLowerCase();
|
|
157
|
+
const providerNorm = provider.toLowerCase();
|
|
158
|
+
const override = pricingOverrides.find(
|
|
159
|
+
(p) => p.provider === providerNorm && p.model === normalized
|
|
160
|
+
);
|
|
161
|
+
if (override) return override;
|
|
162
|
+
const providerPricing = MODEL_PRICING.filter(
|
|
163
|
+
(pricing) => pricing.provider === providerNorm
|
|
164
|
+
);
|
|
165
|
+
const exact = providerPricing.find((pricing) => pricing.model === normalized);
|
|
166
|
+
if (exact) return exact;
|
|
167
|
+
const prefix = providerPricing.filter((pricing) => normalized.startsWith(pricing.model)).sort((left, right) => right.model.length - left.model.length)[0];
|
|
168
|
+
if (prefix) return prefix;
|
|
169
|
+
return null;
|
|
170
|
+
}
|
|
171
|
+
function calculateCost(provider, model, inputTokens, outputTokens) {
|
|
172
|
+
const pricing = getModelPricing(provider, model);
|
|
173
|
+
if (!pricing) {
|
|
174
|
+
return { inputCost: 0, outputCost: 0, totalCost: 0 };
|
|
175
|
+
}
|
|
176
|
+
const inputCost = inputTokens / 1e6 * pricing.inputCostPer1M;
|
|
177
|
+
const outputCost = outputTokens / 1e6 * pricing.outputCostPer1M;
|
|
178
|
+
return {
|
|
179
|
+
inputCost,
|
|
180
|
+
outputCost,
|
|
181
|
+
totalCost: inputCost + outputCost
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// src/otlp.ts
|
|
186
|
+
function strAttr(key, value) {
|
|
187
|
+
return { key, value: { stringValue: value } };
|
|
188
|
+
}
|
|
189
|
+
function intAttr(key, value) {
|
|
190
|
+
return { key, value: { intValue: String(value) } };
|
|
191
|
+
}
|
|
192
|
+
function floatAttr(key, value) {
|
|
193
|
+
return { key, value: { doubleValue: value } };
|
|
194
|
+
}
|
|
195
|
+
function msToNano(ms) {
|
|
196
|
+
return String(BigInt(ms) * BigInt(1e6));
|
|
197
|
+
}
|
|
198
|
+
function padHex(id, length) {
|
|
199
|
+
return id.padStart(length, "0").slice(0, length);
|
|
200
|
+
}
|
|
201
|
+
function convertSpanToOtlp(span) {
|
|
202
|
+
const attrs = [
|
|
203
|
+
// GenAI Semantic Conventions
|
|
204
|
+
strAttr("gen_ai.system", span.providerName),
|
|
205
|
+
strAttr("gen_ai.request.model", span.requestModel),
|
|
206
|
+
strAttr("gen_ai.operation.name", span.operationName)
|
|
207
|
+
];
|
|
208
|
+
if (span.responseModel) attrs.push(strAttr("gen_ai.response.model", span.responseModel));
|
|
209
|
+
if (span.inputTokens != null) attrs.push(intAttr("gen_ai.usage.input_tokens", span.inputTokens));
|
|
210
|
+
if (span.outputTokens != null) attrs.push(intAttr("gen_ai.usage.output_tokens", span.outputTokens));
|
|
211
|
+
if (span.totalTokens != null) attrs.push(intAttr("gen_ai.usage.total_tokens", span.totalTokens));
|
|
212
|
+
if (span.temperature != null) attrs.push(floatAttr("gen_ai.request.temperature", span.temperature));
|
|
213
|
+
if (span.maxTokens != null) attrs.push(intAttr("gen_ai.request.max_tokens", span.maxTokens));
|
|
214
|
+
if (span.topP != null) attrs.push(floatAttr("gen_ai.request.top_p", span.topP));
|
|
215
|
+
if (span.inputCost != null) attrs.push(floatAttr("llmtap.cost.input", span.inputCost));
|
|
216
|
+
if (span.outputCost != null) attrs.push(floatAttr("llmtap.cost.output", span.outputCost));
|
|
217
|
+
if (span.totalCost != null) attrs.push(floatAttr("llmtap.cost.total", span.totalCost));
|
|
218
|
+
if (span.sessionId) attrs.push(strAttr("session.id", span.sessionId));
|
|
219
|
+
if (span.userId) attrs.push(strAttr("enduser.id", span.userId));
|
|
220
|
+
if (span.errorType) attrs.push(strAttr("error.type", span.errorType));
|
|
221
|
+
if (span.tags) {
|
|
222
|
+
for (const [key, value] of Object.entries(span.tags)) {
|
|
223
|
+
attrs.push(strAttr(`llmtap.tag.${key}`, value));
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
const events = [];
|
|
227
|
+
if (span.inputMessages) {
|
|
228
|
+
for (const msg of span.inputMessages) {
|
|
229
|
+
events.push({
|
|
230
|
+
name: "gen_ai.content.prompt",
|
|
231
|
+
timeUnixNano: msToNano(span.startTime),
|
|
232
|
+
attributes: [
|
|
233
|
+
strAttr("gen_ai.prompt.role", msg.role),
|
|
234
|
+
strAttr("gen_ai.prompt.content", typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content) ?? "")
|
|
235
|
+
]
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
if (span.outputMessages) {
|
|
240
|
+
for (const msg of span.outputMessages) {
|
|
241
|
+
events.push({
|
|
242
|
+
name: "gen_ai.content.completion",
|
|
243
|
+
timeUnixNano: msToNano(span.endTime ?? span.startTime),
|
|
244
|
+
attributes: [
|
|
245
|
+
strAttr("gen_ai.completion.role", msg.role),
|
|
246
|
+
strAttr("gen_ai.completion.content", typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content) ?? "")
|
|
247
|
+
]
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
if (span.toolCalls) {
|
|
252
|
+
for (const tc of span.toolCalls) {
|
|
253
|
+
events.push({
|
|
254
|
+
name: "gen_ai.content.tool_call",
|
|
255
|
+
timeUnixNano: msToNano(span.endTime ?? span.startTime),
|
|
256
|
+
attributes: [
|
|
257
|
+
strAttr("gen_ai.tool_call.id", tc.id),
|
|
258
|
+
strAttr("gen_ai.tool_call.name", tc.name),
|
|
259
|
+
strAttr("gen_ai.tool_call.arguments", tc.arguments)
|
|
260
|
+
]
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
return {
|
|
265
|
+
traceId: padHex(span.traceId, 32),
|
|
266
|
+
spanId: padHex(span.spanId, 16),
|
|
267
|
+
parentSpanId: span.parentSpanId ? padHex(span.parentSpanId, 16) : void 0,
|
|
268
|
+
name: span.name,
|
|
269
|
+
kind: 3,
|
|
270
|
+
// SPAN_KIND_CLIENT
|
|
271
|
+
startTimeUnixNano: msToNano(span.startTime),
|
|
272
|
+
endTimeUnixNano: msToNano(span.endTime ?? span.startTime + (span.duration ?? 0)),
|
|
273
|
+
attributes: attrs,
|
|
274
|
+
events,
|
|
275
|
+
status: span.status === "error" ? { code: 2, message: span.errorMessage } : { code: 1 }
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
function spansToOtlp(spans, serviceName = "llmtap") {
|
|
279
|
+
return {
|
|
280
|
+
resourceSpans: [
|
|
281
|
+
{
|
|
282
|
+
resource: {
|
|
283
|
+
attributes: [
|
|
284
|
+
strAttr("service.name", serviceName),
|
|
285
|
+
strAttr("service.version", "0.1.0"),
|
|
286
|
+
strAttr("telemetry.sdk.name", "llmtap"),
|
|
287
|
+
strAttr("telemetry.sdk.language", "nodejs")
|
|
288
|
+
]
|
|
289
|
+
},
|
|
290
|
+
scopeSpans: [
|
|
291
|
+
{
|
|
292
|
+
scope: { name: "@llmtap/sdk", version: "0.1.0" },
|
|
293
|
+
spans: spans.map(convertSpanToOtlp)
|
|
294
|
+
}
|
|
295
|
+
]
|
|
296
|
+
}
|
|
297
|
+
]
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
301
|
+
0 && (module.exports = {
|
|
302
|
+
DB_DIR_NAME,
|
|
303
|
+
DB_FILE_NAME,
|
|
304
|
+
DEFAULT_COLLECTOR_PORT,
|
|
305
|
+
DEFAULT_COLLECTOR_URL,
|
|
306
|
+
DEFAULT_MAX_BUFFER_SIZE,
|
|
307
|
+
MODEL_PRICING,
|
|
308
|
+
ROUTES,
|
|
309
|
+
VERSION,
|
|
310
|
+
calculateCost,
|
|
311
|
+
clearPricingOverrides,
|
|
312
|
+
getAllPricing,
|
|
313
|
+
getModelPricing,
|
|
314
|
+
loadPricingFromURL,
|
|
315
|
+
setPricing,
|
|
316
|
+
setPricingBulk,
|
|
317
|
+
spansToOtlp
|
|
318
|
+
});
|
|
319
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/constants.ts","../src/pricing.ts","../src/otlp.ts"],"sourcesContent":["export * from \"./types.js\";\r\nexport * from \"./constants.js\";\r\nexport {\r\n MODEL_PRICING,\r\n getModelPricing,\r\n calculateCost,\r\n setPricing,\r\n setPricingBulk,\r\n loadPricingFromURL,\r\n clearPricingOverrides,\r\n getAllPricing,\r\n} from \"./pricing.js\";\r\nexport { spansToOtlp } from \"./otlp.js\";\r\nexport type { OtlpExportPayload } from \"./otlp.js\";\r\n","export const DEFAULT_COLLECTOR_PORT = 4781;\r\nexport const DEFAULT_COLLECTOR_URL = `http://localhost:${DEFAULT_COLLECTOR_PORT}`;\r\nexport const DEFAULT_MAX_BUFFER_SIZE = 1000;\r\nexport const DB_DIR_NAME = \".llmtap\";\r\nexport const DB_FILE_NAME = \"data.db\";\r\nexport const VERSION = \"0.1.0\";\r\n\r\n// API routes\r\nexport const ROUTES = {\r\n INGEST_SPANS: \"/v1/spans\",\r\n LIST_TRACES: \"/v1/traces\",\r\n GET_TRACE_SPANS: \"/v1/traces/:traceId/spans\",\r\n GET_STATS: \"/v1/stats\",\r\n GET_SESSIONS: \"/v1/sessions\",\r\n GET_DB_INFO: \"/v1/db-info\",\r\n SSE_STREAM: \"/v1/stream\",\r\n} as const;\r\n","import type { ModelPricing } from \"./types.js\";\n\n// Runtime pricing overrides set by the user.\nconst pricingOverrides: ModelPricing[] = [];\n\n/**\n * Set pricing for a single model. Overrides built-in pricing.\n */\nexport function setPricing(\n provider: string,\n model: string,\n inputCostPer1M: number,\n outputCostPer1M: number\n): void {\n const existing = pricingOverrides.findIndex(\n (p) => p.provider === provider.toLowerCase() && p.model === model.toLowerCase()\n );\n const entry: ModelPricing = {\n provider: provider.toLowerCase(),\n model: model.toLowerCase(),\n inputCostPer1M,\n outputCostPer1M,\n };\n if (existing >= 0) {\n pricingOverrides[existing] = entry;\n } else {\n pricingOverrides.push(entry);\n }\n}\n\n/**\n * Set pricing for multiple models at once.\n */\nexport function setPricingBulk(entries: ModelPricing[]): void {\n for (const entry of entries) {\n setPricing(entry.provider, entry.model, entry.inputCostPer1M, entry.outputCostPer1M);\n }\n}\n\n/**\n * Load pricing from a remote JSON URL.\n * Expects an array of ModelPricing objects.\n * Pass a fetch-compatible function (globalThis.fetch or node-fetch).\n */\nexport async function loadPricingFromURL(\n url: string,\n fetchFn: (url: string) => Promise<{ ok: boolean; status: number; json: () => Promise<unknown> }>\n): Promise<void> {\n const res = await fetchFn(url);\n if (!res.ok) throw new Error(`Failed to load pricing from ${url}: HTTP ${res.status}`);\n const data = (await res.json()) as ModelPricing[];\n setPricingBulk(data);\n}\n\n/**\n * Remove all runtime pricing overrides.\n */\nexport function clearPricingOverrides(): void {\n pricingOverrides.length = 0;\n}\n\n/**\n * Get all pricing entries (overrides merged on top of built-in).\n */\nexport function getAllPricing(): ModelPricing[] {\n const merged = new Map<string, ModelPricing>();\n for (const entry of MODEL_PRICING) {\n merged.set(`${entry.provider}::${entry.model}`, entry);\n }\n for (const entry of pricingOverrides) {\n merged.set(`${entry.provider}::${entry.model}`, entry);\n }\n return [...merged.values()];\n}\n\n/**\n * Pricing data for popular LLM models.\n * Costs are per 1 million tokens in USD.\n * Last updated: 2025-01\n */\nexport const MODEL_PRICING: ModelPricing[] = [\n // OpenAI\n { provider: \"openai\", model: \"gpt-4o\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-2024-11-20\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-2024-08-06\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-mini\", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },\n { provider: \"openai\", model: \"gpt-4o-mini-2024-07-18\", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },\n { provider: \"openai\", model: \"gpt-4-turbo\", inputCostPer1M: 10.0, outputCostPer1M: 30.0 },\n { provider: \"openai\", model: \"gpt-4\", inputCostPer1M: 30.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"gpt-3.5-turbo\", inputCostPer1M: 0.5, outputCostPer1M: 1.5 },\n { provider: \"openai\", model: \"o1\", inputCostPer1M: 15.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"o1-mini\", inputCostPer1M: 3.0, outputCostPer1M: 12.0 },\n { provider: \"openai\", model: \"o1-preview\", inputCostPer1M: 15.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"o3-mini\", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },\n { provider: \"openai\", model: \"o3\", inputCostPer1M: 10.0, outputCostPer1M: 40.0 },\n { provider: \"openai\", model: \"o4-mini\", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },\n\n // Anthropic\n { provider: \"anthropic\", model: \"claude-sonnet-4-20250514\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-sonnet-20241022\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-sonnet-20240620\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-haiku-20241022\", inputCostPer1M: 0.8, outputCostPer1M: 4.0 },\n { provider: \"anthropic\", model: \"claude-3-opus-20240229\", inputCostPer1M: 15.0, outputCostPer1M: 75.0 },\n { provider: \"anthropic\", model: \"claude-3-haiku-20240307\", inputCostPer1M: 0.25, outputCostPer1M: 1.25 },\n { provider: \"anthropic\", model: \"claude-opus-4\", inputCostPer1M: 15.0, outputCostPer1M: 75.0 },\n { provider: \"anthropic\", model: \"claude-sonnet-4\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n\n // Google\n { provider: \"google\", model: \"gemini-2.0-flash\", inputCostPer1M: 0.1, outputCostPer1M: 0.4 },\n { provider: \"google\", model: \"gemini-2.0-flash-lite\", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },\n { provider: \"google\", model: \"gemini-1.5-pro\", inputCostPer1M: 1.25, outputCostPer1M: 5.0 },\n { provider: \"google\", model: \"gemini-1.5-flash\", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },\n { provider: \"google\", model: \"gemini-2.5-pro\", inputCostPer1M: 1.25, outputCostPer1M: 10.0 },\n { provider: \"google\", model: \"gemini-2.5-flash\", inputCostPer1M: 0.3, outputCostPer1M: 2.5 },\n\n // OpenAI-compatible providers\n { provider: \"deepseek\", model: \"deepseek-chat\", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },\n { provider: \"deepseek\", model: \"deepseek-v3\", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },\n { provider: \"deepseek\", model: \"deepseek-reasoner\", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },\n { provider: \"deepseek\", model: \"deepseek-r1\", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },\n { provider: \"deepseek\", model: \"deepseek-coder-v2\", inputCostPer1M: 0.14, outputCostPer1M: 0.28 },\n { provider: \"groq\", model: \"llama-3.3-70b-versatile\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"llama-3.1-8b-instant\", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },\n { provider: \"groq\", model: \"llama-3.1-70b-versatile\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"llama-3-8b-8192\", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },\n { provider: \"groq\", model: \"llama-3-70b-8192\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"gemma2-9b-it\", inputCostPer1M: 0.20, outputCostPer1M: 0.20 },\n { provider: \"groq\", model: \"mixtral-8x7b-32768\", inputCostPer1M: 0.24, outputCostPer1M: 0.24 },\n { provider: \"xai\", model: \"grok-2\", inputCostPer1M: 2.0, outputCostPer1M: 10.0 },\n { provider: \"xai\", model: \"grok-3\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n\n // Ollama / local models (free)\n { provider: \"ollama\", model: \"llama3\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"llama3.1\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"mistral\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"codellama\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"deepseek-r1\", inputCostPer1M: 0, outputCostPer1M: 0 },\n];\n\n/**\n * Look up pricing for a model. Returns null if model is not found.\n * Tries exact match first, then prefix match for versioned models.\n */\nexport function getModelPricing(\n provider: string,\n model: string\n): ModelPricing | null {\n const normalized = model.toLowerCase();\n const providerNorm = provider.toLowerCase();\n\n // Check runtime overrides first (exact match only).\n const override = pricingOverrides.find(\n (p) => p.provider === providerNorm && p.model === normalized\n );\n if (override) return override;\n\n const providerPricing = MODEL_PRICING.filter(\n (pricing) => pricing.provider === providerNorm\n );\n\n // Exact match\n const exact = providerPricing.find((pricing) => pricing.model === normalized);\n if (exact) return exact;\n\n // Prefix match should prefer the most-specific model name first.\n const prefix = providerPricing\n .filter((pricing) => normalized.startsWith(pricing.model))\n .sort((left, right) => right.model.length - left.model.length)[0];\n if (prefix) return prefix;\n\n return null;\n}\n\n/**\n * Calculate cost for a given number of tokens\n */\nexport function calculateCost(\n provider: string,\n model: string,\n inputTokens: number,\n outputTokens: number\n): { inputCost: number; outputCost: number; totalCost: number } {\n const pricing = getModelPricing(provider, model);\n if (!pricing) {\n return { inputCost: 0, outputCost: 0, totalCost: 0 };\n }\n\n const inputCost = (inputTokens / 1_000_000) * pricing.inputCostPer1M;\n const outputCost = (outputTokens / 1_000_000) * pricing.outputCostPer1M;\n\n return {\n inputCost,\n outputCost,\n totalCost: inputCost + outputCost,\n };\n}\n","import type { Span } from \"./types.js\";\r\n\r\n/**\r\n * Convert LLMTap spans to OpenTelemetry OTLP JSON format.\r\n *\r\n * Follows the OTLP/HTTP JSON specification and the\r\n * OpenTelemetry GenAI Semantic Conventions for LLM spans.\r\n *\r\n * @see https://opentelemetry.io/docs/specs/otlp/\r\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/\r\n */\r\n\r\ninterface OtlpAttribute {\r\n key: string;\r\n value: { stringValue?: string; intValue?: string; doubleValue?: number; boolValue?: boolean };\r\n}\r\n\r\ninterface OtlpEvent {\r\n name: string;\r\n timeUnixNano: string;\r\n attributes: OtlpAttribute[];\r\n}\r\n\r\ninterface OtlpSpan {\r\n traceId: string;\r\n spanId: string;\r\n parentSpanId?: string;\r\n name: string;\r\n kind: number; // SPAN_KIND_CLIENT = 3\r\n startTimeUnixNano: string;\r\n endTimeUnixNano: string;\r\n attributes: OtlpAttribute[];\r\n events: OtlpEvent[];\r\n status: { code: number; message?: string }; // OK=1, ERROR=2\r\n}\r\n\r\ninterface OtlpResourceSpans {\r\n resource: {\r\n attributes: OtlpAttribute[];\r\n };\r\n scopeSpans: Array<{\r\n scope: { name: string; version: string };\r\n spans: OtlpSpan[];\r\n }>;\r\n}\r\n\r\nexport interface OtlpExportPayload {\r\n resourceSpans: OtlpResourceSpans[];\r\n}\r\n\r\nfunction strAttr(key: string, value: string): OtlpAttribute {\r\n return { key, value: { stringValue: value } };\r\n}\r\n\r\nfunction intAttr(key: string, value: number): OtlpAttribute {\r\n return { key, value: { intValue: String(value) } };\r\n}\r\n\r\nfunction floatAttr(key: string, value: number): OtlpAttribute {\r\n return { key, value: { doubleValue: value } };\r\n}\r\n\r\nfunction msToNano(ms: number): string {\r\n return String(BigInt(ms) * BigInt(1_000_000));\r\n}\r\n\r\nfunction padHex(id: string, length: number): string {\r\n return id.padStart(length, \"0\").slice(0, length);\r\n}\r\n\r\nfunction convertSpanToOtlp(span: Span): OtlpSpan {\r\n const attrs: OtlpAttribute[] = [\r\n // GenAI Semantic Conventions\r\n strAttr(\"gen_ai.system\", span.providerName),\r\n strAttr(\"gen_ai.request.model\", span.requestModel),\r\n strAttr(\"gen_ai.operation.name\", span.operationName),\r\n ];\r\n\r\n if (span.responseModel) attrs.push(strAttr(\"gen_ai.response.model\", span.responseModel));\r\n if (span.inputTokens != null) attrs.push(intAttr(\"gen_ai.usage.input_tokens\", span.inputTokens));\r\n if (span.outputTokens != null) attrs.push(intAttr(\"gen_ai.usage.output_tokens\", span.outputTokens));\r\n if (span.totalTokens != null) attrs.push(intAttr(\"gen_ai.usage.total_tokens\", span.totalTokens));\r\n if (span.temperature != null) attrs.push(floatAttr(\"gen_ai.request.temperature\", span.temperature));\r\n if (span.maxTokens != null) attrs.push(intAttr(\"gen_ai.request.max_tokens\", span.maxTokens));\r\n if (span.topP != null) attrs.push(floatAttr(\"gen_ai.request.top_p\", span.topP));\r\n\r\n // LLMTap-specific attributes\r\n if (span.inputCost != null) attrs.push(floatAttr(\"llmtap.cost.input\", span.inputCost));\r\n if (span.outputCost != null) attrs.push(floatAttr(\"llmtap.cost.output\", span.outputCost));\r\n if (span.totalCost != null) attrs.push(floatAttr(\"llmtap.cost.total\", span.totalCost));\r\n if (span.sessionId) attrs.push(strAttr(\"session.id\", span.sessionId));\r\n if (span.userId) attrs.push(strAttr(\"enduser.id\", span.userId));\r\n if (span.errorType) attrs.push(strAttr(\"error.type\", span.errorType));\r\n\r\n // Custom tags -> attributes\r\n if (span.tags) {\r\n for (const [key, value] of Object.entries(span.tags)) {\r\n attrs.push(strAttr(`llmtap.tag.${key}`, value));\r\n }\r\n }\r\n\r\n // GenAI events for prompt/completion content\r\n const events: OtlpEvent[] = [];\r\n\r\n if (span.inputMessages) {\r\n for (const msg of span.inputMessages) {\r\n events.push({\r\n name: \"gen_ai.content.prompt\",\r\n timeUnixNano: msToNano(span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.prompt.role\", msg.role),\r\n strAttr(\"gen_ai.prompt.content\", typeof msg.content === \"string\" ? msg.content : JSON.stringify(msg.content) ?? \"\"),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n if (span.outputMessages) {\r\n for (const msg of span.outputMessages) {\r\n events.push({\r\n name: \"gen_ai.content.completion\",\r\n timeUnixNano: msToNano(span.endTime ?? span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.completion.role\", msg.role),\r\n strAttr(\"gen_ai.completion.content\", typeof msg.content === \"string\" ? msg.content : JSON.stringify(msg.content) ?? \"\"),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n if (span.toolCalls) {\r\n for (const tc of span.toolCalls) {\r\n events.push({\r\n name: \"gen_ai.content.tool_call\",\r\n timeUnixNano: msToNano(span.endTime ?? span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.tool_call.id\", tc.id),\r\n strAttr(\"gen_ai.tool_call.name\", tc.name),\r\n strAttr(\"gen_ai.tool_call.arguments\", tc.arguments),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n return {\r\n traceId: padHex(span.traceId, 32),\r\n spanId: padHex(span.spanId, 16),\r\n parentSpanId: span.parentSpanId ? padHex(span.parentSpanId, 16) : undefined,\r\n name: span.name,\r\n kind: 3, // SPAN_KIND_CLIENT\r\n startTimeUnixNano: msToNano(span.startTime),\r\n endTimeUnixNano: msToNano(span.endTime ?? span.startTime + (span.duration ?? 0)),\r\n attributes: attrs,\r\n events,\r\n status: span.status === \"error\"\r\n ? { code: 2, message: span.errorMessage }\r\n : { code: 1 },\r\n };\r\n}\r\n\r\n/**\r\n * Convert an array of LLMTap spans to OTLP JSON export format.\r\n */\r\nexport function spansToOtlp(spans: Span[], serviceName = \"llmtap\"): OtlpExportPayload {\r\n return {\r\n resourceSpans: [\r\n {\r\n resource: {\r\n attributes: [\r\n strAttr(\"service.name\", serviceName),\r\n strAttr(\"service.version\", \"0.1.0\"),\r\n strAttr(\"telemetry.sdk.name\", \"llmtap\"),\r\n strAttr(\"telemetry.sdk.language\", \"nodejs\"),\r\n ],\r\n },\r\n scopeSpans: [\r\n {\r\n scope: { name: \"@llmtap/sdk\", version: \"0.1.0\" },\r\n spans: spans.map(convertSpanToOtlp),\r\n },\r\n ],\r\n },\r\n ],\r\n };\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAO,IAAM,yBAAyB;AAC/B,IAAM,wBAAwB,oBAAoB,sBAAsB;AACxE,IAAM,0BAA0B;AAChC,IAAM,cAAc;AACpB,IAAM,eAAe;AACrB,IAAM,UAAU;AAGhB,IAAM,SAAS;AAAA,EACpB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,WAAW;AAAA,EACX,cAAc;AAAA,EACd,aAAa;AAAA,EACb,YAAY;AACd;;;ACbA,IAAM,mBAAmC,CAAC;AAKnC,SAAS,WACd,UACA,OACA,gBACA,iBACM;AACN,QAAM,WAAW,iBAAiB;AAAA,IAChC,CAAC,MAAM,EAAE,aAAa,SAAS,YAAY,KAAK,EAAE,UAAU,MAAM,YAAY;AAAA,EAChF;AACA,QAAM,QAAsB;AAAA,IAC1B,UAAU,SAAS,YAAY;AAAA,IAC/B,OAAO,MAAM,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AACA,MAAI,YAAY,GAAG;AACjB,qBAAiB,QAAQ,IAAI;AAAA,EAC/B,OAAO;AACL,qBAAiB,KAAK,KAAK;AAAA,EAC7B;AACF;AAKO,SAAS,eAAe,SAA+B;AAC5D,aAAW,SAAS,SAAS;AAC3B,eAAW,MAAM,UAAU,MAAM,OAAO,MAAM,gBAAgB,MAAM,eAAe;AAAA,EACrF;AACF;AAOA,eAAsB,mBACpB,KACA,SACe;AACf,QAAM,MAAM,MAAM,QAAQ,GAAG;AAC7B,MAAI,CAAC,IAAI,GAAI,OAAM,IAAI,MAAM,+BAA+B,GAAG,UAAU,IAAI,MAAM,EAAE;AACrF,QAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,iBAAe,IAAI;AACrB;AAKO,SAAS,wBAA8B;AAC5C,mBAAiB,SAAS;AAC5B;AAKO,SAAS,gBAAgC;AAC9C,QAAM,SAAS,oBAAI,IAA0B;AAC7C,aAAW,SAAS,eAAe;AACjC,WAAO,IAAI,GAAG,MAAM,QAAQ,KAAK,MAAM,KAAK,IAAI,KAAK;AAAA,EACvD;AACA,aAAW,SAAS,kBAAkB;AACpC,WAAO,IAAI,GAAG,MAAM,QAAQ,KAAK,MAAM,KAAK,IAAI,KAAK;AAAA,EACvD;AACA,SAAO,CAAC,GAAG,OAAO,OAAO,CAAC;AAC5B;AAOO,IAAM,gBAAgC;AAAA;AAAA,EAE3C,EAAE,UAAU,UAAU,OAAO,UAAU,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,qBAAqB,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,qBAAqB,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EACvF,EAAE,UAAU,UAAU,OAAO,0BAA0B,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EAClG,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACxF,EAAE,UAAU,UAAU,OAAO,SAAS,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,iBAAiB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EACxF,EAAE,UAAU,UAAU,OAAO,MAAM,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACnF,EAAE,UAAU,UAAU,OAAO,cAAc,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACvF,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,MAAM,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,KAAK,iBAAiB,IAAI;AAAA;AAAA,EAGlF,EAAE,UAAU,aAAa,OAAO,4BAA4B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,8BAA8B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACzG,EAAE,UAAU,aAAa,OAAO,8BAA8B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACzG,EAAE,UAAU,aAAa,OAAO,6BAA6B,gBAAgB,KAAK,iBAAiB,EAAI;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,0BAA0B,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACtG,EAAE,UAAU,aAAa,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,iBAAiB,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,aAAa,OAAO,mBAAmB,gBAAgB,GAAK,iBAAiB,GAAK;AAAA;AAAA,EAG9F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EAC3F,EAAE,UAAU,UAAU,OAAO,yBAAyB,gBAAgB,OAAO,iBAAiB,IAAI;AAAA,EAClG,EAAE,UAAU,UAAU,OAAO,kBAAkB,gBAAgB,MAAM,iBAAiB,EAAI;AAAA,EAC1F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,OAAO,iBAAiB,IAAI;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,kBAAkB,gBAAgB,MAAM,iBAAiB,GAAK;AAAA,EAC3F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA;AAAA,EAG3F,EAAE,UAAU,YAAY,OAAO,iBAAiB,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EAC3F,EAAE,UAAU,YAAY,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EACzF,EAAE,UAAU,YAAY,OAAO,qBAAqB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAChG,EAAE,UAAU,YAAY,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC1F,EAAE,UAAU,YAAY,OAAO,qBAAqB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAChG,EAAE,UAAU,QAAQ,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAClG,EAAE,UAAU,QAAQ,OAAO,wBAAwB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC/F,EAAE,UAAU,QAAQ,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAClG,EAAE,UAAU,QAAQ,OAAO,mBAAmB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC1F,EAAE,UAAU,QAAQ,OAAO,oBAAoB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC3F,EAAE,UAAU,QAAQ,OAAO,gBAAgB,gBAAgB,KAAM,iBAAiB,IAAK;AAAA,EACvF,EAAE,UAAU,QAAQ,OAAO,sBAAsB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC7F,EAAE,UAAU,OAAO,OAAO,UAAU,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,OAAO,OAAO,UAAU,gBAAgB,GAAK,iBAAiB,GAAK;AAAA;AAAA,EAG/E,EAAE,UAAU,UAAU,OAAO,UAAU,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC7E,EAAE,UAAU,UAAU,OAAO,YAAY,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC9E,EAAE,UAAU,UAAU,OAAO,aAAa,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAChF,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,GAAG,iBAAiB,EAAE;AACpF;AAMO,SAAS,gBACd,UACA,OACqB;AACrB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,eAAe,SAAS,YAAY;AAG1C,QAAM,WAAW,iBAAiB;AAAA,IAChC,CAAC,MAAM,EAAE,aAAa,gBAAgB,EAAE,UAAU;AAAA,EACpD;AACA,MAAI,SAAU,QAAO;AAErB,QAAM,kBAAkB,cAAc;AAAA,IACpC,CAAC,YAAY,QAAQ,aAAa;AAAA,EACpC;AAGA,QAAM,QAAQ,gBAAgB,KAAK,CAAC,YAAY,QAAQ,UAAU,UAAU;AAC5E,MAAI,MAAO,QAAO;AAGlB,QAAM,SAAS,gBACZ,OAAO,CAAC,YAAY,WAAW,WAAW,QAAQ,KAAK,CAAC,EACxD,KAAK,CAAC,MAAM,UAAU,MAAM,MAAM,SAAS,KAAK,MAAM,MAAM,EAAE,CAAC;AAClE,MAAI,OAAQ,QAAO;AAEnB,SAAO;AACT;AAKO,SAAS,cACd,UACA,OACA,aACA,cAC8D;AAC9D,QAAM,UAAU,gBAAgB,UAAU,KAAK;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,WAAW,GAAG,YAAY,GAAG,WAAW,EAAE;AAAA,EACrD;AAEA,QAAM,YAAa,cAAc,MAAa,QAAQ;AACtD,QAAM,aAAc,eAAe,MAAa,QAAQ;AAExD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,WAAW,YAAY;AAAA,EACzB;AACF;;;ACjJA,SAAS,QAAQ,KAAa,OAA8B;AAC1D,SAAO,EAAE,KAAK,OAAO,EAAE,aAAa,MAAM,EAAE;AAC9C;AAEA,SAAS,QAAQ,KAAa,OAA8B;AAC1D,SAAO,EAAE,KAAK,OAAO,EAAE,UAAU,OAAO,KAAK,EAAE,EAAE;AACnD;AAEA,SAAS,UAAU,KAAa,OAA8B;AAC5D,SAAO,EAAE,KAAK,OAAO,EAAE,aAAa,MAAM,EAAE;AAC9C;AAEA,SAAS,SAAS,IAAoB;AACpC,SAAO,OAAO,OAAO,EAAE,IAAI,OAAO,GAAS,CAAC;AAC9C;AAEA,SAAS,OAAO,IAAY,QAAwB;AAClD,SAAO,GAAG,SAAS,QAAQ,GAAG,EAAE,MAAM,GAAG,MAAM;AACjD;AAEA,SAAS,kBAAkB,MAAsB;AAC/C,QAAM,QAAyB;AAAA;AAAA,IAE7B,QAAQ,iBAAiB,KAAK,YAAY;AAAA,IAC1C,QAAQ,wBAAwB,KAAK,YAAY;AAAA,IACjD,QAAQ,yBAAyB,KAAK,aAAa;AAAA,EACrD;AAEA,MAAI,KAAK,cAAe,OAAM,KAAK,QAAQ,yBAAyB,KAAK,aAAa,CAAC;AACvF,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,WAAW,CAAC;AAC/F,MAAI,KAAK,gBAAgB,KAAM,OAAM,KAAK,QAAQ,8BAA8B,KAAK,YAAY,CAAC;AAClG,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,WAAW,CAAC;AAC/F,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,UAAU,8BAA8B,KAAK,WAAW,CAAC;AAClG,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,SAAS,CAAC;AAC3F,MAAI,KAAK,QAAQ,KAAM,OAAM,KAAK,UAAU,wBAAwB,KAAK,IAAI,CAAC;AAG9E,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS,CAAC;AACrF,MAAI,KAAK,cAAc,KAAM,OAAM,KAAK,UAAU,sBAAsB,KAAK,UAAU,CAAC;AACxF,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS,CAAC;AACrF,MAAI,KAAK,UAAW,OAAM,KAAK,QAAQ,cAAc,KAAK,SAAS,CAAC;AACpE,MAAI,KAAK,OAAQ,OAAM,KAAK,QAAQ,cAAc,KAAK,MAAM,CAAC;AAC9D,MAAI,KAAK,UAAW,OAAM,KAAK,QAAQ,cAAc,KAAK,SAAS,CAAC;AAGpE,MAAI,KAAK,MAAM;AACb,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,IAAI,GAAG;AACpD,YAAM,KAAK,QAAQ,cAAc,GAAG,IAAI,KAAK,CAAC;AAAA,IAChD;AAAA,EACF;AAGA,QAAM,SAAsB,CAAC;AAE7B,MAAI,KAAK,eAAe;AACtB,eAAW,OAAO,KAAK,eAAe;AACpC,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,SAAS;AAAA,QACrC,YAAY;AAAA,UACV,QAAQ,sBAAsB,IAAI,IAAI;AAAA,UACtC,QAAQ,yBAAyB,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU,KAAK,UAAU,IAAI,OAAO,KAAK,EAAE;AAAA,QACpH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,KAAK,gBAAgB;AACvB,eAAW,OAAO,KAAK,gBAAgB;AACrC,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,WAAW,KAAK,SAAS;AAAA,QACrD,YAAY;AAAA,UACV,QAAQ,0BAA0B,IAAI,IAAI;AAAA,UAC1C,QAAQ,6BAA6B,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU,KAAK,UAAU,IAAI,OAAO,KAAK,EAAE;AAAA,QACxH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,KAAK,WAAW;AAClB,eAAW,MAAM,KAAK,WAAW;AAC/B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,WAAW,KAAK,SAAS;AAAA,QACrD,YAAY;AAAA,UACV,QAAQ,uBAAuB,GAAG,EAAE;AAAA,UACpC,QAAQ,yBAAyB,GAAG,IAAI;AAAA,UACxC,QAAQ,8BAA8B,GAAG,SAAS;AAAA,QACpD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,OAAO,KAAK,SAAS,EAAE;AAAA,IAChC,QAAQ,OAAO,KAAK,QAAQ,EAAE;AAAA,IAC9B,cAAc,KAAK,eAAe,OAAO,KAAK,cAAc,EAAE,IAAI;AAAA,IAClE,MAAM,KAAK;AAAA,IACX,MAAM;AAAA;AAAA,IACN,mBAAmB,SAAS,KAAK,SAAS;AAAA,IAC1C,iBAAiB,SAAS,KAAK,WAAW,KAAK,aAAa,KAAK,YAAY,EAAE;AAAA,IAC/E,YAAY;AAAA,IACZ;AAAA,IACA,QAAQ,KAAK,WAAW,UACpB,EAAE,MAAM,GAAG,SAAS,KAAK,aAAa,IACtC,EAAE,MAAM,EAAE;AAAA,EAChB;AACF;AAKO,SAAS,YAAY,OAAe,cAAc,UAA6B;AACpF,SAAO;AAAA,IACL,eAAe;AAAA,MACb;AAAA,QACE,UAAU;AAAA,UACR,YAAY;AAAA,YACV,QAAQ,gBAAgB,WAAW;AAAA,YACnC,QAAQ,mBAAmB,OAAO;AAAA,YAClC,QAAQ,sBAAsB,QAAQ;AAAA,YACtC,QAAQ,0BAA0B,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA,YAAY;AAAA,UACV;AAAA,YACE,OAAO,EAAE,MAAM,eAAe,SAAS,QAAQ;AAAA,YAC/C,OAAO,MAAM,IAAI,iBAAiB;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
// src/constants.ts
|
|
2
|
+
var DEFAULT_COLLECTOR_PORT = 4781;
|
|
3
|
+
var DEFAULT_COLLECTOR_URL = `http://localhost:${DEFAULT_COLLECTOR_PORT}`;
|
|
4
|
+
var DEFAULT_MAX_BUFFER_SIZE = 1e3;
|
|
5
|
+
var DB_DIR_NAME = ".llmtap";
|
|
6
|
+
var DB_FILE_NAME = "data.db";
|
|
7
|
+
var VERSION = "0.1.0";
|
|
8
|
+
var ROUTES = {
|
|
9
|
+
INGEST_SPANS: "/v1/spans",
|
|
10
|
+
LIST_TRACES: "/v1/traces",
|
|
11
|
+
GET_TRACE_SPANS: "/v1/traces/:traceId/spans",
|
|
12
|
+
GET_STATS: "/v1/stats",
|
|
13
|
+
GET_SESSIONS: "/v1/sessions",
|
|
14
|
+
GET_DB_INFO: "/v1/db-info",
|
|
15
|
+
SSE_STREAM: "/v1/stream"
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
// src/pricing.ts
|
|
19
|
+
var pricingOverrides = [];
|
|
20
|
+
function setPricing(provider, model, inputCostPer1M, outputCostPer1M) {
|
|
21
|
+
const existing = pricingOverrides.findIndex(
|
|
22
|
+
(p) => p.provider === provider.toLowerCase() && p.model === model.toLowerCase()
|
|
23
|
+
);
|
|
24
|
+
const entry = {
|
|
25
|
+
provider: provider.toLowerCase(),
|
|
26
|
+
model: model.toLowerCase(),
|
|
27
|
+
inputCostPer1M,
|
|
28
|
+
outputCostPer1M
|
|
29
|
+
};
|
|
30
|
+
if (existing >= 0) {
|
|
31
|
+
pricingOverrides[existing] = entry;
|
|
32
|
+
} else {
|
|
33
|
+
pricingOverrides.push(entry);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function setPricingBulk(entries) {
|
|
37
|
+
for (const entry of entries) {
|
|
38
|
+
setPricing(entry.provider, entry.model, entry.inputCostPer1M, entry.outputCostPer1M);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
async function loadPricingFromURL(url, fetchFn) {
|
|
42
|
+
const res = await fetchFn(url);
|
|
43
|
+
if (!res.ok) throw new Error(`Failed to load pricing from ${url}: HTTP ${res.status}`);
|
|
44
|
+
const data = await res.json();
|
|
45
|
+
setPricingBulk(data);
|
|
46
|
+
}
|
|
47
|
+
function clearPricingOverrides() {
|
|
48
|
+
pricingOverrides.length = 0;
|
|
49
|
+
}
|
|
50
|
+
function getAllPricing() {
|
|
51
|
+
const merged = /* @__PURE__ */ new Map();
|
|
52
|
+
for (const entry of MODEL_PRICING) {
|
|
53
|
+
merged.set(`${entry.provider}::${entry.model}`, entry);
|
|
54
|
+
}
|
|
55
|
+
for (const entry of pricingOverrides) {
|
|
56
|
+
merged.set(`${entry.provider}::${entry.model}`, entry);
|
|
57
|
+
}
|
|
58
|
+
return [...merged.values()];
|
|
59
|
+
}
|
|
60
|
+
var MODEL_PRICING = [
|
|
61
|
+
// OpenAI
|
|
62
|
+
{ provider: "openai", model: "gpt-4o", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
63
|
+
{ provider: "openai", model: "gpt-4o-2024-11-20", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
64
|
+
{ provider: "openai", model: "gpt-4o-2024-08-06", inputCostPer1M: 2.5, outputCostPer1M: 10 },
|
|
65
|
+
{ provider: "openai", model: "gpt-4o-mini", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },
|
|
66
|
+
{ provider: "openai", model: "gpt-4o-mini-2024-07-18", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },
|
|
67
|
+
{ provider: "openai", model: "gpt-4-turbo", inputCostPer1M: 10, outputCostPer1M: 30 },
|
|
68
|
+
{ provider: "openai", model: "gpt-4", inputCostPer1M: 30, outputCostPer1M: 60 },
|
|
69
|
+
{ provider: "openai", model: "gpt-3.5-turbo", inputCostPer1M: 0.5, outputCostPer1M: 1.5 },
|
|
70
|
+
{ provider: "openai", model: "o1", inputCostPer1M: 15, outputCostPer1M: 60 },
|
|
71
|
+
{ provider: "openai", model: "o1-mini", inputCostPer1M: 3, outputCostPer1M: 12 },
|
|
72
|
+
{ provider: "openai", model: "o1-preview", inputCostPer1M: 15, outputCostPer1M: 60 },
|
|
73
|
+
{ provider: "openai", model: "o3-mini", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },
|
|
74
|
+
{ provider: "openai", model: "o3", inputCostPer1M: 10, outputCostPer1M: 40 },
|
|
75
|
+
{ provider: "openai", model: "o4-mini", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },
|
|
76
|
+
// Anthropic
|
|
77
|
+
{ provider: "anthropic", model: "claude-sonnet-4-20250514", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
78
|
+
{ provider: "anthropic", model: "claude-3-5-sonnet-20241022", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
79
|
+
{ provider: "anthropic", model: "claude-3-5-sonnet-20240620", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
80
|
+
{ provider: "anthropic", model: "claude-3-5-haiku-20241022", inputCostPer1M: 0.8, outputCostPer1M: 4 },
|
|
81
|
+
{ provider: "anthropic", model: "claude-3-opus-20240229", inputCostPer1M: 15, outputCostPer1M: 75 },
|
|
82
|
+
{ provider: "anthropic", model: "claude-3-haiku-20240307", inputCostPer1M: 0.25, outputCostPer1M: 1.25 },
|
|
83
|
+
{ provider: "anthropic", model: "claude-opus-4", inputCostPer1M: 15, outputCostPer1M: 75 },
|
|
84
|
+
{ provider: "anthropic", model: "claude-sonnet-4", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
85
|
+
// Google
|
|
86
|
+
{ provider: "google", model: "gemini-2.0-flash", inputCostPer1M: 0.1, outputCostPer1M: 0.4 },
|
|
87
|
+
{ provider: "google", model: "gemini-2.0-flash-lite", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },
|
|
88
|
+
{ provider: "google", model: "gemini-1.5-pro", inputCostPer1M: 1.25, outputCostPer1M: 5 },
|
|
89
|
+
{ provider: "google", model: "gemini-1.5-flash", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },
|
|
90
|
+
{ provider: "google", model: "gemini-2.5-pro", inputCostPer1M: 1.25, outputCostPer1M: 10 },
|
|
91
|
+
{ provider: "google", model: "gemini-2.5-flash", inputCostPer1M: 0.3, outputCostPer1M: 2.5 },
|
|
92
|
+
// OpenAI-compatible providers
|
|
93
|
+
{ provider: "deepseek", model: "deepseek-chat", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },
|
|
94
|
+
{ provider: "deepseek", model: "deepseek-v3", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },
|
|
95
|
+
{ provider: "deepseek", model: "deepseek-reasoner", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },
|
|
96
|
+
{ provider: "deepseek", model: "deepseek-r1", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },
|
|
97
|
+
{ provider: "deepseek", model: "deepseek-coder-v2", inputCostPer1M: 0.14, outputCostPer1M: 0.28 },
|
|
98
|
+
{ provider: "groq", model: "llama-3.3-70b-versatile", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
99
|
+
{ provider: "groq", model: "llama-3.1-8b-instant", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },
|
|
100
|
+
{ provider: "groq", model: "llama-3.1-70b-versatile", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
101
|
+
{ provider: "groq", model: "llama-3-8b-8192", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },
|
|
102
|
+
{ provider: "groq", model: "llama-3-70b-8192", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },
|
|
103
|
+
{ provider: "groq", model: "gemma2-9b-it", inputCostPer1M: 0.2, outputCostPer1M: 0.2 },
|
|
104
|
+
{ provider: "groq", model: "mixtral-8x7b-32768", inputCostPer1M: 0.24, outputCostPer1M: 0.24 },
|
|
105
|
+
{ provider: "xai", model: "grok-2", inputCostPer1M: 2, outputCostPer1M: 10 },
|
|
106
|
+
{ provider: "xai", model: "grok-3", inputCostPer1M: 3, outputCostPer1M: 15 },
|
|
107
|
+
// Ollama / local models (free)
|
|
108
|
+
{ provider: "ollama", model: "llama3", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
109
|
+
{ provider: "ollama", model: "llama3.1", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
110
|
+
{ provider: "ollama", model: "mistral", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
111
|
+
{ provider: "ollama", model: "codellama", inputCostPer1M: 0, outputCostPer1M: 0 },
|
|
112
|
+
{ provider: "ollama", model: "deepseek-r1", inputCostPer1M: 0, outputCostPer1M: 0 }
|
|
113
|
+
];
|
|
114
|
+
function getModelPricing(provider, model) {
|
|
115
|
+
const normalized = model.toLowerCase();
|
|
116
|
+
const providerNorm = provider.toLowerCase();
|
|
117
|
+
const override = pricingOverrides.find(
|
|
118
|
+
(p) => p.provider === providerNorm && p.model === normalized
|
|
119
|
+
);
|
|
120
|
+
if (override) return override;
|
|
121
|
+
const providerPricing = MODEL_PRICING.filter(
|
|
122
|
+
(pricing) => pricing.provider === providerNorm
|
|
123
|
+
);
|
|
124
|
+
const exact = providerPricing.find((pricing) => pricing.model === normalized);
|
|
125
|
+
if (exact) return exact;
|
|
126
|
+
const prefix = providerPricing.filter((pricing) => normalized.startsWith(pricing.model)).sort((left, right) => right.model.length - left.model.length)[0];
|
|
127
|
+
if (prefix) return prefix;
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
function calculateCost(provider, model, inputTokens, outputTokens) {
|
|
131
|
+
const pricing = getModelPricing(provider, model);
|
|
132
|
+
if (!pricing) {
|
|
133
|
+
return { inputCost: 0, outputCost: 0, totalCost: 0 };
|
|
134
|
+
}
|
|
135
|
+
const inputCost = inputTokens / 1e6 * pricing.inputCostPer1M;
|
|
136
|
+
const outputCost = outputTokens / 1e6 * pricing.outputCostPer1M;
|
|
137
|
+
return {
|
|
138
|
+
inputCost,
|
|
139
|
+
outputCost,
|
|
140
|
+
totalCost: inputCost + outputCost
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// src/otlp.ts
|
|
145
|
+
function strAttr(key, value) {
|
|
146
|
+
return { key, value: { stringValue: value } };
|
|
147
|
+
}
|
|
148
|
+
function intAttr(key, value) {
|
|
149
|
+
return { key, value: { intValue: String(value) } };
|
|
150
|
+
}
|
|
151
|
+
function floatAttr(key, value) {
|
|
152
|
+
return { key, value: { doubleValue: value } };
|
|
153
|
+
}
|
|
154
|
+
function msToNano(ms) {
|
|
155
|
+
return String(BigInt(ms) * BigInt(1e6));
|
|
156
|
+
}
|
|
157
|
+
function padHex(id, length) {
|
|
158
|
+
return id.padStart(length, "0").slice(0, length);
|
|
159
|
+
}
|
|
160
|
+
function convertSpanToOtlp(span) {
|
|
161
|
+
const attrs = [
|
|
162
|
+
// GenAI Semantic Conventions
|
|
163
|
+
strAttr("gen_ai.system", span.providerName),
|
|
164
|
+
strAttr("gen_ai.request.model", span.requestModel),
|
|
165
|
+
strAttr("gen_ai.operation.name", span.operationName)
|
|
166
|
+
];
|
|
167
|
+
if (span.responseModel) attrs.push(strAttr("gen_ai.response.model", span.responseModel));
|
|
168
|
+
if (span.inputTokens != null) attrs.push(intAttr("gen_ai.usage.input_tokens", span.inputTokens));
|
|
169
|
+
if (span.outputTokens != null) attrs.push(intAttr("gen_ai.usage.output_tokens", span.outputTokens));
|
|
170
|
+
if (span.totalTokens != null) attrs.push(intAttr("gen_ai.usage.total_tokens", span.totalTokens));
|
|
171
|
+
if (span.temperature != null) attrs.push(floatAttr("gen_ai.request.temperature", span.temperature));
|
|
172
|
+
if (span.maxTokens != null) attrs.push(intAttr("gen_ai.request.max_tokens", span.maxTokens));
|
|
173
|
+
if (span.topP != null) attrs.push(floatAttr("gen_ai.request.top_p", span.topP));
|
|
174
|
+
if (span.inputCost != null) attrs.push(floatAttr("llmtap.cost.input", span.inputCost));
|
|
175
|
+
if (span.outputCost != null) attrs.push(floatAttr("llmtap.cost.output", span.outputCost));
|
|
176
|
+
if (span.totalCost != null) attrs.push(floatAttr("llmtap.cost.total", span.totalCost));
|
|
177
|
+
if (span.sessionId) attrs.push(strAttr("session.id", span.sessionId));
|
|
178
|
+
if (span.userId) attrs.push(strAttr("enduser.id", span.userId));
|
|
179
|
+
if (span.errorType) attrs.push(strAttr("error.type", span.errorType));
|
|
180
|
+
if (span.tags) {
|
|
181
|
+
for (const [key, value] of Object.entries(span.tags)) {
|
|
182
|
+
attrs.push(strAttr(`llmtap.tag.${key}`, value));
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
const events = [];
|
|
186
|
+
if (span.inputMessages) {
|
|
187
|
+
for (const msg of span.inputMessages) {
|
|
188
|
+
events.push({
|
|
189
|
+
name: "gen_ai.content.prompt",
|
|
190
|
+
timeUnixNano: msToNano(span.startTime),
|
|
191
|
+
attributes: [
|
|
192
|
+
strAttr("gen_ai.prompt.role", msg.role),
|
|
193
|
+
strAttr("gen_ai.prompt.content", typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content) ?? "")
|
|
194
|
+
]
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
if (span.outputMessages) {
|
|
199
|
+
for (const msg of span.outputMessages) {
|
|
200
|
+
events.push({
|
|
201
|
+
name: "gen_ai.content.completion",
|
|
202
|
+
timeUnixNano: msToNano(span.endTime ?? span.startTime),
|
|
203
|
+
attributes: [
|
|
204
|
+
strAttr("gen_ai.completion.role", msg.role),
|
|
205
|
+
strAttr("gen_ai.completion.content", typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content) ?? "")
|
|
206
|
+
]
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
if (span.toolCalls) {
|
|
211
|
+
for (const tc of span.toolCalls) {
|
|
212
|
+
events.push({
|
|
213
|
+
name: "gen_ai.content.tool_call",
|
|
214
|
+
timeUnixNano: msToNano(span.endTime ?? span.startTime),
|
|
215
|
+
attributes: [
|
|
216
|
+
strAttr("gen_ai.tool_call.id", tc.id),
|
|
217
|
+
strAttr("gen_ai.tool_call.name", tc.name),
|
|
218
|
+
strAttr("gen_ai.tool_call.arguments", tc.arguments)
|
|
219
|
+
]
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return {
|
|
224
|
+
traceId: padHex(span.traceId, 32),
|
|
225
|
+
spanId: padHex(span.spanId, 16),
|
|
226
|
+
parentSpanId: span.parentSpanId ? padHex(span.parentSpanId, 16) : void 0,
|
|
227
|
+
name: span.name,
|
|
228
|
+
kind: 3,
|
|
229
|
+
// SPAN_KIND_CLIENT
|
|
230
|
+
startTimeUnixNano: msToNano(span.startTime),
|
|
231
|
+
endTimeUnixNano: msToNano(span.endTime ?? span.startTime + (span.duration ?? 0)),
|
|
232
|
+
attributes: attrs,
|
|
233
|
+
events,
|
|
234
|
+
status: span.status === "error" ? { code: 2, message: span.errorMessage } : { code: 1 }
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
function spansToOtlp(spans, serviceName = "llmtap") {
|
|
238
|
+
return {
|
|
239
|
+
resourceSpans: [
|
|
240
|
+
{
|
|
241
|
+
resource: {
|
|
242
|
+
attributes: [
|
|
243
|
+
strAttr("service.name", serviceName),
|
|
244
|
+
strAttr("service.version", "0.1.0"),
|
|
245
|
+
strAttr("telemetry.sdk.name", "llmtap"),
|
|
246
|
+
strAttr("telemetry.sdk.language", "nodejs")
|
|
247
|
+
]
|
|
248
|
+
},
|
|
249
|
+
scopeSpans: [
|
|
250
|
+
{
|
|
251
|
+
scope: { name: "@llmtap/sdk", version: "0.1.0" },
|
|
252
|
+
spans: spans.map(convertSpanToOtlp)
|
|
253
|
+
}
|
|
254
|
+
]
|
|
255
|
+
}
|
|
256
|
+
]
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
export {
|
|
260
|
+
DB_DIR_NAME,
|
|
261
|
+
DB_FILE_NAME,
|
|
262
|
+
DEFAULT_COLLECTOR_PORT,
|
|
263
|
+
DEFAULT_COLLECTOR_URL,
|
|
264
|
+
DEFAULT_MAX_BUFFER_SIZE,
|
|
265
|
+
MODEL_PRICING,
|
|
266
|
+
ROUTES,
|
|
267
|
+
VERSION,
|
|
268
|
+
calculateCost,
|
|
269
|
+
clearPricingOverrides,
|
|
270
|
+
getAllPricing,
|
|
271
|
+
getModelPricing,
|
|
272
|
+
loadPricingFromURL,
|
|
273
|
+
setPricing,
|
|
274
|
+
setPricingBulk,
|
|
275
|
+
spansToOtlp
|
|
276
|
+
};
|
|
277
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/constants.ts","../src/pricing.ts","../src/otlp.ts"],"sourcesContent":["export const DEFAULT_COLLECTOR_PORT = 4781;\r\nexport const DEFAULT_COLLECTOR_URL = `http://localhost:${DEFAULT_COLLECTOR_PORT}`;\r\nexport const DEFAULT_MAX_BUFFER_SIZE = 1000;\r\nexport const DB_DIR_NAME = \".llmtap\";\r\nexport const DB_FILE_NAME = \"data.db\";\r\nexport const VERSION = \"0.1.0\";\r\n\r\n// API routes\r\nexport const ROUTES = {\r\n INGEST_SPANS: \"/v1/spans\",\r\n LIST_TRACES: \"/v1/traces\",\r\n GET_TRACE_SPANS: \"/v1/traces/:traceId/spans\",\r\n GET_STATS: \"/v1/stats\",\r\n GET_SESSIONS: \"/v1/sessions\",\r\n GET_DB_INFO: \"/v1/db-info\",\r\n SSE_STREAM: \"/v1/stream\",\r\n} as const;\r\n","import type { ModelPricing } from \"./types.js\";\n\n// Runtime pricing overrides set by the user.\nconst pricingOverrides: ModelPricing[] = [];\n\n/**\n * Set pricing for a single model. Overrides built-in pricing.\n */\nexport function setPricing(\n provider: string,\n model: string,\n inputCostPer1M: number,\n outputCostPer1M: number\n): void {\n const existing = pricingOverrides.findIndex(\n (p) => p.provider === provider.toLowerCase() && p.model === model.toLowerCase()\n );\n const entry: ModelPricing = {\n provider: provider.toLowerCase(),\n model: model.toLowerCase(),\n inputCostPer1M,\n outputCostPer1M,\n };\n if (existing >= 0) {\n pricingOverrides[existing] = entry;\n } else {\n pricingOverrides.push(entry);\n }\n}\n\n/**\n * Set pricing for multiple models at once.\n */\nexport function setPricingBulk(entries: ModelPricing[]): void {\n for (const entry of entries) {\n setPricing(entry.provider, entry.model, entry.inputCostPer1M, entry.outputCostPer1M);\n }\n}\n\n/**\n * Load pricing from a remote JSON URL.\n * Expects an array of ModelPricing objects.\n * Pass a fetch-compatible function (globalThis.fetch or node-fetch).\n */\nexport async function loadPricingFromURL(\n url: string,\n fetchFn: (url: string) => Promise<{ ok: boolean; status: number; json: () => Promise<unknown> }>\n): Promise<void> {\n const res = await fetchFn(url);\n if (!res.ok) throw new Error(`Failed to load pricing from ${url}: HTTP ${res.status}`);\n const data = (await res.json()) as ModelPricing[];\n setPricingBulk(data);\n}\n\n/**\n * Remove all runtime pricing overrides.\n */\nexport function clearPricingOverrides(): void {\n pricingOverrides.length = 0;\n}\n\n/**\n * Get all pricing entries (overrides merged on top of built-in).\n */\nexport function getAllPricing(): ModelPricing[] {\n const merged = new Map<string, ModelPricing>();\n for (const entry of MODEL_PRICING) {\n merged.set(`${entry.provider}::${entry.model}`, entry);\n }\n for (const entry of pricingOverrides) {\n merged.set(`${entry.provider}::${entry.model}`, entry);\n }\n return [...merged.values()];\n}\n\n/**\n * Pricing data for popular LLM models.\n * Costs are per 1 million tokens in USD.\n * Last updated: 2025-01\n */\nexport const MODEL_PRICING: ModelPricing[] = [\n // OpenAI\n { provider: \"openai\", model: \"gpt-4o\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-2024-11-20\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-2024-08-06\", inputCostPer1M: 2.5, outputCostPer1M: 10.0 },\n { provider: \"openai\", model: \"gpt-4o-mini\", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },\n { provider: \"openai\", model: \"gpt-4o-mini-2024-07-18\", inputCostPer1M: 0.15, outputCostPer1M: 0.6 },\n { provider: \"openai\", model: \"gpt-4-turbo\", inputCostPer1M: 10.0, outputCostPer1M: 30.0 },\n { provider: \"openai\", model: \"gpt-4\", inputCostPer1M: 30.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"gpt-3.5-turbo\", inputCostPer1M: 0.5, outputCostPer1M: 1.5 },\n { provider: \"openai\", model: \"o1\", inputCostPer1M: 15.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"o1-mini\", inputCostPer1M: 3.0, outputCostPer1M: 12.0 },\n { provider: \"openai\", model: \"o1-preview\", inputCostPer1M: 15.0, outputCostPer1M: 60.0 },\n { provider: \"openai\", model: \"o3-mini\", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },\n { provider: \"openai\", model: \"o3\", inputCostPer1M: 10.0, outputCostPer1M: 40.0 },\n { provider: \"openai\", model: \"o4-mini\", inputCostPer1M: 1.1, outputCostPer1M: 4.4 },\n\n // Anthropic\n { provider: \"anthropic\", model: \"claude-sonnet-4-20250514\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-sonnet-20241022\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-sonnet-20240620\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n { provider: \"anthropic\", model: \"claude-3-5-haiku-20241022\", inputCostPer1M: 0.8, outputCostPer1M: 4.0 },\n { provider: \"anthropic\", model: \"claude-3-opus-20240229\", inputCostPer1M: 15.0, outputCostPer1M: 75.0 },\n { provider: \"anthropic\", model: \"claude-3-haiku-20240307\", inputCostPer1M: 0.25, outputCostPer1M: 1.25 },\n { provider: \"anthropic\", model: \"claude-opus-4\", inputCostPer1M: 15.0, outputCostPer1M: 75.0 },\n { provider: \"anthropic\", model: \"claude-sonnet-4\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n\n // Google\n { provider: \"google\", model: \"gemini-2.0-flash\", inputCostPer1M: 0.1, outputCostPer1M: 0.4 },\n { provider: \"google\", model: \"gemini-2.0-flash-lite\", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },\n { provider: \"google\", model: \"gemini-1.5-pro\", inputCostPer1M: 1.25, outputCostPer1M: 5.0 },\n { provider: \"google\", model: \"gemini-1.5-flash\", inputCostPer1M: 0.075, outputCostPer1M: 0.3 },\n { provider: \"google\", model: \"gemini-2.5-pro\", inputCostPer1M: 1.25, outputCostPer1M: 10.0 },\n { provider: \"google\", model: \"gemini-2.5-flash\", inputCostPer1M: 0.3, outputCostPer1M: 2.5 },\n\n // OpenAI-compatible providers\n { provider: \"deepseek\", model: \"deepseek-chat\", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },\n { provider: \"deepseek\", model: \"deepseek-v3\", inputCostPer1M: 0.27, outputCostPer1M: 1.1 },\n { provider: \"deepseek\", model: \"deepseek-reasoner\", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },\n { provider: \"deepseek\", model: \"deepseek-r1\", inputCostPer1M: 0.55, outputCostPer1M: 2.19 },\n { provider: \"deepseek\", model: \"deepseek-coder-v2\", inputCostPer1M: 0.14, outputCostPer1M: 0.28 },\n { provider: \"groq\", model: \"llama-3.3-70b-versatile\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"llama-3.1-8b-instant\", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },\n { provider: \"groq\", model: \"llama-3.1-70b-versatile\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"llama-3-8b-8192\", inputCostPer1M: 0.05, outputCostPer1M: 0.08 },\n { provider: \"groq\", model: \"llama-3-70b-8192\", inputCostPer1M: 0.59, outputCostPer1M: 0.79 },\n { provider: \"groq\", model: \"gemma2-9b-it\", inputCostPer1M: 0.20, outputCostPer1M: 0.20 },\n { provider: \"groq\", model: \"mixtral-8x7b-32768\", inputCostPer1M: 0.24, outputCostPer1M: 0.24 },\n { provider: \"xai\", model: \"grok-2\", inputCostPer1M: 2.0, outputCostPer1M: 10.0 },\n { provider: \"xai\", model: \"grok-3\", inputCostPer1M: 3.0, outputCostPer1M: 15.0 },\n\n // Ollama / local models (free)\n { provider: \"ollama\", model: \"llama3\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"llama3.1\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"mistral\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"codellama\", inputCostPer1M: 0, outputCostPer1M: 0 },\n { provider: \"ollama\", model: \"deepseek-r1\", inputCostPer1M: 0, outputCostPer1M: 0 },\n];\n\n/**\n * Look up pricing for a model. Returns null if model is not found.\n * Tries exact match first, then prefix match for versioned models.\n */\nexport function getModelPricing(\n provider: string,\n model: string\n): ModelPricing | null {\n const normalized = model.toLowerCase();\n const providerNorm = provider.toLowerCase();\n\n // Check runtime overrides first (exact match only).\n const override = pricingOverrides.find(\n (p) => p.provider === providerNorm && p.model === normalized\n );\n if (override) return override;\n\n const providerPricing = MODEL_PRICING.filter(\n (pricing) => pricing.provider === providerNorm\n );\n\n // Exact match\n const exact = providerPricing.find((pricing) => pricing.model === normalized);\n if (exact) return exact;\n\n // Prefix match should prefer the most-specific model name first.\n const prefix = providerPricing\n .filter((pricing) => normalized.startsWith(pricing.model))\n .sort((left, right) => right.model.length - left.model.length)[0];\n if (prefix) return prefix;\n\n return null;\n}\n\n/**\n * Calculate cost for a given number of tokens\n */\nexport function calculateCost(\n provider: string,\n model: string,\n inputTokens: number,\n outputTokens: number\n): { inputCost: number; outputCost: number; totalCost: number } {\n const pricing = getModelPricing(provider, model);\n if (!pricing) {\n return { inputCost: 0, outputCost: 0, totalCost: 0 };\n }\n\n const inputCost = (inputTokens / 1_000_000) * pricing.inputCostPer1M;\n const outputCost = (outputTokens / 1_000_000) * pricing.outputCostPer1M;\n\n return {\n inputCost,\n outputCost,\n totalCost: inputCost + outputCost,\n };\n}\n","import type { Span } from \"./types.js\";\r\n\r\n/**\r\n * Convert LLMTap spans to OpenTelemetry OTLP JSON format.\r\n *\r\n * Follows the OTLP/HTTP JSON specification and the\r\n * OpenTelemetry GenAI Semantic Conventions for LLM spans.\r\n *\r\n * @see https://opentelemetry.io/docs/specs/otlp/\r\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/\r\n */\r\n\r\ninterface OtlpAttribute {\r\n key: string;\r\n value: { stringValue?: string; intValue?: string; doubleValue?: number; boolValue?: boolean };\r\n}\r\n\r\ninterface OtlpEvent {\r\n name: string;\r\n timeUnixNano: string;\r\n attributes: OtlpAttribute[];\r\n}\r\n\r\ninterface OtlpSpan {\r\n traceId: string;\r\n spanId: string;\r\n parentSpanId?: string;\r\n name: string;\r\n kind: number; // SPAN_KIND_CLIENT = 3\r\n startTimeUnixNano: string;\r\n endTimeUnixNano: string;\r\n attributes: OtlpAttribute[];\r\n events: OtlpEvent[];\r\n status: { code: number; message?: string }; // OK=1, ERROR=2\r\n}\r\n\r\ninterface OtlpResourceSpans {\r\n resource: {\r\n attributes: OtlpAttribute[];\r\n };\r\n scopeSpans: Array<{\r\n scope: { name: string; version: string };\r\n spans: OtlpSpan[];\r\n }>;\r\n}\r\n\r\nexport interface OtlpExportPayload {\r\n resourceSpans: OtlpResourceSpans[];\r\n}\r\n\r\nfunction strAttr(key: string, value: string): OtlpAttribute {\r\n return { key, value: { stringValue: value } };\r\n}\r\n\r\nfunction intAttr(key: string, value: number): OtlpAttribute {\r\n return { key, value: { intValue: String(value) } };\r\n}\r\n\r\nfunction floatAttr(key: string, value: number): OtlpAttribute {\r\n return { key, value: { doubleValue: value } };\r\n}\r\n\r\nfunction msToNano(ms: number): string {\r\n return String(BigInt(ms) * BigInt(1_000_000));\r\n}\r\n\r\nfunction padHex(id: string, length: number): string {\r\n return id.padStart(length, \"0\").slice(0, length);\r\n}\r\n\r\nfunction convertSpanToOtlp(span: Span): OtlpSpan {\r\n const attrs: OtlpAttribute[] = [\r\n // GenAI Semantic Conventions\r\n strAttr(\"gen_ai.system\", span.providerName),\r\n strAttr(\"gen_ai.request.model\", span.requestModel),\r\n strAttr(\"gen_ai.operation.name\", span.operationName),\r\n ];\r\n\r\n if (span.responseModel) attrs.push(strAttr(\"gen_ai.response.model\", span.responseModel));\r\n if (span.inputTokens != null) attrs.push(intAttr(\"gen_ai.usage.input_tokens\", span.inputTokens));\r\n if (span.outputTokens != null) attrs.push(intAttr(\"gen_ai.usage.output_tokens\", span.outputTokens));\r\n if (span.totalTokens != null) attrs.push(intAttr(\"gen_ai.usage.total_tokens\", span.totalTokens));\r\n if (span.temperature != null) attrs.push(floatAttr(\"gen_ai.request.temperature\", span.temperature));\r\n if (span.maxTokens != null) attrs.push(intAttr(\"gen_ai.request.max_tokens\", span.maxTokens));\r\n if (span.topP != null) attrs.push(floatAttr(\"gen_ai.request.top_p\", span.topP));\r\n\r\n // LLMTap-specific attributes\r\n if (span.inputCost != null) attrs.push(floatAttr(\"llmtap.cost.input\", span.inputCost));\r\n if (span.outputCost != null) attrs.push(floatAttr(\"llmtap.cost.output\", span.outputCost));\r\n if (span.totalCost != null) attrs.push(floatAttr(\"llmtap.cost.total\", span.totalCost));\r\n if (span.sessionId) attrs.push(strAttr(\"session.id\", span.sessionId));\r\n if (span.userId) attrs.push(strAttr(\"enduser.id\", span.userId));\r\n if (span.errorType) attrs.push(strAttr(\"error.type\", span.errorType));\r\n\r\n // Custom tags -> attributes\r\n if (span.tags) {\r\n for (const [key, value] of Object.entries(span.tags)) {\r\n attrs.push(strAttr(`llmtap.tag.${key}`, value));\r\n }\r\n }\r\n\r\n // GenAI events for prompt/completion content\r\n const events: OtlpEvent[] = [];\r\n\r\n if (span.inputMessages) {\r\n for (const msg of span.inputMessages) {\r\n events.push({\r\n name: \"gen_ai.content.prompt\",\r\n timeUnixNano: msToNano(span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.prompt.role\", msg.role),\r\n strAttr(\"gen_ai.prompt.content\", typeof msg.content === \"string\" ? msg.content : JSON.stringify(msg.content) ?? \"\"),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n if (span.outputMessages) {\r\n for (const msg of span.outputMessages) {\r\n events.push({\r\n name: \"gen_ai.content.completion\",\r\n timeUnixNano: msToNano(span.endTime ?? span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.completion.role\", msg.role),\r\n strAttr(\"gen_ai.completion.content\", typeof msg.content === \"string\" ? msg.content : JSON.stringify(msg.content) ?? \"\"),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n if (span.toolCalls) {\r\n for (const tc of span.toolCalls) {\r\n events.push({\r\n name: \"gen_ai.content.tool_call\",\r\n timeUnixNano: msToNano(span.endTime ?? span.startTime),\r\n attributes: [\r\n strAttr(\"gen_ai.tool_call.id\", tc.id),\r\n strAttr(\"gen_ai.tool_call.name\", tc.name),\r\n strAttr(\"gen_ai.tool_call.arguments\", tc.arguments),\r\n ],\r\n });\r\n }\r\n }\r\n\r\n return {\r\n traceId: padHex(span.traceId, 32),\r\n spanId: padHex(span.spanId, 16),\r\n parentSpanId: span.parentSpanId ? padHex(span.parentSpanId, 16) : undefined,\r\n name: span.name,\r\n kind: 3, // SPAN_KIND_CLIENT\r\n startTimeUnixNano: msToNano(span.startTime),\r\n endTimeUnixNano: msToNano(span.endTime ?? span.startTime + (span.duration ?? 0)),\r\n attributes: attrs,\r\n events,\r\n status: span.status === \"error\"\r\n ? { code: 2, message: span.errorMessage }\r\n : { code: 1 },\r\n };\r\n}\r\n\r\n/**\r\n * Convert an array of LLMTap spans to OTLP JSON export format.\r\n */\r\nexport function spansToOtlp(spans: Span[], serviceName = \"llmtap\"): OtlpExportPayload {\r\n return {\r\n resourceSpans: [\r\n {\r\n resource: {\r\n attributes: [\r\n strAttr(\"service.name\", serviceName),\r\n strAttr(\"service.version\", \"0.1.0\"),\r\n strAttr(\"telemetry.sdk.name\", \"llmtap\"),\r\n strAttr(\"telemetry.sdk.language\", \"nodejs\"),\r\n ],\r\n },\r\n scopeSpans: [\r\n {\r\n scope: { name: \"@llmtap/sdk\", version: \"0.1.0\" },\r\n spans: spans.map(convertSpanToOtlp),\r\n },\r\n ],\r\n },\r\n ],\r\n };\r\n}\r\n"],"mappings":";AAAO,IAAM,yBAAyB;AAC/B,IAAM,wBAAwB,oBAAoB,sBAAsB;AACxE,IAAM,0BAA0B;AAChC,IAAM,cAAc;AACpB,IAAM,eAAe;AACrB,IAAM,UAAU;AAGhB,IAAM,SAAS;AAAA,EACpB,cAAc;AAAA,EACd,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,WAAW;AAAA,EACX,cAAc;AAAA,EACd,aAAa;AAAA,EACb,YAAY;AACd;;;ACbA,IAAM,mBAAmC,CAAC;AAKnC,SAAS,WACd,UACA,OACA,gBACA,iBACM;AACN,QAAM,WAAW,iBAAiB;AAAA,IAChC,CAAC,MAAM,EAAE,aAAa,SAAS,YAAY,KAAK,EAAE,UAAU,MAAM,YAAY;AAAA,EAChF;AACA,QAAM,QAAsB;AAAA,IAC1B,UAAU,SAAS,YAAY;AAAA,IAC/B,OAAO,MAAM,YAAY;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AACA,MAAI,YAAY,GAAG;AACjB,qBAAiB,QAAQ,IAAI;AAAA,EAC/B,OAAO;AACL,qBAAiB,KAAK,KAAK;AAAA,EAC7B;AACF;AAKO,SAAS,eAAe,SAA+B;AAC5D,aAAW,SAAS,SAAS;AAC3B,eAAW,MAAM,UAAU,MAAM,OAAO,MAAM,gBAAgB,MAAM,eAAe;AAAA,EACrF;AACF;AAOA,eAAsB,mBACpB,KACA,SACe;AACf,QAAM,MAAM,MAAM,QAAQ,GAAG;AAC7B,MAAI,CAAC,IAAI,GAAI,OAAM,IAAI,MAAM,+BAA+B,GAAG,UAAU,IAAI,MAAM,EAAE;AACrF,QAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,iBAAe,IAAI;AACrB;AAKO,SAAS,wBAA8B;AAC5C,mBAAiB,SAAS;AAC5B;AAKO,SAAS,gBAAgC;AAC9C,QAAM,SAAS,oBAAI,IAA0B;AAC7C,aAAW,SAAS,eAAe;AACjC,WAAO,IAAI,GAAG,MAAM,QAAQ,KAAK,MAAM,KAAK,IAAI,KAAK;AAAA,EACvD;AACA,aAAW,SAAS,kBAAkB;AACpC,WAAO,IAAI,GAAG,MAAM,QAAQ,KAAK,MAAM,KAAK,IAAI,KAAK;AAAA,EACvD;AACA,SAAO,CAAC,GAAG,OAAO,OAAO,CAAC;AAC5B;AAOO,IAAM,gBAAgC;AAAA;AAAA,EAE3C,EAAE,UAAU,UAAU,OAAO,UAAU,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,qBAAqB,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,qBAAqB,gBAAgB,KAAK,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EACvF,EAAE,UAAU,UAAU,OAAO,0BAA0B,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EAClG,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACxF,EAAE,UAAU,UAAU,OAAO,SAAS,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,iBAAiB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EACxF,EAAE,UAAU,UAAU,OAAO,MAAM,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACnF,EAAE,UAAU,UAAU,OAAO,cAAc,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACvF,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EAClF,EAAE,UAAU,UAAU,OAAO,MAAM,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,KAAK,iBAAiB,IAAI;AAAA;AAAA,EAGlF,EAAE,UAAU,aAAa,OAAO,4BAA4B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,8BAA8B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACzG,EAAE,UAAU,aAAa,OAAO,8BAA8B,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EACzG,EAAE,UAAU,aAAa,OAAO,6BAA6B,gBAAgB,KAAK,iBAAiB,EAAI;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,0BAA0B,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EACtG,EAAE,UAAU,aAAa,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EACvG,EAAE,UAAU,aAAa,OAAO,iBAAiB,gBAAgB,IAAM,iBAAiB,GAAK;AAAA,EAC7F,EAAE,UAAU,aAAa,OAAO,mBAAmB,gBAAgB,GAAK,iBAAiB,GAAK;AAAA;AAAA,EAG9F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA,EAC3F,EAAE,UAAU,UAAU,OAAO,yBAAyB,gBAAgB,OAAO,iBAAiB,IAAI;AAAA,EAClG,EAAE,UAAU,UAAU,OAAO,kBAAkB,gBAAgB,MAAM,iBAAiB,EAAI;AAAA,EAC1F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,OAAO,iBAAiB,IAAI;AAAA,EAC7F,EAAE,UAAU,UAAU,OAAO,kBAAkB,gBAAgB,MAAM,iBAAiB,GAAK;AAAA,EAC3F,EAAE,UAAU,UAAU,OAAO,oBAAoB,gBAAgB,KAAK,iBAAiB,IAAI;AAAA;AAAA,EAG3F,EAAE,UAAU,YAAY,OAAO,iBAAiB,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EAC3F,EAAE,UAAU,YAAY,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,IAAI;AAAA,EACzF,EAAE,UAAU,YAAY,OAAO,qBAAqB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAChG,EAAE,UAAU,YAAY,OAAO,eAAe,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC1F,EAAE,UAAU,YAAY,OAAO,qBAAqB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAChG,EAAE,UAAU,QAAQ,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAClG,EAAE,UAAU,QAAQ,OAAO,wBAAwB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC/F,EAAE,UAAU,QAAQ,OAAO,2BAA2B,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAClG,EAAE,UAAU,QAAQ,OAAO,mBAAmB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC1F,EAAE,UAAU,QAAQ,OAAO,oBAAoB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC3F,EAAE,UAAU,QAAQ,OAAO,gBAAgB,gBAAgB,KAAM,iBAAiB,IAAK;AAAA,EACvF,EAAE,UAAU,QAAQ,OAAO,sBAAsB,gBAAgB,MAAM,iBAAiB,KAAK;AAAA,EAC7F,EAAE,UAAU,OAAO,OAAO,UAAU,gBAAgB,GAAK,iBAAiB,GAAK;AAAA,EAC/E,EAAE,UAAU,OAAO,OAAO,UAAU,gBAAgB,GAAK,iBAAiB,GAAK;AAAA;AAAA,EAG/E,EAAE,UAAU,UAAU,OAAO,UAAU,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC7E,EAAE,UAAU,UAAU,OAAO,YAAY,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC/E,EAAE,UAAU,UAAU,OAAO,WAAW,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAC9E,EAAE,UAAU,UAAU,OAAO,aAAa,gBAAgB,GAAG,iBAAiB,EAAE;AAAA,EAChF,EAAE,UAAU,UAAU,OAAO,eAAe,gBAAgB,GAAG,iBAAiB,EAAE;AACpF;AAMO,SAAS,gBACd,UACA,OACqB;AACrB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,eAAe,SAAS,YAAY;AAG1C,QAAM,WAAW,iBAAiB;AAAA,IAChC,CAAC,MAAM,EAAE,aAAa,gBAAgB,EAAE,UAAU;AAAA,EACpD;AACA,MAAI,SAAU,QAAO;AAErB,QAAM,kBAAkB,cAAc;AAAA,IACpC,CAAC,YAAY,QAAQ,aAAa;AAAA,EACpC;AAGA,QAAM,QAAQ,gBAAgB,KAAK,CAAC,YAAY,QAAQ,UAAU,UAAU;AAC5E,MAAI,MAAO,QAAO;AAGlB,QAAM,SAAS,gBACZ,OAAO,CAAC,YAAY,WAAW,WAAW,QAAQ,KAAK,CAAC,EACxD,KAAK,CAAC,MAAM,UAAU,MAAM,MAAM,SAAS,KAAK,MAAM,MAAM,EAAE,CAAC;AAClE,MAAI,OAAQ,QAAO;AAEnB,SAAO;AACT;AAKO,SAAS,cACd,UACA,OACA,aACA,cAC8D;AAC9D,QAAM,UAAU,gBAAgB,UAAU,KAAK;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,WAAW,GAAG,YAAY,GAAG,WAAW,EAAE;AAAA,EACrD;AAEA,QAAM,YAAa,cAAc,MAAa,QAAQ;AACtD,QAAM,aAAc,eAAe,MAAa,QAAQ;AAExD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,WAAW,YAAY;AAAA,EACzB;AACF;;;ACjJA,SAAS,QAAQ,KAAa,OAA8B;AAC1D,SAAO,EAAE,KAAK,OAAO,EAAE,aAAa,MAAM,EAAE;AAC9C;AAEA,SAAS,QAAQ,KAAa,OAA8B;AAC1D,SAAO,EAAE,KAAK,OAAO,EAAE,UAAU,OAAO,KAAK,EAAE,EAAE;AACnD;AAEA,SAAS,UAAU,KAAa,OAA8B;AAC5D,SAAO,EAAE,KAAK,OAAO,EAAE,aAAa,MAAM,EAAE;AAC9C;AAEA,SAAS,SAAS,IAAoB;AACpC,SAAO,OAAO,OAAO,EAAE,IAAI,OAAO,GAAS,CAAC;AAC9C;AAEA,SAAS,OAAO,IAAY,QAAwB;AAClD,SAAO,GAAG,SAAS,QAAQ,GAAG,EAAE,MAAM,GAAG,MAAM;AACjD;AAEA,SAAS,kBAAkB,MAAsB;AAC/C,QAAM,QAAyB;AAAA;AAAA,IAE7B,QAAQ,iBAAiB,KAAK,YAAY;AAAA,IAC1C,QAAQ,wBAAwB,KAAK,YAAY;AAAA,IACjD,QAAQ,yBAAyB,KAAK,aAAa;AAAA,EACrD;AAEA,MAAI,KAAK,cAAe,OAAM,KAAK,QAAQ,yBAAyB,KAAK,aAAa,CAAC;AACvF,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,WAAW,CAAC;AAC/F,MAAI,KAAK,gBAAgB,KAAM,OAAM,KAAK,QAAQ,8BAA8B,KAAK,YAAY,CAAC;AAClG,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,WAAW,CAAC;AAC/F,MAAI,KAAK,eAAe,KAAM,OAAM,KAAK,UAAU,8BAA8B,KAAK,WAAW,CAAC;AAClG,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,QAAQ,6BAA6B,KAAK,SAAS,CAAC;AAC3F,MAAI,KAAK,QAAQ,KAAM,OAAM,KAAK,UAAU,wBAAwB,KAAK,IAAI,CAAC;AAG9E,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS,CAAC;AACrF,MAAI,KAAK,cAAc,KAAM,OAAM,KAAK,UAAU,sBAAsB,KAAK,UAAU,CAAC;AACxF,MAAI,KAAK,aAAa,KAAM,OAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS,CAAC;AACrF,MAAI,KAAK,UAAW,OAAM,KAAK,QAAQ,cAAc,KAAK,SAAS,CAAC;AACpE,MAAI,KAAK,OAAQ,OAAM,KAAK,QAAQ,cAAc,KAAK,MAAM,CAAC;AAC9D,MAAI,KAAK,UAAW,OAAM,KAAK,QAAQ,cAAc,KAAK,SAAS,CAAC;AAGpE,MAAI,KAAK,MAAM;AACb,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,IAAI,GAAG;AACpD,YAAM,KAAK,QAAQ,cAAc,GAAG,IAAI,KAAK,CAAC;AAAA,IAChD;AAAA,EACF;AAGA,QAAM,SAAsB,CAAC;AAE7B,MAAI,KAAK,eAAe;AACtB,eAAW,OAAO,KAAK,eAAe;AACpC,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,SAAS;AAAA,QACrC,YAAY;AAAA,UACV,QAAQ,sBAAsB,IAAI,IAAI;AAAA,UACtC,QAAQ,yBAAyB,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU,KAAK,UAAU,IAAI,OAAO,KAAK,EAAE;AAAA,QACpH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,KAAK,gBAAgB;AACvB,eAAW,OAAO,KAAK,gBAAgB;AACrC,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,WAAW,KAAK,SAAS;AAAA,QACrD,YAAY;AAAA,UACV,QAAQ,0BAA0B,IAAI,IAAI;AAAA,UAC1C,QAAQ,6BAA6B,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU,KAAK,UAAU,IAAI,OAAO,KAAK,EAAE;AAAA,QACxH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,KAAK,WAAW;AAClB,eAAW,MAAM,KAAK,WAAW;AAC/B,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,cAAc,SAAS,KAAK,WAAW,KAAK,SAAS;AAAA,QACrD,YAAY;AAAA,UACV,QAAQ,uBAAuB,GAAG,EAAE;AAAA,UACpC,QAAQ,yBAAyB,GAAG,IAAI;AAAA,UACxC,QAAQ,8BAA8B,GAAG,SAAS;AAAA,QACpD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,OAAO,KAAK,SAAS,EAAE;AAAA,IAChC,QAAQ,OAAO,KAAK,QAAQ,EAAE;AAAA,IAC9B,cAAc,KAAK,eAAe,OAAO,KAAK,cAAc,EAAE,IAAI;AAAA,IAClE,MAAM,KAAK;AAAA,IACX,MAAM;AAAA;AAAA,IACN,mBAAmB,SAAS,KAAK,SAAS;AAAA,IAC1C,iBAAiB,SAAS,KAAK,WAAW,KAAK,aAAa,KAAK,YAAY,EAAE;AAAA,IAC/E,YAAY;AAAA,IACZ;AAAA,IACA,QAAQ,KAAK,WAAW,UACpB,EAAE,MAAM,GAAG,SAAS,KAAK,aAAa,IACtC,EAAE,MAAM,EAAE;AAAA,EAChB;AACF;AAKO,SAAS,YAAY,OAAe,cAAc,UAA6B;AACpF,SAAO;AAAA,IACL,eAAe;AAAA,MACb;AAAA,QACE,UAAU;AAAA,UACR,YAAY;AAAA,YACV,QAAQ,gBAAgB,WAAW;AAAA,YACnC,QAAQ,mBAAmB,OAAO;AAAA,YAClC,QAAQ,sBAAsB,QAAQ;AAAA,YACtC,QAAQ,0BAA0B,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA,YAAY;AAAA,UACV;AAAA,YACE,OAAO,EAAE,MAAM,eAAe,SAAS,QAAQ;AAAA,YAC/C,OAAO,MAAM,IAAI,iBAAiB;AAAA,UACpC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@llmtap/shared",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Shared types, constants, and utilities for LLMTap",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.mjs",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.mjs",
|
|
12
|
+
"require": "./dist/index.js"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"devDependencies": {
|
|
19
|
+
"tsup": "^8.4.0",
|
|
20
|
+
"typescript": "^5.7.0",
|
|
21
|
+
"rimraf": "^6.0.0",
|
|
22
|
+
"vitest": "^3.0.0"
|
|
23
|
+
},
|
|
24
|
+
"keywords": [
|
|
25
|
+
"llm",
|
|
26
|
+
"ai",
|
|
27
|
+
"observability",
|
|
28
|
+
"tracing",
|
|
29
|
+
"openai",
|
|
30
|
+
"anthropic"
|
|
31
|
+
],
|
|
32
|
+
"license": "MIT",
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "https://github.com/llmtap/llmtap",
|
|
36
|
+
"directory": "packages/shared"
|
|
37
|
+
},
|
|
38
|
+
"scripts": {
|
|
39
|
+
"build": "tsup",
|
|
40
|
+
"test": "vitest run",
|
|
41
|
+
"clean": "rimraf dist"
|
|
42
|
+
}
|
|
43
|
+
}
|