@llmops/sdk 1.0.0-beta.2 → 1.0.0-beta.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/agents.cjs +1 -1
  2. package/dist/agents.d.cts +1 -1
  3. package/dist/agents.d.mts +1 -1
  4. package/dist/agents.mjs +1 -1
  5. package/dist/chunk-CxwUPGYo.mjs +21 -0
  6. package/dist/constants--ywcWP7q.cjs +18 -0
  7. package/dist/constants-BvnYU_pl.mjs +12 -0
  8. package/dist/eval.cjs +367 -0
  9. package/dist/eval.d.cts +200 -0
  10. package/dist/eval.d.mts +200 -0
  11. package/dist/eval.mjs +364 -0
  12. package/dist/express.cjs +29 -2
  13. package/dist/express.d.cts +7 -3
  14. package/dist/express.d.mts +7 -3
  15. package/dist/express.mjs +28 -1
  16. package/dist/hono.d.cts +2 -2
  17. package/dist/hono.d.mts +2 -2
  18. package/dist/{index-05byZKeu.d.mts → index-BZLzywwb.d.mts} +1 -1
  19. package/dist/{index-Beb26ZNG.d.cts → index-lgspeSNr.d.cts} +1 -1
  20. package/dist/index.cjs +3 -3
  21. package/dist/index.d.cts +4 -4
  22. package/dist/index.d.mts +4 -4
  23. package/dist/index.mjs +3 -3
  24. package/dist/interface-BbAwy96d.d.cts +223 -0
  25. package/dist/interface-Dz7B6QN1.d.mts +223 -0
  26. package/dist/nextjs.d.cts +2 -2
  27. package/dist/nextjs.d.mts +2 -2
  28. package/dist/store/d1.cjs +512 -0
  29. package/dist/store/d1.d.cts +60 -0
  30. package/dist/store/d1.d.mts +60 -0
  31. package/dist/store/d1.mjs +511 -0
  32. package/dist/store/pg.cjs +13634 -6
  33. package/dist/store/pg.d.cts +38 -2
  34. package/dist/store/pg.d.mts +38 -2
  35. package/dist/store/pg.mjs +13618 -2
  36. package/dist/store/sqlite.cjs +541 -0
  37. package/dist/store/sqlite.d.cts +50 -0
  38. package/dist/store/sqlite.d.mts +50 -0
  39. package/dist/store/sqlite.mjs +541 -0
  40. package/dist/types.d.cts +2 -2
  41. package/dist/types.d.mts +2 -2
  42. package/package.json +48 -3
  43. package/dist/express-B-wbCza5.cjs +0 -35
  44. package/dist/express-DMtc0d_Y.mjs +0 -30
  45. package/dist/index-DnWGper4.d.cts +0 -7
  46. package/dist/index-Dvz-L2Hf.d.mts +0 -7
  47. /package/dist/{agents-exporter-vcpgCF69.mjs → agents-exporter-CGxTzDeQ.mjs} +0 -0
  48. /package/dist/{agents-exporter-BZHCcFSd.d.mts → agents-exporter-CehKIArI.d.mts} +0 -0
  49. /package/dist/{agents-exporter-BuTq2n2y.cjs → agents-exporter-DizRE7CQ.cjs} +0 -0
  50. /package/dist/{agents-exporter-uzN3bkth.d.cts → agents-exporter-DkqkCcIx.d.cts} +0 -0
@@ -0,0 +1,511 @@
1
+ import { randomUUID } from "node:crypto";
2
+
3
+ //#region src/store/d1/d1-store.ts
4
+ /** JSON columns stored as TEXT in D1 — parse them back to objects on read */
5
+ const JSON_COLUMNS = new Set([
6
+ "tags",
7
+ "metadata",
8
+ "attributes",
9
+ "guardrailResults",
10
+ "input",
11
+ "output"
12
+ ]);
13
+ function parseJsonColumns(row) {
14
+ if (!row || typeof row !== "object") return row;
15
+ const parsed = { ...row };
16
+ for (const key of Object.keys(parsed)) if (JSON_COLUMNS.has(key) && typeof parsed[key] === "string") try {
17
+ parsed[key] = JSON.parse(parsed[key]);
18
+ } catch {}
19
+ return parsed;
20
+ }
21
+ function parseJsonRows(rows) {
22
+ return rows.map(parseJsonColumns);
23
+ }
24
+ function buildTagFilters(tags) {
25
+ const conditions = [];
26
+ const params = [];
27
+ if (!tags) return {
28
+ conditions,
29
+ params
30
+ };
31
+ for (const [key, values] of Object.entries(tags)) {
32
+ if (values.length === 0) continue;
33
+ if (values.length === 1) {
34
+ conditions.push(`json_extract("tags", '$.' || ?) = ?`);
35
+ params.push(key, values[0]);
36
+ } else {
37
+ const placeholders = values.map(() => "?").join(", ");
38
+ conditions.push(`json_extract("tags", '$.' || ?) IN (${placeholders})`);
39
+ params.push(key, ...values);
40
+ }
41
+ }
42
+ return {
43
+ conditions,
44
+ params
45
+ };
46
+ }
47
+ const D1_BATCH_LIMIT = 100;
48
+ function createD1LLMRequestsStore(db) {
49
+ return {
50
+ batchInsertRequests: async (requests) => {
51
+ if (requests.length === 0) return { count: 0 };
52
+ const now = (/* @__PURE__ */ new Date()).toISOString();
53
+ const stmts = requests.map((req) => db.prepare(`
54
+ INSERT INTO "llm_requests" (
55
+ "id","requestId","configId","variantId","environmentId",
56
+ "providerConfigId","provider","model","promptTokens",
57
+ "completionTokens","totalTokens","cachedTokens",
58
+ "cacheCreationTokens","cost","cacheSavings","inputCost",
59
+ "outputCost","endpoint","statusCode","latencyMs","isStreaming",
60
+ "userId","tags","guardrailResults","traceId","spanId",
61
+ "parentSpanId","sessionId","createdAt","updatedAt"
62
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
63
+ `).bind(randomUUID(), req.requestId, req.configId ?? null, req.variantId ?? null, req.environmentId ?? null, req.providerConfigId ?? null, req.provider, req.model, req.promptTokens ?? 0, req.completionTokens ?? 0, req.totalTokens ?? 0, req.cachedTokens ?? 0, req.cacheCreationTokens ?? 0, req.cost ?? 0, req.cacheSavings ?? 0, req.inputCost ?? 0, req.outputCost ?? 0, req.endpoint, req.statusCode, req.latencyMs ?? 0, req.isStreaming ? 1 : 0, req.userId ?? null, JSON.stringify(req.tags ?? {}), req.guardrailResults ? JSON.stringify(req.guardrailResults) : null, req.traceId ?? null, req.spanId ?? null, req.parentSpanId ?? null, req.sessionId ?? null, now, now));
64
+ for (let i = 0; i < stmts.length; i += D1_BATCH_LIMIT) await db.batch(stmts.slice(i, i + D1_BATCH_LIMIT));
65
+ return { count: requests.length };
66
+ },
67
+ insertRequest: async (req) => {
68
+ const now = (/* @__PURE__ */ new Date()).toISOString();
69
+ return db.prepare(`
70
+ INSERT INTO "llm_requests" (
71
+ "id","requestId","configId","variantId","environmentId",
72
+ "providerConfigId","provider","model","promptTokens",
73
+ "completionTokens","totalTokens","cachedTokens",
74
+ "cacheCreationTokens","cost","cacheSavings","inputCost",
75
+ "outputCost","endpoint","statusCode","latencyMs","isStreaming",
76
+ "userId","tags","guardrailResults","traceId","spanId",
77
+ "parentSpanId","sessionId","createdAt","updatedAt"
78
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
79
+ RETURNING *
80
+ `).bind(randomUUID(), req.requestId, req.configId ?? null, req.variantId ?? null, req.environmentId ?? null, req.providerConfigId ?? null, req.provider, req.model, req.promptTokens ?? 0, req.completionTokens ?? 0, req.totalTokens ?? 0, req.cachedTokens ?? 0, req.cacheCreationTokens ?? 0, req.cost ?? 0, req.cacheSavings ?? 0, req.inputCost ?? 0, req.outputCost ?? 0, req.endpoint, req.statusCode, req.latencyMs ?? 0, req.isStreaming ? 1 : 0, req.userId ?? null, JSON.stringify(req.tags ?? {}), req.guardrailResults ? JSON.stringify(req.guardrailResults) : null, req.traceId ?? null, req.spanId ?? null, req.parentSpanId ?? null, req.sessionId ?? null, now, now).first();
81
+ },
82
+ listRequests: async (params) => {
83
+ const { limit = 100, offset = 0, configId, variantId, environmentId, providerConfigId, provider, model, startDate, endDate, tags } = params ?? {};
84
+ const conditions = ["1=1"];
85
+ const queryParams = [];
86
+ if (configId) {
87
+ conditions.push(`"configId" = ?`);
88
+ queryParams.push(configId);
89
+ }
90
+ if (variantId) {
91
+ conditions.push(`"variantId" = ?`);
92
+ queryParams.push(variantId);
93
+ }
94
+ if (environmentId) {
95
+ conditions.push(`"environmentId" = ?`);
96
+ queryParams.push(environmentId);
97
+ }
98
+ if (providerConfigId) {
99
+ conditions.push(`"providerConfigId" = ?`);
100
+ queryParams.push(providerConfigId);
101
+ }
102
+ if (provider) {
103
+ conditions.push(`"provider" = ?`);
104
+ queryParams.push(provider);
105
+ }
106
+ if (model) {
107
+ conditions.push(`"model" = ?`);
108
+ queryParams.push(model);
109
+ }
110
+ if (startDate) {
111
+ conditions.push(`"createdAt" >= ?`);
112
+ queryParams.push(startDate.toISOString());
113
+ }
114
+ if (endDate) {
115
+ conditions.push(`"createdAt" <= ?`);
116
+ queryParams.push(endDate.toISOString());
117
+ }
118
+ const tagFilter = buildTagFilters(tags);
119
+ conditions.push(...tagFilter.conditions);
120
+ queryParams.push(...tagFilter.params);
121
+ const where = conditions.join(" AND ");
122
+ const total = (await db.prepare(`SELECT COUNT(*) AS "total" FROM "llm_requests" WHERE ${where}`).bind(...queryParams).first())?.total ?? 0;
123
+ const { results } = await db.prepare(`SELECT * FROM "llm_requests" WHERE ${where} ORDER BY "createdAt" DESC LIMIT ? OFFSET ?`).bind(...queryParams, limit, offset).all();
124
+ return {
125
+ data: parseJsonRows(results),
126
+ total,
127
+ limit,
128
+ offset
129
+ };
130
+ },
131
+ getRequestByRequestId: async (requestId) => {
132
+ const row = await db.prepare(`SELECT * FROM "llm_requests" WHERE "requestId" = ?`).bind(requestId).first();
133
+ return row ? parseJsonColumns(row) : void 0;
134
+ },
135
+ getTotalCost: async (params) => {
136
+ const conditions = [`"createdAt" >= ?`, `"createdAt" <= ?`];
137
+ const queryParams = [params.startDate.toISOString(), params.endDate.toISOString()];
138
+ if (params.configId) {
139
+ conditions.push(`"configId" = ?`);
140
+ queryParams.push(params.configId);
141
+ }
142
+ if (params.variantId) {
143
+ conditions.push(`"variantId" = ?`);
144
+ queryParams.push(params.variantId);
145
+ }
146
+ if (params.environmentId) {
147
+ conditions.push(`"environmentId" = ?`);
148
+ queryParams.push(params.environmentId);
149
+ }
150
+ const tagFilter = buildTagFilters(params.tags);
151
+ conditions.push(...tagFilter.conditions);
152
+ queryParams.push(...tagFilter.params);
153
+ const where = conditions.join(" AND ");
154
+ return db.prepare(`
155
+ SELECT
156
+ COALESCE(SUM("cost"), 0) AS "totalCost",
157
+ COALESCE(SUM("inputCost"), 0) AS "totalInputCost",
158
+ COALESCE(SUM("outputCost"), 0) AS "totalOutputCost",
159
+ COALESCE(SUM("promptTokens"), 0) AS "totalPromptTokens",
160
+ COALESCE(SUM("completionTokens"), 0) AS "totalCompletionTokens",
161
+ COALESCE(SUM("totalTokens"), 0) AS "totalTokens",
162
+ COALESCE(SUM("cachedTokens"), 0) AS "totalCachedTokens",
163
+ COALESCE(SUM("cacheSavings"), 0) AS "totalCacheSavings",
164
+ COUNT(*) AS "requestCount"
165
+ FROM "llm_requests" WHERE ${where}
166
+ `).bind(...queryParams).first();
167
+ },
168
+ getCostByModel: async (params) => {
169
+ const { results } = await db.prepare(`
170
+ SELECT "provider", "model",
171
+ COALESCE(SUM("cost"), 0) AS "totalCost",
172
+ COALESCE(SUM("inputCost"), 0) AS "totalInputCost",
173
+ COALESCE(SUM("outputCost"), 0) AS "totalOutputCost",
174
+ COALESCE(SUM("totalTokens"), 0) AS "totalTokens",
175
+ COUNT(*) AS "requestCount",
176
+ AVG("latencyMs") AS "avgLatencyMs"
177
+ FROM "llm_requests"
178
+ WHERE "createdAt" >= ? AND "createdAt" <= ?
179
+ GROUP BY "provider", "model"
180
+ ORDER BY SUM("cost") DESC
181
+ `).bind(params.startDate.toISOString(), params.endDate.toISOString()).all();
182
+ return results;
183
+ },
184
+ getCostByProvider: async (params) => {
185
+ const { results } = await db.prepare(`
186
+ SELECT "provider",
187
+ COALESCE(SUM("cost"), 0) AS "totalCost",
188
+ COALESCE(SUM("inputCost"), 0) AS "totalInputCost",
189
+ COALESCE(SUM("outputCost"), 0) AS "totalOutputCost",
190
+ COALESCE(SUM("totalTokens"), 0) AS "totalTokens",
191
+ COUNT(*) AS "requestCount",
192
+ AVG("latencyMs") AS "avgLatencyMs"
193
+ FROM "llm_requests"
194
+ WHERE "createdAt" >= ? AND "createdAt" <= ?
195
+ GROUP BY "provider"
196
+ ORDER BY SUM("cost") DESC
197
+ `).bind(params.startDate.toISOString(), params.endDate.toISOString()).all();
198
+ return results;
199
+ },
200
+ getDailyCosts: async (params) => {
201
+ const { results } = await db.prepare(`
202
+ SELECT date("createdAt") AS "date",
203
+ COALESCE(SUM("cost"), 0) AS "totalCost",
204
+ COALESCE(SUM("inputCost"), 0) AS "totalInputCost",
205
+ COALESCE(SUM("outputCost"), 0) AS "totalOutputCost",
206
+ COALESCE(SUM("totalTokens"), 0) AS "totalTokens",
207
+ COUNT(*) AS "requestCount"
208
+ FROM "llm_requests"
209
+ WHERE "createdAt" >= ? AND "createdAt" <= ?
210
+ GROUP BY date("createdAt")
211
+ ORDER BY date("createdAt") ASC
212
+ `).bind(params.startDate.toISOString(), params.endDate.toISOString()).all();
213
+ return results;
214
+ },
215
+ getCostSummary: async (params) => {
216
+ const { startDate, endDate, groupBy, configId, variantId, environmentId, tags, tagKeys } = params;
217
+ const conditions = [`"createdAt" >= ?`, `"createdAt" <= ?`];
218
+ const queryParams = [startDate.toISOString(), endDate.toISOString()];
219
+ if (configId) {
220
+ conditions.push(`"configId" = ?`);
221
+ queryParams.push(configId);
222
+ }
223
+ if (variantId) {
224
+ conditions.push(`"variantId" = ?`);
225
+ queryParams.push(variantId);
226
+ }
227
+ if (environmentId) {
228
+ conditions.push(`"environmentId" = ?`);
229
+ queryParams.push(environmentId);
230
+ }
231
+ const tagFilter = buildTagFilters(tags);
232
+ conditions.push(...tagFilter.conditions);
233
+ queryParams.push(...tagFilter.params);
234
+ const where = conditions.join(" AND ");
235
+ if (groupBy === "tags") {
236
+ const tagConditions = [...conditions];
237
+ const tagParams = [...queryParams];
238
+ if (tagKeys && tagKeys.length > 0) {
239
+ tagConditions.push(`json_each.key IN (${tagKeys.map(() => "?").join(",")})`);
240
+ tagParams.push(...tagKeys);
241
+ }
242
+ const tagWhere = tagConditions.join(" AND ");
243
+ const { results: results$1 } = await db.prepare(`
244
+ SELECT json_each.key || ':' || json_each.value AS "groupKey",
245
+ COALESCE(SUM("cost"), 0) AS "totalCost",
246
+ COUNT(*) AS "requestCount"
247
+ FROM "llm_requests", json_each("tags")
248
+ WHERE ${tagWhere}
249
+ GROUP BY json_each.key, json_each.value
250
+ ORDER BY SUM("cost") DESC
251
+ `).bind(...tagParams).all();
252
+ return results$1;
253
+ }
254
+ const sqlMap = {
255
+ day: `SELECT date("createdAt") AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount", COALESCE(SUM("totalTokens"),0) AS "totalTokens" FROM "llm_requests" WHERE ${where} GROUP BY date("createdAt") ORDER BY date("createdAt") ASC`,
256
+ hour: `SELECT strftime('%Y-%m-%d %H:00:00',"createdAt") AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount", COALESCE(SUM("totalTokens"),0) AS "totalTokens" FROM "llm_requests" WHERE ${where} GROUP BY strftime('%Y-%m-%d %H:00:00',"createdAt") ORDER BY strftime('%Y-%m-%d %H:00:00',"createdAt") ASC`,
257
+ model: `SELECT "provider"||'/'||"model" AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount" FROM "llm_requests" WHERE ${where} GROUP BY "provider","model" ORDER BY SUM("cost") DESC`,
258
+ provider: `SELECT "provider" AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount" FROM "llm_requests" WHERE ${where} GROUP BY "provider" ORDER BY SUM("cost") DESC`,
259
+ endpoint: `SELECT COALESCE("endpoint",'unknown') AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount" FROM "llm_requests" WHERE ${where} GROUP BY "endpoint" ORDER BY SUM("cost") DESC`
260
+ };
261
+ const totalSql = `SELECT 'total' AS "groupKey", COALESCE(SUM("cost"),0) AS "totalCost", COUNT(*) AS "requestCount" FROM "llm_requests" WHERE ${where}`;
262
+ const sql = groupBy ? sqlMap[groupBy] ?? totalSql : totalSql;
263
+ const { results } = await db.prepare(sql).bind(...queryParams).all();
264
+ return results;
265
+ },
266
+ getRequestStats: async (params) => {
267
+ const conditions = [`"createdAt" >= ?`, `"createdAt" <= ?`];
268
+ const queryParams = [params.startDate.toISOString(), params.endDate.toISOString()];
269
+ if (params.configId) {
270
+ conditions.push(`"configId" = ?`);
271
+ queryParams.push(params.configId);
272
+ }
273
+ if (params.variantId) {
274
+ conditions.push(`"variantId" = ?`);
275
+ queryParams.push(params.variantId);
276
+ }
277
+ if (params.environmentId) {
278
+ conditions.push(`"environmentId" = ?`);
279
+ queryParams.push(params.environmentId);
280
+ }
281
+ const tagFilter = buildTagFilters(params.tags);
282
+ conditions.push(...tagFilter.conditions);
283
+ queryParams.push(...tagFilter.params);
284
+ const where = conditions.join(" AND ");
285
+ return db.prepare(`
286
+ SELECT
287
+ COUNT(*) AS "totalRequests",
288
+ COUNT(CASE WHEN "statusCode">=200 AND "statusCode"<300 THEN 1 END) AS "successfulRequests",
289
+ COUNT(CASE WHEN "statusCode">=400 THEN 1 END) AS "failedRequests",
290
+ COUNT(CASE WHEN "isStreaming"=1 THEN 1 END) AS "streamingRequests",
291
+ AVG("latencyMs") AS "avgLatencyMs",
292
+ MAX("latencyMs") AS "maxLatencyMs",
293
+ MIN("latencyMs") AS "minLatencyMs"
294
+ FROM "llm_requests" WHERE ${where}
295
+ `).bind(...queryParams).first();
296
+ },
297
+ getDistinctTags: async () => {
298
+ const { results } = await db.prepare(`
299
+ SELECT DISTINCT json_each.key AS key, json_each.value AS value
300
+ FROM "llm_requests", json_each("tags")
301
+ WHERE "tags" != '{}'
302
+ ORDER BY json_each.key, json_each.value
303
+ `).all();
304
+ return results;
305
+ }
306
+ };
307
+ }
308
+ function createD1TracesStore(db) {
309
+ return {
310
+ upsertTrace: async (data) => {
311
+ const now = (/* @__PURE__ */ new Date()).toISOString();
312
+ await db.prepare(`
313
+ INSERT INTO "traces" (
314
+ "id","traceId","name","sessionId","userId","status",
315
+ "startTime","endTime","durationMs","spanCount",
316
+ "totalInputTokens","totalOutputTokens","totalTokens","totalCost",
317
+ "tags","metadata","createdAt","updatedAt"
318
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
319
+ ON CONFLICT ("traceId") DO UPDATE SET
320
+ "name" = COALESCE(EXCLUDED."name", "traces"."name"),
321
+ "sessionId" = COALESCE(EXCLUDED."sessionId", "traces"."sessionId"),
322
+ "userId" = COALESCE(EXCLUDED."userId", "traces"."userId"),
323
+ "status" = CASE
324
+ WHEN EXCLUDED."status" = 'error' THEN 'error'
325
+ WHEN EXCLUDED."status" = 'ok' AND "traces"."status" != 'error' THEN 'ok'
326
+ ELSE "traces"."status"
327
+ END,
328
+ "startTime" = MIN("traces"."startTime", EXCLUDED."startTime"),
329
+ "endTime" = MAX(
330
+ COALESCE("traces"."endTime", EXCLUDED."endTime"),
331
+ COALESCE(EXCLUDED."endTime", "traces"."endTime")
332
+ ),
333
+ "durationMs" = CAST(
334
+ (julianday(MAX(
335
+ COALESCE("traces"."endTime", EXCLUDED."endTime"),
336
+ COALESCE(EXCLUDED."endTime", "traces"."endTime")
337
+ )) - julianday(MIN("traces"."startTime", EXCLUDED."startTime")))
338
+ * 86400000 AS INTEGER
339
+ ),
340
+ "spanCount" = "traces"."spanCount" + EXCLUDED."spanCount",
341
+ "totalInputTokens" = "traces"."totalInputTokens" + EXCLUDED."totalInputTokens",
342
+ "totalOutputTokens" = "traces"."totalOutputTokens" + EXCLUDED."totalOutputTokens",
343
+ "totalTokens" = "traces"."totalTokens" + EXCLUDED."totalTokens",
344
+ "totalCost" = "traces"."totalCost" + EXCLUDED."totalCost",
345
+ "tags" = json_patch("traces"."tags", EXCLUDED."tags"),
346
+ "metadata" = json_patch("traces"."metadata", EXCLUDED."metadata"),
347
+ "updatedAt" = ?
348
+ `).bind(randomUUID(), data.traceId, data.name ?? null, data.sessionId ?? null, data.userId ?? null, data.status ?? "unset", data.startTime.toISOString(), data.endTime?.toISOString() ?? null, data.durationMs ?? null, data.spanCount ?? 1, data.totalInputTokens ?? 0, data.totalOutputTokens ?? 0, data.totalTokens ?? 0, data.totalCost ?? 0, JSON.stringify(data.tags ?? {}), JSON.stringify(data.metadata ?? {}), now, now, now).run();
349
+ },
350
+ batchInsertSpans: async (spans) => {
351
+ if (spans.length === 0) return { count: 0 };
352
+ const now = (/* @__PURE__ */ new Date()).toISOString();
353
+ const stmts = spans.map((s) => db.prepare(`
354
+ INSERT OR IGNORE INTO "spans" (
355
+ "id","traceId","spanId","parentSpanId","name","kind",
356
+ "status","statusMessage","startTime","endTime","durationMs",
357
+ "provider","model","promptTokens","completionTokens",
358
+ "totalTokens","cost","configId","variantId","environmentId",
359
+ "providerConfigId","requestId","source","input","output",
360
+ "attributes","createdAt","updatedAt"
361
+ ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
362
+ `).bind(randomUUID(), s.traceId, s.spanId, s.parentSpanId ?? null, s.name, s.kind ?? 1, s.status ?? 0, s.statusMessage ?? null, s.startTime.toISOString(), s.endTime?.toISOString() ?? null, s.durationMs ?? null, s.provider ?? null, s.model ?? null, s.promptTokens ?? 0, s.completionTokens ?? 0, s.totalTokens ?? 0, s.cost ?? 0, s.configId ?? null, s.variantId ?? null, s.environmentId ?? null, s.providerConfigId ?? null, s.requestId ?? null, s.source ?? "gateway", s.input != null ? JSON.stringify(s.input) : null, s.output != null ? JSON.stringify(s.output) : null, JSON.stringify(s.attributes ?? {}), now, now));
363
+ for (let i = 0; i < stmts.length; i += D1_BATCH_LIMIT) await db.batch(stmts.slice(i, i + D1_BATCH_LIMIT));
364
+ return { count: spans.length };
365
+ },
366
+ batchInsertSpanEvents: async (events) => {
367
+ if (events.length === 0) return { count: 0 };
368
+ const now = (/* @__PURE__ */ new Date()).toISOString();
369
+ const stmts = events.map((e) => db.prepare(`
370
+ INSERT INTO "span_events" (
371
+ "id","traceId","spanId","name","timestamp","attributes","createdAt"
372
+ ) VALUES (?,?,?,?,?,?,?)
373
+ `).bind(randomUUID(), e.traceId, e.spanId, e.name, e.timestamp.toISOString(), JSON.stringify(e.attributes ?? {}), now));
374
+ for (let i = 0; i < stmts.length; i += D1_BATCH_LIMIT) await db.batch(stmts.slice(i, i + D1_BATCH_LIMIT));
375
+ return { count: events.length };
376
+ },
377
+ listTraces: async (params) => {
378
+ const { limit = 50, offset = 0, sessionId, userId, status, name, startDate, endDate, tags } = params ?? {};
379
+ const conditions = ["1=1"];
380
+ const queryParams = [];
381
+ if (sessionId) {
382
+ conditions.push(`"sessionId" = ?`);
383
+ queryParams.push(sessionId);
384
+ }
385
+ if (userId) {
386
+ conditions.push(`"userId" = ?`);
387
+ queryParams.push(userId);
388
+ }
389
+ if (status) {
390
+ conditions.push(`"status" = ?`);
391
+ queryParams.push(status);
392
+ }
393
+ if (name) {
394
+ conditions.push(`"name" LIKE ?`);
395
+ queryParams.push(`%${name}%`);
396
+ }
397
+ if (startDate) {
398
+ conditions.push(`"startTime" >= ?`);
399
+ queryParams.push(startDate.toISOString());
400
+ }
401
+ if (endDate) {
402
+ conditions.push(`"startTime" <= ?`);
403
+ queryParams.push(endDate.toISOString());
404
+ }
405
+ const tagFilter = buildTagFilters(tags);
406
+ conditions.push(...tagFilter.conditions);
407
+ queryParams.push(...tagFilter.params);
408
+ const where = conditions.join(" AND ");
409
+ const total = (await db.prepare(`SELECT COUNT(*) AS "total" FROM "traces" WHERE ${where}`).bind(...queryParams).first())?.total ?? 0;
410
+ const { results } = await db.prepare(`SELECT * FROM "traces" WHERE ${where} ORDER BY "startTime" DESC LIMIT ? OFFSET ?`).bind(...queryParams, limit, offset).all();
411
+ return {
412
+ data: parseJsonRows(results),
413
+ total,
414
+ limit,
415
+ offset
416
+ };
417
+ },
418
+ getTraceWithSpans: async (traceId) => {
419
+ const trace = await db.prepare(`SELECT * FROM "traces" WHERE "traceId" = ?`).bind(traceId).first();
420
+ if (!trace) return void 0;
421
+ const [spanResult, eventResult] = await db.batch([db.prepare(`SELECT * FROM "spans" WHERE "traceId" = ? ORDER BY "startTime" ASC`).bind(traceId), db.prepare(`SELECT * FROM "span_events" WHERE "traceId" = ? ORDER BY "timestamp" ASC`).bind(traceId)]);
422
+ return {
423
+ trace: parseJsonColumns(trace),
424
+ spans: parseJsonRows(spanResult.results ?? []),
425
+ events: parseJsonRows(eventResult.results ?? [])
426
+ };
427
+ },
428
+ getTraceStats: async (params) => {
429
+ const conditions = [`"startTime" >= ?`, `"startTime" <= ?`];
430
+ const queryParams = [params.startDate.toISOString(), params.endDate.toISOString()];
431
+ if (params.sessionId) {
432
+ conditions.push(`"sessionId" = ?`);
433
+ queryParams.push(params.sessionId);
434
+ }
435
+ if (params.userId) {
436
+ conditions.push(`"userId" = ?`);
437
+ queryParams.push(params.userId);
438
+ }
439
+ const where = conditions.join(" AND ");
440
+ return db.prepare(`
441
+ SELECT
442
+ COUNT(*) AS "totalTraces",
443
+ COALESCE(AVG("durationMs"), 0) AS "avgDurationMs",
444
+ COUNT(CASE WHEN "status" = 'error' THEN 1 END) AS "errorCount",
445
+ COALESCE(SUM("totalCost"), 0) AS "totalCost",
446
+ COALESCE(SUM("totalTokens"), 0) AS "totalTokens",
447
+ COALESCE(SUM("spanCount"), 0) AS "totalSpans"
448
+ FROM "traces" WHERE ${where}
449
+ `).bind(...queryParams).first();
450
+ }
451
+ };
452
+ }
453
+ /**
454
+ * Create a Cloudflare D1-backed telemetry store.
455
+ *
456
+ * Usage:
457
+ * ```ts
458
+ * import { d1Store } from '@llmops/sdk/store/d1'
459
+ *
460
+ * export default {
461
+ * async fetch(request, env) {
462
+ * const ops = llmops({
463
+ * telemetry: d1Store(env.DB),
464
+ * })
465
+ * }
466
+ * }
467
+ * ```
468
+ */
469
+ function createD1Store(db) {
470
+ return {
471
+ ...createD1LLMRequestsStore(db),
472
+ ...createD1TracesStore(db),
473
+ _db: db
474
+ };
475
+ }
476
+
477
+ //#endregion
478
+ //#region src/store/d1/migrations/000001_e44e1c4f.sql
479
+ var _000001_e44e1c4f_default = "CREATE TABLE IF NOT EXISTS llm_requests (\n id TEXT PRIMARY KEY,\n \"requestId\" TEXT NOT NULL,\n \"configId\" TEXT,\n \"variantId\" TEXT,\n \"environmentId\" TEXT,\n \"providerConfigId\" TEXT,\n provider TEXT NOT NULL,\n model TEXT NOT NULL,\n \"promptTokens\" INTEGER NOT NULL DEFAULT 0,\n \"completionTokens\" INTEGER NOT NULL DEFAULT 0,\n \"totalTokens\" INTEGER NOT NULL DEFAULT 0,\n \"cachedTokens\" INTEGER NOT NULL DEFAULT 0,\n \"cacheCreationTokens\" INTEGER NOT NULL DEFAULT 0,\n cost INTEGER NOT NULL DEFAULT 0,\n \"cacheSavings\" INTEGER NOT NULL DEFAULT 0,\n \"inputCost\" INTEGER NOT NULL DEFAULT 0,\n \"outputCost\" INTEGER NOT NULL DEFAULT 0,\n endpoint TEXT NOT NULL,\n \"statusCode\" INTEGER NOT NULL,\n \"latencyMs\" INTEGER NOT NULL DEFAULT 0,\n \"isStreaming\" INTEGER NOT NULL DEFAULT 0,\n \"userId\" TEXT,\n tags TEXT NOT NULL DEFAULT '{}',\n \"guardrailResults\" TEXT,\n \"traceId\" TEXT,\n \"spanId\" TEXT,\n \"parentSpanId\" TEXT,\n \"sessionId\" TEXT,\n \"createdAt\" TEXT NOT NULL DEFAULT (datetime('now')),\n \"updatedAt\" TEXT NOT NULL DEFAULT (datetime('now'))\n);\n\nCREATE TABLE IF NOT EXISTS span_events (\n id TEXT PRIMARY KEY,\n \"traceId\" TEXT NOT NULL,\n \"spanId\" TEXT NOT NULL,\n name TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n attributes TEXT NOT NULL DEFAULT '{}',\n \"createdAt\" TEXT NOT NULL DEFAULT (datetime('now'))\n);\n\nCREATE TABLE IF NOT EXISTS spans (\n id TEXT PRIMARY KEY,\n \"traceId\" TEXT NOT NULL,\n \"spanId\" TEXT NOT NULL UNIQUE,\n \"parentSpanId\" TEXT,\n name TEXT NOT NULL,\n kind INTEGER NOT NULL DEFAULT 1,\n status INTEGER NOT NULL DEFAULT 0,\n \"statusMessage\" TEXT,\n \"startTime\" TEXT NOT NULL,\n \"endTime\" TEXT,\n \"durationMs\" INTEGER,\n provider TEXT,\n model TEXT,\n \"promptTokens\" INTEGER NOT NULL DEFAULT 0,\n \"completionTokens\" INTEGER NOT NULL DEFAULT 0,\n \"totalTokens\" INTEGER NOT NULL DEFAULT 0,\n cost INTEGER NOT NULL DEFAULT 0,\n \"configId\" TEXT,\n \"variantId\" TEXT,\n \"environmentId\" TEXT,\n \"providerConfigId\" TEXT,\n \"requestId\" TEXT,\n source TEXT NOT NULL DEFAULT 'gateway',\n input TEXT,\n output TEXT,\n attributes TEXT NOT NULL DEFAULT '{}',\n \"createdAt\" TEXT NOT NULL DEFAULT (datetime('now')),\n \"updatedAt\" TEXT NOT NULL DEFAULT (datetime('now'))\n);\n\nCREATE TABLE IF NOT EXISTS traces (\n id TEXT PRIMARY KEY,\n \"traceId\" TEXT NOT NULL UNIQUE,\n name TEXT,\n \"sessionId\" TEXT,\n \"userId\" TEXT,\n status TEXT NOT NULL DEFAULT 'unset',\n \"startTime\" TEXT NOT NULL,\n \"endTime\" TEXT,\n \"durationMs\" INTEGER,\n \"spanCount\" INTEGER NOT NULL DEFAULT 0,\n \"totalInputTokens\" INTEGER NOT NULL DEFAULT 0,\n \"totalOutputTokens\" INTEGER NOT NULL DEFAULT 0,\n \"totalTokens\" INTEGER NOT NULL DEFAULT 0,\n \"totalCost\" INTEGER NOT NULL DEFAULT 0,\n tags TEXT NOT NULL DEFAULT '{}',\n metadata TEXT NOT NULL DEFAULT '{}',\n \"createdAt\" TEXT NOT NULL DEFAULT (datetime('now')),\n \"updatedAt\" TEXT NOT NULL DEFAULT (datetime('now'))\n);\n";
480
+
481
+ //#endregion
482
+ //#region src/store/d1/migrations/index.ts
483
+ const migrations = [["000001_e44e1c4f", _000001_e44e1c4f_default]];
484
+
485
+ //#endregion
486
+ //#region src/store/d1/migrate.ts
487
+ /**
488
+ * Run pending migrations against a D1 database.
489
+ */
490
+ async function runD1Migrations(db) {
491
+ await db.prepare(`
492
+ CREATE TABLE IF NOT EXISTS _llmops_migrations (
493
+ name TEXT PRIMARY KEY,
494
+ applied_at TEXT NOT NULL DEFAULT (datetime('now'))
495
+ )
496
+ `).run();
497
+ const { results } = await db.prepare("SELECT name FROM _llmops_migrations ORDER BY name").all();
498
+ const applied = new Set(results.map((r) => r.name));
499
+ const newlyApplied = [];
500
+ for (const [name, sql] of migrations) {
501
+ if (applied.has(name)) continue;
502
+ const statements = sql.split(";").map((s) => s.trim()).filter(Boolean).map((s) => db.prepare(s));
503
+ statements.push(db.prepare("INSERT INTO _llmops_migrations (name) VALUES (?)").bind(name));
504
+ await db.batch(statements);
505
+ newlyApplied.push(name);
506
+ }
507
+ return { applied: newlyApplied };
508
+ }
509
+
510
+ //#endregion
511
+ export { createD1Store as d1Store, runD1Migrations };