tracia 0.2.0 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +457 -3
- package/dist/index.d.ts +457 -3
- package/dist/index.js +1155 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1144 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +28 -3
package/dist/index.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
2
3
|
var __defProp = Object.defineProperty;
|
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
8
|
var __export = (target, all) => {
|
|
7
9
|
for (var name in all)
|
|
@@ -15,12 +17,21 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
15
17
|
}
|
|
16
18
|
return to;
|
|
17
19
|
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
29
|
|
|
20
30
|
// src/index.ts
|
|
21
31
|
var index_exports = {};
|
|
22
32
|
__export(index_exports, {
|
|
23
33
|
Eval: () => Eval,
|
|
34
|
+
LLMProvider: () => LLMProvider,
|
|
24
35
|
Tracia: () => Tracia,
|
|
25
36
|
TraciaError: () => TraciaError,
|
|
26
37
|
TraciaErrorCode: () => TraciaErrorCode
|
|
@@ -49,12 +60,22 @@ var TraciaErrorCode = /* @__PURE__ */ ((TraciaErrorCode2) => {
|
|
|
49
60
|
TraciaErrorCode2["INVALID_REQUEST"] = "INVALID_REQUEST";
|
|
50
61
|
TraciaErrorCode2["NETWORK_ERROR"] = "NETWORK_ERROR";
|
|
51
62
|
TraciaErrorCode2["TIMEOUT"] = "TIMEOUT";
|
|
63
|
+
TraciaErrorCode2["ABORTED"] = "ABORTED";
|
|
52
64
|
TraciaErrorCode2["UNKNOWN"] = "UNKNOWN";
|
|
65
|
+
TraciaErrorCode2["MISSING_PROVIDER_SDK"] = "MISSING_PROVIDER_SDK";
|
|
66
|
+
TraciaErrorCode2["MISSING_PROVIDER_API_KEY"] = "MISSING_PROVIDER_API_KEY";
|
|
67
|
+
TraciaErrorCode2["UNSUPPORTED_MODEL"] = "UNSUPPORTED_MODEL";
|
|
53
68
|
return TraciaErrorCode2;
|
|
54
69
|
})(TraciaErrorCode || {});
|
|
70
|
+
var LLMProvider = /* @__PURE__ */ ((LLMProvider2) => {
|
|
71
|
+
LLMProvider2["OPENAI"] = "openai";
|
|
72
|
+
LLMProvider2["ANTHROPIC"] = "anthropic";
|
|
73
|
+
LLMProvider2["GOOGLE"] = "google";
|
|
74
|
+
return LLMProvider2;
|
|
75
|
+
})(LLMProvider || {});
|
|
55
76
|
|
|
56
77
|
// src/client.ts
|
|
57
|
-
var SDK_VERSION = "0.2.
|
|
78
|
+
var SDK_VERSION = "0.2.3";
|
|
58
79
|
var DEFAULT_TIMEOUT_MS = 12e4;
|
|
59
80
|
function mapApiErrorCodeToTraciaErrorCode(apiCode) {
|
|
60
81
|
const codeMap = {
|
|
@@ -203,10 +224,510 @@ var Prompts = class {
|
|
|
203
224
|
}
|
|
204
225
|
};
|
|
205
226
|
|
|
227
|
+
// src/models.ts
|
|
228
|
+
var MODEL_TO_PROVIDER = {
|
|
229
|
+
// OpenAI
|
|
230
|
+
"chatgpt-4o-latest": "openai" /* OPENAI */,
|
|
231
|
+
"gpt-3.5-turbo": "openai" /* OPENAI */,
|
|
232
|
+
"gpt-3.5-turbo-0125": "openai" /* OPENAI */,
|
|
233
|
+
"gpt-3.5-turbo-1106": "openai" /* OPENAI */,
|
|
234
|
+
"gpt-3.5-turbo-16k": "openai" /* OPENAI */,
|
|
235
|
+
"gpt-4": "openai" /* OPENAI */,
|
|
236
|
+
"gpt-4-0125-preview": "openai" /* OPENAI */,
|
|
237
|
+
"gpt-4-1106-preview": "openai" /* OPENAI */,
|
|
238
|
+
"gpt-4-turbo": "openai" /* OPENAI */,
|
|
239
|
+
"gpt-4-turbo-2024-04-09": "openai" /* OPENAI */,
|
|
240
|
+
"gpt-4-turbo-preview": "openai" /* OPENAI */,
|
|
241
|
+
"gpt-4.1": "openai" /* OPENAI */,
|
|
242
|
+
"gpt-4.1-2025-04-14": "openai" /* OPENAI */,
|
|
243
|
+
"gpt-4.1-mini": "openai" /* OPENAI */,
|
|
244
|
+
"gpt-4.1-mini-2025-04-14": "openai" /* OPENAI */,
|
|
245
|
+
"gpt-4.1-nano": "openai" /* OPENAI */,
|
|
246
|
+
"gpt-4.1-nano-2025-04-14": "openai" /* OPENAI */,
|
|
247
|
+
"gpt-4o": "openai" /* OPENAI */,
|
|
248
|
+
"gpt-4o-2024-05-13": "openai" /* OPENAI */,
|
|
249
|
+
"gpt-4o-2024-08-06": "openai" /* OPENAI */,
|
|
250
|
+
"gpt-4o-2024-11-20": "openai" /* OPENAI */,
|
|
251
|
+
"gpt-4o-mini": "openai" /* OPENAI */,
|
|
252
|
+
"gpt-4o-mini-2024-07-18": "openai" /* OPENAI */,
|
|
253
|
+
"gpt-4o-mini-search-preview": "openai" /* OPENAI */,
|
|
254
|
+
"gpt-4o-mini-search-preview-2025-03-11": "openai" /* OPENAI */,
|
|
255
|
+
"gpt-4o-search-preview": "openai" /* OPENAI */,
|
|
256
|
+
"gpt-4o-search-preview-2025-03-11": "openai" /* OPENAI */,
|
|
257
|
+
"gpt-5": "openai" /* OPENAI */,
|
|
258
|
+
"gpt-5.1": "openai" /* OPENAI */,
|
|
259
|
+
"gpt-5.1-2025-11-13": "openai" /* OPENAI */,
|
|
260
|
+
"gpt-5.1-chat-latest": "openai" /* OPENAI */,
|
|
261
|
+
"gpt-5.2": "openai" /* OPENAI */,
|
|
262
|
+
"gpt-5.2-2025-12-11": "openai" /* OPENAI */,
|
|
263
|
+
"gpt-5.2-chat-latest": "openai" /* OPENAI */,
|
|
264
|
+
"gpt-5.2-pro": "openai" /* OPENAI */,
|
|
265
|
+
"gpt-5.2-pro-2025-12-11": "openai" /* OPENAI */,
|
|
266
|
+
"gpt-5-pro": "openai" /* OPENAI */,
|
|
267
|
+
"gpt-5-pro-2025-10-06": "openai" /* OPENAI */,
|
|
268
|
+
"gpt-5-2025-08-07": "openai" /* OPENAI */,
|
|
269
|
+
"gpt-5-chat-latest": "openai" /* OPENAI */,
|
|
270
|
+
"gpt-5-codex": "openai" /* OPENAI */,
|
|
271
|
+
"gpt-5.1-codex": "openai" /* OPENAI */,
|
|
272
|
+
"gpt-5.1-codex-max": "openai" /* OPENAI */,
|
|
273
|
+
"gpt-5.1-codex-mini": "openai" /* OPENAI */,
|
|
274
|
+
"gpt-5-mini": "openai" /* OPENAI */,
|
|
275
|
+
"gpt-5-mini-2025-08-07": "openai" /* OPENAI */,
|
|
276
|
+
"gpt-5-nano": "openai" /* OPENAI */,
|
|
277
|
+
"gpt-5-nano-2025-08-07": "openai" /* OPENAI */,
|
|
278
|
+
"o1": "openai" /* OPENAI */,
|
|
279
|
+
"o1-2024-12-17": "openai" /* OPENAI */,
|
|
280
|
+
"o1-pro": "openai" /* OPENAI */,
|
|
281
|
+
"o1-pro-2025-03-19": "openai" /* OPENAI */,
|
|
282
|
+
"o3": "openai" /* OPENAI */,
|
|
283
|
+
"o3-2025-04-16": "openai" /* OPENAI */,
|
|
284
|
+
"o3-mini": "openai" /* OPENAI */,
|
|
285
|
+
"o3-mini-2025-01-31": "openai" /* OPENAI */,
|
|
286
|
+
"o4-mini": "openai" /* OPENAI */,
|
|
287
|
+
"o4-mini-2025-04-16": "openai" /* OPENAI */,
|
|
288
|
+
// Anthropic
|
|
289
|
+
"claude-haiku-4-5-20251001": "anthropic" /* ANTHROPIC */,
|
|
290
|
+
"claude-haiku-4-5": "anthropic" /* ANTHROPIC */,
|
|
291
|
+
"claude-3-7-sonnet-20250219": "anthropic" /* ANTHROPIC */,
|
|
292
|
+
"claude-3-haiku-20240307": "anthropic" /* ANTHROPIC */,
|
|
293
|
+
"claude-3-opus-20240229": "anthropic" /* ANTHROPIC */,
|
|
294
|
+
"claude-4-opus-20250514": "anthropic" /* ANTHROPIC */,
|
|
295
|
+
"claude-4-sonnet-20250514": "anthropic" /* ANTHROPIC */,
|
|
296
|
+
"claude-sonnet-4-5": "anthropic" /* ANTHROPIC */,
|
|
297
|
+
"claude-sonnet-4-5-20250929": "anthropic" /* ANTHROPIC */,
|
|
298
|
+
"claude-opus-4-1": "anthropic" /* ANTHROPIC */,
|
|
299
|
+
"claude-opus-4-1-20250805": "anthropic" /* ANTHROPIC */,
|
|
300
|
+
"claude-opus-4-20250514": "anthropic" /* ANTHROPIC */,
|
|
301
|
+
"claude-opus-4-5-20251101": "anthropic" /* ANTHROPIC */,
|
|
302
|
+
"claude-opus-4-5": "anthropic" /* ANTHROPIC */,
|
|
303
|
+
"claude-sonnet-4-20250514": "anthropic" /* ANTHROPIC */,
|
|
304
|
+
// Google
|
|
305
|
+
"gemini-2.0-flash": "google" /* GOOGLE */,
|
|
306
|
+
"gemini-2.0-flash-001": "google" /* GOOGLE */,
|
|
307
|
+
"gemini-2.0-flash-exp": "google" /* GOOGLE */,
|
|
308
|
+
"gemini-2.0-flash-lite": "google" /* GOOGLE */,
|
|
309
|
+
"gemini-2.0-flash-lite-001": "google" /* GOOGLE */,
|
|
310
|
+
"gemini-2.5-flash": "google" /* GOOGLE */,
|
|
311
|
+
"gemini-2.5-flash-lite": "google" /* GOOGLE */,
|
|
312
|
+
"gemini-2.5-flash-lite-preview-09-2025": "google" /* GOOGLE */,
|
|
313
|
+
"gemini-2.5-flash-preview-09-2025": "google" /* GOOGLE */,
|
|
314
|
+
"gemini-2.5-pro": "google" /* GOOGLE */,
|
|
315
|
+
"gemini-3-pro-preview": "google" /* GOOGLE */,
|
|
316
|
+
"gemini-3-flash-preview": "google" /* GOOGLE */
|
|
317
|
+
};
|
|
318
|
+
function getProviderForModel(modelId) {
|
|
319
|
+
return MODEL_TO_PROVIDER[modelId];
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
// src/providers/ai-sdk.ts
|
|
323
|
+
var aiSdk = null;
|
|
324
|
+
var openaiProvider = null;
|
|
325
|
+
var anthropicProvider = null;
|
|
326
|
+
var googleProvider = null;
|
|
327
|
+
async function loadAISdk() {
|
|
328
|
+
if (aiSdk) return aiSdk;
|
|
329
|
+
try {
|
|
330
|
+
aiSdk = await import("ai");
|
|
331
|
+
return aiSdk;
|
|
332
|
+
} catch {
|
|
333
|
+
throw new TraciaError(
|
|
334
|
+
"MISSING_PROVIDER_SDK" /* MISSING_PROVIDER_SDK */,
|
|
335
|
+
"Vercel AI SDK not installed. Run: npm install ai"
|
|
336
|
+
);
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
async function loadOpenAIProvider() {
|
|
340
|
+
if (openaiProvider) return openaiProvider;
|
|
341
|
+
try {
|
|
342
|
+
openaiProvider = await import("@ai-sdk/openai");
|
|
343
|
+
return openaiProvider;
|
|
344
|
+
} catch {
|
|
345
|
+
throw new TraciaError(
|
|
346
|
+
"MISSING_PROVIDER_SDK" /* MISSING_PROVIDER_SDK */,
|
|
347
|
+
"OpenAI provider not installed. Run: npm install @ai-sdk/openai"
|
|
348
|
+
);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
async function loadAnthropicProvider() {
|
|
352
|
+
if (anthropicProvider) return anthropicProvider;
|
|
353
|
+
try {
|
|
354
|
+
anthropicProvider = await import("@ai-sdk/anthropic");
|
|
355
|
+
return anthropicProvider;
|
|
356
|
+
} catch {
|
|
357
|
+
throw new TraciaError(
|
|
358
|
+
"MISSING_PROVIDER_SDK" /* MISSING_PROVIDER_SDK */,
|
|
359
|
+
"Anthropic provider not installed. Run: npm install @ai-sdk/anthropic"
|
|
360
|
+
);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
async function loadGoogleProvider() {
|
|
364
|
+
if (googleProvider) return googleProvider;
|
|
365
|
+
try {
|
|
366
|
+
googleProvider = await import("@ai-sdk/google");
|
|
367
|
+
return googleProvider;
|
|
368
|
+
} catch {
|
|
369
|
+
throw new TraciaError(
|
|
370
|
+
"MISSING_PROVIDER_SDK" /* MISSING_PROVIDER_SDK */,
|
|
371
|
+
"Google provider not installed. Run: npm install @ai-sdk/google"
|
|
372
|
+
);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
function combineAbortSignals(userSignal, timeoutMs) {
|
|
376
|
+
if (!timeoutMs && !userSignal) return void 0;
|
|
377
|
+
if (timeoutMs && !userSignal) return AbortSignal.timeout(timeoutMs);
|
|
378
|
+
if (!timeoutMs && userSignal) return userSignal;
|
|
379
|
+
const timeoutSignal = AbortSignal.timeout(timeoutMs);
|
|
380
|
+
const controller = new AbortController();
|
|
381
|
+
const cleanup = () => {
|
|
382
|
+
userSignal.removeEventListener("abort", onAbort);
|
|
383
|
+
timeoutSignal.removeEventListener("abort", onAbort);
|
|
384
|
+
};
|
|
385
|
+
const onAbort = () => {
|
|
386
|
+
cleanup();
|
|
387
|
+
controller.abort();
|
|
388
|
+
};
|
|
389
|
+
userSignal.addEventListener("abort", onAbort, { once: true });
|
|
390
|
+
timeoutSignal.addEventListener("abort", onAbort, { once: true });
|
|
391
|
+
return controller.signal;
|
|
392
|
+
}
|
|
393
|
+
function sanitizeErrorMessage(message) {
|
|
394
|
+
return message.replace(/\b(sk-|tr_|key-|api[_-]?key[=:\s]+)[a-zA-Z0-9_-]{10,}\b/gi, "[REDACTED]").replace(/Bearer\s+[a-zA-Z0-9_.-]+/gi, "Bearer [REDACTED]").replace(/Basic\s+[a-zA-Z0-9+/=]{20,}/gi, "Basic [REDACTED]").replace(/(authorization[=:\s]+)[^\s,}]+/gi, "$1[REDACTED]");
|
|
395
|
+
}
|
|
396
|
+
function resolveProvider(model, explicitProvider) {
|
|
397
|
+
if (explicitProvider) return explicitProvider;
|
|
398
|
+
const fromRegistry = getProviderForModel(model);
|
|
399
|
+
if (fromRegistry) return fromRegistry;
|
|
400
|
+
if (model.startsWith("gpt-") || model.startsWith("o1") || model.startsWith("o3") || model.startsWith("o4")) {
|
|
401
|
+
return "openai" /* OPENAI */;
|
|
402
|
+
}
|
|
403
|
+
if (model.startsWith("claude-")) {
|
|
404
|
+
return "anthropic" /* ANTHROPIC */;
|
|
405
|
+
}
|
|
406
|
+
if (model.startsWith("gemini-")) {
|
|
407
|
+
return "google" /* GOOGLE */;
|
|
408
|
+
}
|
|
409
|
+
throw new TraciaError(
|
|
410
|
+
"UNSUPPORTED_MODEL" /* UNSUPPORTED_MODEL */,
|
|
411
|
+
`Cannot determine provider for model: ${model}. Specify provider explicitly.`
|
|
412
|
+
);
|
|
413
|
+
}
|
|
414
|
+
async function getLanguageModel(provider, model, apiKey) {
|
|
415
|
+
switch (provider) {
|
|
416
|
+
case "openai" /* OPENAI */: {
|
|
417
|
+
const { createOpenAI } = await loadOpenAIProvider();
|
|
418
|
+
const openai = createOpenAI({ apiKey });
|
|
419
|
+
return openai(model);
|
|
420
|
+
}
|
|
421
|
+
case "anthropic" /* ANTHROPIC */: {
|
|
422
|
+
const { createAnthropic } = await loadAnthropicProvider();
|
|
423
|
+
const anthropic = createAnthropic({ apiKey });
|
|
424
|
+
return anthropic(model);
|
|
425
|
+
}
|
|
426
|
+
case "google" /* GOOGLE */: {
|
|
427
|
+
const { createGoogleGenerativeAI } = await loadGoogleProvider();
|
|
428
|
+
const google = createGoogleGenerativeAI({ apiKey });
|
|
429
|
+
return google(model);
|
|
430
|
+
}
|
|
431
|
+
default:
|
|
432
|
+
throw new TraciaError(
|
|
433
|
+
"UNSUPPORTED_MODEL" /* UNSUPPORTED_MODEL */,
|
|
434
|
+
`Unsupported provider: ${provider}`
|
|
435
|
+
);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
function convertMessages(messages) {
|
|
439
|
+
return messages.map((msg) => {
|
|
440
|
+
if (msg.role === "tool") {
|
|
441
|
+
return {
|
|
442
|
+
role: "tool",
|
|
443
|
+
content: [{
|
|
444
|
+
type: "tool-result",
|
|
445
|
+
toolCallId: msg.toolCallId,
|
|
446
|
+
toolName: msg.toolName ?? msg.toolCallId,
|
|
447
|
+
// Use toolName, fallback to toolCallId
|
|
448
|
+
output: { type: "text", value: msg.content }
|
|
449
|
+
}]
|
|
450
|
+
};
|
|
451
|
+
}
|
|
452
|
+
if (msg.role === "assistant" && Array.isArray(msg.content)) {
|
|
453
|
+
if (msg.content.length === 0) {
|
|
454
|
+
return { role: "assistant", content: "" };
|
|
455
|
+
}
|
|
456
|
+
const convertedContent = msg.content.map((part) => {
|
|
457
|
+
if (part.type === "tool_call") {
|
|
458
|
+
const toolCall = part;
|
|
459
|
+
return {
|
|
460
|
+
type: "tool-call",
|
|
461
|
+
toolCallId: toolCall.id,
|
|
462
|
+
toolName: toolCall.name,
|
|
463
|
+
input: toolCall.arguments
|
|
464
|
+
};
|
|
465
|
+
}
|
|
466
|
+
return part;
|
|
467
|
+
});
|
|
468
|
+
return {
|
|
469
|
+
role: "assistant",
|
|
470
|
+
content: convertedContent
|
|
471
|
+
};
|
|
472
|
+
}
|
|
473
|
+
return {
|
|
474
|
+
role: msg.role,
|
|
475
|
+
content: typeof msg.content === "string" ? msg.content : msg.content.map((b) => b.type === "text" ? b.text : "").join("")
|
|
476
|
+
};
|
|
477
|
+
});
|
|
478
|
+
}
|
|
479
|
+
async function convertTools(tools) {
|
|
480
|
+
if (!tools || tools.length === 0) return void 0;
|
|
481
|
+
const { tool, jsonSchema } = await loadAISdk();
|
|
482
|
+
const result = {};
|
|
483
|
+
for (const toolDef of tools) {
|
|
484
|
+
result[toolDef.name] = tool({
|
|
485
|
+
description: toolDef.description,
|
|
486
|
+
inputSchema: jsonSchema(toolDef.parameters),
|
|
487
|
+
execute: async (args) => args
|
|
488
|
+
// No-op execute function
|
|
489
|
+
});
|
|
490
|
+
}
|
|
491
|
+
return result;
|
|
492
|
+
}
|
|
493
|
+
function convertToolChoice(toolChoice) {
|
|
494
|
+
if (!toolChoice) return void 0;
|
|
495
|
+
if (toolChoice === "auto") return "auto";
|
|
496
|
+
if (toolChoice === "none") return "none";
|
|
497
|
+
if (toolChoice === "required") return "required";
|
|
498
|
+
return { type: "tool", toolName: toolChoice.tool };
|
|
499
|
+
}
|
|
500
|
+
function parseFinishReason(reason) {
|
|
501
|
+
if (reason === "tool-calls") return "tool_calls";
|
|
502
|
+
if (reason === "length") return "max_tokens";
|
|
503
|
+
return "stop";
|
|
504
|
+
}
|
|
505
|
+
function extractToolCalls(toolCalls) {
|
|
506
|
+
if (!toolCalls) return [];
|
|
507
|
+
return toolCalls.filter((tc) => tc.toolCallId && tc.toolName).map((tc) => ({
|
|
508
|
+
id: tc.toolCallId,
|
|
509
|
+
name: tc.toolName,
|
|
510
|
+
arguments: tc.input ?? {}
|
|
511
|
+
}));
|
|
512
|
+
}
|
|
513
|
+
async function complete(options) {
|
|
514
|
+
const { generateText } = await loadAISdk();
|
|
515
|
+
const provider = resolveProvider(options.model, options.provider);
|
|
516
|
+
const model = await getLanguageModel(provider, options.model, options.apiKey);
|
|
517
|
+
const convertedMessages = convertMessages(options.messages);
|
|
518
|
+
const convertedTools = await convertTools(options.tools);
|
|
519
|
+
const convertedToolChoice = convertToolChoice(options.toolChoice);
|
|
520
|
+
try {
|
|
521
|
+
const result = await generateText({
|
|
522
|
+
model,
|
|
523
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
524
|
+
messages: convertedMessages,
|
|
525
|
+
temperature: options.temperature,
|
|
526
|
+
maxOutputTokens: options.maxOutputTokens,
|
|
527
|
+
topP: options.topP,
|
|
528
|
+
stopSequences: options.stopSequences,
|
|
529
|
+
tools: convertedTools,
|
|
530
|
+
toolChoice: convertedToolChoice,
|
|
531
|
+
abortSignal: options.timeoutMs ? AbortSignal.timeout(options.timeoutMs) : void 0
|
|
532
|
+
});
|
|
533
|
+
const toolCalls = extractToolCalls(result.toolCalls);
|
|
534
|
+
return {
|
|
535
|
+
text: result.text,
|
|
536
|
+
inputTokens: result.usage?.inputTokens ?? 0,
|
|
537
|
+
outputTokens: result.usage?.outputTokens ?? 0,
|
|
538
|
+
totalTokens: result.usage?.totalTokens ?? 0,
|
|
539
|
+
toolCalls,
|
|
540
|
+
finishReason: parseFinishReason(result.finishReason),
|
|
541
|
+
provider
|
|
542
|
+
};
|
|
543
|
+
} catch (error) {
|
|
544
|
+
if (error instanceof TraciaError) throw error;
|
|
545
|
+
const rawMessage = error instanceof Error ? error.message : String(error);
|
|
546
|
+
throw new TraciaError(
|
|
547
|
+
"PROVIDER_ERROR" /* PROVIDER_ERROR */,
|
|
548
|
+
`${provider} error: ${sanitizeErrorMessage(rawMessage)}`
|
|
549
|
+
);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
function stream(options) {
|
|
553
|
+
const provider = resolveProvider(options.model, options.provider);
|
|
554
|
+
let resolveResult;
|
|
555
|
+
let rejectResult;
|
|
556
|
+
const resultPromise = new Promise((resolve, reject) => {
|
|
557
|
+
resolveResult = resolve;
|
|
558
|
+
rejectResult = reject;
|
|
559
|
+
});
|
|
560
|
+
async function* generateChunks() {
|
|
561
|
+
try {
|
|
562
|
+
const { streamText } = await loadAISdk();
|
|
563
|
+
const model = await getLanguageModel(provider, options.model, options.apiKey);
|
|
564
|
+
const convertedMessages = convertMessages(options.messages);
|
|
565
|
+
const convertedTools = await convertTools(options.tools);
|
|
566
|
+
const convertedToolChoice = convertToolChoice(options.toolChoice);
|
|
567
|
+
const abortSignal = combineAbortSignals(options.signal, options.timeoutMs);
|
|
568
|
+
const result = streamText({
|
|
569
|
+
model,
|
|
570
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
571
|
+
messages: convertedMessages,
|
|
572
|
+
temperature: options.temperature,
|
|
573
|
+
maxOutputTokens: options.maxOutputTokens,
|
|
574
|
+
topP: options.topP,
|
|
575
|
+
stopSequences: options.stopSequences,
|
|
576
|
+
tools: convertedTools,
|
|
577
|
+
toolChoice: convertedToolChoice,
|
|
578
|
+
abortSignal
|
|
579
|
+
});
|
|
580
|
+
for await (const chunk of result.textStream) {
|
|
581
|
+
yield chunk;
|
|
582
|
+
}
|
|
583
|
+
const [text, usageData, toolCallsData, finishReasonData] = await Promise.all([
|
|
584
|
+
result.text,
|
|
585
|
+
result.usage,
|
|
586
|
+
result.toolCalls,
|
|
587
|
+
result.finishReason
|
|
588
|
+
]);
|
|
589
|
+
const toolCalls = extractToolCalls(toolCallsData);
|
|
590
|
+
resolveResult({
|
|
591
|
+
text,
|
|
592
|
+
inputTokens: usageData?.inputTokens ?? 0,
|
|
593
|
+
outputTokens: usageData?.outputTokens ?? 0,
|
|
594
|
+
totalTokens: usageData?.totalTokens ?? 0,
|
|
595
|
+
toolCalls,
|
|
596
|
+
finishReason: parseFinishReason(finishReasonData),
|
|
597
|
+
provider
|
|
598
|
+
});
|
|
599
|
+
} catch (error) {
|
|
600
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
601
|
+
const traciaError2 = new TraciaError("ABORTED" /* ABORTED */, "Stream aborted");
|
|
602
|
+
rejectResult(traciaError2);
|
|
603
|
+
throw traciaError2;
|
|
604
|
+
}
|
|
605
|
+
const rawMessage = error instanceof Error ? error.message : String(error);
|
|
606
|
+
const traciaError = error instanceof TraciaError ? error : new TraciaError(
|
|
607
|
+
"PROVIDER_ERROR" /* PROVIDER_ERROR */,
|
|
608
|
+
`${provider} error: ${sanitizeErrorMessage(rawMessage)}`
|
|
609
|
+
);
|
|
610
|
+
rejectResult(traciaError);
|
|
611
|
+
throw traciaError;
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
return {
|
|
615
|
+
chunks: generateChunks(),
|
|
616
|
+
result: resultPromise
|
|
617
|
+
};
|
|
618
|
+
}
|
|
619
|
+
function responsesStream(options) {
|
|
620
|
+
let resolveResult;
|
|
621
|
+
let rejectResult;
|
|
622
|
+
const resultPromise = new Promise((resolve, reject) => {
|
|
623
|
+
resolveResult = resolve;
|
|
624
|
+
rejectResult = reject;
|
|
625
|
+
});
|
|
626
|
+
async function* generateEvents() {
|
|
627
|
+
let fullText = "";
|
|
628
|
+
let usage = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
|
|
629
|
+
const outputItems = [];
|
|
630
|
+
const toolCalls = [];
|
|
631
|
+
let aborted = false;
|
|
632
|
+
try {
|
|
633
|
+
const { createOpenAI } = await loadOpenAIProvider();
|
|
634
|
+
const openai = createOpenAI({ apiKey: options.apiKey });
|
|
635
|
+
const model = openai.responses(options.model);
|
|
636
|
+
const { streamText } = await loadAISdk();
|
|
637
|
+
const convertedTools = options.tools ? await convertTools(options.tools) : void 0;
|
|
638
|
+
const abortSignal = combineAbortSignals(options.signal, options.timeoutMs);
|
|
639
|
+
const result = streamText({
|
|
640
|
+
model,
|
|
641
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
642
|
+
messages: options.input,
|
|
643
|
+
maxOutputTokens: options.maxOutputTokens,
|
|
644
|
+
tools: convertedTools,
|
|
645
|
+
abortSignal
|
|
646
|
+
});
|
|
647
|
+
for await (const chunk of result.textStream) {
|
|
648
|
+
fullText += chunk;
|
|
649
|
+
yield { type: "text_delta", data: chunk };
|
|
650
|
+
}
|
|
651
|
+
const [usageData, toolCallsData] = await Promise.all([
|
|
652
|
+
result.usage,
|
|
653
|
+
result.toolCalls
|
|
654
|
+
]);
|
|
655
|
+
usage = {
|
|
656
|
+
inputTokens: usageData?.inputTokens ?? 0,
|
|
657
|
+
outputTokens: usageData?.outputTokens ?? 0,
|
|
658
|
+
totalTokens: usageData?.totalTokens ?? 0
|
|
659
|
+
};
|
|
660
|
+
if (toolCallsData) {
|
|
661
|
+
for (const tc of toolCallsData) {
|
|
662
|
+
if (!tc.toolCallId || !tc.toolName) continue;
|
|
663
|
+
const toolCall = {
|
|
664
|
+
id: tc.toolCallId,
|
|
665
|
+
callId: tc.toolCallId,
|
|
666
|
+
name: tc.toolName,
|
|
667
|
+
arguments: tc.input ?? {}
|
|
668
|
+
};
|
|
669
|
+
toolCalls.push(toolCall);
|
|
670
|
+
yield {
|
|
671
|
+
type: "tool_call",
|
|
672
|
+
id: toolCall.id,
|
|
673
|
+
callId: toolCall.callId,
|
|
674
|
+
name: toolCall.name,
|
|
675
|
+
arguments: toolCall.arguments
|
|
676
|
+
};
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
if (fullText) {
|
|
680
|
+
yield { type: "text", data: fullText };
|
|
681
|
+
outputItems.push({ type: "message", content: fullText });
|
|
682
|
+
}
|
|
683
|
+
yield { type: "done", usage };
|
|
684
|
+
resolveResult({
|
|
685
|
+
text: fullText,
|
|
686
|
+
usage,
|
|
687
|
+
outputItems,
|
|
688
|
+
toolCalls,
|
|
689
|
+
aborted
|
|
690
|
+
});
|
|
691
|
+
} catch (error) {
|
|
692
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
693
|
+
aborted = true;
|
|
694
|
+
resolveResult({
|
|
695
|
+
text: fullText,
|
|
696
|
+
usage,
|
|
697
|
+
outputItems,
|
|
698
|
+
toolCalls,
|
|
699
|
+
aborted
|
|
700
|
+
});
|
|
701
|
+
return;
|
|
702
|
+
}
|
|
703
|
+
const rawMessage = error instanceof Error ? error.message : String(error);
|
|
704
|
+
const traciaError = new TraciaError(
|
|
705
|
+
"PROVIDER_ERROR" /* PROVIDER_ERROR */,
|
|
706
|
+
`OpenAI Responses API error: ${sanitizeErrorMessage(rawMessage)}`
|
|
707
|
+
);
|
|
708
|
+
rejectResult(traciaError);
|
|
709
|
+
throw traciaError;
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
return {
|
|
713
|
+
events: generateEvents(),
|
|
714
|
+
result: resultPromise
|
|
715
|
+
};
|
|
716
|
+
}
|
|
717
|
+
|
|
206
718
|
// src/traces.ts
|
|
719
|
+
var INTERNAL_SET_PENDING_TRACES = /* @__PURE__ */ Symbol("setPendingTracesMap");
|
|
207
720
|
var Traces = class {
|
|
208
721
|
constructor(client) {
|
|
209
722
|
this.client = client;
|
|
723
|
+
this.pendingTraces = null;
|
|
724
|
+
}
|
|
725
|
+
/** @internal */
|
|
726
|
+
[INTERNAL_SET_PENDING_TRACES](map) {
|
|
727
|
+
this.pendingTraces = map;
|
|
728
|
+
}
|
|
729
|
+
async create(payload) {
|
|
730
|
+
return this.client.post("/api/v1/traces", payload);
|
|
210
731
|
}
|
|
211
732
|
async get(traceId) {
|
|
212
733
|
return this.client.get(`/api/v1/traces/${encodeURIComponent(traceId)}`);
|
|
@@ -245,6 +766,12 @@ var Traces = class {
|
|
|
245
766
|
return this.client.get(path);
|
|
246
767
|
}
|
|
247
768
|
async evaluate(traceId, options) {
|
|
769
|
+
if (this.pendingTraces) {
|
|
770
|
+
const pendingTrace = this.pendingTraces.get(traceId);
|
|
771
|
+
if (pendingTrace) {
|
|
772
|
+
await pendingTrace;
|
|
773
|
+
}
|
|
774
|
+
}
|
|
248
775
|
if (typeof options.value !== "number") {
|
|
249
776
|
throw new TraciaError(
|
|
250
777
|
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
@@ -265,28 +792,654 @@ var Traces = class {
|
|
|
265
792
|
}
|
|
266
793
|
};
|
|
267
794
|
|
|
795
|
+
// src/utils.ts
|
|
796
|
+
var import_crypto = __toESM(require("crypto"));
|
|
797
|
+
var TRACE_ID_REGEX = /^tr_[a-f0-9]{16}$/i;
|
|
798
|
+
function generateTraceId() {
|
|
799
|
+
const randomPart = import_crypto.default.randomBytes(8).toString("hex");
|
|
800
|
+
return `tr_${randomPart}`;
|
|
801
|
+
}
|
|
802
|
+
function isValidTraceIdFormat(traceId) {
|
|
803
|
+
return TRACE_ID_REGEX.test(traceId);
|
|
804
|
+
}
|
|
805
|
+
|
|
268
806
|
// src/index.ts
|
|
269
807
|
var Eval = {
|
|
270
808
|
POSITIVE: 1,
|
|
271
809
|
NEGATIVE: 0
|
|
272
810
|
};
|
|
273
811
|
var DEFAULT_BASE_URL = "https://app.tracia.io";
|
|
812
|
+
var MAX_PENDING_TRACES = 1e3;
|
|
813
|
+
var TRACE_RETRY_ATTEMPTS = 2;
|
|
814
|
+
var TRACE_RETRY_DELAY_MS = 500;
|
|
815
|
+
var TRACE_STATUS_SUCCESS = "SUCCESS";
|
|
816
|
+
var TRACE_STATUS_ERROR = "ERROR";
|
|
817
|
+
var ENV_VAR_MAP = {
|
|
818
|
+
["openai" /* OPENAI */]: "OPENAI_API_KEY",
|
|
819
|
+
["anthropic" /* ANTHROPIC */]: "ANTHROPIC_API_KEY",
|
|
820
|
+
["google" /* GOOGLE */]: "GOOGLE_API_KEY"
|
|
821
|
+
};
|
|
822
|
+
function convertResponsesItemToMessage(item) {
|
|
823
|
+
if ("role" in item && (item.role === "developer" || item.role === "user")) {
|
|
824
|
+
const messageItem = item;
|
|
825
|
+
return {
|
|
826
|
+
role: messageItem.role === "developer" ? "system" : "user",
|
|
827
|
+
content: messageItem.content
|
|
828
|
+
};
|
|
829
|
+
}
|
|
830
|
+
if ("type" in item && item.type === "function_call_output") {
|
|
831
|
+
const outputItem = item;
|
|
832
|
+
return {
|
|
833
|
+
role: "tool",
|
|
834
|
+
toolCallId: outputItem.call_id,
|
|
835
|
+
content: outputItem.output
|
|
836
|
+
};
|
|
837
|
+
}
|
|
838
|
+
if ("type" in item) {
|
|
839
|
+
return {
|
|
840
|
+
role: "assistant",
|
|
841
|
+
content: JSON.stringify(item)
|
|
842
|
+
};
|
|
843
|
+
}
|
|
844
|
+
return {
|
|
845
|
+
role: "user",
|
|
846
|
+
content: JSON.stringify(item)
|
|
847
|
+
};
|
|
848
|
+
}
|
|
274
849
|
var Tracia = class {
|
|
275
850
|
constructor(options) {
|
|
851
|
+
this.pendingTraces = /* @__PURE__ */ new Map();
|
|
276
852
|
if (!options.apiKey) {
|
|
277
|
-
throw new
|
|
853
|
+
throw new TraciaError(
|
|
854
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
855
|
+
"apiKey is required"
|
|
856
|
+
);
|
|
278
857
|
}
|
|
279
858
|
this.client = new HttpClient({
|
|
280
859
|
apiKey: options.apiKey,
|
|
281
860
|
baseUrl: DEFAULT_BASE_URL
|
|
282
861
|
});
|
|
862
|
+
this.onTraceError = options.onTraceError;
|
|
283
863
|
this.prompts = new Prompts(this.client);
|
|
284
864
|
this.traces = new Traces(this.client);
|
|
865
|
+
this.traces[INTERNAL_SET_PENDING_TRACES](this.pendingTraces);
|
|
866
|
+
}
|
|
867
|
+
runLocal(input) {
|
|
868
|
+
if (input.stream === true) {
|
|
869
|
+
return this.runLocalStreaming(input);
|
|
870
|
+
}
|
|
871
|
+
return this.runLocalNonStreaming(input);
|
|
872
|
+
}
|
|
873
|
+
async runLocalNonStreaming(input) {
|
|
874
|
+
this.validateRunLocalInput(input);
|
|
875
|
+
let traceId = "";
|
|
876
|
+
if (input.sendTrace !== false) {
|
|
877
|
+
if (input.traceId && !isValidTraceIdFormat(input.traceId)) {
|
|
878
|
+
throw new TraciaError(
|
|
879
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
880
|
+
`Invalid trace ID format. Must match: tr_ + 16 hex characters (e.g., tr_1234567890abcdef)`
|
|
881
|
+
);
|
|
882
|
+
}
|
|
883
|
+
traceId = input.traceId || generateTraceId();
|
|
884
|
+
}
|
|
885
|
+
const interpolatedMessages = this.interpolateMessages(input.messages, input.variables);
|
|
886
|
+
const provider = resolveProvider(input.model, input.provider);
|
|
887
|
+
const apiKey = this.getProviderApiKey(provider, input.providerApiKey);
|
|
888
|
+
const startTime = Date.now();
|
|
889
|
+
let completionResult = null;
|
|
890
|
+
let errorMessage = null;
|
|
891
|
+
try {
|
|
892
|
+
completionResult = await complete({
|
|
893
|
+
model: input.model,
|
|
894
|
+
messages: interpolatedMessages,
|
|
895
|
+
apiKey,
|
|
896
|
+
provider: input.provider,
|
|
897
|
+
temperature: input.temperature,
|
|
898
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
899
|
+
topP: input.topP,
|
|
900
|
+
stopSequences: input.stopSequences,
|
|
901
|
+
tools: input.tools,
|
|
902
|
+
toolChoice: input.toolChoice,
|
|
903
|
+
timeoutMs: input.timeoutMs
|
|
904
|
+
});
|
|
905
|
+
} catch (error) {
|
|
906
|
+
if (error instanceof TraciaError) {
|
|
907
|
+
errorMessage = error.message;
|
|
908
|
+
} else {
|
|
909
|
+
errorMessage = error instanceof Error ? error.message : String(error);
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
const latencyMs = Date.now() - startTime;
|
|
913
|
+
if (traceId) {
|
|
914
|
+
this.scheduleTraceCreation(traceId, {
|
|
915
|
+
traceId,
|
|
916
|
+
model: input.model,
|
|
917
|
+
provider: completionResult?.provider ?? provider,
|
|
918
|
+
input: { messages: interpolatedMessages },
|
|
919
|
+
variables: input.variables ?? null,
|
|
920
|
+
output: completionResult?.text ?? null,
|
|
921
|
+
status: errorMessage ? TRACE_STATUS_ERROR : TRACE_STATUS_SUCCESS,
|
|
922
|
+
error: errorMessage,
|
|
923
|
+
latencyMs,
|
|
924
|
+
inputTokens: completionResult?.inputTokens ?? 0,
|
|
925
|
+
outputTokens: completionResult?.outputTokens ?? 0,
|
|
926
|
+
totalTokens: completionResult?.totalTokens ?? 0,
|
|
927
|
+
tags: input.tags,
|
|
928
|
+
userId: input.userId,
|
|
929
|
+
sessionId: input.sessionId,
|
|
930
|
+
temperature: input.temperature,
|
|
931
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
932
|
+
topP: input.topP,
|
|
933
|
+
tools: input.tools,
|
|
934
|
+
toolCalls: completionResult?.toolCalls
|
|
935
|
+
});
|
|
936
|
+
}
|
|
937
|
+
if (errorMessage) {
|
|
938
|
+
throw new TraciaError("PROVIDER_ERROR" /* PROVIDER_ERROR */, errorMessage);
|
|
939
|
+
}
|
|
940
|
+
const toolCalls = completionResult.toolCalls;
|
|
941
|
+
const finishReason = completionResult.finishReason;
|
|
942
|
+
const message = this.buildAssistantMessage(completionResult.text, toolCalls);
|
|
943
|
+
return {
|
|
944
|
+
text: completionResult.text,
|
|
945
|
+
traceId,
|
|
946
|
+
latencyMs,
|
|
947
|
+
usage: {
|
|
948
|
+
inputTokens: completionResult.inputTokens,
|
|
949
|
+
outputTokens: completionResult.outputTokens,
|
|
950
|
+
totalTokens: completionResult.totalTokens
|
|
951
|
+
},
|
|
952
|
+
cost: null,
|
|
953
|
+
provider: completionResult.provider,
|
|
954
|
+
model: input.model,
|
|
955
|
+
toolCalls,
|
|
956
|
+
finishReason,
|
|
957
|
+
message
|
|
958
|
+
};
|
|
959
|
+
}
|
|
960
|
+
runLocalStreaming(input) {
|
|
961
|
+
this.validateRunLocalInput(input);
|
|
962
|
+
let traceId = "";
|
|
963
|
+
if (input.sendTrace !== false) {
|
|
964
|
+
if (input.traceId && !isValidTraceIdFormat(input.traceId)) {
|
|
965
|
+
throw new TraciaError(
|
|
966
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
967
|
+
`Invalid trace ID format. Must match: tr_ + 16 hex characters (e.g., tr_1234567890abcdef)`
|
|
968
|
+
);
|
|
969
|
+
}
|
|
970
|
+
traceId = input.traceId || generateTraceId();
|
|
971
|
+
}
|
|
972
|
+
const interpolatedMessages = this.interpolateMessages(input.messages, input.variables);
|
|
973
|
+
const provider = resolveProvider(input.model, input.provider);
|
|
974
|
+
const apiKey = this.getProviderApiKey(provider, input.providerApiKey);
|
|
975
|
+
const abortController = new AbortController();
|
|
976
|
+
const combinedSignal = input.signal ? this.combineAbortSignals(input.signal, abortController.signal) : abortController.signal;
|
|
977
|
+
return this.createLocalStream(
|
|
978
|
+
input,
|
|
979
|
+
interpolatedMessages,
|
|
980
|
+
provider,
|
|
981
|
+
apiKey,
|
|
982
|
+
traceId,
|
|
983
|
+
combinedSignal,
|
|
984
|
+
abortController
|
|
985
|
+
);
|
|
986
|
+
}
|
|
987
|
+
runResponses(input) {
|
|
988
|
+
if (input.stream === true) {
|
|
989
|
+
return this.runResponsesStreaming(input);
|
|
990
|
+
}
|
|
991
|
+
return this.runResponsesNonStreaming(input);
|
|
992
|
+
}
|
|
993
|
+
async runResponsesNonStreaming(input) {
|
|
994
|
+
const stream2 = this.runResponsesStreaming(input);
|
|
995
|
+
for await (const _event of stream2) {
|
|
996
|
+
}
|
|
997
|
+
return stream2.result;
|
|
998
|
+
}
|
|
999
|
+
runResponsesStreaming(input) {
|
|
1000
|
+
this.validateResponsesInput(input);
|
|
1001
|
+
let traceId = "";
|
|
1002
|
+
if (input.sendTrace !== false) {
|
|
1003
|
+
if (input.traceId && !isValidTraceIdFormat(input.traceId)) {
|
|
1004
|
+
throw new TraciaError(
|
|
1005
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1006
|
+
`Invalid trace ID format. Must match: tr_ + 16 hex characters (e.g., tr_1234567890abcdef)`
|
|
1007
|
+
);
|
|
1008
|
+
}
|
|
1009
|
+
traceId = input.traceId || generateTraceId();
|
|
1010
|
+
}
|
|
1011
|
+
const apiKey = this.getProviderApiKey("openai" /* OPENAI */, input.providerApiKey);
|
|
1012
|
+
const abortController = new AbortController();
|
|
1013
|
+
const combinedSignal = input.signal ? this.combineAbortSignals(input.signal, abortController.signal) : abortController.signal;
|
|
1014
|
+
return this.createResponsesStream(
|
|
1015
|
+
input,
|
|
1016
|
+
apiKey,
|
|
1017
|
+
traceId,
|
|
1018
|
+
combinedSignal,
|
|
1019
|
+
abortController
|
|
1020
|
+
);
|
|
1021
|
+
}
|
|
1022
|
+
validateResponsesInput(input) {
|
|
1023
|
+
if (!input.model || input.model.trim() === "") {
|
|
1024
|
+
throw new TraciaError(
|
|
1025
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1026
|
+
"model is required and cannot be empty"
|
|
1027
|
+
);
|
|
1028
|
+
}
|
|
1029
|
+
if (!input.input || input.input.length === 0) {
|
|
1030
|
+
throw new TraciaError(
|
|
1031
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1032
|
+
"input array is required and cannot be empty"
|
|
1033
|
+
);
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
createResponsesStream(input, apiKey, traceId, signal, abortController) {
|
|
1037
|
+
const startTime = Date.now();
|
|
1038
|
+
let aborted = false;
|
|
1039
|
+
let resolveResult;
|
|
1040
|
+
let rejectResult;
|
|
1041
|
+
const resultPromise = new Promise((resolve, reject) => {
|
|
1042
|
+
resolveResult = resolve;
|
|
1043
|
+
rejectResult = reject;
|
|
1044
|
+
});
|
|
1045
|
+
const providerStream = responsesStream({
|
|
1046
|
+
model: input.model,
|
|
1047
|
+
input: input.input,
|
|
1048
|
+
apiKey,
|
|
1049
|
+
tools: input.tools,
|
|
1050
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
1051
|
+
timeoutMs: input.timeoutMs,
|
|
1052
|
+
signal
|
|
1053
|
+
});
|
|
1054
|
+
let collectedText = "";
|
|
1055
|
+
const scheduleTrace = this.scheduleTraceCreation.bind(this);
|
|
1056
|
+
async function* wrappedEvents() {
|
|
1057
|
+
try {
|
|
1058
|
+
for await (const event of providerStream.events) {
|
|
1059
|
+
if (event.type === "text_delta") {
|
|
1060
|
+
collectedText += event.data;
|
|
1061
|
+
}
|
|
1062
|
+
yield event;
|
|
1063
|
+
}
|
|
1064
|
+
const providerResult = await providerStream.result;
|
|
1065
|
+
const latencyMs = Date.now() - startTime;
|
|
1066
|
+
if (traceId) {
|
|
1067
|
+
scheduleTrace(traceId, {
|
|
1068
|
+
traceId,
|
|
1069
|
+
model: input.model,
|
|
1070
|
+
provider: "openai" /* OPENAI */,
|
|
1071
|
+
input: { messages: input.input.map((item) => convertResponsesItemToMessage(item)) },
|
|
1072
|
+
variables: null,
|
|
1073
|
+
output: providerResult.text,
|
|
1074
|
+
status: providerResult.aborted ? TRACE_STATUS_ERROR : TRACE_STATUS_SUCCESS,
|
|
1075
|
+
error: providerResult.aborted ? "Stream aborted" : null,
|
|
1076
|
+
latencyMs,
|
|
1077
|
+
inputTokens: providerResult.usage.inputTokens,
|
|
1078
|
+
outputTokens: providerResult.usage.outputTokens,
|
|
1079
|
+
totalTokens: providerResult.usage.totalTokens,
|
|
1080
|
+
tags: input.tags,
|
|
1081
|
+
userId: input.userId,
|
|
1082
|
+
sessionId: input.sessionId,
|
|
1083
|
+
tools: input.tools,
|
|
1084
|
+
toolCalls: providerResult.toolCalls.map((tc) => ({
|
|
1085
|
+
id: tc.id,
|
|
1086
|
+
name: tc.name,
|
|
1087
|
+
arguments: tc.arguments
|
|
1088
|
+
}))
|
|
1089
|
+
});
|
|
1090
|
+
}
|
|
1091
|
+
resolveResult({
|
|
1092
|
+
text: providerResult.text,
|
|
1093
|
+
traceId,
|
|
1094
|
+
latencyMs,
|
|
1095
|
+
usage: providerResult.usage,
|
|
1096
|
+
outputItems: providerResult.outputItems,
|
|
1097
|
+
toolCalls: providerResult.toolCalls,
|
|
1098
|
+
aborted: providerResult.aborted
|
|
1099
|
+
});
|
|
1100
|
+
} catch (error) {
|
|
1101
|
+
const latencyMs = Date.now() - startTime;
|
|
1102
|
+
const isAborted = aborted || signal.aborted;
|
|
1103
|
+
const errorMessage = isAborted ? "Stream aborted" : error instanceof Error ? error.message : String(error);
|
|
1104
|
+
if (traceId) {
|
|
1105
|
+
scheduleTrace(traceId, {
|
|
1106
|
+
traceId,
|
|
1107
|
+
model: input.model,
|
|
1108
|
+
provider: "openai" /* OPENAI */,
|
|
1109
|
+
input: { messages: input.input.map((item) => convertResponsesItemToMessage(item)) },
|
|
1110
|
+
variables: null,
|
|
1111
|
+
output: collectedText || null,
|
|
1112
|
+
status: TRACE_STATUS_ERROR,
|
|
1113
|
+
error: errorMessage,
|
|
1114
|
+
latencyMs,
|
|
1115
|
+
inputTokens: 0,
|
|
1116
|
+
outputTokens: 0,
|
|
1117
|
+
totalTokens: 0,
|
|
1118
|
+
tags: input.tags,
|
|
1119
|
+
userId: input.userId,
|
|
1120
|
+
sessionId: input.sessionId,
|
|
1121
|
+
tools: input.tools
|
|
1122
|
+
});
|
|
1123
|
+
}
|
|
1124
|
+
if (isAborted) {
|
|
1125
|
+
resolveResult({
|
|
1126
|
+
text: collectedText,
|
|
1127
|
+
traceId,
|
|
1128
|
+
latencyMs,
|
|
1129
|
+
usage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
|
|
1130
|
+
outputItems: [],
|
|
1131
|
+
toolCalls: [],
|
|
1132
|
+
aborted: true
|
|
1133
|
+
});
|
|
1134
|
+
} else {
|
|
1135
|
+
const traciaError = error instanceof TraciaError ? error : new TraciaError("PROVIDER_ERROR" /* PROVIDER_ERROR */, errorMessage);
|
|
1136
|
+
rejectResult(traciaError);
|
|
1137
|
+
throw traciaError;
|
|
1138
|
+
}
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
const asyncIterator = wrappedEvents();
|
|
1142
|
+
return {
|
|
1143
|
+
traceId,
|
|
1144
|
+
[Symbol.asyncIterator]() {
|
|
1145
|
+
return asyncIterator;
|
|
1146
|
+
},
|
|
1147
|
+
result: resultPromise,
|
|
1148
|
+
abort() {
|
|
1149
|
+
aborted = true;
|
|
1150
|
+
abortController.abort();
|
|
1151
|
+
}
|
|
1152
|
+
};
|
|
1153
|
+
}
|
|
1154
|
+
createLocalStream(input, interpolatedMessages, provider, apiKey, traceId, signal, abortController) {
|
|
1155
|
+
const startTime = Date.now();
|
|
1156
|
+
let aborted = false;
|
|
1157
|
+
let resolveResult;
|
|
1158
|
+
let rejectResult;
|
|
1159
|
+
const resultPromise = new Promise((resolve, reject) => {
|
|
1160
|
+
resolveResult = resolve;
|
|
1161
|
+
rejectResult = reject;
|
|
1162
|
+
});
|
|
1163
|
+
const providerStream = stream({
|
|
1164
|
+
model: input.model,
|
|
1165
|
+
messages: interpolatedMessages,
|
|
1166
|
+
apiKey,
|
|
1167
|
+
provider: input.provider,
|
|
1168
|
+
temperature: input.temperature,
|
|
1169
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
1170
|
+
topP: input.topP,
|
|
1171
|
+
stopSequences: input.stopSequences,
|
|
1172
|
+
tools: input.tools,
|
|
1173
|
+
toolChoice: input.toolChoice,
|
|
1174
|
+
timeoutMs: input.timeoutMs,
|
|
1175
|
+
signal
|
|
1176
|
+
});
|
|
1177
|
+
let collectedText = "";
|
|
1178
|
+
const scheduleTrace = this.scheduleTraceCreation.bind(this);
|
|
1179
|
+
const buildAssistantMessage = this.buildAssistantMessage.bind(this);
|
|
1180
|
+
async function* wrappedChunks() {
|
|
1181
|
+
try {
|
|
1182
|
+
for await (const chunk of providerStream.chunks) {
|
|
1183
|
+
collectedText += chunk;
|
|
1184
|
+
yield chunk;
|
|
1185
|
+
}
|
|
1186
|
+
const completionResult = await providerStream.result;
|
|
1187
|
+
const latencyMs = Date.now() - startTime;
|
|
1188
|
+
if (traceId) {
|
|
1189
|
+
scheduleTrace(traceId, {
|
|
1190
|
+
traceId,
|
|
1191
|
+
model: input.model,
|
|
1192
|
+
provider: completionResult.provider,
|
|
1193
|
+
input: { messages: interpolatedMessages },
|
|
1194
|
+
variables: input.variables ?? null,
|
|
1195
|
+
output: completionResult.text,
|
|
1196
|
+
status: TRACE_STATUS_SUCCESS,
|
|
1197
|
+
error: null,
|
|
1198
|
+
latencyMs,
|
|
1199
|
+
inputTokens: completionResult.inputTokens,
|
|
1200
|
+
outputTokens: completionResult.outputTokens,
|
|
1201
|
+
totalTokens: completionResult.totalTokens,
|
|
1202
|
+
tags: input.tags,
|
|
1203
|
+
userId: input.userId,
|
|
1204
|
+
sessionId: input.sessionId,
|
|
1205
|
+
temperature: input.temperature,
|
|
1206
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
1207
|
+
topP: input.topP,
|
|
1208
|
+
tools: input.tools,
|
|
1209
|
+
toolCalls: completionResult.toolCalls
|
|
1210
|
+
});
|
|
1211
|
+
}
|
|
1212
|
+
const toolCalls = completionResult.toolCalls;
|
|
1213
|
+
const finishReason = completionResult.finishReason;
|
|
1214
|
+
const message = buildAssistantMessage(completionResult.text, toolCalls);
|
|
1215
|
+
resolveResult({
|
|
1216
|
+
text: completionResult.text,
|
|
1217
|
+
traceId,
|
|
1218
|
+
latencyMs,
|
|
1219
|
+
usage: {
|
|
1220
|
+
inputTokens: completionResult.inputTokens,
|
|
1221
|
+
outputTokens: completionResult.outputTokens,
|
|
1222
|
+
totalTokens: completionResult.totalTokens
|
|
1223
|
+
},
|
|
1224
|
+
cost: null,
|
|
1225
|
+
provider: completionResult.provider,
|
|
1226
|
+
model: input.model,
|
|
1227
|
+
aborted: false,
|
|
1228
|
+
toolCalls,
|
|
1229
|
+
finishReason,
|
|
1230
|
+
message
|
|
1231
|
+
});
|
|
1232
|
+
} catch (error) {
|
|
1233
|
+
const latencyMs = Date.now() - startTime;
|
|
1234
|
+
const isAborted = aborted || signal.aborted;
|
|
1235
|
+
const errorMessage = isAborted ? "Stream aborted" : error instanceof Error ? error.message : String(error);
|
|
1236
|
+
if (traceId) {
|
|
1237
|
+
scheduleTrace(traceId, {
|
|
1238
|
+
traceId,
|
|
1239
|
+
model: input.model,
|
|
1240
|
+
provider,
|
|
1241
|
+
input: { messages: interpolatedMessages },
|
|
1242
|
+
variables: input.variables ?? null,
|
|
1243
|
+
output: collectedText || null,
|
|
1244
|
+
status: TRACE_STATUS_ERROR,
|
|
1245
|
+
error: errorMessage,
|
|
1246
|
+
latencyMs,
|
|
1247
|
+
inputTokens: 0,
|
|
1248
|
+
outputTokens: 0,
|
|
1249
|
+
totalTokens: 0,
|
|
1250
|
+
tags: input.tags,
|
|
1251
|
+
userId: input.userId,
|
|
1252
|
+
sessionId: input.sessionId,
|
|
1253
|
+
temperature: input.temperature,
|
|
1254
|
+
maxOutputTokens: input.maxOutputTokens,
|
|
1255
|
+
topP: input.topP
|
|
1256
|
+
});
|
|
1257
|
+
}
|
|
1258
|
+
if (isAborted) {
|
|
1259
|
+
const abortedMessage = buildAssistantMessage(collectedText, []);
|
|
1260
|
+
resolveResult({
|
|
1261
|
+
text: collectedText,
|
|
1262
|
+
traceId,
|
|
1263
|
+
latencyMs,
|
|
1264
|
+
usage: {
|
|
1265
|
+
inputTokens: 0,
|
|
1266
|
+
outputTokens: 0,
|
|
1267
|
+
totalTokens: 0
|
|
1268
|
+
},
|
|
1269
|
+
cost: null,
|
|
1270
|
+
provider,
|
|
1271
|
+
model: input.model,
|
|
1272
|
+
aborted: true,
|
|
1273
|
+
toolCalls: [],
|
|
1274
|
+
finishReason: "stop",
|
|
1275
|
+
message: abortedMessage
|
|
1276
|
+
});
|
|
1277
|
+
} else {
|
|
1278
|
+
const traciaError = error instanceof TraciaError ? error : new TraciaError("PROVIDER_ERROR" /* PROVIDER_ERROR */, errorMessage);
|
|
1279
|
+
rejectResult(traciaError);
|
|
1280
|
+
throw traciaError;
|
|
1281
|
+
}
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
const asyncIterator = wrappedChunks();
|
|
1285
|
+
return {
|
|
1286
|
+
traceId,
|
|
1287
|
+
[Symbol.asyncIterator]() {
|
|
1288
|
+
return asyncIterator;
|
|
1289
|
+
},
|
|
1290
|
+
result: resultPromise,
|
|
1291
|
+
abort() {
|
|
1292
|
+
aborted = true;
|
|
1293
|
+
abortController.abort();
|
|
1294
|
+
}
|
|
1295
|
+
};
|
|
1296
|
+
}
|
|
1297
|
+
combineAbortSignals(signal1, signal2) {
|
|
1298
|
+
const controller = new AbortController();
|
|
1299
|
+
if (signal1.aborted || signal2.aborted) {
|
|
1300
|
+
controller.abort();
|
|
1301
|
+
return controller.signal;
|
|
1302
|
+
}
|
|
1303
|
+
const onAbort = () => {
|
|
1304
|
+
signal1.removeEventListener("abort", onAbort);
|
|
1305
|
+
signal2.removeEventListener("abort", onAbort);
|
|
1306
|
+
controller.abort();
|
|
1307
|
+
};
|
|
1308
|
+
signal1.addEventListener("abort", onAbort, { once: true });
|
|
1309
|
+
signal2.addEventListener("abort", onAbort, { once: true });
|
|
1310
|
+
return controller.signal;
|
|
1311
|
+
}
|
|
1312
|
+
async flush() {
|
|
1313
|
+
await Promise.all(this.pendingTraces.values());
|
|
1314
|
+
}
|
|
1315
|
+
validateRunLocalInput(input) {
|
|
1316
|
+
if (!input.model || input.model.trim() === "") {
|
|
1317
|
+
throw new TraciaError(
|
|
1318
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1319
|
+
"model is required and cannot be empty"
|
|
1320
|
+
);
|
|
1321
|
+
}
|
|
1322
|
+
if (!input.messages || input.messages.length === 0) {
|
|
1323
|
+
throw new TraciaError(
|
|
1324
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1325
|
+
"messages array is required and cannot be empty"
|
|
1326
|
+
);
|
|
1327
|
+
}
|
|
1328
|
+
for (const message of input.messages) {
|
|
1329
|
+
if (message.role === "tool") {
|
|
1330
|
+
if (!message.toolCallId) {
|
|
1331
|
+
throw new TraciaError(
|
|
1332
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1333
|
+
`Tool messages must include toolCallId. Example: { role: "tool", toolCallId: "call_123", content: '{"result": "data"}' }`
|
|
1334
|
+
);
|
|
1335
|
+
}
|
|
1336
|
+
if (typeof message.content !== "string") {
|
|
1337
|
+
throw new TraciaError(
|
|
1338
|
+
"INVALID_REQUEST" /* INVALID_REQUEST */,
|
|
1339
|
+
`Tool message content must be a string (the tool result). Example: { role: "tool", toolCallId: "call_123", content: '{"result": "data"}' }`
|
|
1340
|
+
);
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
scheduleTraceCreation(traceId, payload) {
|
|
1346
|
+
if (this.pendingTraces.size >= MAX_PENDING_TRACES) {
|
|
1347
|
+
const oldestTraceId = this.pendingTraces.keys().next().value;
|
|
1348
|
+
if (oldestTraceId) {
|
|
1349
|
+
this.pendingTraces.delete(oldestTraceId);
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
const tracePromise = this.createTraceWithRetry(traceId, payload);
|
|
1353
|
+
this.pendingTraces.set(traceId, tracePromise);
|
|
1354
|
+
tracePromise.finally(() => this.pendingTraces.delete(traceId));
|
|
1355
|
+
}
|
|
1356
|
+
async createTraceWithRetry(traceId, payload) {
|
|
1357
|
+
let lastError = null;
|
|
1358
|
+
for (let attempt = 0; attempt <= TRACE_RETRY_ATTEMPTS; attempt++) {
|
|
1359
|
+
try {
|
|
1360
|
+
await this.traces.create(payload);
|
|
1361
|
+
return;
|
|
1362
|
+
} catch (error) {
|
|
1363
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
1364
|
+
if (attempt < TRACE_RETRY_ATTEMPTS) {
|
|
1365
|
+
await this.delay(TRACE_RETRY_DELAY_MS * (attempt + 1));
|
|
1366
|
+
}
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
if (this.onTraceError && lastError) {
|
|
1370
|
+
this.onTraceError(lastError, traceId);
|
|
1371
|
+
}
|
|
1372
|
+
}
|
|
1373
|
+
delay(ms) {
|
|
1374
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1375
|
+
}
|
|
1376
|
+
interpolateMessages(messages, variables) {
|
|
1377
|
+
if (!variables) return messages;
|
|
1378
|
+
return messages.map((message) => {
|
|
1379
|
+
if (typeof message.content === "string") {
|
|
1380
|
+
return {
|
|
1381
|
+
...message,
|
|
1382
|
+
content: message.content.replace(
|
|
1383
|
+
/\{\{(\w+)\}\}/g,
|
|
1384
|
+
(match, key) => variables[key] ?? match
|
|
1385
|
+
)
|
|
1386
|
+
};
|
|
1387
|
+
}
|
|
1388
|
+
if (message.role === "tool") {
|
|
1389
|
+
return message;
|
|
1390
|
+
}
|
|
1391
|
+
return {
|
|
1392
|
+
...message,
|
|
1393
|
+
content: message.content.map((block) => {
|
|
1394
|
+
if (block.type === "text") {
|
|
1395
|
+
return {
|
|
1396
|
+
...block,
|
|
1397
|
+
text: block.text.replace(
|
|
1398
|
+
/\{\{(\w+)\}\}/g,
|
|
1399
|
+
(match, key) => variables[key] ?? match
|
|
1400
|
+
)
|
|
1401
|
+
};
|
|
1402
|
+
}
|
|
1403
|
+
return block;
|
|
1404
|
+
})
|
|
1405
|
+
};
|
|
1406
|
+
});
|
|
1407
|
+
}
|
|
1408
|
+
buildAssistantMessage(text, toolCalls) {
|
|
1409
|
+
if (toolCalls.length === 0) {
|
|
1410
|
+
return { role: "assistant", content: text };
|
|
1411
|
+
}
|
|
1412
|
+
const contentParts = [];
|
|
1413
|
+
if (text) {
|
|
1414
|
+
contentParts.push({ type: "text", text });
|
|
1415
|
+
}
|
|
1416
|
+
for (const toolCall of toolCalls) {
|
|
1417
|
+
contentParts.push({
|
|
1418
|
+
type: "tool_call",
|
|
1419
|
+
id: toolCall.id,
|
|
1420
|
+
name: toolCall.name,
|
|
1421
|
+
arguments: toolCall.arguments
|
|
1422
|
+
});
|
|
1423
|
+
}
|
|
1424
|
+
return { role: "assistant", content: contentParts };
|
|
1425
|
+
}
|
|
1426
|
+
getProviderApiKey(provider, override) {
|
|
1427
|
+
if (override) return override;
|
|
1428
|
+
const envVar = ENV_VAR_MAP[provider];
|
|
1429
|
+
const key = process.env[envVar];
|
|
1430
|
+
if (!key) {
|
|
1431
|
+
throw new TraciaError(
|
|
1432
|
+
"MISSING_PROVIDER_API_KEY" /* MISSING_PROVIDER_API_KEY */,
|
|
1433
|
+
`Missing API key for ${provider}. Set the ${envVar} environment variable or provide providerApiKey in options.`
|
|
1434
|
+
);
|
|
1435
|
+
}
|
|
1436
|
+
return key;
|
|
285
1437
|
}
|
|
286
1438
|
};
|
|
287
1439
|
// Annotate the CommonJS export names for ESM import in node:
|
|
288
1440
|
0 && (module.exports = {
|
|
289
1441
|
Eval,
|
|
1442
|
+
LLMProvider,
|
|
290
1443
|
Tracia,
|
|
291
1444
|
TraciaError,
|
|
292
1445
|
TraciaErrorCode
|