@superatomai/sdk-node 0.0.39 → 0.0.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +421 -1
- package/dist/index.d.ts +421 -1
- package/dist/index.js +1247 -239
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1240 -238
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1276,13 +1276,16 @@ var init_prompt_loader = __esm({
|
|
|
1276
1276
|
const contextMarker = "---\n\n## CONTEXT";
|
|
1277
1277
|
if (template.system.includes(contextMarker)) {
|
|
1278
1278
|
const [staticPart, contextPart] = template.system.split(contextMarker);
|
|
1279
|
-
|
|
1279
|
+
const processedStatic = this.replaceVariables(staticPart, variables);
|
|
1280
1280
|
const processedContext = this.replaceVariables(contextMarker + contextPart, variables);
|
|
1281
|
+
const staticLength = processedStatic.length;
|
|
1282
|
+
const contextLength = processedContext.length;
|
|
1283
|
+
logger.debug(`\u2713 Prompt caching enabled for '${promptName}' (cached: ${staticLength} chars, dynamic: ${contextLength} chars)`);
|
|
1281
1284
|
return {
|
|
1282
1285
|
system: [
|
|
1283
1286
|
{
|
|
1284
1287
|
type: "text",
|
|
1285
|
-
text:
|
|
1288
|
+
text: processedStatic.trim(),
|
|
1286
1289
|
cache_control: { type: "ephemeral" }
|
|
1287
1290
|
},
|
|
1288
1291
|
{
|
|
@@ -1620,10 +1623,16 @@ __export(index_exports, {
|
|
|
1620
1623
|
UIBlock: () => UIBlock,
|
|
1621
1624
|
UILogCollector: () => UILogCollector,
|
|
1622
1625
|
UserManager: () => UserManager,
|
|
1626
|
+
anthropicLLM: () => anthropicLLM,
|
|
1627
|
+
geminiLLM: () => geminiLLM,
|
|
1628
|
+
groqLLM: () => groqLLM,
|
|
1623
1629
|
hybridRerank: () => hybridRerank,
|
|
1630
|
+
llmUsageLogger: () => llmUsageLogger,
|
|
1624
1631
|
logger: () => logger,
|
|
1632
|
+
openaiLLM: () => openaiLLM,
|
|
1625
1633
|
rerankChromaResults: () => rerankChromaResults,
|
|
1626
|
-
rerankConversationResults: () => rerankConversationResults
|
|
1634
|
+
rerankConversationResults: () => rerankConversationResults,
|
|
1635
|
+
userPromptErrorLogger: () => userPromptErrorLogger
|
|
1627
1636
|
});
|
|
1628
1637
|
module.exports = __toCommonJS(index_exports);
|
|
1629
1638
|
|
|
@@ -3422,6 +3431,465 @@ var import_groq_sdk = __toESM(require("groq-sdk"));
|
|
|
3422
3431
|
var import_generative_ai = require("@google/generative-ai");
|
|
3423
3432
|
var import_openai = __toESM(require("openai"));
|
|
3424
3433
|
var import_jsonrepair = require("jsonrepair");
|
|
3434
|
+
|
|
3435
|
+
// src/utils/llm-usage-logger.ts
|
|
3436
|
+
var import_fs4 = __toESM(require("fs"));
|
|
3437
|
+
var import_path3 = __toESM(require("path"));
|
|
3438
|
+
var PRICING = {
|
|
3439
|
+
// Anthropic (December 2025)
|
|
3440
|
+
"claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
|
|
3441
|
+
"claude-opus-4-5-20251101": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
|
|
3442
|
+
"claude-sonnet-4-5": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
|
|
3443
|
+
"claude-sonnet-4-5-20250929": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
|
|
3444
|
+
"claude-haiku-4-5": { input: 1, output: 5, cacheRead: 0.1, cacheWrite: 1.25 },
|
|
3445
|
+
"claude-haiku-4-5-20251001": { input: 1, output: 5, cacheRead: 0.1, cacheWrite: 1.25 },
|
|
3446
|
+
"claude-3-5-sonnet-20241022": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
|
|
3447
|
+
"claude-3-5-haiku-20241022": { input: 1, output: 5, cacheRead: 0.1, cacheWrite: 1.25 },
|
|
3448
|
+
"claude-3-opus-20240229": { input: 15, output: 75, cacheRead: 1.5, cacheWrite: 18.75 },
|
|
3449
|
+
"claude-3-sonnet-20240229": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
|
|
3450
|
+
"claude-3-haiku-20240307": { input: 0.25, output: 1.25, cacheRead: 0.03, cacheWrite: 0.3 },
|
|
3451
|
+
// OpenAI (December 2025)
|
|
3452
|
+
"gpt-5": { input: 1.25, output: 10 },
|
|
3453
|
+
"gpt-5-mini": { input: 0.25, output: 2 },
|
|
3454
|
+
"gpt-4o": { input: 5, output: 15 },
|
|
3455
|
+
// Updated pricing as of late 2025
|
|
3456
|
+
"gpt-4o-mini": { input: 0.15, output: 0.6 },
|
|
3457
|
+
"gpt-4-turbo": { input: 10, output: 30 },
|
|
3458
|
+
"gpt-4": { input: 30, output: 60 },
|
|
3459
|
+
"gpt-3.5-turbo": { input: 0.5, output: 1.5 },
|
|
3460
|
+
// Google Gemini (December 2025)
|
|
3461
|
+
"gemini-3-pro": { input: 2, output: 8 },
|
|
3462
|
+
// New Gemini 3
|
|
3463
|
+
"gemini-2.5-pro": { input: 1.25, output: 10 },
|
|
3464
|
+
// For prompts ≤200K tokens, 2x for >200K
|
|
3465
|
+
"gemini-2.5-flash": { input: 0.15, output: 0.6 },
|
|
3466
|
+
// Standard mode (thinking disabled: $0.60, thinking enabled: $3.50)
|
|
3467
|
+
"gemini-2.5-flash-lite": { input: 0.1, output: 0.4 },
|
|
3468
|
+
"gemini-2.0-flash": { input: 0.1, output: 0.4 },
|
|
3469
|
+
"gemini-2.0-flash-lite": { input: 0.075, output: 0.3 },
|
|
3470
|
+
"gemini-1.5-pro": { input: 1.25, output: 5 },
|
|
3471
|
+
"gemini-1.5-flash": { input: 0.075, output: 0.3 },
|
|
3472
|
+
// Groq (December 2025)
|
|
3473
|
+
"llama-3.3-70b-versatile": { input: 0.59, output: 0.79 },
|
|
3474
|
+
"llama-3.1-70b-versatile": { input: 0.59, output: 0.79 },
|
|
3475
|
+
"llama-3.1-8b-instant": { input: 0.05, output: 0.08 },
|
|
3476
|
+
"llama-4-scout-17b-16e": { input: 0.11, output: 0.34 },
|
|
3477
|
+
"llama-4-maverick-17b-128e": { input: 0.2, output: 0.6 },
|
|
3478
|
+
"mixtral-8x7b-32768": { input: 0.27, output: 0.27 },
|
|
3479
|
+
"qwen3-32b": { input: 0.29, output: 0.59 }
|
|
3480
|
+
};
|
|
3481
|
+
var DEFAULT_PRICING = { input: 3, output: 15 };
|
|
3482
|
+
var LLMUsageLogger = class {
|
|
3483
|
+
constructor() {
|
|
3484
|
+
this.logStream = null;
|
|
3485
|
+
this.sessionStats = {
|
|
3486
|
+
totalCalls: 0,
|
|
3487
|
+
totalInputTokens: 0,
|
|
3488
|
+
totalOutputTokens: 0,
|
|
3489
|
+
totalCacheReadTokens: 0,
|
|
3490
|
+
totalCacheWriteTokens: 0,
|
|
3491
|
+
totalCostUSD: 0,
|
|
3492
|
+
totalDurationMs: 0
|
|
3493
|
+
};
|
|
3494
|
+
this.logPath = process.env.LLM_USAGE_LOG_PATH || import_path3.default.join(process.cwd(), "llm-usage-logs");
|
|
3495
|
+
this.enabled = process.env.LLM_USAGE_LOGGING !== "false";
|
|
3496
|
+
if (this.enabled) {
|
|
3497
|
+
this.initLogStream();
|
|
3498
|
+
}
|
|
3499
|
+
}
|
|
3500
|
+
initLogStream() {
|
|
3501
|
+
try {
|
|
3502
|
+
const dir = import_path3.default.dirname(this.logPath);
|
|
3503
|
+
if (!import_fs4.default.existsSync(dir)) {
|
|
3504
|
+
import_fs4.default.mkdirSync(dir, { recursive: true });
|
|
3505
|
+
}
|
|
3506
|
+
this.logStream = import_fs4.default.createWriteStream(this.logPath, { flags: "a" });
|
|
3507
|
+
if (!import_fs4.default.existsSync(this.logPath) || import_fs4.default.statSync(this.logPath).size === 0) {
|
|
3508
|
+
this.writeHeader();
|
|
3509
|
+
}
|
|
3510
|
+
} catch (error) {
|
|
3511
|
+
console.error("[LLM-Usage-Logger] Failed to initialize log stream:", error);
|
|
3512
|
+
this.enabled = false;
|
|
3513
|
+
}
|
|
3514
|
+
}
|
|
3515
|
+
writeHeader() {
|
|
3516
|
+
const header = `
|
|
3517
|
+
================================================================================
|
|
3518
|
+
LLM USAGE LOG - Session Started: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
3519
|
+
================================================================================
|
|
3520
|
+
Format: [TIMESTAMP] [REQUEST_ID] [PROVIDER/MODEL] [METHOD]
|
|
3521
|
+
Tokens: IN=input OUT=output CACHE_R=cache_read CACHE_W=cache_write TOTAL=total
|
|
3522
|
+
Cost: $X.XXXXXX | Time: Xms
|
|
3523
|
+
================================================================================
|
|
3524
|
+
|
|
3525
|
+
`;
|
|
3526
|
+
this.logStream?.write(header);
|
|
3527
|
+
}
|
|
3528
|
+
/**
|
|
3529
|
+
* Calculate cost based on token usage and model
|
|
3530
|
+
*/
|
|
3531
|
+
calculateCost(model, inputTokens, outputTokens, cacheReadTokens = 0, cacheWriteTokens = 0) {
|
|
3532
|
+
let pricing = PRICING[model];
|
|
3533
|
+
if (!pricing) {
|
|
3534
|
+
const modelLower = model.toLowerCase();
|
|
3535
|
+
for (const [key, value] of Object.entries(PRICING)) {
|
|
3536
|
+
if (modelLower.includes(key.toLowerCase()) || key.toLowerCase().includes(modelLower)) {
|
|
3537
|
+
pricing = value;
|
|
3538
|
+
break;
|
|
3539
|
+
}
|
|
3540
|
+
}
|
|
3541
|
+
}
|
|
3542
|
+
pricing = pricing || DEFAULT_PRICING;
|
|
3543
|
+
const inputCost = inputTokens / 1e6 * pricing.input;
|
|
3544
|
+
const outputCost = outputTokens / 1e6 * pricing.output;
|
|
3545
|
+
const cacheReadCost = cacheReadTokens / 1e6 * (pricing.cacheRead || pricing.input * 0.1);
|
|
3546
|
+
const cacheWriteCost = cacheWriteTokens / 1e6 * (pricing.cacheWrite || pricing.input * 1.25);
|
|
3547
|
+
return inputCost + outputCost + cacheReadCost + cacheWriteCost;
|
|
3548
|
+
}
|
|
3549
|
+
/**
|
|
3550
|
+
* Log an LLM API call
|
|
3551
|
+
*/
|
|
3552
|
+
log(entry) {
|
|
3553
|
+
if (!this.enabled) return;
|
|
3554
|
+
this.sessionStats.totalCalls++;
|
|
3555
|
+
this.sessionStats.totalInputTokens += entry.inputTokens;
|
|
3556
|
+
this.sessionStats.totalOutputTokens += entry.outputTokens;
|
|
3557
|
+
this.sessionStats.totalCacheReadTokens += entry.cacheReadTokens || 0;
|
|
3558
|
+
this.sessionStats.totalCacheWriteTokens += entry.cacheWriteTokens || 0;
|
|
3559
|
+
this.sessionStats.totalCostUSD += entry.costUSD;
|
|
3560
|
+
this.sessionStats.totalDurationMs += entry.durationMs;
|
|
3561
|
+
const cacheInfo = entry.cacheReadTokens || entry.cacheWriteTokens ? ` CACHE_R=${entry.cacheReadTokens || 0} CACHE_W=${entry.cacheWriteTokens || 0}` : "";
|
|
3562
|
+
const toolInfo = entry.toolCalls ? ` | Tools: ${entry.toolCalls}` : "";
|
|
3563
|
+
const errorInfo = entry.error ? ` | ERROR: ${entry.error}` : "";
|
|
3564
|
+
const status = entry.success ? "\u2713" : "\u2717";
|
|
3565
|
+
let cacheStatus = "";
|
|
3566
|
+
if (entry.cacheReadTokens && entry.cacheReadTokens > 0) {
|
|
3567
|
+
const savedCost = entry.cacheReadTokens / 1e6 * 2.7;
|
|
3568
|
+
cacheStatus = ` \u26A1 CACHE HIT! Saved ~$${savedCost.toFixed(4)}`;
|
|
3569
|
+
} else if (entry.cacheWriteTokens && entry.cacheWriteTokens > 0) {
|
|
3570
|
+
cacheStatus = " \u{1F4DD} Cache created (next request will be cheaper)";
|
|
3571
|
+
}
|
|
3572
|
+
const logLine = `[${entry.timestamp}] [${entry.requestId}] ${status} ${entry.provider}/${entry.model} [${entry.method}]
|
|
3573
|
+
Tokens: IN=${entry.inputTokens} OUT=${entry.outputTokens}${cacheInfo} TOTAL=${entry.totalTokens}
|
|
3574
|
+
Cost: $${entry.costUSD.toFixed(6)} | Time: ${entry.durationMs}ms${toolInfo}${errorInfo}${cacheStatus}
|
|
3575
|
+
`;
|
|
3576
|
+
this.logStream?.write(logLine);
|
|
3577
|
+
if (entry.cacheReadTokens && entry.cacheReadTokens > 0) {
|
|
3578
|
+
console.log(`[LLM] \u26A1 CACHE HIT: ${entry.cacheReadTokens.toLocaleString()} tokens read from cache (${entry.method})`);
|
|
3579
|
+
} else if (entry.cacheWriteTokens && entry.cacheWriteTokens > 0) {
|
|
3580
|
+
console.log(`[LLM] \u{1F4DD} CACHE WRITE: ${entry.cacheWriteTokens.toLocaleString()} tokens cached for future requests (${entry.method})`);
|
|
3581
|
+
}
|
|
3582
|
+
if (process.env.SUPERATOM_LOG_LEVEL === "verbose") {
|
|
3583
|
+
console.log("\n[LLM-Usage]", logLine);
|
|
3584
|
+
}
|
|
3585
|
+
}
|
|
3586
|
+
/**
|
|
3587
|
+
* Log session summary (call at end of request)
|
|
3588
|
+
*/
|
|
3589
|
+
logSessionSummary(requestContext) {
|
|
3590
|
+
if (!this.enabled || this.sessionStats.totalCalls === 0) return;
|
|
3591
|
+
const cacheReadSavings = this.sessionStats.totalCacheReadTokens / 1e6 * 2.7;
|
|
3592
|
+
const hasCaching = this.sessionStats.totalCacheReadTokens > 0 || this.sessionStats.totalCacheWriteTokens > 0;
|
|
3593
|
+
let cacheSection = "";
|
|
3594
|
+
if (hasCaching) {
|
|
3595
|
+
cacheSection = `
|
|
3596
|
+
Cache Statistics:
|
|
3597
|
+
Cache Read Tokens: ${this.sessionStats.totalCacheReadTokens.toLocaleString()}${this.sessionStats.totalCacheReadTokens > 0 ? " \u26A1" : ""}
|
|
3598
|
+
Cache Write Tokens: ${this.sessionStats.totalCacheWriteTokens.toLocaleString()}${this.sessionStats.totalCacheWriteTokens > 0 ? " \u{1F4DD}" : ""}
|
|
3599
|
+
Estimated Savings: $${cacheReadSavings.toFixed(4)}`;
|
|
3600
|
+
}
|
|
3601
|
+
const summary = `
|
|
3602
|
+
--------------------------------------------------------------------------------
|
|
3603
|
+
SESSION SUMMARY${requestContext ? ` (${requestContext})` : ""}
|
|
3604
|
+
--------------------------------------------------------------------------------
|
|
3605
|
+
Total LLM Calls: ${this.sessionStats.totalCalls}
|
|
3606
|
+
Total Input Tokens: ${this.sessionStats.totalInputTokens.toLocaleString()}
|
|
3607
|
+
Total Output Tokens: ${this.sessionStats.totalOutputTokens.toLocaleString()}
|
|
3608
|
+
Total Tokens: ${(this.sessionStats.totalInputTokens + this.sessionStats.totalOutputTokens).toLocaleString()}
|
|
3609
|
+
Total Cost: $${this.sessionStats.totalCostUSD.toFixed(6)}
|
|
3610
|
+
Total Time: ${this.sessionStats.totalDurationMs}ms (${(this.sessionStats.totalDurationMs / 1e3).toFixed(2)}s)
|
|
3611
|
+
Avg Cost/Call: $${(this.sessionStats.totalCostUSD / this.sessionStats.totalCalls).toFixed(6)}
|
|
3612
|
+
Avg Time/Call: ${Math.round(this.sessionStats.totalDurationMs / this.sessionStats.totalCalls)}ms${cacheSection}
|
|
3613
|
+
--------------------------------------------------------------------------------
|
|
3614
|
+
|
|
3615
|
+
`;
|
|
3616
|
+
this.logStream?.write(summary);
|
|
3617
|
+
console.log("\n[LLM-Usage] Session Summary:");
|
|
3618
|
+
console.log(` Calls: ${this.sessionStats.totalCalls} | Tokens: ${(this.sessionStats.totalInputTokens + this.sessionStats.totalOutputTokens).toLocaleString()} | Cost: $${this.sessionStats.totalCostUSD.toFixed(4)} | Time: ${(this.sessionStats.totalDurationMs / 1e3).toFixed(2)}s`);
|
|
3619
|
+
if (hasCaching) {
|
|
3620
|
+
console.log(` Cache: ${this.sessionStats.totalCacheReadTokens.toLocaleString()} read, ${this.sessionStats.totalCacheWriteTokens.toLocaleString()} written | Savings: ~$${cacheReadSavings.toFixed(4)}`);
|
|
3621
|
+
}
|
|
3622
|
+
}
|
|
3623
|
+
/**
|
|
3624
|
+
* Reset session stats (call at start of new user request)
|
|
3625
|
+
*/
|
|
3626
|
+
resetSession() {
|
|
3627
|
+
this.sessionStats = {
|
|
3628
|
+
totalCalls: 0,
|
|
3629
|
+
totalInputTokens: 0,
|
|
3630
|
+
totalOutputTokens: 0,
|
|
3631
|
+
totalCacheReadTokens: 0,
|
|
3632
|
+
totalCacheWriteTokens: 0,
|
|
3633
|
+
totalCostUSD: 0,
|
|
3634
|
+
totalDurationMs: 0
|
|
3635
|
+
};
|
|
3636
|
+
}
|
|
3637
|
+
/**
|
|
3638
|
+
* Reset the log file for a new request (clears previous logs)
|
|
3639
|
+
* Call this at the start of each USER_PROMPT_REQ
|
|
3640
|
+
*/
|
|
3641
|
+
resetLogFile(requestContext) {
|
|
3642
|
+
if (!this.enabled) return;
|
|
3643
|
+
try {
|
|
3644
|
+
if (this.logStream) {
|
|
3645
|
+
this.logStream.end();
|
|
3646
|
+
this.logStream = null;
|
|
3647
|
+
}
|
|
3648
|
+
this.logStream = import_fs4.default.createWriteStream(this.logPath, { flags: "w" });
|
|
3649
|
+
const header = `
|
|
3650
|
+
================================================================================
|
|
3651
|
+
LLM USAGE LOG - Request Started: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
3652
|
+
${requestContext ? `Context: ${requestContext}` : ""}
|
|
3653
|
+
================================================================================
|
|
3654
|
+
Format: [TIMESTAMP] [REQUEST_ID] [PROVIDER/MODEL] [METHOD]
|
|
3655
|
+
Tokens: IN=input OUT=output CACHE_R=cache_read CACHE_W=cache_write TOTAL=total
|
|
3656
|
+
Cost: $X.XXXXXX | Time: Xms
|
|
3657
|
+
================================================================================
|
|
3658
|
+
|
|
3659
|
+
`;
|
|
3660
|
+
this.logStream.write(header);
|
|
3661
|
+
this.resetSession();
|
|
3662
|
+
console.log(`[LLM-Usage] Log file reset for new request: ${this.logPath}`);
|
|
3663
|
+
} catch (error) {
|
|
3664
|
+
console.error("[LLM-Usage-Logger] Failed to reset log file:", error);
|
|
3665
|
+
}
|
|
3666
|
+
}
|
|
3667
|
+
/**
|
|
3668
|
+
* Get current session stats
|
|
3669
|
+
*/
|
|
3670
|
+
getSessionStats() {
|
|
3671
|
+
return { ...this.sessionStats };
|
|
3672
|
+
}
|
|
3673
|
+
/**
|
|
3674
|
+
* Generate a unique request ID
|
|
3675
|
+
*/
|
|
3676
|
+
generateRequestId() {
|
|
3677
|
+
return `req-${Date.now()}-${Math.random().toString(36).substring(2, 8)}`;
|
|
3678
|
+
}
|
|
3679
|
+
};
|
|
3680
|
+
var llmUsageLogger = new LLMUsageLogger();
|
|
3681
|
+
|
|
3682
|
+
// src/utils/user-prompt-error-logger.ts
|
|
3683
|
+
var import_fs5 = __toESM(require("fs"));
|
|
3684
|
+
var import_path4 = __toESM(require("path"));
|
|
3685
|
+
var UserPromptErrorLogger = class {
|
|
3686
|
+
constructor() {
|
|
3687
|
+
this.logStream = null;
|
|
3688
|
+
this.hasErrors = false;
|
|
3689
|
+
this.logPath = process.env.USER_PROMPT_ERROR_LOG_PATH || import_path4.default.join(process.cwd(), "user-prompt-req-errors");
|
|
3690
|
+
this.enabled = process.env.USER_PROMPT_ERROR_LOGGING !== "false";
|
|
3691
|
+
}
|
|
3692
|
+
/**
|
|
3693
|
+
* Reset the error log file for a new request
|
|
3694
|
+
*/
|
|
3695
|
+
resetLogFile(requestContext) {
|
|
3696
|
+
if (!this.enabled) return;
|
|
3697
|
+
try {
|
|
3698
|
+
if (this.logStream) {
|
|
3699
|
+
this.logStream.end();
|
|
3700
|
+
this.logStream = null;
|
|
3701
|
+
}
|
|
3702
|
+
const dir = import_path4.default.dirname(this.logPath);
|
|
3703
|
+
if (dir !== "." && !import_fs5.default.existsSync(dir)) {
|
|
3704
|
+
import_fs5.default.mkdirSync(dir, { recursive: true });
|
|
3705
|
+
}
|
|
3706
|
+
this.logStream = import_fs5.default.createWriteStream(this.logPath, { flags: "w" });
|
|
3707
|
+
this.hasErrors = false;
|
|
3708
|
+
const header = `================================================================================
|
|
3709
|
+
USER PROMPT REQUEST ERROR LOG
|
|
3710
|
+
Request Started: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
3711
|
+
${requestContext ? `Context: ${requestContext}` : ""}
|
|
3712
|
+
================================================================================
|
|
3713
|
+
|
|
3714
|
+
`;
|
|
3715
|
+
this.logStream.write(header);
|
|
3716
|
+
} catch (error) {
|
|
3717
|
+
console.error("[UserPromptErrorLogger] Failed to reset log file:", error);
|
|
3718
|
+
}
|
|
3719
|
+
}
|
|
3720
|
+
/**
|
|
3721
|
+
* Log a JSON parse error with the raw string that failed
|
|
3722
|
+
*/
|
|
3723
|
+
logJsonParseError(context, rawString, error) {
|
|
3724
|
+
if (!this.enabled) return;
|
|
3725
|
+
this.hasErrors = true;
|
|
3726
|
+
const entry = `
|
|
3727
|
+
--------------------------------------------------------------------------------
|
|
3728
|
+
[${(/* @__PURE__ */ new Date()).toISOString()}] JSON PARSE ERROR
|
|
3729
|
+
--------------------------------------------------------------------------------
|
|
3730
|
+
Context: ${context}
|
|
3731
|
+
Error: ${error.message}
|
|
3732
|
+
|
|
3733
|
+
Raw String (${rawString.length} chars):
|
|
3734
|
+
--------------------------------------------------------------------------------
|
|
3735
|
+
${rawString}
|
|
3736
|
+
--------------------------------------------------------------------------------
|
|
3737
|
+
|
|
3738
|
+
Stack Trace:
|
|
3739
|
+
${error.stack || "No stack trace available"}
|
|
3740
|
+
|
|
3741
|
+
`;
|
|
3742
|
+
this.write(entry);
|
|
3743
|
+
console.error(`[UserPromptError] JSON Parse Error in ${context}: ${error.message}`);
|
|
3744
|
+
}
|
|
3745
|
+
/**
|
|
3746
|
+
* Log a general error with full details
|
|
3747
|
+
*/
|
|
3748
|
+
logError(context, error, additionalData) {
|
|
3749
|
+
if (!this.enabled) return;
|
|
3750
|
+
this.hasErrors = true;
|
|
3751
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
3752
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
3753
|
+
let entry = `
|
|
3754
|
+
--------------------------------------------------------------------------------
|
|
3755
|
+
[${(/* @__PURE__ */ new Date()).toISOString()}] ERROR
|
|
3756
|
+
--------------------------------------------------------------------------------
|
|
3757
|
+
Context: ${context}
|
|
3758
|
+
Error: ${errorMessage}
|
|
3759
|
+
`;
|
|
3760
|
+
if (additionalData) {
|
|
3761
|
+
entry += `
|
|
3762
|
+
Additional Data:
|
|
3763
|
+
${JSON.stringify(additionalData, null, 2)}
|
|
3764
|
+
`;
|
|
3765
|
+
}
|
|
3766
|
+
if (errorStack) {
|
|
3767
|
+
entry += `
|
|
3768
|
+
Stack Trace:
|
|
3769
|
+
${errorStack}
|
|
3770
|
+
`;
|
|
3771
|
+
}
|
|
3772
|
+
entry += `--------------------------------------------------------------------------------
|
|
3773
|
+
|
|
3774
|
+
`;
|
|
3775
|
+
this.write(entry);
|
|
3776
|
+
console.error(`[UserPromptError] ${context}: ${errorMessage}`);
|
|
3777
|
+
}
|
|
3778
|
+
/**
|
|
3779
|
+
* Log a SQL query error with the full query
|
|
3780
|
+
*/
|
|
3781
|
+
logSqlError(query, error, params) {
|
|
3782
|
+
if (!this.enabled) return;
|
|
3783
|
+
this.hasErrors = true;
|
|
3784
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
3785
|
+
const entry = `
|
|
3786
|
+
--------------------------------------------------------------------------------
|
|
3787
|
+
[${(/* @__PURE__ */ new Date()).toISOString()}] SQL QUERY ERROR
|
|
3788
|
+
--------------------------------------------------------------------------------
|
|
3789
|
+
Error: ${errorMessage}
|
|
3790
|
+
|
|
3791
|
+
Query (${query.length} chars):
|
|
3792
|
+
--------------------------------------------------------------------------------
|
|
3793
|
+
${query}
|
|
3794
|
+
--------------------------------------------------------------------------------
|
|
3795
|
+
${params ? `
|
|
3796
|
+
Parameters: ${JSON.stringify(params)}` : ""}
|
|
3797
|
+
|
|
3798
|
+
`;
|
|
3799
|
+
this.write(entry);
|
|
3800
|
+
console.error(`[UserPromptError] SQL Error: ${errorMessage}`);
|
|
3801
|
+
}
|
|
3802
|
+
/**
|
|
3803
|
+
* Log an LLM API error
|
|
3804
|
+
*/
|
|
3805
|
+
logLlmError(provider, model, method, error, requestData) {
|
|
3806
|
+
if (!this.enabled) return;
|
|
3807
|
+
this.hasErrors = true;
|
|
3808
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
3809
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
3810
|
+
let entry = `
|
|
3811
|
+
--------------------------------------------------------------------------------
|
|
3812
|
+
[${(/* @__PURE__ */ new Date()).toISOString()}] LLM API ERROR
|
|
3813
|
+
--------------------------------------------------------------------------------
|
|
3814
|
+
Provider: ${provider}
|
|
3815
|
+
Model: ${model}
|
|
3816
|
+
Method: ${method}
|
|
3817
|
+
Error: ${errorMessage}
|
|
3818
|
+
`;
|
|
3819
|
+
if (requestData) {
|
|
3820
|
+
const dataStr = JSON.stringify(requestData, null, 2);
|
|
3821
|
+
const truncated = dataStr.length > 5e3 ? dataStr.substring(0, 5e3) + "\n... [truncated]" : dataStr;
|
|
3822
|
+
entry += `
|
|
3823
|
+
Request Data:
|
|
3824
|
+
${truncated}
|
|
3825
|
+
`;
|
|
3826
|
+
}
|
|
3827
|
+
if (errorStack) {
|
|
3828
|
+
entry += `
|
|
3829
|
+
Stack Trace:
|
|
3830
|
+
${errorStack}
|
|
3831
|
+
`;
|
|
3832
|
+
}
|
|
3833
|
+
entry += `--------------------------------------------------------------------------------
|
|
3834
|
+
|
|
3835
|
+
`;
|
|
3836
|
+
this.write(entry);
|
|
3837
|
+
console.error(`[UserPromptError] LLM Error (${provider}/${model}): ${errorMessage}`);
|
|
3838
|
+
}
|
|
3839
|
+
/**
|
|
3840
|
+
* Log tool execution error
|
|
3841
|
+
*/
|
|
3842
|
+
logToolError(toolName, toolInput, error) {
|
|
3843
|
+
if (!this.enabled) return;
|
|
3844
|
+
this.hasErrors = true;
|
|
3845
|
+
const errorMessage = error instanceof Error ? error.message : error;
|
|
3846
|
+
const errorStack = error instanceof Error ? error.stack : void 0;
|
|
3847
|
+
const entry = `
|
|
3848
|
+
--------------------------------------------------------------------------------
|
|
3849
|
+
[${(/* @__PURE__ */ new Date()).toISOString()}] TOOL EXECUTION ERROR
|
|
3850
|
+
--------------------------------------------------------------------------------
|
|
3851
|
+
Tool: ${toolName}
|
|
3852
|
+
Error: ${errorMessage}
|
|
3853
|
+
|
|
3854
|
+
Tool Input:
|
|
3855
|
+
${JSON.stringify(toolInput, null, 2)}
|
|
3856
|
+
${errorStack ? `
|
|
3857
|
+
Stack Trace:
|
|
3858
|
+
${errorStack}` : ""}
|
|
3859
|
+
--------------------------------------------------------------------------------
|
|
3860
|
+
|
|
3861
|
+
`;
|
|
3862
|
+
this.write(entry);
|
|
3863
|
+
console.error(`[UserPromptError] Tool Error (${toolName}): ${errorMessage}`);
|
|
3864
|
+
}
|
|
3865
|
+
/**
|
|
3866
|
+
* Write final summary if there were errors
|
|
3867
|
+
*/
|
|
3868
|
+
writeSummary() {
|
|
3869
|
+
if (!this.enabled || !this.hasErrors) return;
|
|
3870
|
+
const summary = `
|
|
3871
|
+
================================================================================
|
|
3872
|
+
REQUEST COMPLETED WITH ERRORS
|
|
3873
|
+
Time: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
3874
|
+
================================================================================
|
|
3875
|
+
`;
|
|
3876
|
+
this.write(summary);
|
|
3877
|
+
}
|
|
3878
|
+
/**
|
|
3879
|
+
* Check if any errors were logged
|
|
3880
|
+
*/
|
|
3881
|
+
hadErrors() {
|
|
3882
|
+
return this.hasErrors;
|
|
3883
|
+
}
|
|
3884
|
+
write(content) {
|
|
3885
|
+
if (this.logStream) {
|
|
3886
|
+
this.logStream.write(content);
|
|
3887
|
+
}
|
|
3888
|
+
}
|
|
3889
|
+
};
|
|
3890
|
+
var userPromptErrorLogger = new UserPromptErrorLogger();
|
|
3891
|
+
|
|
3892
|
+
// src/llm.ts
|
|
3425
3893
|
var LLM = class {
|
|
3426
3894
|
/* Get a complete text response from an LLM (Anthropic or Groq) */
|
|
3427
3895
|
static async text(messages, options = {}) {
|
|
@@ -3564,68 +4032,156 @@ var LLM = class {
|
|
|
3564
4032
|
// ANTHROPIC IMPLEMENTATION
|
|
3565
4033
|
// ============================================================
|
|
3566
4034
|
static async _anthropicText(messages, modelName, options) {
|
|
4035
|
+
const startTime = Date.now();
|
|
4036
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3567
4037
|
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
|
|
3568
4038
|
const client = new import_sdk.default({
|
|
3569
4039
|
apiKey
|
|
3570
4040
|
});
|
|
3571
|
-
|
|
3572
|
-
|
|
3573
|
-
|
|
3574
|
-
|
|
3575
|
-
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
|
|
3580
|
-
|
|
3581
|
-
|
|
3582
|
-
|
|
4041
|
+
try {
|
|
4042
|
+
const response = await client.messages.create({
|
|
4043
|
+
model: modelName,
|
|
4044
|
+
max_tokens: options.maxTokens || 1e3,
|
|
4045
|
+
temperature: options.temperature,
|
|
4046
|
+
system: this._normalizeSystemPrompt(messages.sys),
|
|
4047
|
+
messages: [{
|
|
4048
|
+
role: "user",
|
|
4049
|
+
content: messages.user
|
|
4050
|
+
}]
|
|
4051
|
+
});
|
|
4052
|
+
const durationMs = Date.now() - startTime;
|
|
4053
|
+
const usage = response.usage;
|
|
4054
|
+
const inputTokens = usage?.input_tokens || 0;
|
|
4055
|
+
const outputTokens = usage?.output_tokens || 0;
|
|
4056
|
+
const cacheReadTokens = usage?.cache_read_input_tokens || 0;
|
|
4057
|
+
const cacheWriteTokens = usage?.cache_creation_input_tokens || 0;
|
|
4058
|
+
llmUsageLogger.log({
|
|
4059
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4060
|
+
requestId,
|
|
4061
|
+
provider: "anthropic",
|
|
4062
|
+
model: modelName,
|
|
4063
|
+
method: "text",
|
|
4064
|
+
inputTokens,
|
|
4065
|
+
outputTokens,
|
|
4066
|
+
cacheReadTokens,
|
|
4067
|
+
cacheWriteTokens,
|
|
4068
|
+
totalTokens: inputTokens + outputTokens + cacheReadTokens + cacheWriteTokens,
|
|
4069
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens),
|
|
4070
|
+
durationMs,
|
|
4071
|
+
success: true
|
|
4072
|
+
});
|
|
4073
|
+
const textBlock = response.content.find((block) => block.type === "text");
|
|
4074
|
+
return textBlock?.type === "text" ? textBlock.text : "";
|
|
4075
|
+
} catch (error) {
|
|
4076
|
+
const durationMs = Date.now() - startTime;
|
|
4077
|
+
llmUsageLogger.log({
|
|
4078
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4079
|
+
requestId,
|
|
4080
|
+
provider: "anthropic",
|
|
4081
|
+
model: modelName,
|
|
4082
|
+
method: "text",
|
|
4083
|
+
inputTokens: 0,
|
|
4084
|
+
outputTokens: 0,
|
|
4085
|
+
totalTokens: 0,
|
|
4086
|
+
costUSD: 0,
|
|
4087
|
+
durationMs,
|
|
4088
|
+
success: false,
|
|
4089
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4090
|
+
});
|
|
4091
|
+
throw error;
|
|
4092
|
+
}
|
|
3583
4093
|
}
|
|
3584
4094
|
static async _anthropicStream(messages, modelName, options, json) {
|
|
4095
|
+
const startTime = Date.now();
|
|
4096
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3585
4097
|
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
|
|
3586
4098
|
const client = new import_sdk.default({
|
|
3587
4099
|
apiKey
|
|
3588
4100
|
});
|
|
3589
|
-
|
|
3590
|
-
|
|
3591
|
-
|
|
3592
|
-
|
|
3593
|
-
|
|
3594
|
-
|
|
3595
|
-
|
|
3596
|
-
|
|
3597
|
-
|
|
4101
|
+
try {
|
|
4102
|
+
const apiMessages = [{
|
|
4103
|
+
role: "user",
|
|
4104
|
+
content: messages.user
|
|
4105
|
+
}];
|
|
4106
|
+
const prefill = messages.prefill || (json ? "{" : void 0);
|
|
4107
|
+
if (prefill) {
|
|
4108
|
+
apiMessages.push({
|
|
4109
|
+
role: "assistant",
|
|
4110
|
+
content: prefill
|
|
4111
|
+
});
|
|
4112
|
+
}
|
|
4113
|
+
const stream = await client.messages.create({
|
|
4114
|
+
model: modelName,
|
|
4115
|
+
max_tokens: options.maxTokens || 1e3,
|
|
4116
|
+
temperature: options.temperature,
|
|
4117
|
+
system: this._normalizeSystemPrompt(messages.sys),
|
|
4118
|
+
messages: apiMessages,
|
|
4119
|
+
stream: true
|
|
3598
4120
|
});
|
|
3599
|
-
|
|
3600
|
-
|
|
3601
|
-
|
|
3602
|
-
|
|
3603
|
-
|
|
3604
|
-
|
|
3605
|
-
|
|
3606
|
-
|
|
3607
|
-
|
|
3608
|
-
|
|
3609
|
-
|
|
3610
|
-
|
|
3611
|
-
|
|
3612
|
-
|
|
3613
|
-
|
|
3614
|
-
|
|
3615
|
-
|
|
4121
|
+
let fullText = prefill || "";
|
|
4122
|
+
let usage = null;
|
|
4123
|
+
let inputTokens = 0;
|
|
4124
|
+
let outputTokens = 0;
|
|
4125
|
+
let cacheReadTokens = 0;
|
|
4126
|
+
let cacheWriteTokens = 0;
|
|
4127
|
+
for await (const chunk of stream) {
|
|
4128
|
+
if (chunk.type === "content_block_delta" && chunk.delta.type === "text_delta") {
|
|
4129
|
+
const text = chunk.delta.text;
|
|
4130
|
+
fullText += text;
|
|
4131
|
+
if (options.partial) {
|
|
4132
|
+
options.partial(text);
|
|
4133
|
+
}
|
|
4134
|
+
} else if (chunk.type === "message_start" && chunk.message?.usage) {
|
|
4135
|
+
const msgUsage = chunk.message.usage;
|
|
4136
|
+
inputTokens = msgUsage.input_tokens || 0;
|
|
4137
|
+
cacheReadTokens = msgUsage.cache_read_input_tokens || 0;
|
|
4138
|
+
cacheWriteTokens = msgUsage.cache_creation_input_tokens || 0;
|
|
4139
|
+
} else if (chunk.type === "message_delta" && chunk.usage) {
|
|
4140
|
+
usage = chunk.usage;
|
|
4141
|
+
outputTokens = usage.output_tokens || 0;
|
|
3616
4142
|
}
|
|
3617
|
-
} else if (chunk.type === "message_delta" && chunk.usage) {
|
|
3618
|
-
usage = chunk.usage;
|
|
3619
4143
|
}
|
|
4144
|
+
const durationMs = Date.now() - startTime;
|
|
4145
|
+
llmUsageLogger.log({
|
|
4146
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4147
|
+
requestId,
|
|
4148
|
+
provider: "anthropic",
|
|
4149
|
+
model: modelName,
|
|
4150
|
+
method: "stream",
|
|
4151
|
+
inputTokens,
|
|
4152
|
+
outputTokens,
|
|
4153
|
+
cacheReadTokens,
|
|
4154
|
+
cacheWriteTokens,
|
|
4155
|
+
totalTokens: inputTokens + outputTokens + cacheReadTokens + cacheWriteTokens,
|
|
4156
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens),
|
|
4157
|
+
durationMs,
|
|
4158
|
+
success: true
|
|
4159
|
+
});
|
|
4160
|
+
if (json) {
|
|
4161
|
+
return this._parseJSON(fullText);
|
|
4162
|
+
}
|
|
4163
|
+
return fullText;
|
|
4164
|
+
} catch (error) {
|
|
4165
|
+
const durationMs = Date.now() - startTime;
|
|
4166
|
+
llmUsageLogger.log({
|
|
4167
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4168
|
+
requestId,
|
|
4169
|
+
provider: "anthropic",
|
|
4170
|
+
model: modelName,
|
|
4171
|
+
method: "stream",
|
|
4172
|
+
inputTokens: 0,
|
|
4173
|
+
outputTokens: 0,
|
|
4174
|
+
totalTokens: 0,
|
|
4175
|
+
costUSD: 0,
|
|
4176
|
+
durationMs,
|
|
4177
|
+
success: false,
|
|
4178
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4179
|
+
});
|
|
4180
|
+
throw error;
|
|
3620
4181
|
}
|
|
3621
|
-
if (usage) {
|
|
3622
|
-
}
|
|
3623
|
-
if (json) {
|
|
3624
|
-
return this._parseJSON(fullText);
|
|
3625
|
-
}
|
|
3626
|
-
return fullText;
|
|
3627
4182
|
}
|
|
3628
4183
|
static async _anthropicStreamWithTools(messages, tools, toolHandler, modelName, options, maxIterations) {
|
|
4184
|
+
const methodStartTime = Date.now();
|
|
3629
4185
|
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
|
|
3630
4186
|
const client = new import_sdk.default({
|
|
3631
4187
|
apiKey
|
|
@@ -3636,8 +4192,15 @@ var LLM = class {
|
|
|
3636
4192
|
}];
|
|
3637
4193
|
let iterations = 0;
|
|
3638
4194
|
let finalText = "";
|
|
4195
|
+
let totalToolCalls = 0;
|
|
4196
|
+
let totalInputTokens = 0;
|
|
4197
|
+
let totalOutputTokens = 0;
|
|
4198
|
+
let totalCacheReadTokens = 0;
|
|
4199
|
+
let totalCacheWriteTokens = 0;
|
|
3639
4200
|
while (iterations < maxIterations) {
|
|
3640
4201
|
iterations++;
|
|
4202
|
+
const iterationStartTime = Date.now();
|
|
4203
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3641
4204
|
const stream = await client.messages.create({
|
|
3642
4205
|
model: modelName,
|
|
3643
4206
|
max_tokens: options.maxTokens || 4e3,
|
|
@@ -3652,12 +4215,21 @@ var LLM = class {
|
|
|
3652
4215
|
const contentBlocks = [];
|
|
3653
4216
|
let currentTextBlock = "";
|
|
3654
4217
|
let currentToolUse = null;
|
|
3655
|
-
let
|
|
4218
|
+
let inputTokens = 0;
|
|
4219
|
+
let outputTokens = 0;
|
|
4220
|
+
let cacheReadTokens = 0;
|
|
4221
|
+
let cacheWriteTokens = 0;
|
|
3656
4222
|
for await (const chunk of stream) {
|
|
3657
4223
|
if (chunk.type === "message_start") {
|
|
3658
4224
|
contentBlocks.length = 0;
|
|
3659
4225
|
currentTextBlock = "";
|
|
3660
4226
|
currentToolUse = null;
|
|
4227
|
+
const msgUsage = chunk.message?.usage;
|
|
4228
|
+
if (msgUsage) {
|
|
4229
|
+
inputTokens = msgUsage.input_tokens || 0;
|
|
4230
|
+
cacheReadTokens = msgUsage.cache_read_input_tokens || 0;
|
|
4231
|
+
cacheWriteTokens = msgUsage.cache_creation_input_tokens || 0;
|
|
4232
|
+
}
|
|
3661
4233
|
}
|
|
3662
4234
|
if (chunk.type === "content_block_start") {
|
|
3663
4235
|
if (chunk.content_block.type === "text") {
|
|
@@ -3704,15 +4276,36 @@ var LLM = class {
|
|
|
3704
4276
|
if (chunk.type === "message_delta") {
|
|
3705
4277
|
stopReason = chunk.delta.stop_reason || stopReason;
|
|
3706
4278
|
if (chunk.usage) {
|
|
3707
|
-
|
|
4279
|
+
outputTokens = chunk.usage.output_tokens || 0;
|
|
3708
4280
|
}
|
|
3709
4281
|
}
|
|
3710
4282
|
if (chunk.type === "message_stop") {
|
|
3711
4283
|
break;
|
|
3712
4284
|
}
|
|
3713
4285
|
}
|
|
3714
|
-
|
|
3715
|
-
|
|
4286
|
+
const iterationDuration = Date.now() - iterationStartTime;
|
|
4287
|
+
const toolUsesInIteration = contentBlocks.filter((block) => block.type === "tool_use").length;
|
|
4288
|
+
totalToolCalls += toolUsesInIteration;
|
|
4289
|
+
totalInputTokens += inputTokens;
|
|
4290
|
+
totalOutputTokens += outputTokens;
|
|
4291
|
+
totalCacheReadTokens += cacheReadTokens;
|
|
4292
|
+
totalCacheWriteTokens += cacheWriteTokens;
|
|
4293
|
+
llmUsageLogger.log({
|
|
4294
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4295
|
+
requestId,
|
|
4296
|
+
provider: "anthropic",
|
|
4297
|
+
model: modelName,
|
|
4298
|
+
method: `streamWithTools[iter=${iterations}]`,
|
|
4299
|
+
inputTokens,
|
|
4300
|
+
outputTokens,
|
|
4301
|
+
cacheReadTokens,
|
|
4302
|
+
cacheWriteTokens,
|
|
4303
|
+
totalTokens: inputTokens + outputTokens + cacheReadTokens + cacheWriteTokens,
|
|
4304
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens, cacheReadTokens, cacheWriteTokens),
|
|
4305
|
+
durationMs: iterationDuration,
|
|
4306
|
+
toolCalls: toolUsesInIteration,
|
|
4307
|
+
success: true
|
|
4308
|
+
});
|
|
3716
4309
|
if (stopReason === "end_turn") {
|
|
3717
4310
|
break;
|
|
3718
4311
|
}
|
|
@@ -3756,6 +4349,25 @@ var LLM = class {
|
|
|
3756
4349
|
break;
|
|
3757
4350
|
}
|
|
3758
4351
|
}
|
|
4352
|
+
const totalDuration = Date.now() - methodStartTime;
|
|
4353
|
+
if (iterations > 1) {
|
|
4354
|
+
llmUsageLogger.log({
|
|
4355
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4356
|
+
requestId: llmUsageLogger.generateRequestId(),
|
|
4357
|
+
provider: "anthropic",
|
|
4358
|
+
model: modelName,
|
|
4359
|
+
method: `streamWithTools[TOTAL:${iterations}iters]`,
|
|
4360
|
+
inputTokens: totalInputTokens,
|
|
4361
|
+
outputTokens: totalOutputTokens,
|
|
4362
|
+
cacheReadTokens: totalCacheReadTokens,
|
|
4363
|
+
cacheWriteTokens: totalCacheWriteTokens,
|
|
4364
|
+
totalTokens: totalInputTokens + totalOutputTokens + totalCacheReadTokens + totalCacheWriteTokens,
|
|
4365
|
+
costUSD: llmUsageLogger.calculateCost(modelName, totalInputTokens, totalOutputTokens, totalCacheReadTokens, totalCacheWriteTokens),
|
|
4366
|
+
durationMs: totalDuration,
|
|
4367
|
+
toolCalls: totalToolCalls,
|
|
4368
|
+
success: true
|
|
4369
|
+
});
|
|
4370
|
+
}
|
|
3759
4371
|
if (iterations >= maxIterations) {
|
|
3760
4372
|
throw new Error(`Max iterations (${maxIterations}) reached in tool calling loop`);
|
|
3761
4373
|
}
|
|
@@ -3765,100 +4377,272 @@ var LLM = class {
|
|
|
3765
4377
|
// GROQ IMPLEMENTATION
|
|
3766
4378
|
// ============================================================
|
|
3767
4379
|
static async _groqText(messages, modelName, options) {
|
|
4380
|
+
const startTime = Date.now();
|
|
4381
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3768
4382
|
const client = new import_groq_sdk.default({
|
|
3769
4383
|
apiKey: options.apiKey || process.env.GROQ_API_KEY || ""
|
|
3770
4384
|
});
|
|
3771
|
-
|
|
3772
|
-
|
|
3773
|
-
|
|
3774
|
-
|
|
3775
|
-
|
|
3776
|
-
|
|
3777
|
-
|
|
3778
|
-
|
|
3779
|
-
|
|
3780
|
-
|
|
4385
|
+
try {
|
|
4386
|
+
const response = await client.chat.completions.create({
|
|
4387
|
+
model: modelName,
|
|
4388
|
+
messages: [
|
|
4389
|
+
{ role: "system", content: messages.sys },
|
|
4390
|
+
{ role: "user", content: messages.user }
|
|
4391
|
+
],
|
|
4392
|
+
temperature: options.temperature,
|
|
4393
|
+
max_tokens: options.maxTokens || 1e3
|
|
4394
|
+
});
|
|
4395
|
+
const durationMs = Date.now() - startTime;
|
|
4396
|
+
const usage = response.usage;
|
|
4397
|
+
const inputTokens = usage?.prompt_tokens || 0;
|
|
4398
|
+
const outputTokens = usage?.completion_tokens || 0;
|
|
4399
|
+
llmUsageLogger.log({
|
|
4400
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4401
|
+
requestId,
|
|
4402
|
+
provider: "groq",
|
|
4403
|
+
model: modelName,
|
|
4404
|
+
method: "text",
|
|
4405
|
+
inputTokens,
|
|
4406
|
+
outputTokens,
|
|
4407
|
+
totalTokens: inputTokens + outputTokens,
|
|
4408
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4409
|
+
durationMs,
|
|
4410
|
+
success: true
|
|
4411
|
+
});
|
|
4412
|
+
return response.choices[0]?.message?.content || "";
|
|
4413
|
+
} catch (error) {
|
|
4414
|
+
const durationMs = Date.now() - startTime;
|
|
4415
|
+
llmUsageLogger.log({
|
|
4416
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4417
|
+
requestId,
|
|
4418
|
+
provider: "groq",
|
|
4419
|
+
model: modelName,
|
|
4420
|
+
method: "text",
|
|
4421
|
+
inputTokens: 0,
|
|
4422
|
+
outputTokens: 0,
|
|
4423
|
+
totalTokens: 0,
|
|
4424
|
+
costUSD: 0,
|
|
4425
|
+
durationMs,
|
|
4426
|
+
success: false,
|
|
4427
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4428
|
+
});
|
|
4429
|
+
throw error;
|
|
4430
|
+
}
|
|
3781
4431
|
}
|
|
3782
4432
|
static async _groqStream(messages, modelName, options, json) {
|
|
4433
|
+
const startTime = Date.now();
|
|
4434
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3783
4435
|
const apiKey = options.apiKey || process.env.GROQ_API_KEY || "";
|
|
3784
4436
|
const client = new import_groq_sdk.default({
|
|
3785
4437
|
apiKey
|
|
3786
4438
|
});
|
|
3787
|
-
|
|
3788
|
-
|
|
3789
|
-
|
|
3790
|
-
|
|
3791
|
-
|
|
3792
|
-
|
|
3793
|
-
|
|
3794
|
-
|
|
3795
|
-
|
|
3796
|
-
|
|
3797
|
-
|
|
3798
|
-
|
|
3799
|
-
|
|
3800
|
-
|
|
3801
|
-
|
|
3802
|
-
|
|
3803
|
-
|
|
3804
|
-
|
|
4439
|
+
try {
|
|
4440
|
+
const stream = await client.chat.completions.create({
|
|
4441
|
+
model: modelName,
|
|
4442
|
+
messages: [
|
|
4443
|
+
{ role: "system", content: messages.sys },
|
|
4444
|
+
{ role: "user", content: messages.user }
|
|
4445
|
+
],
|
|
4446
|
+
temperature: options.temperature,
|
|
4447
|
+
max_tokens: options.maxTokens || 1e3,
|
|
4448
|
+
stream: true,
|
|
4449
|
+
response_format: json ? { type: "json_object" } : void 0
|
|
4450
|
+
});
|
|
4451
|
+
let fullText = "";
|
|
4452
|
+
let inputTokens = 0;
|
|
4453
|
+
let outputTokens = 0;
|
|
4454
|
+
for await (const chunk of stream) {
|
|
4455
|
+
const text = chunk.choices[0]?.delta?.content || "";
|
|
4456
|
+
if (text) {
|
|
4457
|
+
fullText += text;
|
|
4458
|
+
if (options.partial) {
|
|
4459
|
+
options.partial(text);
|
|
4460
|
+
}
|
|
4461
|
+
}
|
|
4462
|
+
if (chunk.x_groq?.usage) {
|
|
4463
|
+
inputTokens = chunk.x_groq.usage.prompt_tokens || 0;
|
|
4464
|
+
outputTokens = chunk.x_groq.usage.completion_tokens || 0;
|
|
3805
4465
|
}
|
|
3806
4466
|
}
|
|
4467
|
+
const durationMs = Date.now() - startTime;
|
|
4468
|
+
if (inputTokens === 0) {
|
|
4469
|
+
const sysPrompt = typeof messages.sys === "string" ? messages.sys : messages.sys.map((b) => b.text).join("");
|
|
4470
|
+
inputTokens = Math.ceil((sysPrompt.length + messages.user.length) / 4);
|
|
4471
|
+
}
|
|
4472
|
+
if (outputTokens === 0) {
|
|
4473
|
+
outputTokens = Math.ceil(fullText.length / 4);
|
|
4474
|
+
}
|
|
4475
|
+
llmUsageLogger.log({
|
|
4476
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4477
|
+
requestId,
|
|
4478
|
+
provider: "groq",
|
|
4479
|
+
model: modelName,
|
|
4480
|
+
method: "stream",
|
|
4481
|
+
inputTokens,
|
|
4482
|
+
outputTokens,
|
|
4483
|
+
totalTokens: inputTokens + outputTokens,
|
|
4484
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4485
|
+
durationMs,
|
|
4486
|
+
success: true
|
|
4487
|
+
});
|
|
4488
|
+
if (json) {
|
|
4489
|
+
return this._parseJSON(fullText);
|
|
4490
|
+
}
|
|
4491
|
+
return fullText;
|
|
4492
|
+
} catch (error) {
|
|
4493
|
+
const durationMs = Date.now() - startTime;
|
|
4494
|
+
llmUsageLogger.log({
|
|
4495
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4496
|
+
requestId,
|
|
4497
|
+
provider: "groq",
|
|
4498
|
+
model: modelName,
|
|
4499
|
+
method: "stream",
|
|
4500
|
+
inputTokens: 0,
|
|
4501
|
+
outputTokens: 0,
|
|
4502
|
+
totalTokens: 0,
|
|
4503
|
+
costUSD: 0,
|
|
4504
|
+
durationMs,
|
|
4505
|
+
success: false,
|
|
4506
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4507
|
+
});
|
|
4508
|
+
throw error;
|
|
3807
4509
|
}
|
|
3808
|
-
if (json) {
|
|
3809
|
-
return this._parseJSON(fullText);
|
|
3810
|
-
}
|
|
3811
|
-
return fullText;
|
|
3812
4510
|
}
|
|
3813
4511
|
// ============================================================
|
|
3814
4512
|
// GEMINI IMPLEMENTATION
|
|
3815
4513
|
// ============================================================
|
|
3816
4514
|
static async _geminiText(messages, modelName, options) {
|
|
4515
|
+
const startTime = Date.now();
|
|
4516
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3817
4517
|
const apiKey = options.apiKey || process.env.GEMINI_API_KEY || "";
|
|
3818
4518
|
const genAI = new import_generative_ai.GoogleGenerativeAI(apiKey);
|
|
3819
4519
|
const systemPrompt = typeof messages.sys === "string" ? messages.sys : messages.sys.map((block) => block.text).join("\n");
|
|
3820
|
-
|
|
3821
|
-
model
|
|
3822
|
-
|
|
3823
|
-
|
|
3824
|
-
|
|
3825
|
-
|
|
3826
|
-
|
|
3827
|
-
|
|
3828
|
-
|
|
3829
|
-
|
|
3830
|
-
|
|
3831
|
-
|
|
4520
|
+
try {
|
|
4521
|
+
const model = genAI.getGenerativeModel({
|
|
4522
|
+
model: modelName,
|
|
4523
|
+
systemInstruction: systemPrompt,
|
|
4524
|
+
generationConfig: {
|
|
4525
|
+
maxOutputTokens: options.maxTokens || 1e3,
|
|
4526
|
+
temperature: options.temperature,
|
|
4527
|
+
topP: options.topP
|
|
4528
|
+
}
|
|
4529
|
+
});
|
|
4530
|
+
const result = await model.generateContent(messages.user);
|
|
4531
|
+
const response = await result.response;
|
|
4532
|
+
const text = response.text();
|
|
4533
|
+
const durationMs = Date.now() - startTime;
|
|
4534
|
+
const usage = response.usageMetadata;
|
|
4535
|
+
const inputTokens = usage?.promptTokenCount || Math.ceil((systemPrompt.length + messages.user.length) / 4);
|
|
4536
|
+
const outputTokens = usage?.candidatesTokenCount || Math.ceil(text.length / 4);
|
|
4537
|
+
llmUsageLogger.log({
|
|
4538
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4539
|
+
requestId,
|
|
4540
|
+
provider: "gemini",
|
|
4541
|
+
model: modelName,
|
|
4542
|
+
method: "text",
|
|
4543
|
+
inputTokens,
|
|
4544
|
+
outputTokens,
|
|
4545
|
+
totalTokens: inputTokens + outputTokens,
|
|
4546
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4547
|
+
durationMs,
|
|
4548
|
+
success: true
|
|
4549
|
+
});
|
|
4550
|
+
return text;
|
|
4551
|
+
} catch (error) {
|
|
4552
|
+
const durationMs = Date.now() - startTime;
|
|
4553
|
+
llmUsageLogger.log({
|
|
4554
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4555
|
+
requestId,
|
|
4556
|
+
provider: "gemini",
|
|
4557
|
+
model: modelName,
|
|
4558
|
+
method: "text",
|
|
4559
|
+
inputTokens: 0,
|
|
4560
|
+
outputTokens: 0,
|
|
4561
|
+
totalTokens: 0,
|
|
4562
|
+
costUSD: 0,
|
|
4563
|
+
durationMs,
|
|
4564
|
+
success: false,
|
|
4565
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4566
|
+
});
|
|
4567
|
+
throw error;
|
|
4568
|
+
}
|
|
3832
4569
|
}
|
|
3833
4570
|
static async _geminiStream(messages, modelName, options, json) {
|
|
4571
|
+
const startTime = Date.now();
|
|
4572
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3834
4573
|
const apiKey = options.apiKey || process.env.GEMINI_API_KEY || "";
|
|
3835
4574
|
const genAI = new import_generative_ai.GoogleGenerativeAI(apiKey);
|
|
3836
4575
|
const systemPrompt = typeof messages.sys === "string" ? messages.sys : messages.sys.map((block) => block.text).join("\n");
|
|
3837
|
-
|
|
3838
|
-
model
|
|
3839
|
-
|
|
3840
|
-
|
|
3841
|
-
|
|
3842
|
-
|
|
3843
|
-
|
|
3844
|
-
|
|
3845
|
-
|
|
3846
|
-
|
|
3847
|
-
|
|
3848
|
-
|
|
3849
|
-
|
|
3850
|
-
|
|
3851
|
-
|
|
3852
|
-
|
|
3853
|
-
|
|
3854
|
-
|
|
4576
|
+
try {
|
|
4577
|
+
const model = genAI.getGenerativeModel({
|
|
4578
|
+
model: modelName,
|
|
4579
|
+
systemInstruction: systemPrompt,
|
|
4580
|
+
generationConfig: {
|
|
4581
|
+
maxOutputTokens: options.maxTokens || 1e3,
|
|
4582
|
+
temperature: options.temperature,
|
|
4583
|
+
topP: options.topP,
|
|
4584
|
+
responseMimeType: json ? "application/json" : void 0
|
|
4585
|
+
}
|
|
4586
|
+
});
|
|
4587
|
+
const result = await model.generateContentStream(messages.user);
|
|
4588
|
+
let fullText = "";
|
|
4589
|
+
let inputTokens = 0;
|
|
4590
|
+
let outputTokens = 0;
|
|
4591
|
+
for await (const chunk of result.stream) {
|
|
4592
|
+
const text = chunk.text();
|
|
4593
|
+
if (text) {
|
|
4594
|
+
fullText += text;
|
|
4595
|
+
if (options.partial) {
|
|
4596
|
+
options.partial(text);
|
|
4597
|
+
}
|
|
4598
|
+
}
|
|
4599
|
+
if (chunk.usageMetadata) {
|
|
4600
|
+
inputTokens = chunk.usageMetadata.promptTokenCount || 0;
|
|
4601
|
+
outputTokens = chunk.usageMetadata.candidatesTokenCount || 0;
|
|
3855
4602
|
}
|
|
3856
4603
|
}
|
|
4604
|
+
const durationMs = Date.now() - startTime;
|
|
4605
|
+
if (inputTokens === 0) {
|
|
4606
|
+
inputTokens = Math.ceil((systemPrompt.length + messages.user.length) / 4);
|
|
4607
|
+
}
|
|
4608
|
+
if (outputTokens === 0) {
|
|
4609
|
+
outputTokens = Math.ceil(fullText.length / 4);
|
|
4610
|
+
}
|
|
4611
|
+
llmUsageLogger.log({
|
|
4612
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4613
|
+
requestId,
|
|
4614
|
+
provider: "gemini",
|
|
4615
|
+
model: modelName,
|
|
4616
|
+
method: "stream",
|
|
4617
|
+
inputTokens,
|
|
4618
|
+
outputTokens,
|
|
4619
|
+
totalTokens: inputTokens + outputTokens,
|
|
4620
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4621
|
+
durationMs,
|
|
4622
|
+
success: true
|
|
4623
|
+
});
|
|
4624
|
+
if (json) {
|
|
4625
|
+
return this._parseJSON(fullText);
|
|
4626
|
+
}
|
|
4627
|
+
return fullText;
|
|
4628
|
+
} catch (error) {
|
|
4629
|
+
const durationMs = Date.now() - startTime;
|
|
4630
|
+
llmUsageLogger.log({
|
|
4631
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4632
|
+
requestId,
|
|
4633
|
+
provider: "gemini",
|
|
4634
|
+
model: modelName,
|
|
4635
|
+
method: "stream",
|
|
4636
|
+
inputTokens: 0,
|
|
4637
|
+
outputTokens: 0,
|
|
4638
|
+
totalTokens: 0,
|
|
4639
|
+
costUSD: 0,
|
|
4640
|
+
durationMs,
|
|
4641
|
+
success: false,
|
|
4642
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4643
|
+
});
|
|
4644
|
+
throw error;
|
|
3857
4645
|
}
|
|
3858
|
-
if (json) {
|
|
3859
|
-
return this._parseJSON(fullText);
|
|
3860
|
-
}
|
|
3861
|
-
return fullText;
|
|
3862
4646
|
}
|
|
3863
4647
|
static async _geminiStreamWithTools(messages, tools, toolHandler, modelName, options, maxIterations) {
|
|
3864
4648
|
const apiKey = options.apiKey || process.env.GEMINI_API_KEY || "";
|
|
@@ -3952,51 +4736,138 @@ var LLM = class {
|
|
|
3952
4736
|
// OPENAI IMPLEMENTATION
|
|
3953
4737
|
// ============================================================
|
|
3954
4738
|
static async _openaiText(messages, modelName, options) {
|
|
4739
|
+
const startTime = Date.now();
|
|
4740
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3955
4741
|
const apiKey = options.apiKey || process.env.OPENAI_API_KEY || "";
|
|
3956
4742
|
const openai = new import_openai.default({ apiKey });
|
|
3957
4743
|
const systemPrompt = typeof messages.sys === "string" ? messages.sys : messages.sys.map((block) => block.text).join("\n");
|
|
3958
|
-
|
|
3959
|
-
|
|
3960
|
-
|
|
3961
|
-
|
|
3962
|
-
|
|
3963
|
-
|
|
3964
|
-
|
|
3965
|
-
|
|
3966
|
-
|
|
3967
|
-
|
|
3968
|
-
|
|
4744
|
+
try {
|
|
4745
|
+
const response = await openai.chat.completions.create({
|
|
4746
|
+
model: modelName,
|
|
4747
|
+
messages: [
|
|
4748
|
+
{ role: "system", content: systemPrompt },
|
|
4749
|
+
{ role: "user", content: messages.user }
|
|
4750
|
+
],
|
|
4751
|
+
max_tokens: options.maxTokens || 1e3,
|
|
4752
|
+
temperature: options.temperature,
|
|
4753
|
+
top_p: options.topP
|
|
4754
|
+
});
|
|
4755
|
+
const durationMs = Date.now() - startTime;
|
|
4756
|
+
const usage = response.usage;
|
|
4757
|
+
const inputTokens = usage?.prompt_tokens || 0;
|
|
4758
|
+
const outputTokens = usage?.completion_tokens || 0;
|
|
4759
|
+
llmUsageLogger.log({
|
|
4760
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4761
|
+
requestId,
|
|
4762
|
+
provider: "openai",
|
|
4763
|
+
model: modelName,
|
|
4764
|
+
method: "text",
|
|
4765
|
+
inputTokens,
|
|
4766
|
+
outputTokens,
|
|
4767
|
+
totalTokens: inputTokens + outputTokens,
|
|
4768
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4769
|
+
durationMs,
|
|
4770
|
+
success: true
|
|
4771
|
+
});
|
|
4772
|
+
return response.choices[0]?.message?.content || "";
|
|
4773
|
+
} catch (error) {
|
|
4774
|
+
const durationMs = Date.now() - startTime;
|
|
4775
|
+
llmUsageLogger.log({
|
|
4776
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4777
|
+
requestId,
|
|
4778
|
+
provider: "openai",
|
|
4779
|
+
model: modelName,
|
|
4780
|
+
method: "text",
|
|
4781
|
+
inputTokens: 0,
|
|
4782
|
+
outputTokens: 0,
|
|
4783
|
+
totalTokens: 0,
|
|
4784
|
+
costUSD: 0,
|
|
4785
|
+
durationMs,
|
|
4786
|
+
success: false,
|
|
4787
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4788
|
+
});
|
|
4789
|
+
throw error;
|
|
4790
|
+
}
|
|
3969
4791
|
}
|
|
3970
4792
|
static async _openaiStream(messages, modelName, options, json) {
|
|
4793
|
+
const startTime = Date.now();
|
|
4794
|
+
const requestId = llmUsageLogger.generateRequestId();
|
|
3971
4795
|
const apiKey = options.apiKey || process.env.OPENAI_API_KEY || "";
|
|
3972
4796
|
const openai = new import_openai.default({ apiKey });
|
|
3973
4797
|
const systemPrompt = typeof messages.sys === "string" ? messages.sys : messages.sys.map((block) => block.text).join("\n");
|
|
3974
|
-
|
|
3975
|
-
|
|
3976
|
-
|
|
3977
|
-
|
|
3978
|
-
|
|
3979
|
-
|
|
3980
|
-
|
|
3981
|
-
|
|
3982
|
-
|
|
3983
|
-
|
|
3984
|
-
|
|
3985
|
-
|
|
3986
|
-
|
|
3987
|
-
|
|
3988
|
-
|
|
3989
|
-
|
|
3990
|
-
|
|
3991
|
-
|
|
3992
|
-
|
|
4798
|
+
try {
|
|
4799
|
+
const stream = await openai.chat.completions.create({
|
|
4800
|
+
model: modelName,
|
|
4801
|
+
messages: [
|
|
4802
|
+
{ role: "system", content: systemPrompt },
|
|
4803
|
+
{ role: "user", content: messages.user }
|
|
4804
|
+
],
|
|
4805
|
+
max_tokens: options.maxTokens || 1e3,
|
|
4806
|
+
temperature: options.temperature,
|
|
4807
|
+
top_p: options.topP,
|
|
4808
|
+
response_format: json ? { type: "json_object" } : void 0,
|
|
4809
|
+
stream: true,
|
|
4810
|
+
stream_options: { include_usage: true }
|
|
4811
|
+
// Request usage info in stream
|
|
4812
|
+
});
|
|
4813
|
+
let fullText = "";
|
|
4814
|
+
let inputTokens = 0;
|
|
4815
|
+
let outputTokens = 0;
|
|
4816
|
+
for await (const chunk of stream) {
|
|
4817
|
+
const content = chunk.choices[0]?.delta?.content || "";
|
|
4818
|
+
if (content) {
|
|
4819
|
+
fullText += content;
|
|
4820
|
+
if (options.partial) {
|
|
4821
|
+
options.partial(content);
|
|
4822
|
+
}
|
|
4823
|
+
}
|
|
4824
|
+
if (chunk.usage) {
|
|
4825
|
+
inputTokens = chunk.usage.prompt_tokens || 0;
|
|
4826
|
+
outputTokens = chunk.usage.completion_tokens || 0;
|
|
3993
4827
|
}
|
|
3994
4828
|
}
|
|
4829
|
+
const durationMs = Date.now() - startTime;
|
|
4830
|
+
if (inputTokens === 0) {
|
|
4831
|
+
inputTokens = Math.ceil((systemPrompt.length + messages.user.length) / 4);
|
|
4832
|
+
}
|
|
4833
|
+
if (outputTokens === 0) {
|
|
4834
|
+
outputTokens = Math.ceil(fullText.length / 4);
|
|
4835
|
+
}
|
|
4836
|
+
llmUsageLogger.log({
|
|
4837
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4838
|
+
requestId,
|
|
4839
|
+
provider: "openai",
|
|
4840
|
+
model: modelName,
|
|
4841
|
+
method: "stream",
|
|
4842
|
+
inputTokens,
|
|
4843
|
+
outputTokens,
|
|
4844
|
+
totalTokens: inputTokens + outputTokens,
|
|
4845
|
+
costUSD: llmUsageLogger.calculateCost(modelName, inputTokens, outputTokens),
|
|
4846
|
+
durationMs,
|
|
4847
|
+
success: true
|
|
4848
|
+
});
|
|
4849
|
+
if (json) {
|
|
4850
|
+
return this._parseJSON(fullText);
|
|
4851
|
+
}
|
|
4852
|
+
return fullText;
|
|
4853
|
+
} catch (error) {
|
|
4854
|
+
const durationMs = Date.now() - startTime;
|
|
4855
|
+
llmUsageLogger.log({
|
|
4856
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4857
|
+
requestId,
|
|
4858
|
+
provider: "openai",
|
|
4859
|
+
model: modelName,
|
|
4860
|
+
method: "stream",
|
|
4861
|
+
inputTokens: 0,
|
|
4862
|
+
outputTokens: 0,
|
|
4863
|
+
totalTokens: 0,
|
|
4864
|
+
costUSD: 0,
|
|
4865
|
+
durationMs,
|
|
4866
|
+
success: false,
|
|
4867
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4868
|
+
});
|
|
4869
|
+
throw error;
|
|
3995
4870
|
}
|
|
3996
|
-
if (json) {
|
|
3997
|
-
return this._parseJSON(fullText);
|
|
3998
|
-
}
|
|
3999
|
-
return fullText;
|
|
4000
4871
|
}
|
|
4001
4872
|
static async _openaiStreamWithTools(messages, tools, toolHandler, modelName, options, maxIterations) {
|
|
4002
4873
|
const apiKey = options.apiKey || process.env.OPENAI_API_KEY || "";
|
|
@@ -4142,11 +5013,9 @@ var LLM = class {
|
|
|
4142
5013
|
closeChar = "]";
|
|
4143
5014
|
}
|
|
4144
5015
|
if (startIdx === -1) {
|
|
4145
|
-
const
|
|
4146
|
-
|
|
4147
|
-
|
|
4148
|
-
Full response:
|
|
4149
|
-
${preview}`);
|
|
5016
|
+
const error = new Error(`No JSON found in response. LLM returned plain text instead of JSON.`);
|
|
5017
|
+
userPromptErrorLogger.logJsonParseError("LLM._parseJSON - No JSON structure found", text, error);
|
|
5018
|
+
throw error;
|
|
4150
5019
|
}
|
|
4151
5020
|
let depth = 0;
|
|
4152
5021
|
let inString = false;
|
|
@@ -4173,24 +5042,17 @@ ${preview}`);
|
|
|
4173
5042
|
if (endIdx !== -1) {
|
|
4174
5043
|
jsonText = jsonText.substring(startIdx, endIdx + 1);
|
|
4175
5044
|
} else {
|
|
4176
|
-
const
|
|
4177
|
-
|
|
4178
|
-
|
|
4179
|
-
Full response:
|
|
4180
|
-
${preview}`);
|
|
5045
|
+
const error = new Error(`Incomplete JSON - no matching closing ${closeChar} found.`);
|
|
5046
|
+
userPromptErrorLogger.logJsonParseError("LLM._parseJSON - Incomplete JSON", text, error);
|
|
5047
|
+
throw error;
|
|
4181
5048
|
}
|
|
4182
5049
|
try {
|
|
4183
5050
|
const repairedJson = (0, import_jsonrepair.jsonrepair)(jsonText);
|
|
4184
5051
|
return JSON.parse(repairedJson);
|
|
4185
5052
|
} catch (error) {
|
|
4186
|
-
const
|
|
4187
|
-
|
|
4188
|
-
|
|
4189
|
-
Extracted JSON:
|
|
4190
|
-
${jsonText.substring(0, 300)}...
|
|
4191
|
-
|
|
4192
|
-
Full response:
|
|
4193
|
-
${preview}`);
|
|
5053
|
+
const parseError = error instanceof Error ? error : new Error(String(error));
|
|
5054
|
+
userPromptErrorLogger.logJsonParseError("LLM._parseJSON - JSON parse/repair failed", text, parseError);
|
|
5055
|
+
throw new Error(`Failed to parse JSON: ${parseError.message}`);
|
|
4194
5056
|
}
|
|
4195
5057
|
}
|
|
4196
5058
|
};
|
|
@@ -4538,8 +5400,41 @@ var conversation_search_default = ConversationSearch;
|
|
|
4538
5400
|
var BaseLLM = class {
|
|
4539
5401
|
constructor(config) {
|
|
4540
5402
|
this.model = config?.model || this.getDefaultModel();
|
|
5403
|
+
this.fastModel = config?.fastModel || this.getDefaultFastModel();
|
|
4541
5404
|
this.defaultLimit = config?.defaultLimit || 50;
|
|
4542
5405
|
this.apiKey = config?.apiKey;
|
|
5406
|
+
this.modelStrategy = config?.modelStrategy || "fast";
|
|
5407
|
+
}
|
|
5408
|
+
/**
|
|
5409
|
+
* Get the appropriate model based on task type and model strategy
|
|
5410
|
+
* @param taskType - 'complex' for text generation/matching, 'simple' for classification/actions
|
|
5411
|
+
* @returns The model string to use for this task
|
|
5412
|
+
*/
|
|
5413
|
+
getModelForTask(taskType) {
|
|
5414
|
+
switch (this.modelStrategy) {
|
|
5415
|
+
case "best":
|
|
5416
|
+
return this.model;
|
|
5417
|
+
case "fast":
|
|
5418
|
+
return this.fastModel;
|
|
5419
|
+
case "balanced":
|
|
5420
|
+
default:
|
|
5421
|
+
return taskType === "complex" ? this.model : this.fastModel;
|
|
5422
|
+
}
|
|
5423
|
+
}
|
|
5424
|
+
/**
|
|
5425
|
+
* Set the model strategy at runtime
|
|
5426
|
+
* @param strategy - 'best', 'fast', or 'balanced'
|
|
5427
|
+
*/
|
|
5428
|
+
setModelStrategy(strategy) {
|
|
5429
|
+
this.modelStrategy = strategy;
|
|
5430
|
+
logger.info(`[${this.getProviderName()}] Model strategy set to: ${strategy}`);
|
|
5431
|
+
}
|
|
5432
|
+
/**
|
|
5433
|
+
* Get the current model strategy
|
|
5434
|
+
* @returns The current model strategy
|
|
5435
|
+
*/
|
|
5436
|
+
getModelStrategy() {
|
|
5437
|
+
return this.modelStrategy;
|
|
4543
5438
|
}
|
|
4544
5439
|
/**
|
|
4545
5440
|
* Get the API key (from instance, parameter, or environment)
|
|
@@ -4724,7 +5619,7 @@ ${JSON.stringify(tool.requiredFields || [], null, 2)}`;
|
|
|
4724
5619
|
user: prompts.user
|
|
4725
5620
|
},
|
|
4726
5621
|
{
|
|
4727
|
-
model: this.
|
|
5622
|
+
model: this.getModelForTask("complex"),
|
|
4728
5623
|
maxTokens: 8192,
|
|
4729
5624
|
temperature: 0.2,
|
|
4730
5625
|
apiKey: this.getApiKey(apiKey),
|
|
@@ -4847,7 +5742,7 @@ ${JSON.stringify(tool.requiredFields || [], null, 2)}`;
|
|
|
4847
5742
|
user: prompts.user
|
|
4848
5743
|
},
|
|
4849
5744
|
{
|
|
4850
|
-
model: this.
|
|
5745
|
+
model: this.getModelForTask("simple"),
|
|
4851
5746
|
maxTokens: 1500,
|
|
4852
5747
|
temperature: 0.2,
|
|
4853
5748
|
apiKey: this.getApiKey(apiKey)
|
|
@@ -4908,7 +5803,7 @@ ${JSON.stringify(tool.requiredFields || [], null, 2)}`;
|
|
|
4908
5803
|
user: prompts.user
|
|
4909
5804
|
},
|
|
4910
5805
|
{
|
|
4911
|
-
model: this.
|
|
5806
|
+
model: this.getModelForTask("complex"),
|
|
4912
5807
|
maxTokens: 3e3,
|
|
4913
5808
|
temperature: 0.2,
|
|
4914
5809
|
apiKey: this.getApiKey(apiKey)
|
|
@@ -5279,6 +6174,7 @@ ${sql}
|
|
|
5279
6174
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
5280
6175
|
logger.error(`[${this.getProviderName()}] Query execution failed (attempt ${attempts}/${MAX_QUERY_ATTEMPTS}): ${errorMsg}`);
|
|
5281
6176
|
logCollector?.error(`Query failed (attempt ${attempts}/${MAX_QUERY_ATTEMPTS}): ${errorMsg}`);
|
|
6177
|
+
userPromptErrorLogger.logSqlError(sql, error instanceof Error ? error : new Error(errorMsg), Object.keys(params).length > 0 ? Object.values(params) : void 0);
|
|
5282
6178
|
if (wrappedStreamCallback) {
|
|
5283
6179
|
wrappedStreamCallback(`\u274C **Query execution failed:**
|
|
5284
6180
|
\`\`\`
|
|
@@ -5369,6 +6265,7 @@ Please try rephrasing your request or contact support.
|
|
|
5369
6265
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
5370
6266
|
logger.error(`[${this.getProviderName()}] External tool ${externalTool.name} failed (attempt ${attempts}/${MAX_TOOL_ATTEMPTS}): ${errorMsg}`);
|
|
5371
6267
|
logCollector?.error(`\u2717 ${externalTool.name} failed: ${errorMsg}`);
|
|
6268
|
+
userPromptErrorLogger.logToolError(externalTool.name, toolInput, error instanceof Error ? error : new Error(errorMsg));
|
|
5372
6269
|
if (wrappedStreamCallback) {
|
|
5373
6270
|
wrappedStreamCallback(`\u274C **${externalTool.name} failed:**
|
|
5374
6271
|
\`\`\`
|
|
@@ -5396,7 +6293,7 @@ ${errorMsg}
|
|
|
5396
6293
|
tools,
|
|
5397
6294
|
toolHandler,
|
|
5398
6295
|
{
|
|
5399
|
-
model: this.
|
|
6296
|
+
model: this.getModelForTask("complex"),
|
|
5400
6297
|
maxTokens: 4e3,
|
|
5401
6298
|
temperature: 0.7,
|
|
5402
6299
|
apiKey: this.getApiKey(apiKey),
|
|
@@ -5441,6 +6338,21 @@ ${errorMsg}
|
|
|
5441
6338
|
if (category === "general") {
|
|
5442
6339
|
logger.info(`[${this.getProviderName()}] Skipping component generation for general/conversational question`);
|
|
5443
6340
|
logCollector?.info("Skipping component generation for general question");
|
|
6341
|
+
logger.info(`[${this.getProviderName()}] Generating actions for general question...`);
|
|
6342
|
+
const nextQuestions = await this.generateNextQuestions(
|
|
6343
|
+
userPrompt,
|
|
6344
|
+
null,
|
|
6345
|
+
// no component
|
|
6346
|
+
void 0,
|
|
6347
|
+
// no component data
|
|
6348
|
+
apiKey,
|
|
6349
|
+
logCollector,
|
|
6350
|
+
conversationHistory,
|
|
6351
|
+
textResponse
|
|
6352
|
+
// pass text response as context
|
|
6353
|
+
);
|
|
6354
|
+
actions = convertQuestionsToActions(nextQuestions);
|
|
6355
|
+
logger.info(`[${this.getProviderName()}] Generated ${actions.length} follow-up actions for general question`);
|
|
5444
6356
|
} else if (components && components.length > 0) {
|
|
5445
6357
|
logger.info(`[${this.getProviderName()}] Matching components from text response...`);
|
|
5446
6358
|
const componentStreamCallback = wrappedStreamCallback && category !== "data_modification" ? (component) => {
|
|
@@ -5511,6 +6423,13 @@ ${errorMsg}
|
|
|
5511
6423
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
5512
6424
|
logger.error(`[${this.getProviderName()}] Error generating text response: ${errorMsg}`);
|
|
5513
6425
|
logCollector?.error(`Error generating text response: ${errorMsg}`);
|
|
6426
|
+
userPromptErrorLogger.logLlmError(
|
|
6427
|
+
this.getProviderName(),
|
|
6428
|
+
this.model,
|
|
6429
|
+
"generateTextResponse",
|
|
6430
|
+
error instanceof Error ? error : new Error(errorMsg),
|
|
6431
|
+
{ userPrompt }
|
|
6432
|
+
);
|
|
5514
6433
|
errors.push(errorMsg);
|
|
5515
6434
|
return {
|
|
5516
6435
|
success: false,
|
|
@@ -5592,10 +6511,18 @@ ${errorMsg}
|
|
|
5592
6511
|
logger.info(`[${this.getProviderName()}] \u2713 100% match - returning UI block directly without adaptation`);
|
|
5593
6512
|
logCollector?.info(`\u2713 Exact match (${(conversationMatch.similarity * 100).toFixed(2)}%) - returning cached result`);
|
|
5594
6513
|
logCollector?.info(`Total time taken: ${elapsedTime2}ms (${(elapsedTime2 / 1e3).toFixed(2)}s)`);
|
|
6514
|
+
if (streamCallback && cachedTextResponse) {
|
|
6515
|
+
logger.info(`[${this.getProviderName()}] Streaming cached text response to frontend`);
|
|
6516
|
+
streamCallback(cachedTextResponse);
|
|
6517
|
+
}
|
|
6518
|
+
const cachedActions = conversationMatch.uiBlock?.actions || [];
|
|
5595
6519
|
return {
|
|
5596
6520
|
success: true,
|
|
5597
6521
|
data: {
|
|
6522
|
+
text: cachedTextResponse,
|
|
5598
6523
|
component,
|
|
6524
|
+
matchedComponents: component?.props?.config?.components || [],
|
|
6525
|
+
actions: cachedActions,
|
|
5599
6526
|
reasoning: `Exact match from previous conversation (${(conversationMatch.similarity * 100).toFixed(2)}% similarity)`,
|
|
5600
6527
|
method: `${this.getProviderName()}-semantic-match-exact`,
|
|
5601
6528
|
semanticSimilarity: conversationMatch.similarity
|
|
@@ -5618,10 +6545,18 @@ ${errorMsg}
|
|
|
5618
6545
|
logger.info(`[${this.getProviderName()}] Total time taken: ${elapsedTime2}ms (${(elapsedTime2 / 1e3).toFixed(2)}s)`);
|
|
5619
6546
|
logCollector?.info(`\u2713 UI block adapted successfully`);
|
|
5620
6547
|
logCollector?.info(`Total time taken: ${elapsedTime2}ms (${(elapsedTime2 / 1e3).toFixed(2)}s)`);
|
|
6548
|
+
if (streamCallback && cachedTextResponse) {
|
|
6549
|
+
logger.info(`[${this.getProviderName()}] Streaming cached text response to frontend (adapted match)`);
|
|
6550
|
+
streamCallback(cachedTextResponse);
|
|
6551
|
+
}
|
|
6552
|
+
const cachedActions = conversationMatch.uiBlock?.actions || [];
|
|
5621
6553
|
return {
|
|
5622
6554
|
success: true,
|
|
5623
6555
|
data: {
|
|
6556
|
+
text: cachedTextResponse,
|
|
5624
6557
|
component: adaptResult.adaptedComponent,
|
|
6558
|
+
matchedComponents: adaptResult.adaptedComponent?.props?.config?.components || [],
|
|
6559
|
+
actions: cachedActions,
|
|
5625
6560
|
reasoning: `Adapted from previous conversation: ${originalPrompt}`,
|
|
5626
6561
|
method: `${this.getProviderName()}-semantic-match`,
|
|
5627
6562
|
semanticSimilarity: conversationMatch.similarity,
|
|
@@ -5713,6 +6648,11 @@ ${errorMsg}
|
|
|
5713
6648
|
logger.error(`[${this.getProviderName()}] Error in handleUserRequest: ${errorMsg}`);
|
|
5714
6649
|
logger.debug(`[${this.getProviderName()}] Error details:`, error);
|
|
5715
6650
|
logCollector?.error(`Error processing request: ${errorMsg}`);
|
|
6651
|
+
userPromptErrorLogger.logError(
|
|
6652
|
+
"handleUserRequest",
|
|
6653
|
+
error instanceof Error ? error : new Error(errorMsg),
|
|
6654
|
+
{ userPrompt }
|
|
6655
|
+
);
|
|
5716
6656
|
const elapsedTime = Date.now() - startTime;
|
|
5717
6657
|
logger.info(`[${this.getProviderName()}] Total time taken: ${elapsedTime}ms (${(elapsedTime / 1e3).toFixed(2)}s)`);
|
|
5718
6658
|
logCollector?.info(`Total time taken: ${elapsedTime}ms (${(elapsedTime / 1e3).toFixed(2)}s)`);
|
|
@@ -5729,15 +6669,26 @@ ${errorMsg}
|
|
|
5729
6669
|
/**
|
|
5730
6670
|
* Generate next questions that the user might ask based on the original prompt and generated component
|
|
5731
6671
|
* This helps provide intelligent suggestions for follow-up queries
|
|
6672
|
+
* For general/conversational questions without components, pass textResponse instead
|
|
5732
6673
|
*/
|
|
5733
|
-
async generateNextQuestions(originalUserPrompt, component, componentData, apiKey, logCollector, conversationHistory) {
|
|
6674
|
+
async generateNextQuestions(originalUserPrompt, component, componentData, apiKey, logCollector, conversationHistory, textResponse) {
|
|
5734
6675
|
try {
|
|
5735
|
-
|
|
6676
|
+
let component_info;
|
|
6677
|
+
if (component) {
|
|
6678
|
+
component_info = `
|
|
5736
6679
|
Component Name: ${component.name}
|
|
5737
6680
|
Component Type: ${component.type}
|
|
5738
6681
|
Component Description: ${component.description || "No description"}
|
|
5739
6682
|
Component Props: ${component.props ? JSON.stringify(component.props, null, 2) : "No props"}
|
|
5740
6683
|
`;
|
|
6684
|
+
} else if (textResponse) {
|
|
6685
|
+
component_info = `
|
|
6686
|
+
Response Type: Text/Conversational Response
|
|
6687
|
+
Response Content: ${textResponse.substring(0, 1e3)}${textResponse.length > 1e3 ? "..." : ""}
|
|
6688
|
+
`;
|
|
6689
|
+
} else {
|
|
6690
|
+
component_info = "No component or response context available";
|
|
6691
|
+
}
|
|
5741
6692
|
const component_data = componentData ? `Component Data: ${JSON.stringify(componentData, null, 2)}` : "";
|
|
5742
6693
|
const prompts = await promptLoader.loadPrompts("actions", {
|
|
5743
6694
|
ORIGINAL_USER_PROMPT: originalUserPrompt,
|
|
@@ -5751,7 +6702,7 @@ ${errorMsg}
|
|
|
5751
6702
|
user: prompts.user
|
|
5752
6703
|
},
|
|
5753
6704
|
{
|
|
5754
|
-
model: this.
|
|
6705
|
+
model: this.getModelForTask("simple"),
|
|
5755
6706
|
maxTokens: 1200,
|
|
5756
6707
|
temperature: 0.7,
|
|
5757
6708
|
apiKey: this.getApiKey(apiKey)
|
|
@@ -5788,6 +6739,9 @@ var GroqLLM = class extends BaseLLM {
|
|
|
5788
6739
|
getDefaultModel() {
|
|
5789
6740
|
return "groq/openai/gpt-oss-120b";
|
|
5790
6741
|
}
|
|
6742
|
+
getDefaultFastModel() {
|
|
6743
|
+
return "groq/llama-3.1-8b-instant";
|
|
6744
|
+
}
|
|
5791
6745
|
getDefaultApiKey() {
|
|
5792
6746
|
return process.env.GROQ_API_KEY;
|
|
5793
6747
|
}
|
|
@@ -5807,6 +6761,9 @@ var AnthropicLLM = class extends BaseLLM {
|
|
|
5807
6761
|
getDefaultModel() {
|
|
5808
6762
|
return "anthropic/claude-sonnet-4-5-20250929";
|
|
5809
6763
|
}
|
|
6764
|
+
getDefaultFastModel() {
|
|
6765
|
+
return "anthropic/claude-haiku-4-5-20251001";
|
|
6766
|
+
}
|
|
5810
6767
|
getDefaultApiKey() {
|
|
5811
6768
|
return process.env.ANTHROPIC_API_KEY;
|
|
5812
6769
|
}
|
|
@@ -5826,6 +6783,9 @@ var GeminiLLM = class extends BaseLLM {
|
|
|
5826
6783
|
getDefaultModel() {
|
|
5827
6784
|
return "gemini/gemini-2.5-flash";
|
|
5828
6785
|
}
|
|
6786
|
+
getDefaultFastModel() {
|
|
6787
|
+
return "gemini/gemini-2.0-flash-exp";
|
|
6788
|
+
}
|
|
5829
6789
|
getDefaultApiKey() {
|
|
5830
6790
|
return process.env.GEMINI_API_KEY;
|
|
5831
6791
|
}
|
|
@@ -5845,6 +6805,9 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5845
6805
|
getDefaultModel() {
|
|
5846
6806
|
return "openai/gpt-4.1";
|
|
5847
6807
|
}
|
|
6808
|
+
getDefaultFastModel() {
|
|
6809
|
+
return "openai/gpt-4o-mini";
|
|
6810
|
+
}
|
|
5848
6811
|
getDefaultApiKey() {
|
|
5849
6812
|
return process.env.OPENAI_API_KEY;
|
|
5850
6813
|
}
|
|
@@ -6351,6 +7314,9 @@ var get_user_request = async (data, components, sendMessage, anthropicApiKey, gr
|
|
|
6351
7314
|
const prompt = payload.prompt;
|
|
6352
7315
|
const SA_RUNTIME = payload.SA_RUNTIME;
|
|
6353
7316
|
const wsId = userPromptRequest.from.id || "unknown";
|
|
7317
|
+
const promptContext = `User Prompt: ${prompt?.substring(0, 50)}${(prompt?.length || 0) > 50 ? "..." : ""}`;
|
|
7318
|
+
llmUsageLogger.resetLogFile(promptContext);
|
|
7319
|
+
userPromptErrorLogger.resetLogFile(promptContext);
|
|
6354
7320
|
if (!SA_RUNTIME) {
|
|
6355
7321
|
errors.push("SA_RUNTIME is required");
|
|
6356
7322
|
}
|
|
@@ -6424,6 +7390,14 @@ var get_user_request = async (data, components, sendMessage, anthropicApiKey, gr
|
|
|
6424
7390
|
const uiBlockId = existingUiBlockId;
|
|
6425
7391
|
if (!userResponse.success) {
|
|
6426
7392
|
logger.error(`User prompt request failed with errors: ${userResponse.errors.join(", ")}`);
|
|
7393
|
+
userPromptErrorLogger.logError("User Response Failed", userResponse.errors.join("\n"), {
|
|
7394
|
+
prompt,
|
|
7395
|
+
uiBlockId,
|
|
7396
|
+
threadId,
|
|
7397
|
+
responseData: userResponse.data
|
|
7398
|
+
});
|
|
7399
|
+
userPromptErrorLogger.writeSummary();
|
|
7400
|
+
llmUsageLogger.logSessionSummary(`FAILED: ${prompt?.substring(0, 30)}`);
|
|
6427
7401
|
return {
|
|
6428
7402
|
success: false,
|
|
6429
7403
|
data: userResponse.data,
|
|
@@ -6499,6 +7473,7 @@ var get_user_request = async (data, components, sendMessage, anthropicApiKey, gr
|
|
|
6499
7473
|
}
|
|
6500
7474
|
}
|
|
6501
7475
|
}
|
|
7476
|
+
llmUsageLogger.logSessionSummary(prompt?.substring(0, 50));
|
|
6502
7477
|
return {
|
|
6503
7478
|
success: userResponse.success,
|
|
6504
7479
|
data: userResponse.data,
|
|
@@ -9400,8 +10375,8 @@ function sendDashCompResponse(id, res, sendMessage, clientId) {
|
|
|
9400
10375
|
}
|
|
9401
10376
|
|
|
9402
10377
|
// src/auth/user-manager.ts
|
|
9403
|
-
var
|
|
9404
|
-
var
|
|
10378
|
+
var import_fs6 = __toESM(require("fs"));
|
|
10379
|
+
var import_path5 = __toESM(require("path"));
|
|
9405
10380
|
var import_os = __toESM(require("os"));
|
|
9406
10381
|
init_logger();
|
|
9407
10382
|
var UserManager = class {
|
|
@@ -9415,7 +10390,7 @@ var UserManager = class {
|
|
|
9415
10390
|
this.hasChanged = false;
|
|
9416
10391
|
this.syncInterval = null;
|
|
9417
10392
|
this.isInitialized = false;
|
|
9418
|
-
this.filePath =
|
|
10393
|
+
this.filePath = import_path5.default.join(import_os.default.homedir(), ".superatom", "projects", projectId, "users.json");
|
|
9419
10394
|
this.syncIntervalMs = syncIntervalMs;
|
|
9420
10395
|
}
|
|
9421
10396
|
/**
|
|
@@ -9440,20 +10415,20 @@ var UserManager = class {
|
|
|
9440
10415
|
*/
|
|
9441
10416
|
async loadUsersFromFile() {
|
|
9442
10417
|
try {
|
|
9443
|
-
const dir =
|
|
9444
|
-
if (!
|
|
10418
|
+
const dir = import_path5.default.dirname(this.filePath);
|
|
10419
|
+
if (!import_fs6.default.existsSync(dir)) {
|
|
9445
10420
|
logger.info(`Creating directory structure: ${dir}`);
|
|
9446
|
-
|
|
10421
|
+
import_fs6.default.mkdirSync(dir, { recursive: true });
|
|
9447
10422
|
}
|
|
9448
|
-
if (!
|
|
10423
|
+
if (!import_fs6.default.existsSync(this.filePath)) {
|
|
9449
10424
|
logger.info(`Users file does not exist at ${this.filePath}, creating with empty users`);
|
|
9450
10425
|
const initialData = { users: [] };
|
|
9451
|
-
|
|
10426
|
+
import_fs6.default.writeFileSync(this.filePath, JSON.stringify(initialData, null, 4));
|
|
9452
10427
|
this.users = [];
|
|
9453
10428
|
this.hasChanged = false;
|
|
9454
10429
|
return;
|
|
9455
10430
|
}
|
|
9456
|
-
const fileContent =
|
|
10431
|
+
const fileContent = import_fs6.default.readFileSync(this.filePath, "utf-8");
|
|
9457
10432
|
const rawData = JSON.parse(fileContent);
|
|
9458
10433
|
const validatedData = UsersDataSchema.parse(rawData);
|
|
9459
10434
|
this.users = validatedData.users;
|
|
@@ -9472,16 +10447,16 @@ var UserManager = class {
|
|
|
9472
10447
|
return;
|
|
9473
10448
|
}
|
|
9474
10449
|
try {
|
|
9475
|
-
const dir =
|
|
9476
|
-
if (!
|
|
9477
|
-
|
|
10450
|
+
const dir = import_path5.default.dirname(this.filePath);
|
|
10451
|
+
if (!import_fs6.default.existsSync(dir)) {
|
|
10452
|
+
import_fs6.default.mkdirSync(dir, { recursive: true });
|
|
9478
10453
|
}
|
|
9479
10454
|
const usersToSave = this.users.map((user) => {
|
|
9480
10455
|
const { wsIds, ...userWithoutWsIds } = user;
|
|
9481
10456
|
return userWithoutWsIds;
|
|
9482
10457
|
});
|
|
9483
10458
|
const data = { users: usersToSave };
|
|
9484
|
-
|
|
10459
|
+
import_fs6.default.writeFileSync(this.filePath, JSON.stringify(data, null, 4));
|
|
9485
10460
|
this.hasChanged = false;
|
|
9486
10461
|
logger.debug(`Synced ${this.users.length} users to file (wsIds excluded)`);
|
|
9487
10462
|
} catch (error) {
|
|
@@ -9699,8 +10674,8 @@ var UserManager = class {
|
|
|
9699
10674
|
};
|
|
9700
10675
|
|
|
9701
10676
|
// src/dashboards/dashboard-manager.ts
|
|
9702
|
-
var
|
|
9703
|
-
var
|
|
10677
|
+
var import_fs7 = __toESM(require("fs"));
|
|
10678
|
+
var import_path6 = __toESM(require("path"));
|
|
9704
10679
|
var import_os2 = __toESM(require("os"));
|
|
9705
10680
|
init_logger();
|
|
9706
10681
|
var DashboardManager = class {
|
|
@@ -9710,7 +10685,7 @@ var DashboardManager = class {
|
|
|
9710
10685
|
*/
|
|
9711
10686
|
constructor(projectId = "snowflake-dataset") {
|
|
9712
10687
|
this.projectId = projectId;
|
|
9713
|
-
this.dashboardsBasePath =
|
|
10688
|
+
this.dashboardsBasePath = import_path6.default.join(
|
|
9714
10689
|
import_os2.default.homedir(),
|
|
9715
10690
|
".superatom",
|
|
9716
10691
|
"projects",
|
|
@@ -9724,7 +10699,7 @@ var DashboardManager = class {
|
|
|
9724
10699
|
* @returns Full path to dashboard data.json file
|
|
9725
10700
|
*/
|
|
9726
10701
|
getDashboardPath(dashboardId) {
|
|
9727
|
-
return
|
|
10702
|
+
return import_path6.default.join(this.dashboardsBasePath, dashboardId, "data.json");
|
|
9728
10703
|
}
|
|
9729
10704
|
/**
|
|
9730
10705
|
* Create a new dashboard
|
|
@@ -9734,13 +10709,13 @@ var DashboardManager = class {
|
|
|
9734
10709
|
*/
|
|
9735
10710
|
createDashboard(dashboardId, dashboard) {
|
|
9736
10711
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9737
|
-
const dashboardDir =
|
|
9738
|
-
if (
|
|
10712
|
+
const dashboardDir = import_path6.default.dirname(dashboardPath);
|
|
10713
|
+
if (import_fs7.default.existsSync(dashboardPath)) {
|
|
9739
10714
|
throw new Error(`Dashboard '${dashboardId}' already exists`);
|
|
9740
10715
|
}
|
|
9741
10716
|
const validated = DSLRendererPropsSchema.parse(dashboard);
|
|
9742
|
-
|
|
9743
|
-
|
|
10717
|
+
import_fs7.default.mkdirSync(dashboardDir, { recursive: true });
|
|
10718
|
+
import_fs7.default.writeFileSync(dashboardPath, JSON.stringify(validated, null, 4));
|
|
9744
10719
|
logger.info(`Dashboard created: ${dashboardId}`);
|
|
9745
10720
|
return validated;
|
|
9746
10721
|
}
|
|
@@ -9751,12 +10726,12 @@ var DashboardManager = class {
|
|
|
9751
10726
|
*/
|
|
9752
10727
|
getDashboard(dashboardId) {
|
|
9753
10728
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9754
|
-
if (!
|
|
10729
|
+
if (!import_fs7.default.existsSync(dashboardPath)) {
|
|
9755
10730
|
logger.warn(`Dashboard not found: ${dashboardId}`);
|
|
9756
10731
|
return null;
|
|
9757
10732
|
}
|
|
9758
10733
|
try {
|
|
9759
|
-
const fileContent =
|
|
10734
|
+
const fileContent = import_fs7.default.readFileSync(dashboardPath, "utf-8");
|
|
9760
10735
|
const dashboard = JSON.parse(fileContent);
|
|
9761
10736
|
const validated = DSLRendererPropsSchema.parse(dashboard);
|
|
9762
10737
|
return validated;
|
|
@@ -9770,16 +10745,16 @@ var DashboardManager = class {
|
|
|
9770
10745
|
* @returns Array of dashboard objects with their IDs
|
|
9771
10746
|
*/
|
|
9772
10747
|
getAllDashboards() {
|
|
9773
|
-
if (!
|
|
9774
|
-
|
|
10748
|
+
if (!import_fs7.default.existsSync(this.dashboardsBasePath)) {
|
|
10749
|
+
import_fs7.default.mkdirSync(this.dashboardsBasePath, { recursive: true });
|
|
9775
10750
|
return [];
|
|
9776
10751
|
}
|
|
9777
10752
|
const dashboards = [];
|
|
9778
10753
|
try {
|
|
9779
|
-
const dashboardDirs =
|
|
10754
|
+
const dashboardDirs = import_fs7.default.readdirSync(this.dashboardsBasePath);
|
|
9780
10755
|
for (const dashboardId of dashboardDirs) {
|
|
9781
10756
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9782
|
-
if (
|
|
10757
|
+
if (import_fs7.default.existsSync(dashboardPath)) {
|
|
9783
10758
|
const dashboard = this.getDashboard(dashboardId);
|
|
9784
10759
|
if (dashboard) {
|
|
9785
10760
|
dashboards.push({ dashboardId, dashboard });
|
|
@@ -9801,13 +10776,13 @@ var DashboardManager = class {
|
|
|
9801
10776
|
*/
|
|
9802
10777
|
updateDashboard(dashboardId, dashboard) {
|
|
9803
10778
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9804
|
-
if (!
|
|
10779
|
+
if (!import_fs7.default.existsSync(dashboardPath)) {
|
|
9805
10780
|
logger.warn(`Dashboard not found for update: ${dashboardId}`);
|
|
9806
10781
|
return null;
|
|
9807
10782
|
}
|
|
9808
10783
|
try {
|
|
9809
10784
|
const validated = DSLRendererPropsSchema.parse(dashboard);
|
|
9810
|
-
|
|
10785
|
+
import_fs7.default.writeFileSync(dashboardPath, JSON.stringify(validated, null, 4));
|
|
9811
10786
|
logger.info(`Dashboard updated: ${dashboardId}`);
|
|
9812
10787
|
return validated;
|
|
9813
10788
|
} catch (error) {
|
|
@@ -9822,13 +10797,13 @@ var DashboardManager = class {
|
|
|
9822
10797
|
*/
|
|
9823
10798
|
deleteDashboard(dashboardId) {
|
|
9824
10799
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9825
|
-
const dashboardDir =
|
|
9826
|
-
if (!
|
|
10800
|
+
const dashboardDir = import_path6.default.dirname(dashboardPath);
|
|
10801
|
+
if (!import_fs7.default.existsSync(dashboardPath)) {
|
|
9827
10802
|
logger.warn(`Dashboard not found for deletion: ${dashboardId}`);
|
|
9828
10803
|
return false;
|
|
9829
10804
|
}
|
|
9830
10805
|
try {
|
|
9831
|
-
|
|
10806
|
+
import_fs7.default.rmSync(dashboardDir, { recursive: true, force: true });
|
|
9832
10807
|
logger.info(`Dashboard deleted: ${dashboardId}`);
|
|
9833
10808
|
return true;
|
|
9834
10809
|
} catch (error) {
|
|
@@ -9843,21 +10818,21 @@ var DashboardManager = class {
|
|
|
9843
10818
|
*/
|
|
9844
10819
|
dashboardExists(dashboardId) {
|
|
9845
10820
|
const dashboardPath = this.getDashboardPath(dashboardId);
|
|
9846
|
-
return
|
|
10821
|
+
return import_fs7.default.existsSync(dashboardPath);
|
|
9847
10822
|
}
|
|
9848
10823
|
/**
|
|
9849
10824
|
* Get dashboard count
|
|
9850
10825
|
* @returns Number of dashboards
|
|
9851
10826
|
*/
|
|
9852
10827
|
getDashboardCount() {
|
|
9853
|
-
if (!
|
|
10828
|
+
if (!import_fs7.default.existsSync(this.dashboardsBasePath)) {
|
|
9854
10829
|
return 0;
|
|
9855
10830
|
}
|
|
9856
10831
|
try {
|
|
9857
|
-
const dashboardDirs =
|
|
10832
|
+
const dashboardDirs = import_fs7.default.readdirSync(this.dashboardsBasePath);
|
|
9858
10833
|
return dashboardDirs.filter((dir) => {
|
|
9859
10834
|
const dashboardPath = this.getDashboardPath(dir);
|
|
9860
|
-
return
|
|
10835
|
+
return import_fs7.default.existsSync(dashboardPath);
|
|
9861
10836
|
}).length;
|
|
9862
10837
|
} catch (error) {
|
|
9863
10838
|
logger.error("Failed to get dashboard count:", error);
|
|
@@ -9867,8 +10842,8 @@ var DashboardManager = class {
|
|
|
9867
10842
|
};
|
|
9868
10843
|
|
|
9869
10844
|
// src/reports/report-manager.ts
|
|
9870
|
-
var
|
|
9871
|
-
var
|
|
10845
|
+
var import_fs8 = __toESM(require("fs"));
|
|
10846
|
+
var import_path7 = __toESM(require("path"));
|
|
9872
10847
|
var import_os3 = __toESM(require("os"));
|
|
9873
10848
|
init_logger();
|
|
9874
10849
|
var ReportManager = class {
|
|
@@ -9878,7 +10853,7 @@ var ReportManager = class {
|
|
|
9878
10853
|
*/
|
|
9879
10854
|
constructor(projectId = "snowflake-dataset") {
|
|
9880
10855
|
this.projectId = projectId;
|
|
9881
|
-
this.reportsBasePath =
|
|
10856
|
+
this.reportsBasePath = import_path7.default.join(
|
|
9882
10857
|
import_os3.default.homedir(),
|
|
9883
10858
|
".superatom",
|
|
9884
10859
|
"projects",
|
|
@@ -9892,7 +10867,7 @@ var ReportManager = class {
|
|
|
9892
10867
|
* @returns Full path to report data.json file
|
|
9893
10868
|
*/
|
|
9894
10869
|
getReportPath(reportId) {
|
|
9895
|
-
return
|
|
10870
|
+
return import_path7.default.join(this.reportsBasePath, reportId, "data.json");
|
|
9896
10871
|
}
|
|
9897
10872
|
/**
|
|
9898
10873
|
* Create a new report
|
|
@@ -9902,13 +10877,13 @@ var ReportManager = class {
|
|
|
9902
10877
|
*/
|
|
9903
10878
|
createReport(reportId, report) {
|
|
9904
10879
|
const reportPath = this.getReportPath(reportId);
|
|
9905
|
-
const reportDir =
|
|
9906
|
-
if (
|
|
10880
|
+
const reportDir = import_path7.default.dirname(reportPath);
|
|
10881
|
+
if (import_fs8.default.existsSync(reportPath)) {
|
|
9907
10882
|
throw new Error(`Report '${reportId}' already exists`);
|
|
9908
10883
|
}
|
|
9909
10884
|
const validated = DSLRendererPropsSchema2.parse(report);
|
|
9910
|
-
|
|
9911
|
-
|
|
10885
|
+
import_fs8.default.mkdirSync(reportDir, { recursive: true });
|
|
10886
|
+
import_fs8.default.writeFileSync(reportPath, JSON.stringify(validated, null, 4));
|
|
9912
10887
|
logger.info(`Report created: ${reportId}`);
|
|
9913
10888
|
return validated;
|
|
9914
10889
|
}
|
|
@@ -9919,12 +10894,12 @@ var ReportManager = class {
|
|
|
9919
10894
|
*/
|
|
9920
10895
|
getReport(reportId) {
|
|
9921
10896
|
const reportPath = this.getReportPath(reportId);
|
|
9922
|
-
if (!
|
|
10897
|
+
if (!import_fs8.default.existsSync(reportPath)) {
|
|
9923
10898
|
logger.warn(`Report not found: ${reportId}`);
|
|
9924
10899
|
return null;
|
|
9925
10900
|
}
|
|
9926
10901
|
try {
|
|
9927
|
-
const fileContent =
|
|
10902
|
+
const fileContent = import_fs8.default.readFileSync(reportPath, "utf-8");
|
|
9928
10903
|
const report = JSON.parse(fileContent);
|
|
9929
10904
|
const validated = DSLRendererPropsSchema2.parse(report);
|
|
9930
10905
|
return validated;
|
|
@@ -9938,16 +10913,16 @@ var ReportManager = class {
|
|
|
9938
10913
|
* @returns Array of report objects with their IDs
|
|
9939
10914
|
*/
|
|
9940
10915
|
getAllReports() {
|
|
9941
|
-
if (!
|
|
9942
|
-
|
|
10916
|
+
if (!import_fs8.default.existsSync(this.reportsBasePath)) {
|
|
10917
|
+
import_fs8.default.mkdirSync(this.reportsBasePath, { recursive: true });
|
|
9943
10918
|
return [];
|
|
9944
10919
|
}
|
|
9945
10920
|
const reports = [];
|
|
9946
10921
|
try {
|
|
9947
|
-
const reportDirs =
|
|
10922
|
+
const reportDirs = import_fs8.default.readdirSync(this.reportsBasePath);
|
|
9948
10923
|
for (const reportId of reportDirs) {
|
|
9949
10924
|
const reportPath = this.getReportPath(reportId);
|
|
9950
|
-
if (
|
|
10925
|
+
if (import_fs8.default.existsSync(reportPath)) {
|
|
9951
10926
|
const report = this.getReport(reportId);
|
|
9952
10927
|
if (report) {
|
|
9953
10928
|
reports.push({ reportId, report });
|
|
@@ -9969,13 +10944,13 @@ var ReportManager = class {
|
|
|
9969
10944
|
*/
|
|
9970
10945
|
updateReport(reportId, report) {
|
|
9971
10946
|
const reportPath = this.getReportPath(reportId);
|
|
9972
|
-
if (!
|
|
10947
|
+
if (!import_fs8.default.existsSync(reportPath)) {
|
|
9973
10948
|
logger.warn(`Report not found for update: ${reportId}`);
|
|
9974
10949
|
return null;
|
|
9975
10950
|
}
|
|
9976
10951
|
try {
|
|
9977
10952
|
const validated = DSLRendererPropsSchema2.parse(report);
|
|
9978
|
-
|
|
10953
|
+
import_fs8.default.writeFileSync(reportPath, JSON.stringify(validated, null, 4));
|
|
9979
10954
|
logger.info(`Report updated: ${reportId}`);
|
|
9980
10955
|
return validated;
|
|
9981
10956
|
} catch (error) {
|
|
@@ -9990,13 +10965,13 @@ var ReportManager = class {
|
|
|
9990
10965
|
*/
|
|
9991
10966
|
deleteReport(reportId) {
|
|
9992
10967
|
const reportPath = this.getReportPath(reportId);
|
|
9993
|
-
const reportDir =
|
|
9994
|
-
if (!
|
|
10968
|
+
const reportDir = import_path7.default.dirname(reportPath);
|
|
10969
|
+
if (!import_fs8.default.existsSync(reportPath)) {
|
|
9995
10970
|
logger.warn(`Report not found for deletion: ${reportId}`);
|
|
9996
10971
|
return false;
|
|
9997
10972
|
}
|
|
9998
10973
|
try {
|
|
9999
|
-
|
|
10974
|
+
import_fs8.default.rmSync(reportDir, { recursive: true, force: true });
|
|
10000
10975
|
logger.info(`Report deleted: ${reportId}`);
|
|
10001
10976
|
return true;
|
|
10002
10977
|
} catch (error) {
|
|
@@ -10011,21 +10986,21 @@ var ReportManager = class {
|
|
|
10011
10986
|
*/
|
|
10012
10987
|
reportExists(reportId) {
|
|
10013
10988
|
const reportPath = this.getReportPath(reportId);
|
|
10014
|
-
return
|
|
10989
|
+
return import_fs8.default.existsSync(reportPath);
|
|
10015
10990
|
}
|
|
10016
10991
|
/**
|
|
10017
10992
|
* Get report count
|
|
10018
10993
|
* @returns Number of reports
|
|
10019
10994
|
*/
|
|
10020
10995
|
getReportCount() {
|
|
10021
|
-
if (!
|
|
10996
|
+
if (!import_fs8.default.existsSync(this.reportsBasePath)) {
|
|
10022
10997
|
return 0;
|
|
10023
10998
|
}
|
|
10024
10999
|
try {
|
|
10025
|
-
const reportDirs =
|
|
11000
|
+
const reportDirs = import_fs8.default.readdirSync(this.reportsBasePath);
|
|
10026
11001
|
return reportDirs.filter((dir) => {
|
|
10027
11002
|
const reportPath = this.getReportPath(dir);
|
|
10028
|
-
return
|
|
11003
|
+
return import_fs8.default.existsSync(reportPath);
|
|
10029
11004
|
}).length;
|
|
10030
11005
|
} catch (error) {
|
|
10031
11006
|
logger.error("Failed to get report count:", error);
|
|
@@ -10254,7 +11229,9 @@ var SuperatomSDK = class {
|
|
|
10254
11229
|
this.openaiApiKey = config.OPENAI_API_KEY || process.env.OPENAI_API_KEY || "";
|
|
10255
11230
|
this.llmProviders = config.LLM_PROVIDERS || getLLMProviders();
|
|
10256
11231
|
this.databaseType = config.databaseType || "postgresql";
|
|
10257
|
-
|
|
11232
|
+
this.modelStrategy = config.modelStrategy || "fast";
|
|
11233
|
+
this.applyModelStrategy(this.modelStrategy);
|
|
11234
|
+
logger.info(`Initializing Superatom SDK v${SDK_VERSION} for project ${this.projectId}, llm providers: ${this.llmProviders.join(", ")}, database type: ${this.databaseType}, model strategy: ${this.modelStrategy}`);
|
|
10258
11235
|
this.userManager = new UserManager(this.projectId, 5e3);
|
|
10259
11236
|
this.dashboardManager = new DashboardManager(this.projectId);
|
|
10260
11237
|
this.reportManager = new ReportManager(this.projectId);
|
|
@@ -10633,6 +11610,31 @@ var SuperatomSDK = class {
|
|
|
10633
11610
|
getTools() {
|
|
10634
11611
|
return this.tools;
|
|
10635
11612
|
}
|
|
11613
|
+
/**
|
|
11614
|
+
* Apply model strategy to all LLM provider singletons
|
|
11615
|
+
* @param strategy - 'best', 'fast', or 'balanced'
|
|
11616
|
+
*/
|
|
11617
|
+
applyModelStrategy(strategy) {
|
|
11618
|
+
anthropicLLM.setModelStrategy(strategy);
|
|
11619
|
+
groqLLM.setModelStrategy(strategy);
|
|
11620
|
+
geminiLLM.setModelStrategy(strategy);
|
|
11621
|
+
openaiLLM.setModelStrategy(strategy);
|
|
11622
|
+
logger.info(`Model strategy '${strategy}' applied to all LLM providers`);
|
|
11623
|
+
}
|
|
11624
|
+
/**
|
|
11625
|
+
* Set model strategy at runtime
|
|
11626
|
+
* @param strategy - 'best', 'fast', or 'balanced'
|
|
11627
|
+
*/
|
|
11628
|
+
setModelStrategy(strategy) {
|
|
11629
|
+
this.modelStrategy = strategy;
|
|
11630
|
+
this.applyModelStrategy(strategy);
|
|
11631
|
+
}
|
|
11632
|
+
/**
|
|
11633
|
+
* Get current model strategy
|
|
11634
|
+
*/
|
|
11635
|
+
getModelStrategy() {
|
|
11636
|
+
return this.modelStrategy;
|
|
11637
|
+
}
|
|
10636
11638
|
};
|
|
10637
11639
|
// Annotate the CommonJS export names for ESM import in node:
|
|
10638
11640
|
0 && (module.exports = {
|
|
@@ -10648,9 +11650,15 @@ var SuperatomSDK = class {
|
|
|
10648
11650
|
UIBlock,
|
|
10649
11651
|
UILogCollector,
|
|
10650
11652
|
UserManager,
|
|
11653
|
+
anthropicLLM,
|
|
11654
|
+
geminiLLM,
|
|
11655
|
+
groqLLM,
|
|
10651
11656
|
hybridRerank,
|
|
11657
|
+
llmUsageLogger,
|
|
10652
11658
|
logger,
|
|
11659
|
+
openaiLLM,
|
|
10653
11660
|
rerankChromaResults,
|
|
10654
|
-
rerankConversationResults
|
|
11661
|
+
rerankConversationResults,
|
|
11662
|
+
userPromptErrorLogger
|
|
10655
11663
|
});
|
|
10656
11664
|
//# sourceMappingURL=index.js.map
|