@iqai/adk 0.0.5 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +44 -141
- package/dist/index.d.ts +44 -141
- package/dist/index.js +510 -432
- package/dist/index.mjs +415 -337
- package/package.json +2 -1
package/dist/index.mjs
CHANGED
|
@@ -19,10 +19,27 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
19
19
|
};
|
|
20
20
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
21
21
|
|
|
22
|
+
// src/helpers/debug.ts
|
|
23
|
+
var isDebugEnabled, debugLog;
|
|
24
|
+
var init_debug = __esm({
|
|
25
|
+
"src/helpers/debug.ts"() {
|
|
26
|
+
isDebugEnabled = () => {
|
|
27
|
+
return process.env.NODE_ENV === "development" || process.env.DEBUG === "true";
|
|
28
|
+
};
|
|
29
|
+
debugLog = (message, ...args) => {
|
|
30
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
31
|
+
if (isDebugEnabled()) {
|
|
32
|
+
console.log(`[DEBUG] ${time}: ${message}`, ...args);
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
|
|
22
38
|
// src/tools/base/base-tool.ts
|
|
23
39
|
var BaseTool;
|
|
24
40
|
var init_base_tool = __esm({
|
|
25
41
|
"src/tools/base/base-tool.ts"() {
|
|
42
|
+
init_debug();
|
|
26
43
|
BaseTool = class {
|
|
27
44
|
/**
|
|
28
45
|
* Name of the tool
|
|
@@ -112,11 +129,9 @@ var init_base_tool = __esm({
|
|
|
112
129
|
while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
|
|
113
130
|
try {
|
|
114
131
|
if (attempts > 0) {
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
);
|
|
119
|
-
}
|
|
132
|
+
debugLog(
|
|
133
|
+
`[BaseTool] Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
|
|
134
|
+
);
|
|
120
135
|
const delay = Math.min(
|
|
121
136
|
this.baseRetryDelay * 2 ** (attempts - 1) + Math.random() * 1e3,
|
|
122
137
|
this.maxRetryDelay
|
|
@@ -459,7 +474,11 @@ var BaseAgent = class {
|
|
|
459
474
|
}
|
|
460
475
|
};
|
|
461
476
|
|
|
477
|
+
// src/agents/llm-agent.ts
|
|
478
|
+
init_debug();
|
|
479
|
+
|
|
462
480
|
// src/models/llm-registry.ts
|
|
481
|
+
init_debug();
|
|
463
482
|
var LLMRegistry = class _LLMRegistry {
|
|
464
483
|
/**
|
|
465
484
|
* Map of model name regex to LLM class
|
|
@@ -516,12 +535,10 @@ var LLMRegistry = class _LLMRegistry {
|
|
|
516
535
|
* Logs all registered models for debugging
|
|
517
536
|
*/
|
|
518
537
|
static logRegisteredModels() {
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
}
|
|
524
|
-
}
|
|
538
|
+
debugLog(
|
|
539
|
+
"Registered LLM models:",
|
|
540
|
+
[..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
|
|
541
|
+
);
|
|
525
542
|
}
|
|
526
543
|
};
|
|
527
544
|
|
|
@@ -919,9 +936,7 @@ var Agent = class extends BaseAgent {
|
|
|
919
936
|
*/
|
|
920
937
|
async executeTool(toolCall, context) {
|
|
921
938
|
const { name, arguments: argsString } = toolCall.function;
|
|
922
|
-
|
|
923
|
-
console.log(`Executing tool: ${name}`);
|
|
924
|
-
}
|
|
939
|
+
debugLog(`Executing tool: ${name}`);
|
|
925
940
|
const tool = this.findTool(name);
|
|
926
941
|
if (!tool) {
|
|
927
942
|
console.warn(`Tool '${name}' not found`);
|
|
@@ -939,9 +954,7 @@ var Agent = class extends BaseAgent {
|
|
|
939
954
|
toolContext.toolName = name;
|
|
940
955
|
toolContext.toolId = toolCall.id;
|
|
941
956
|
const result = await tool.runAsync(args, toolContext);
|
|
942
|
-
|
|
943
|
-
console.log(`Tool ${name} execution complete`);
|
|
944
|
-
}
|
|
957
|
+
debugLog(`Tool ${name} execution complete`);
|
|
945
958
|
return {
|
|
946
959
|
name,
|
|
947
960
|
result: typeof result === "string" ? result : JSON.stringify(result)
|
|
@@ -1110,10 +1123,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1110
1123
|
let stepCount = 0;
|
|
1111
1124
|
while (stepCount < this.maxToolExecutionSteps) {
|
|
1112
1125
|
stepCount++;
|
|
1113
|
-
|
|
1114
|
-
console.log(`
|
|
1115
|
-
[Agent] Step ${stepCount}: Thinking...`);
|
|
1116
|
-
}
|
|
1126
|
+
debugLog(`Step ${stepCount}: Thinking...`);
|
|
1117
1127
|
const llmRequest = new LLMRequest({
|
|
1118
1128
|
messages: context.messages,
|
|
1119
1129
|
config: {
|
|
@@ -1130,9 +1140,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1130
1140
|
throw new Error("No response from LLM");
|
|
1131
1141
|
}
|
|
1132
1142
|
if (currentResponse.tool_calls && currentResponse.tool_calls.length > 0) {
|
|
1133
|
-
|
|
1134
|
-
console.log("[Agent] Executing tools...");
|
|
1135
|
-
}
|
|
1143
|
+
debugLog(`Tool calls: ${JSON.stringify(currentResponse.tool_calls)}`);
|
|
1136
1144
|
context.addMessage({
|
|
1137
1145
|
role: "assistant",
|
|
1138
1146
|
content: currentResponse.content || "",
|
|
@@ -1151,9 +1159,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1151
1159
|
});
|
|
1152
1160
|
}
|
|
1153
1161
|
} else {
|
|
1154
|
-
|
|
1155
|
-
console.log("[Agent] No tool calls, finishing...");
|
|
1156
|
-
}
|
|
1162
|
+
debugLog("[Agent] No tool calls, finishing...");
|
|
1157
1163
|
context.addMessage({
|
|
1158
1164
|
role: "assistant",
|
|
1159
1165
|
content: currentResponse.content || ""
|
|
@@ -1191,10 +1197,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1191
1197
|
let stepCount = 0;
|
|
1192
1198
|
let hadToolCalls = false;
|
|
1193
1199
|
while (stepCount < this.maxToolExecutionSteps) {
|
|
1194
|
-
|
|
1195
|
-
console.log(`
|
|
1196
|
-
[Agent] Step ${stepCount + 1}: Thinking...`);
|
|
1197
|
-
}
|
|
1200
|
+
debugLog(`[Agent] Step ${stepCount}: Thinking...`);
|
|
1198
1201
|
const toolDeclarations = this.tools.map((tool) => tool.getDeclaration()).filter((declaration) => declaration !== null);
|
|
1199
1202
|
const request = {
|
|
1200
1203
|
messages: context.messages,
|
|
@@ -1223,14 +1226,10 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1223
1226
|
function_call: finalResponse.function_call
|
|
1224
1227
|
});
|
|
1225
1228
|
if (!hadToolCalls) {
|
|
1226
|
-
|
|
1227
|
-
console.log("[Agent] No tool calls, finishing...");
|
|
1228
|
-
}
|
|
1229
|
+
debugLog("[Agent] No tool calls, finishing...");
|
|
1229
1230
|
break;
|
|
1230
1231
|
}
|
|
1231
|
-
|
|
1232
|
-
console.log("[Agent] Executing tools...");
|
|
1233
|
-
}
|
|
1232
|
+
debugLog(`[Agent] Step ${stepCount + 1}: Executing tools...`);
|
|
1234
1233
|
stepCount++;
|
|
1235
1234
|
if (finalResponse.function_call) {
|
|
1236
1235
|
const toolCall = {
|
|
@@ -1247,11 +1246,9 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1247
1246
|
content: JSON.stringify(result.result)
|
|
1248
1247
|
});
|
|
1249
1248
|
} else if (finalResponse.tool_calls && finalResponse.tool_calls.length > 0) {
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
);
|
|
1254
|
-
}
|
|
1249
|
+
debugLog(
|
|
1250
|
+
`[Agent] Step ${stepCount + 1}: Executing ${finalResponse.tool_calls.length} tool(s)...`
|
|
1251
|
+
);
|
|
1255
1252
|
context.messages.pop();
|
|
1256
1253
|
context.addMessage({
|
|
1257
1254
|
role: "assistant",
|
|
@@ -1276,6 +1273,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1276
1273
|
};
|
|
1277
1274
|
|
|
1278
1275
|
// src/agents/sequential-agent.ts
|
|
1276
|
+
init_debug();
|
|
1279
1277
|
var SequentialAgent = class extends BaseAgent {
|
|
1280
1278
|
/**
|
|
1281
1279
|
* Constructor for SequentialAgent
|
|
@@ -1296,11 +1294,9 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1296
1294
|
* Executes sub-agents sequentially, passing output from one to the next
|
|
1297
1295
|
*/
|
|
1298
1296
|
async run(options) {
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
);
|
|
1303
|
-
}
|
|
1297
|
+
debugLog(
|
|
1298
|
+
`[SequentialAgent] Running ${this.subAgents.length} sub-agents in sequence`
|
|
1299
|
+
);
|
|
1304
1300
|
if (this.subAgents.length === 0) {
|
|
1305
1301
|
return {
|
|
1306
1302
|
content: "No sub-agents defined for sequential execution.",
|
|
@@ -1316,11 +1312,9 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1316
1312
|
let finalResponse = null;
|
|
1317
1313
|
for (let i = 0; i < this.subAgents.length; i++) {
|
|
1318
1314
|
const agent = this.subAgents[i];
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
);
|
|
1323
|
-
}
|
|
1315
|
+
debugLog(
|
|
1316
|
+
`[SequentialAgent] Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
|
|
1317
|
+
);
|
|
1324
1318
|
try {
|
|
1325
1319
|
const response = await agent.run({
|
|
1326
1320
|
messages: currentMessages,
|
|
@@ -1376,11 +1370,9 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1376
1370
|
* Streams responses from each sub-agent in sequence
|
|
1377
1371
|
*/
|
|
1378
1372
|
async *runStreaming(options) {
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
);
|
|
1383
|
-
}
|
|
1373
|
+
debugLog(
|
|
1374
|
+
`[SequentialAgent] Streaming ${this.subAgents.length} sub-agents in sequence`
|
|
1375
|
+
);
|
|
1384
1376
|
if (this.subAgents.length === 0) {
|
|
1385
1377
|
yield {
|
|
1386
1378
|
content: "No sub-agents defined for sequential execution.",
|
|
@@ -1396,11 +1388,9 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1396
1388
|
const currentMessages = [...options.messages];
|
|
1397
1389
|
for (let i = 0; i < this.subAgents.length; i++) {
|
|
1398
1390
|
const agent = this.subAgents[i];
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
);
|
|
1403
|
-
}
|
|
1391
|
+
debugLog(
|
|
1392
|
+
`[SequentialAgent] Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
|
|
1393
|
+
);
|
|
1404
1394
|
try {
|
|
1405
1395
|
const streamGenerator = agent.runStreaming({
|
|
1406
1396
|
messages: currentMessages,
|
|
@@ -1453,6 +1443,7 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1453
1443
|
};
|
|
1454
1444
|
|
|
1455
1445
|
// src/agents/parallel-agent.ts
|
|
1446
|
+
init_debug();
|
|
1456
1447
|
var ParallelAgent = class extends BaseAgent {
|
|
1457
1448
|
/**
|
|
1458
1449
|
* Constructor for ParallelAgent
|
|
@@ -1473,11 +1464,9 @@ var ParallelAgent = class extends BaseAgent {
|
|
|
1473
1464
|
* Executes all sub-agents in parallel
|
|
1474
1465
|
*/
|
|
1475
1466
|
async run(options) {
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
);
|
|
1480
|
-
}
|
|
1467
|
+
debugLog(
|
|
1468
|
+
`[ParallelAgent] Running ${this.subAgents.length} sub-agents in parallel`
|
|
1469
|
+
);
|
|
1481
1470
|
if (this.subAgents.length === 0) {
|
|
1482
1471
|
return {
|
|
1483
1472
|
content: "No sub-agents defined for parallel execution.",
|
|
@@ -1520,11 +1509,9 @@ ${result.content || "No content"}
|
|
|
1520
1509
|
* Collects streaming responses from all sub-agents
|
|
1521
1510
|
*/
|
|
1522
1511
|
async *runStreaming(options) {
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
);
|
|
1527
|
-
}
|
|
1512
|
+
debugLog(
|
|
1513
|
+
`[ParallelAgent] Streaming ${this.subAgents.length} sub-agents in parallel`
|
|
1514
|
+
);
|
|
1528
1515
|
if (this.subAgents.length === 0) {
|
|
1529
1516
|
yield {
|
|
1530
1517
|
content: "No sub-agents defined for parallel execution.",
|
|
@@ -1590,6 +1577,7 @@ ${response.content || "No content"}
|
|
|
1590
1577
|
};
|
|
1591
1578
|
|
|
1592
1579
|
// src/agents/loop-agent.ts
|
|
1580
|
+
init_debug();
|
|
1593
1581
|
var LoopAgent = class extends BaseAgent {
|
|
1594
1582
|
/**
|
|
1595
1583
|
* Maximum number of iterations to prevent infinite loops
|
|
@@ -1630,28 +1618,20 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1630
1618
|
*/
|
|
1631
1619
|
async shouldContinue(response, iterationCount, messages, config) {
|
|
1632
1620
|
if (iterationCount >= this.maxIterations) {
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
);
|
|
1637
|
-
}
|
|
1621
|
+
debugLog(
|
|
1622
|
+
`[LoopAgent] Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
|
|
1623
|
+
);
|
|
1638
1624
|
return false;
|
|
1639
1625
|
}
|
|
1640
1626
|
if (this.conditionCheck) {
|
|
1641
1627
|
const shouldContinue = await this.conditionCheck(response);
|
|
1642
|
-
|
|
1643
|
-
console.log(
|
|
1644
|
-
`[LoopAgent] Custom condition check result: ${shouldContinue}`
|
|
1645
|
-
);
|
|
1646
|
-
}
|
|
1628
|
+
debugLog(`[LoopAgent] Custom condition check result: ${shouldContinue}`);
|
|
1647
1629
|
return shouldContinue;
|
|
1648
1630
|
}
|
|
1649
1631
|
if (this.conditionAgent) {
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
);
|
|
1654
|
-
}
|
|
1632
|
+
debugLog(
|
|
1633
|
+
`[LoopAgent] Using condition agent ${this.conditionAgent.name} to check loop condition`
|
|
1634
|
+
);
|
|
1655
1635
|
const conditionMessages = [
|
|
1656
1636
|
...messages,
|
|
1657
1637
|
{
|
|
@@ -1670,11 +1650,9 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1670
1650
|
});
|
|
1671
1651
|
const content = conditionResponse.content?.toLowerCase() || "";
|
|
1672
1652
|
const shouldContinue = content.includes("yes") && !content.includes("no");
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
);
|
|
1677
|
-
}
|
|
1653
|
+
debugLog(
|
|
1654
|
+
`[LoopAgent] Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
|
|
1655
|
+
);
|
|
1678
1656
|
return shouldContinue;
|
|
1679
1657
|
} catch (error) {
|
|
1680
1658
|
console.error("[LoopAgent] Error in condition agent:", error);
|
|
@@ -1688,11 +1666,9 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1688
1666
|
* Executes the sub-agent in a loop until the condition is met
|
|
1689
1667
|
*/
|
|
1690
1668
|
async run(options) {
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
);
|
|
1695
|
-
}
|
|
1669
|
+
debugLog(
|
|
1670
|
+
`[LoopAgent] Starting loop with max ${this.maxIterations} iterations`
|
|
1671
|
+
);
|
|
1696
1672
|
if (this.subAgents.length === 0) {
|
|
1697
1673
|
return {
|
|
1698
1674
|
content: "No sub-agent defined for loop execution.",
|
|
@@ -1706,11 +1682,9 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1706
1682
|
let shouldContinueLoop = true;
|
|
1707
1683
|
while (shouldContinueLoop && iterationCount < this.maxIterations) {
|
|
1708
1684
|
iterationCount++;
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
);
|
|
1713
|
-
}
|
|
1685
|
+
debugLog(
|
|
1686
|
+
`[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations}`
|
|
1687
|
+
);
|
|
1714
1688
|
try {
|
|
1715
1689
|
const response = await subAgent.run({
|
|
1716
1690
|
messages: currentMessages,
|
|
@@ -1758,11 +1732,9 @@ ${lastResponse.content || ""}`,
|
|
|
1758
1732
|
* Runs the agent with streaming support
|
|
1759
1733
|
*/
|
|
1760
1734
|
async *runStreaming(options) {
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
);
|
|
1765
|
-
}
|
|
1735
|
+
debugLog(
|
|
1736
|
+
`[LoopAgent] Starting loop with max ${this.maxIterations} iterations (streaming)`
|
|
1737
|
+
);
|
|
1766
1738
|
if (this.subAgents.length === 0) {
|
|
1767
1739
|
yield {
|
|
1768
1740
|
content: "No sub-agent defined for loop execution.",
|
|
@@ -1781,11 +1753,9 @@ ${lastResponse.content || ""}`,
|
|
|
1781
1753
|
};
|
|
1782
1754
|
while (shouldContinueLoop && iterationCount < this.maxIterations) {
|
|
1783
1755
|
iterationCount++;
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
);
|
|
1788
|
-
}
|
|
1756
|
+
debugLog(
|
|
1757
|
+
`[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
|
|
1758
|
+
);
|
|
1789
1759
|
yield {
|
|
1790
1760
|
content: `Running iteration ${iterationCount}/${this.maxIterations}...`,
|
|
1791
1761
|
role: "assistant",
|
|
@@ -1809,11 +1779,9 @@ ${lastResponse.content || ""}`,
|
|
|
1809
1779
|
}
|
|
1810
1780
|
}
|
|
1811
1781
|
if (!lastChunk) {
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
);
|
|
1816
|
-
}
|
|
1782
|
+
debugLog(
|
|
1783
|
+
`[LoopAgent] No complete chunk received from iteration ${iterationCount}`
|
|
1784
|
+
);
|
|
1817
1785
|
shouldContinueLoop = false;
|
|
1818
1786
|
continue;
|
|
1819
1787
|
}
|
|
@@ -1839,9 +1807,8 @@ ${lastResponse.content || ""}`,
|
|
|
1839
1807
|
};
|
|
1840
1808
|
}
|
|
1841
1809
|
} catch (error) {
|
|
1842
|
-
|
|
1843
|
-
`[LoopAgent] Error in loop iteration ${iterationCount}
|
|
1844
|
-
error
|
|
1810
|
+
debugLog(
|
|
1811
|
+
`[LoopAgent] Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`
|
|
1845
1812
|
);
|
|
1846
1813
|
yield {
|
|
1847
1814
|
content: `Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`,
|
|
@@ -1858,6 +1825,7 @@ ${lastResponse.content || ""}`,
|
|
|
1858
1825
|
};
|
|
1859
1826
|
|
|
1860
1827
|
// src/agents/lang-graph-agent.ts
|
|
1828
|
+
init_debug();
|
|
1861
1829
|
var LangGraphAgent = class extends BaseAgent {
|
|
1862
1830
|
/**
|
|
1863
1831
|
* Graph nodes (agents and their connections)
|
|
@@ -1955,11 +1923,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
1955
1923
|
if (targetNode.condition) {
|
|
1956
1924
|
const shouldExecute = await targetNode.condition(result, context);
|
|
1957
1925
|
if (!shouldExecute) {
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
);
|
|
1962
|
-
}
|
|
1926
|
+
debugLog(
|
|
1927
|
+
`[LangGraphAgent] Skipping node "${targetName}" due to condition`
|
|
1928
|
+
);
|
|
1963
1929
|
continue;
|
|
1964
1930
|
}
|
|
1965
1931
|
}
|
|
@@ -1984,11 +1950,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
1984
1950
|
};
|
|
1985
1951
|
const shouldExecute = await node.condition(mockResponse, mockContext);
|
|
1986
1952
|
if (!shouldExecute) {
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
);
|
|
1991
|
-
}
|
|
1953
|
+
debugLog(
|
|
1954
|
+
`[LangGraphAgent] Skipping node "${targetName}" due to condition`
|
|
1955
|
+
);
|
|
1992
1956
|
}
|
|
1993
1957
|
return { shouldExecute };
|
|
1994
1958
|
}
|
|
@@ -2001,11 +1965,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
2001
1965
|
messages: options.messages,
|
|
2002
1966
|
config: options.config
|
|
2003
1967
|
});
|
|
2004
|
-
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
);
|
|
2008
|
-
}
|
|
1968
|
+
debugLog(
|
|
1969
|
+
`[LangGraphAgent] Starting graph execution from root node "${this.rootNode}"`
|
|
1970
|
+
);
|
|
2009
1971
|
if (this.nodes.size === 0) {
|
|
2010
1972
|
return {
|
|
2011
1973
|
content: "No nodes defined in the graph.",
|
|
@@ -2025,11 +1987,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
2025
1987
|
while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
|
|
2026
1988
|
stepCount++;
|
|
2027
1989
|
const { node, messages } = nodesToExecute.shift();
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
);
|
|
2032
|
-
}
|
|
1990
|
+
debugLog(
|
|
1991
|
+
`[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
|
|
1992
|
+
);
|
|
2033
1993
|
executedNodes.push(node.name);
|
|
2034
1994
|
try {
|
|
2035
1995
|
const result = await node.agent.run({
|
|
@@ -2101,11 +2061,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
2101
2061
|
messages: options.messages,
|
|
2102
2062
|
config: options.config
|
|
2103
2063
|
});
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
);
|
|
2108
|
-
}
|
|
2064
|
+
debugLog(
|
|
2065
|
+
`[LangGraphAgent] Starting graph execution from root node "${this.rootNode}" (streaming)`
|
|
2066
|
+
);
|
|
2109
2067
|
if (this.nodes.size === 0) {
|
|
2110
2068
|
yield {
|
|
2111
2069
|
content: "No nodes defined in the graph.",
|
|
@@ -2132,11 +2090,9 @@ var LangGraphAgent = class extends BaseAgent {
|
|
|
2132
2090
|
while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
|
|
2133
2091
|
stepCount++;
|
|
2134
2092
|
const { node, messages } = nodesToExecute.shift();
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
);
|
|
2139
|
-
}
|
|
2093
|
+
debugLog(
|
|
2094
|
+
`[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}" (streaming)`
|
|
2095
|
+
);
|
|
2140
2096
|
executedNodes.push(node.name);
|
|
2141
2097
|
try {
|
|
2142
2098
|
const result = await node.agent.run({
|
|
@@ -2250,6 +2206,7 @@ function createFunctionTool(func, options) {
|
|
|
2250
2206
|
init_function_utils();
|
|
2251
2207
|
|
|
2252
2208
|
// src/tools/common/google-search.ts
|
|
2209
|
+
init_debug();
|
|
2253
2210
|
init_base_tool();
|
|
2254
2211
|
var GoogleSearch = class extends BaseTool {
|
|
2255
2212
|
/**
|
|
@@ -2290,9 +2247,7 @@ var GoogleSearch = class extends BaseTool {
|
|
|
2290
2247
|
* This is a simplified implementation that doesn't actually search, just returns mock results
|
|
2291
2248
|
*/
|
|
2292
2249
|
async runAsync(args, _context) {
|
|
2293
|
-
|
|
2294
|
-
console.log(`Executing Google search for: ${args.query}`);
|
|
2295
|
-
}
|
|
2250
|
+
debugLog(`[GoogleSearch] Executing Google search for: ${args.query}`);
|
|
2296
2251
|
return {
|
|
2297
2252
|
results: [
|
|
2298
2253
|
{
|
|
@@ -2755,6 +2710,7 @@ var UserInteractionTool = class extends BaseTool {
|
|
|
2755
2710
|
};
|
|
2756
2711
|
|
|
2757
2712
|
// src/tools/common/exit-loop-tool.ts
|
|
2713
|
+
init_debug();
|
|
2758
2714
|
init_base_tool();
|
|
2759
2715
|
var ExitLoopTool = class extends BaseTool {
|
|
2760
2716
|
/**
|
|
@@ -2784,9 +2740,7 @@ var ExitLoopTool = class extends BaseTool {
|
|
|
2784
2740
|
* Execute the exit loop action
|
|
2785
2741
|
*/
|
|
2786
2742
|
async runAsync(_args, context) {
|
|
2787
|
-
|
|
2788
|
-
console.log("Executing exit loop tool");
|
|
2789
|
-
}
|
|
2743
|
+
debugLog("[ExitLoopTool] Executing exit loop tool");
|
|
2790
2744
|
if (context.actions) {
|
|
2791
2745
|
context.actions.escalate = true;
|
|
2792
2746
|
} else {
|
|
@@ -2801,6 +2755,7 @@ var ExitLoopTool = class extends BaseTool {
|
|
|
2801
2755
|
};
|
|
2802
2756
|
|
|
2803
2757
|
// src/tools/common/get-user-choice-tool.ts
|
|
2758
|
+
init_debug();
|
|
2804
2759
|
init_base_tool();
|
|
2805
2760
|
var GetUserChoiceTool = class extends BaseTool {
|
|
2806
2761
|
/**
|
|
@@ -2845,13 +2800,13 @@ var GetUserChoiceTool = class extends BaseTool {
|
|
|
2845
2800
|
* and the actual choice will be provided asynchronously
|
|
2846
2801
|
*/
|
|
2847
2802
|
async runAsync(args, context) {
|
|
2848
|
-
|
|
2849
|
-
|
|
2850
|
-
|
|
2851
|
-
)
|
|
2852
|
-
|
|
2853
|
-
|
|
2854
|
-
}
|
|
2803
|
+
debugLog(
|
|
2804
|
+
`[GetUserChoiceTool] Executing get_user_choice with options: ${args.options.join(
|
|
2805
|
+
", "
|
|
2806
|
+
)}`
|
|
2807
|
+
);
|
|
2808
|
+
if (args.question) {
|
|
2809
|
+
debugLog(`[GetUserChoiceTool] Question: ${args.question}`);
|
|
2855
2810
|
}
|
|
2856
2811
|
if (context.actions) {
|
|
2857
2812
|
context.actions.skip_summarization = true;
|
|
@@ -2865,6 +2820,7 @@ var GetUserChoiceTool = class extends BaseTool {
|
|
|
2865
2820
|
};
|
|
2866
2821
|
|
|
2867
2822
|
// src/tools/common/transfer-to-agent-tool.ts
|
|
2823
|
+
init_debug();
|
|
2868
2824
|
init_base_tool();
|
|
2869
2825
|
var TransferToAgentTool = class extends BaseTool {
|
|
2870
2826
|
/**
|
|
@@ -2899,9 +2855,9 @@ var TransferToAgentTool = class extends BaseTool {
|
|
|
2899
2855
|
* Execute the transfer to agent action
|
|
2900
2856
|
*/
|
|
2901
2857
|
async runAsync(args, context) {
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
|
|
2858
|
+
debugLog(
|
|
2859
|
+
`[TransferToAgentTool] Executing transfer to agent: ${args.agent_name}`
|
|
2860
|
+
);
|
|
2905
2861
|
if (context.actions) {
|
|
2906
2862
|
context.actions.transfer_to_agent = args.agent_name;
|
|
2907
2863
|
} else {
|
|
@@ -2916,6 +2872,7 @@ var TransferToAgentTool = class extends BaseTool {
|
|
|
2916
2872
|
};
|
|
2917
2873
|
|
|
2918
2874
|
// src/tools/common/load-memory-tool.ts
|
|
2875
|
+
init_debug();
|
|
2919
2876
|
init_base_tool();
|
|
2920
2877
|
var LoadMemoryTool = class extends BaseTool {
|
|
2921
2878
|
/**
|
|
@@ -2950,9 +2907,9 @@ var LoadMemoryTool = class extends BaseTool {
|
|
|
2950
2907
|
* Execute the memory loading action
|
|
2951
2908
|
*/
|
|
2952
2909
|
async runAsync(args, context) {
|
|
2953
|
-
|
|
2954
|
-
|
|
2955
|
-
|
|
2910
|
+
debugLog(
|
|
2911
|
+
`[LoadMemoryTool] Executing load_memory with query: ${args.query}`
|
|
2912
|
+
);
|
|
2956
2913
|
if (!context.memoryService) {
|
|
2957
2914
|
return {
|
|
2958
2915
|
error: "Memory service is not available",
|
|
@@ -3238,6 +3195,7 @@ var McpClientService = class {
|
|
|
3238
3195
|
};
|
|
3239
3196
|
|
|
3240
3197
|
// src/tools/mcp/create-tool.ts
|
|
3198
|
+
init_debug();
|
|
3241
3199
|
init_base_tool();
|
|
3242
3200
|
|
|
3243
3201
|
// src/tools/mcp/schema-conversion.ts
|
|
@@ -3472,9 +3430,10 @@ var McpToolAdapter = class extends BaseTool {
|
|
|
3472
3430
|
}
|
|
3473
3431
|
}
|
|
3474
3432
|
async runAsync(args, _context) {
|
|
3475
|
-
|
|
3476
|
-
|
|
3477
|
-
|
|
3433
|
+
debugLog(
|
|
3434
|
+
`[McpToolAdapter] Executing MCP tool ${this.name} with args:`,
|
|
3435
|
+
args
|
|
3436
|
+
);
|
|
3478
3437
|
try {
|
|
3479
3438
|
if (typeof this.mcpTool.execute === "function") {
|
|
3480
3439
|
return await this.mcpTool.execute(args);
|
|
@@ -3794,9 +3753,11 @@ var BaseLLMConnection = class {
|
|
|
3794
3753
|
};
|
|
3795
3754
|
|
|
3796
3755
|
// src/models/anthropic-llm.ts
|
|
3756
|
+
init_debug();
|
|
3797
3757
|
import axios from "axios";
|
|
3798
3758
|
|
|
3799
3759
|
// src/models/anthropic-llm-connection.ts
|
|
3760
|
+
init_debug();
|
|
3800
3761
|
var AnthropicLLMConnection = class extends BaseLLMConnection {
|
|
3801
3762
|
/**
|
|
3802
3763
|
* Axios instance for API calls
|
|
@@ -3926,19 +3887,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
3926
3887
|
if (!content?.length) return [];
|
|
3927
3888
|
const toolUses = [];
|
|
3928
3889
|
for (const block of content) {
|
|
3929
|
-
|
|
3930
|
-
|
|
3931
|
-
|
|
3932
|
-
block.type
|
|
3933
|
-
);
|
|
3934
|
-
}
|
|
3890
|
+
debugLog(
|
|
3891
|
+
`[AnthropicLLMConnection] Processing content block of type: ${block.type}`
|
|
3892
|
+
);
|
|
3935
3893
|
if (block.type === "tool_use") {
|
|
3936
|
-
|
|
3937
|
-
|
|
3938
|
-
|
|
3939
|
-
|
|
3940
|
-
);
|
|
3941
|
-
}
|
|
3894
|
+
debugLog(
|
|
3895
|
+
"[AnthropicLLMConnection] Found tool_use block:",
|
|
3896
|
+
JSON.stringify(block, null, 2)
|
|
3897
|
+
);
|
|
3942
3898
|
toolUses.push({
|
|
3943
3899
|
id: block.id || "unknown-id",
|
|
3944
3900
|
name: block.name || "unknown-name",
|
|
@@ -3946,14 +3902,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
3946
3902
|
});
|
|
3947
3903
|
}
|
|
3948
3904
|
}
|
|
3949
|
-
|
|
3950
|
-
|
|
3951
|
-
|
|
3952
|
-
|
|
3953
|
-
|
|
3954
|
-
|
|
3955
|
-
)
|
|
3956
|
-
|
|
3905
|
+
debugLog(
|
|
3906
|
+
`[AnthropicLLMConnection] Found ${toolUses.length} tool uses in content`
|
|
3907
|
+
);
|
|
3908
|
+
if (toolUses.length > 0) {
|
|
3909
|
+
debugLog(
|
|
3910
|
+
"[AnthropicLLMConnection] Extracted tool uses:",
|
|
3911
|
+
JSON.stringify(toolUses, null, 2)
|
|
3912
|
+
);
|
|
3957
3913
|
}
|
|
3958
3914
|
return toolUses;
|
|
3959
3915
|
}
|
|
@@ -4047,43 +4003,30 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4047
4003
|
}
|
|
4048
4004
|
const toolUses = this.extractToolUses(apiResponse.content);
|
|
4049
4005
|
const toolCalls = this.convertToolCalls(toolUses);
|
|
4050
|
-
|
|
4051
|
-
|
|
4052
|
-
|
|
4053
|
-
"Connection - Extracted Tool Uses:",
|
|
4054
|
-
JSON.stringify(toolUses, null, 2)
|
|
4055
|
-
);
|
|
4056
|
-
console.log(
|
|
4057
|
-
"Connection - Converted Tool Calls:",
|
|
4058
|
-
JSON.stringify(toolCalls, null, 2)
|
|
4059
|
-
);
|
|
4060
|
-
}
|
|
4061
|
-
}
|
|
4006
|
+
debugLog(
|
|
4007
|
+
`[AnthropicLLMConnection] - Extracted ${toolUses.length} tool uses in content and converted ${toolCalls?.length || 0} tool calls`
|
|
4008
|
+
);
|
|
4062
4009
|
const llmResponse = new LLMResponse({
|
|
4063
4010
|
role: "assistant",
|
|
4064
4011
|
content,
|
|
4065
4012
|
tool_calls: toolCalls?.length ? toolCalls : void 0,
|
|
4066
4013
|
raw_response: apiResponse
|
|
4067
4014
|
});
|
|
4068
|
-
|
|
4069
|
-
|
|
4070
|
-
|
|
4071
|
-
|
|
4072
|
-
|
|
4073
|
-
|
|
4074
|
-
|
|
4075
|
-
|
|
4076
|
-
|
|
4077
|
-
null,
|
|
4078
|
-
2
|
|
4079
|
-
)
|
|
4080
|
-
);
|
|
4081
|
-
}
|
|
4015
|
+
const logObject = {
|
|
4016
|
+
role: llmResponse.role,
|
|
4017
|
+
content: llmResponse.content?.substring(0, 50) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
|
|
4018
|
+
tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
|
|
4019
|
+
};
|
|
4020
|
+
debugLog(
|
|
4021
|
+
"[AnthropicLLMConnection] Final LLMResponse object:",
|
|
4022
|
+
JSON.stringify(logObject, null, 2)
|
|
4023
|
+
);
|
|
4082
4024
|
return llmResponse;
|
|
4083
4025
|
} catch (error) {
|
|
4084
|
-
|
|
4085
|
-
|
|
4086
|
-
|
|
4026
|
+
debugLog(
|
|
4027
|
+
"[AnthropicLLMConnection] Error sending message to Anthropic:",
|
|
4028
|
+
error
|
|
4029
|
+
);
|
|
4087
4030
|
throw error;
|
|
4088
4031
|
}
|
|
4089
4032
|
}
|
|
@@ -4235,13 +4178,13 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4235
4178
|
if (!content?.length) return [];
|
|
4236
4179
|
const toolUses = [];
|
|
4237
4180
|
for (const block of content) {
|
|
4238
|
-
|
|
4239
|
-
|
|
4240
|
-
|
|
4181
|
+
debugLog(
|
|
4182
|
+
`[AnthropicLLM] Processing content block of type: ${block.type}`
|
|
4183
|
+
);
|
|
4241
4184
|
if (block.type === "tool_use") {
|
|
4242
|
-
|
|
4243
|
-
|
|
4244
|
-
|
|
4185
|
+
debugLog(
|
|
4186
|
+
`[AnthropicLLM] Found tool_use block: ${JSON.stringify(block, null, 2)}`
|
|
4187
|
+
);
|
|
4245
4188
|
toolUses.push({
|
|
4246
4189
|
id: block.id || "unknown-id",
|
|
4247
4190
|
name: block.name || "unknown-name",
|
|
@@ -4249,12 +4192,10 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4249
4192
|
});
|
|
4250
4193
|
}
|
|
4251
4194
|
}
|
|
4252
|
-
|
|
4253
|
-
|
|
4254
|
-
|
|
4255
|
-
|
|
4256
|
-
}
|
|
4257
|
-
}
|
|
4195
|
+
debugLog(
|
|
4196
|
+
`[AnthropicLLM] Found ${toolUses.length} tool uses in content`,
|
|
4197
|
+
toolUses
|
|
4198
|
+
);
|
|
4258
4199
|
return toolUses;
|
|
4259
4200
|
}
|
|
4260
4201
|
/**
|
|
@@ -4276,16 +4217,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4276
4217
|
},
|
|
4277
4218
|
responseType: stream ? "stream" : "json"
|
|
4278
4219
|
});
|
|
4279
|
-
|
|
4280
|
-
|
|
4281
|
-
|
|
4282
|
-
|
|
4283
|
-
|
|
4284
|
-
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
}
|
|
4288
|
-
}
|
|
4220
|
+
debugLog(
|
|
4221
|
+
`[AnthropicLLM] API Response done with ${response.status}:`,
|
|
4222
|
+
response.data
|
|
4223
|
+
);
|
|
4224
|
+
debugLog(
|
|
4225
|
+
"[AnthropicLLM] API Response content:",
|
|
4226
|
+
response.data.content.map((block) => ({ type: block.type }))
|
|
4227
|
+
);
|
|
4289
4228
|
return response.data;
|
|
4290
4229
|
} catch (error) {
|
|
4291
4230
|
console.error("Error calling Anthropic API:", error);
|
|
@@ -4311,24 +4250,17 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4311
4250
|
top_p: llmRequest.config.top_p ?? this.defaultParams.top_p,
|
|
4312
4251
|
tools: tools?.length ? tools : void 0
|
|
4313
4252
|
};
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
});
|
|
4321
|
-
}
|
|
4253
|
+
debugLog("[AnthropicLLM] API Request:", {
|
|
4254
|
+
model: params.model,
|
|
4255
|
+
messageCount: params.messages.length,
|
|
4256
|
+
systemMessage: params.system ? "present" : "none",
|
|
4257
|
+
tools: params.tools ? params.tools.map((t) => t.name) : "none"
|
|
4258
|
+
});
|
|
4322
4259
|
if (stream) {
|
|
4323
4260
|
throw new Error("Streaming is not supported in this implementation");
|
|
4324
4261
|
}
|
|
4325
4262
|
const response = await this.callAnthropicAPI(params);
|
|
4326
|
-
|
|
4327
|
-
console.log(
|
|
4328
|
-
"Full Response Content:",
|
|
4329
|
-
JSON.stringify(response.content, null, 2)
|
|
4330
|
-
);
|
|
4331
|
-
}
|
|
4263
|
+
debugLog("[AnthropicLLM] Full Response Content:", response.content);
|
|
4332
4264
|
let content = "";
|
|
4333
4265
|
for (const block of response.content) {
|
|
4334
4266
|
if (block.type === "text") {
|
|
@@ -4337,43 +4269,26 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4337
4269
|
}
|
|
4338
4270
|
const toolUses = this.extractToolUses(response.content);
|
|
4339
4271
|
const toolCalls = this.convertToolUses(toolUses);
|
|
4340
|
-
|
|
4341
|
-
|
|
4342
|
-
console.log(
|
|
4343
|
-
"Extracted Tool Uses:",
|
|
4344
|
-
JSON.stringify(toolUses, null, 2)
|
|
4345
|
-
);
|
|
4346
|
-
console.log(
|
|
4347
|
-
"Converted Tool Calls:",
|
|
4348
|
-
JSON.stringify(toolCalls, null, 2)
|
|
4349
|
-
);
|
|
4350
|
-
}
|
|
4351
|
-
}
|
|
4272
|
+
debugLog("[AnthropicLLM] Extracted Tool Uses:", toolUses);
|
|
4273
|
+
debugLog("[AnthropicLLM] Converted Tool Calls:", toolCalls);
|
|
4352
4274
|
const llmResponse = new LLMResponse({
|
|
4353
4275
|
role: "assistant",
|
|
4354
4276
|
content,
|
|
4355
4277
|
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
4356
4278
|
raw_response: response
|
|
4357
4279
|
});
|
|
4358
|
-
|
|
4359
|
-
|
|
4360
|
-
|
|
4361
|
-
|
|
4362
|
-
|
|
4363
|
-
|
|
4364
|
-
|
|
4365
|
-
|
|
4366
|
-
|
|
4367
|
-
null,
|
|
4368
|
-
2
|
|
4369
|
-
)
|
|
4370
|
-
);
|
|
4371
|
-
}
|
|
4280
|
+
const logObject = {
|
|
4281
|
+
role: llmResponse.role,
|
|
4282
|
+
content: llmResponse.content?.substring(0, 50) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
|
|
4283
|
+
tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
|
|
4284
|
+
};
|
|
4285
|
+
debugLog(
|
|
4286
|
+
"[AnthropicLLM] Final LLMResponse object:",
|
|
4287
|
+
JSON.stringify(logObject, null, 2)
|
|
4288
|
+
);
|
|
4372
4289
|
yield llmResponse;
|
|
4373
4290
|
} catch (error) {
|
|
4374
|
-
|
|
4375
|
-
console.error("Error calling Anthropic:", error);
|
|
4376
|
-
}
|
|
4291
|
+
debugLog("[AnthropicLLM] Error:", error);
|
|
4377
4292
|
throw error;
|
|
4378
4293
|
}
|
|
4379
4294
|
}
|
|
@@ -4768,6 +4683,7 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4768
4683
|
};
|
|
4769
4684
|
|
|
4770
4685
|
// src/models/openai-llm.ts
|
|
4686
|
+
init_debug();
|
|
4771
4687
|
import OpenAI from "openai";
|
|
4772
4688
|
|
|
4773
4689
|
// src/models/openai-llm-connection.ts
|
|
@@ -5176,11 +5092,9 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5176
5092
|
* Convert OpenAI streaming chunk to LLMResponse
|
|
5177
5093
|
*/
|
|
5178
5094
|
convertChunk(chunk) {
|
|
5179
|
-
|
|
5180
|
-
|
|
5181
|
-
|
|
5182
|
-
);
|
|
5183
|
-
}
|
|
5095
|
+
debugLog(
|
|
5096
|
+
`[OpenAILLM]: Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
|
|
5097
|
+
);
|
|
5184
5098
|
const content = chunk.delta?.content;
|
|
5185
5099
|
const result = new LLMResponse({
|
|
5186
5100
|
content: content !== void 0 ? content : null,
|
|
@@ -5221,32 +5135,24 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5221
5135
|
presence_penalty: llmRequest.config.presence_penalty ?? this.defaultParams.presence_penalty,
|
|
5222
5136
|
stream: shouldStream
|
|
5223
5137
|
};
|
|
5224
|
-
|
|
5225
|
-
|
|
5226
|
-
|
|
5227
|
-
);
|
|
5228
|
-
}
|
|
5138
|
+
debugLog(
|
|
5139
|
+
`[OpenAILLM] Request parameters - model: ${params.model}, messages: ${params.messages.length}, functions: ${params.tools ? params.tools.length : 0}, streaming: ${shouldStream}`
|
|
5140
|
+
);
|
|
5229
5141
|
if (tools && tools.length > 0) {
|
|
5230
5142
|
params.tools = tools;
|
|
5231
5143
|
}
|
|
5232
5144
|
try {
|
|
5233
5145
|
if (shouldStream) {
|
|
5234
|
-
|
|
5235
|
-
console.log("OpenAI: Starting streaming request");
|
|
5236
|
-
}
|
|
5146
|
+
debugLog("[OpenAILLM] Starting streaming request");
|
|
5237
5147
|
const streamResponse = await this.client.chat.completions.create(params);
|
|
5238
5148
|
let partialFunctionCall;
|
|
5239
5149
|
const partialToolCalls = /* @__PURE__ */ new Map();
|
|
5240
5150
|
let accumulatedContent = "";
|
|
5241
5151
|
const asyncIterable = streamResponse;
|
|
5242
|
-
|
|
5243
|
-
console.log("OpenAI: Stream response received, processing chunks");
|
|
5244
|
-
}
|
|
5152
|
+
debugLog("[OpenAILLM] Stream response received, processing chunks");
|
|
5245
5153
|
for await (const chunk of asyncIterable) {
|
|
5246
5154
|
if (!chunk.choices || chunk.choices.length === 0) {
|
|
5247
|
-
|
|
5248
|
-
console.log("OpenAI: Empty chunk received, skipping");
|
|
5249
|
-
}
|
|
5155
|
+
debugLog("[OpenAILLM] Empty chunk received, skipping");
|
|
5250
5156
|
continue;
|
|
5251
5157
|
}
|
|
5252
5158
|
const choice = chunk.choices[0];
|
|
@@ -5254,14 +5160,12 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5254
5160
|
if (responseChunk.content !== null) {
|
|
5255
5161
|
accumulatedContent += responseChunk.content;
|
|
5256
5162
|
}
|
|
5257
|
-
|
|
5258
|
-
|
|
5259
|
-
|
|
5260
|
-
|
|
5261
|
-
|
|
5262
|
-
|
|
5263
|
-
);
|
|
5264
|
-
}
|
|
5163
|
+
debugLog(
|
|
5164
|
+
`[OpenAILLM] Chunk received - delta: "${choice.delta?.content || ""}"`,
|
|
5165
|
+
`responseChunk content: "${responseChunk.content || ""}"`,
|
|
5166
|
+
`is_partial: ${responseChunk.is_partial}`,
|
|
5167
|
+
`accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
|
|
5168
|
+
);
|
|
5265
5169
|
if (responseChunk.function_call) {
|
|
5266
5170
|
if (!partialFunctionCall) {
|
|
5267
5171
|
partialFunctionCall = {
|
|
@@ -5286,37 +5190,27 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5286
5190
|
}
|
|
5287
5191
|
responseChunk.tool_calls = Array.from(partialToolCalls.values());
|
|
5288
5192
|
}
|
|
5289
|
-
|
|
5290
|
-
console.log("OpenAI: Yielding chunk to caller");
|
|
5291
|
-
}
|
|
5193
|
+
debugLog("[OpenAILLM] Yielding chunk to caller");
|
|
5292
5194
|
yield responseChunk;
|
|
5293
5195
|
}
|
|
5294
5196
|
if (accumulatedContent.length > 0) {
|
|
5295
|
-
|
|
5296
|
-
|
|
5297
|
-
|
|
5298
|
-
);
|
|
5299
|
-
}
|
|
5197
|
+
debugLog(
|
|
5198
|
+
`[OpenAILLM] Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
|
|
5199
|
+
);
|
|
5300
5200
|
yield new LLMResponse({
|
|
5301
5201
|
content: accumulatedContent,
|
|
5302
5202
|
role: "assistant",
|
|
5303
5203
|
is_partial: false
|
|
5304
5204
|
});
|
|
5305
5205
|
}
|
|
5306
|
-
|
|
5307
|
-
console.log("OpenAI: Finished processing all stream chunks");
|
|
5308
|
-
}
|
|
5206
|
+
debugLog("[OpenAILLM] Finished processing all stream chunks");
|
|
5309
5207
|
} else {
|
|
5310
|
-
|
|
5311
|
-
console.log("OpenAI: Making non-streaming request");
|
|
5312
|
-
}
|
|
5208
|
+
debugLog("[OpenAILLM] Making non-streaming request");
|
|
5313
5209
|
const response = await this.client.chat.completions.create(params);
|
|
5314
5210
|
if (!response.choices || response.choices.length === 0) {
|
|
5315
5211
|
throw new Error("No response from OpenAI");
|
|
5316
5212
|
}
|
|
5317
|
-
|
|
5318
|
-
console.log("OpenAI: Non-streaming response received");
|
|
5319
|
-
}
|
|
5213
|
+
debugLog("[OpenAILLM] Non-streaming response received");
|
|
5320
5214
|
yield this.convertResponse(response.choices[0]);
|
|
5321
5215
|
}
|
|
5322
5216
|
} catch (error) {
|
|
@@ -5903,6 +5797,7 @@ var InMemoryMemoryService = class {
|
|
|
5903
5797
|
};
|
|
5904
5798
|
|
|
5905
5799
|
// src/memory/persistent-memory-service.ts
|
|
5800
|
+
init_debug();
|
|
5906
5801
|
import fs2 from "fs";
|
|
5907
5802
|
import path2 from "path";
|
|
5908
5803
|
var PersistentMemoryService = class {
|
|
@@ -6006,11 +5901,9 @@ var PersistentMemoryService = class {
|
|
|
6006
5901
|
}
|
|
6007
5902
|
}
|
|
6008
5903
|
}
|
|
6009
|
-
|
|
6010
|
-
|
|
6011
|
-
|
|
6012
|
-
);
|
|
6013
|
-
}
|
|
5904
|
+
debugLog(
|
|
5905
|
+
`Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
|
|
5906
|
+
);
|
|
6014
5907
|
} catch (error) {
|
|
6015
5908
|
console.error("Error loading memory files:", error);
|
|
6016
5909
|
}
|
|
@@ -6074,9 +5967,9 @@ __export(sessions_exports, {
|
|
|
6074
5967
|
PgLiteSessionService: () => PgLiteSessionService,
|
|
6075
5968
|
PostgresSessionService: () => PostgresSessionService,
|
|
6076
5969
|
SessionState: () => SessionState,
|
|
5970
|
+
SqliteSessionService: () => SqliteSessionService,
|
|
6077
5971
|
cloneSession: () => cloneSession,
|
|
6078
5972
|
generateSessionId: () => generateSessionId,
|
|
6079
|
-
sessionsSchema: () => sessionsSchema2,
|
|
6080
5973
|
validateSession: () => validateSession
|
|
6081
5974
|
});
|
|
6082
5975
|
|
|
@@ -6519,6 +6412,191 @@ var PgLiteSessionService = class {
|
|
|
6519
6412
|
}
|
|
6520
6413
|
};
|
|
6521
6414
|
|
|
6415
|
+
// src/sessions/sqlite-session-service.ts
|
|
6416
|
+
import * as fs3 from "fs";
|
|
6417
|
+
import * as path3 from "path";
|
|
6418
|
+
import { eq as eq3 } from "drizzle-orm";
|
|
6419
|
+
import {
|
|
6420
|
+
drizzle as drizzle2
|
|
6421
|
+
} from "drizzle-orm/better-sqlite3";
|
|
6422
|
+
import { integer, text } from "drizzle-orm/sqlite-core";
|
|
6423
|
+
import { sqliteTable } from "drizzle-orm/sqlite-core";
|
|
6424
|
+
var sessionsSchema3 = sqliteTable("sessions", {
|
|
6425
|
+
id: text("id").primaryKey(),
|
|
6426
|
+
userId: text("user_id").notNull(),
|
|
6427
|
+
messages: text("messages", { mode: "json" }).default("[]").$type(),
|
|
6428
|
+
metadata: text("metadata", { mode: "json" }).default("{}").$type(),
|
|
6429
|
+
createdAt: integer("created_at", { mode: "timestamp" }).notNull(),
|
|
6430
|
+
updatedAt: integer("updated_at", { mode: "timestamp" }).notNull(),
|
|
6431
|
+
state: text("state", { mode: "json" }).default("{}").$type()
|
|
6432
|
+
});
|
|
6433
|
+
var SqliteSessionService = class {
|
|
6434
|
+
db;
|
|
6435
|
+
sessionsTable;
|
|
6436
|
+
initialized = false;
|
|
6437
|
+
sqliteInstance;
|
|
6438
|
+
constructor(config) {
|
|
6439
|
+
this.sqliteInstance = config.sqlite;
|
|
6440
|
+
const dbPath = this.sqliteInstance.name;
|
|
6441
|
+
if (dbPath && dbPath !== ":memory:") {
|
|
6442
|
+
const dbDir = path3.dirname(dbPath);
|
|
6443
|
+
if (!fs3.existsSync(dbDir)) {
|
|
6444
|
+
fs3.mkdirSync(dbDir, { recursive: true });
|
|
6445
|
+
}
|
|
6446
|
+
}
|
|
6447
|
+
this.db = drizzle2(config.sqlite, {
|
|
6448
|
+
schema: { sessions: sessionsSchema3 }
|
|
6449
|
+
});
|
|
6450
|
+
this.sessionsTable = sessionsSchema3;
|
|
6451
|
+
if (!config.skipTableCreation) {
|
|
6452
|
+
this.initializeDatabase().catch((error) => {
|
|
6453
|
+
console.error("Failed to initialize SQLite database:", error);
|
|
6454
|
+
});
|
|
6455
|
+
}
|
|
6456
|
+
}
|
|
6457
|
+
/**
|
|
6458
|
+
* Initialize the database by creating required tables if they don't exist
|
|
6459
|
+
*/
|
|
6460
|
+
async initializeDatabase() {
|
|
6461
|
+
if (this.initialized) {
|
|
6462
|
+
return;
|
|
6463
|
+
}
|
|
6464
|
+
try {
|
|
6465
|
+
this.sqliteInstance.pragma("journal_mode = WAL");
|
|
6466
|
+
this.sqliteInstance.exec(`
|
|
6467
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
6468
|
+
id TEXT PRIMARY KEY,
|
|
6469
|
+
user_id TEXT NOT NULL,
|
|
6470
|
+
messages TEXT DEFAULT '[]',
|
|
6471
|
+
metadata TEXT DEFAULT '{}',
|
|
6472
|
+
created_at INTEGER NOT NULL,
|
|
6473
|
+
updated_at INTEGER NOT NULL,
|
|
6474
|
+
state TEXT DEFAULT '{}'
|
|
6475
|
+
);
|
|
6476
|
+
`);
|
|
6477
|
+
this.sqliteInstance.exec(`
|
|
6478
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
|
|
6479
|
+
`);
|
|
6480
|
+
this.initialized = true;
|
|
6481
|
+
} catch (error) {
|
|
6482
|
+
console.error("Error initializing SQLite database:", error);
|
|
6483
|
+
throw error;
|
|
6484
|
+
}
|
|
6485
|
+
}
|
|
6486
|
+
/**
|
|
6487
|
+
* Ensure database is initialized before any operation
|
|
6488
|
+
*/
|
|
6489
|
+
async ensureInitialized() {
|
|
6490
|
+
if (!this.initialized) {
|
|
6491
|
+
await this.initializeDatabase();
|
|
6492
|
+
}
|
|
6493
|
+
}
|
|
6494
|
+
generateSessionId() {
|
|
6495
|
+
return `session-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
|
6496
|
+
}
|
|
6497
|
+
async createSession(userId, metadata = {}) {
|
|
6498
|
+
await this.ensureInitialized();
|
|
6499
|
+
const sessionId = this.generateSessionId();
|
|
6500
|
+
const now = /* @__PURE__ */ new Date();
|
|
6501
|
+
const sessionState = new SessionState();
|
|
6502
|
+
const newSessionData = {
|
|
6503
|
+
id: sessionId,
|
|
6504
|
+
userId,
|
|
6505
|
+
messages: [],
|
|
6506
|
+
metadata,
|
|
6507
|
+
createdAt: now,
|
|
6508
|
+
updatedAt: now,
|
|
6509
|
+
state: sessionState.toObject()
|
|
6510
|
+
};
|
|
6511
|
+
const results = await this.db.insert(this.sessionsTable).values(newSessionData).returning();
|
|
6512
|
+
const result = results[0];
|
|
6513
|
+
if (!result) {
|
|
6514
|
+
throw new Error(
|
|
6515
|
+
"Failed to create session, no data returned from insert."
|
|
6516
|
+
);
|
|
6517
|
+
}
|
|
6518
|
+
return {
|
|
6519
|
+
id: result.id,
|
|
6520
|
+
userId: result.userId,
|
|
6521
|
+
messages: Array.isArray(result.messages) ? result.messages : [],
|
|
6522
|
+
metadata: result.metadata || {},
|
|
6523
|
+
state: SessionState.fromObject(result.state || {}),
|
|
6524
|
+
createdAt: result.createdAt,
|
|
6525
|
+
updatedAt: result.updatedAt
|
|
6526
|
+
};
|
|
6527
|
+
}
|
|
6528
|
+
async getSession(sessionId) {
|
|
6529
|
+
await this.ensureInitialized();
|
|
6530
|
+
const results = await this.db.select().from(this.sessionsTable).where(eq3(this.sessionsTable.id, sessionId)).limit(1);
|
|
6531
|
+
const sessionData = results[0];
|
|
6532
|
+
if (!sessionData) {
|
|
6533
|
+
return void 0;
|
|
6534
|
+
}
|
|
6535
|
+
return {
|
|
6536
|
+
id: sessionData.id,
|
|
6537
|
+
userId: sessionData.userId,
|
|
6538
|
+
messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
|
|
6539
|
+
metadata: sessionData.metadata || {},
|
|
6540
|
+
state: SessionState.fromObject(sessionData.state || {}),
|
|
6541
|
+
createdAt: sessionData.createdAt,
|
|
6542
|
+
updatedAt: sessionData.updatedAt
|
|
6543
|
+
};
|
|
6544
|
+
}
|
|
6545
|
+
async updateSession(session) {
|
|
6546
|
+
await this.ensureInitialized();
|
|
6547
|
+
const updateData = {
|
|
6548
|
+
userId: session.userId,
|
|
6549
|
+
messages: session.messages,
|
|
6550
|
+
metadata: session.metadata,
|
|
6551
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
6552
|
+
state: session.state.toObject()
|
|
6553
|
+
};
|
|
6554
|
+
await this.db.update(this.sessionsTable).set(updateData).where(eq3(this.sessionsTable.id, session.id));
|
|
6555
|
+
}
|
|
6556
|
+
async listSessions(userId, options) {
|
|
6557
|
+
await this.ensureInitialized();
|
|
6558
|
+
let query = this.db.select().from(this.sessionsTable).where(eq3(this.sessionsTable.userId, userId));
|
|
6559
|
+
if (options?.limit !== void 0 && options.limit > 0) {
|
|
6560
|
+
query = query.limit(options.limit);
|
|
6561
|
+
}
|
|
6562
|
+
const results = await query;
|
|
6563
|
+
return results.map((sessionData) => ({
|
|
6564
|
+
id: sessionData.id,
|
|
6565
|
+
userId: sessionData.userId,
|
|
6566
|
+
messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
|
|
6567
|
+
metadata: sessionData.metadata || {},
|
|
6568
|
+
state: SessionState.fromObject(sessionData.state || {}),
|
|
6569
|
+
createdAt: sessionData.createdAt,
|
|
6570
|
+
updatedAt: sessionData.updatedAt
|
|
6571
|
+
}));
|
|
6572
|
+
}
|
|
6573
|
+
async deleteSession(sessionId) {
|
|
6574
|
+
await this.ensureInitialized();
|
|
6575
|
+
await this.db.delete(this.sessionsTable).where(eq3(this.sessionsTable.id, sessionId));
|
|
6576
|
+
}
|
|
6577
|
+
async appendEvent(session, event) {
|
|
6578
|
+
await this.ensureInitialized();
|
|
6579
|
+
if (event.is_partial) {
|
|
6580
|
+
return event;
|
|
6581
|
+
}
|
|
6582
|
+
if (event.actions?.stateDelta) {
|
|
6583
|
+
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
6584
|
+
if (key.startsWith("_temp_")) {
|
|
6585
|
+
continue;
|
|
6586
|
+
}
|
|
6587
|
+
session.state?.set(key, value);
|
|
6588
|
+
}
|
|
6589
|
+
}
|
|
6590
|
+
if (!session.events) {
|
|
6591
|
+
session.events = [];
|
|
6592
|
+
}
|
|
6593
|
+
session.events.push(event);
|
|
6594
|
+
session.updatedAt = /* @__PURE__ */ new Date();
|
|
6595
|
+
await this.updateSession(session);
|
|
6596
|
+
return event;
|
|
6597
|
+
}
|
|
6598
|
+
};
|
|
6599
|
+
|
|
6522
6600
|
// src/sessions/session-util.ts
|
|
6523
6601
|
function generateSessionId() {
|
|
6524
6602
|
return `session-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
|
@@ -6886,6 +6964,7 @@ export {
|
|
|
6886
6964
|
SequentialAgent,
|
|
6887
6965
|
SessionState,
|
|
6888
6966
|
sessions_exports as Sessions,
|
|
6967
|
+
SqliteSessionService,
|
|
6889
6968
|
StreamingMode,
|
|
6890
6969
|
ToolContext,
|
|
6891
6970
|
tools_exports as Tools,
|
|
@@ -6902,6 +6981,5 @@ export {
|
|
|
6902
6981
|
mcpSchemaToParameters,
|
|
6903
6982
|
normalizeJsonSchema,
|
|
6904
6983
|
registerProviders,
|
|
6905
|
-
sessionsSchema2 as sessionsSchema,
|
|
6906
6984
|
validateSession
|
|
6907
6985
|
};
|