@iqai/adk 0.0.5 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14;var __defProp = Object.defineProperty;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15;var __defProp = Object.defineProperty;
2
2
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
3
  var __getOwnPropNames = Object.getOwnPropertyNames;
4
4
  var __hasOwnProp = Object.prototype.hasOwnProperty;
@@ -19,10 +19,27 @@ var __copyProps = (to, from, except, desc) => {
19
19
  };
20
20
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
21
21
 
22
+ // src/helpers/debug.ts
23
+ var isDebugEnabled, debugLog;
24
+ var init_debug = __esm({
25
+ "src/helpers/debug.ts"() {
26
+ isDebugEnabled = () => {
27
+ return process.env.NODE_ENV === "development" || process.env.DEBUG === "true";
28
+ };
29
+ debugLog = (message, ...args) => {
30
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
31
+ if (isDebugEnabled()) {
32
+ console.log(`[DEBUG] ${time}: ${message}`, ...args);
33
+ }
34
+ };
35
+ }
36
+ });
37
+
22
38
  // src/tools/base/base-tool.ts
23
39
  var BaseTool;
24
40
  var init_base_tool = __esm({
25
41
  "src/tools/base/base-tool.ts"() {
42
+ init_debug();
26
43
  BaseTool = exports.BaseTool = (_class = class {
27
44
  /**
28
45
  * Name of the tool
@@ -112,11 +129,9 @@ var init_base_tool = __esm({
112
129
  while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
113
130
  try {
114
131
  if (attempts > 0) {
115
- if (process.env.DEBUG === "true") {
116
- console.log(
117
- `Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
118
- );
119
- }
132
+ debugLog(
133
+ `[BaseTool] Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
134
+ );
120
135
  const delay = Math.min(
121
136
  this.baseRetryDelay * 2 ** (attempts - 1) + Math.random() * 1e3,
122
137
  this.maxRetryDelay
@@ -459,7 +474,11 @@ var BaseAgent = class {
459
474
  }
460
475
  };
461
476
 
477
+ // src/agents/llm-agent.ts
478
+ init_debug();
479
+
462
480
  // src/models/llm-registry.ts
481
+ init_debug();
463
482
  var LLMRegistry = (_class3 = class _LLMRegistry {
464
483
  /**
465
484
  * Map of model name regex to LLM class
@@ -516,12 +535,10 @@ var LLMRegistry = (_class3 = class _LLMRegistry {
516
535
  * Logs all registered models for debugging
517
536
  */
518
537
  static logRegisteredModels() {
519
- if (process.env.DEBUG === "true") {
520
- console.log("Registered LLM models:");
521
- for (const [regex, llmClass] of _LLMRegistry.llmRegistry.entries()) {
522
- console.log(` - Pattern: ${regex.toString()}`);
523
- }
524
- }
538
+ debugLog(
539
+ "Registered LLM models:",
540
+ [..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
541
+ );
525
542
  }
526
543
  }, _class3.__initStatic(), _class3);
527
544
 
@@ -919,9 +936,7 @@ var Agent = class extends BaseAgent {
919
936
  */
920
937
  async executeTool(toolCall, context) {
921
938
  const { name, arguments: argsString } = toolCall.function;
922
- if (process.env.DEBUG === "true") {
923
- console.log(`Executing tool: ${name}`);
924
- }
939
+ debugLog(`Executing tool: ${name}`);
925
940
  const tool = this.findTool(name);
926
941
  if (!tool) {
927
942
  console.warn(`Tool '${name}' not found`);
@@ -939,9 +954,7 @@ var Agent = class extends BaseAgent {
939
954
  toolContext.toolName = name;
940
955
  toolContext.toolId = toolCall.id;
941
956
  const result = await tool.runAsync(args, toolContext);
942
- if (process.env.DEBUG === "true") {
943
- console.log(`Tool ${name} execution complete`);
944
- }
957
+ debugLog(`Tool ${name} execution complete`);
945
958
  return {
946
959
  name,
947
960
  result: typeof result === "string" ? result : JSON.stringify(result)
@@ -1110,10 +1123,7 @@ ${relevantInfo.join("\n\n")}`
1110
1123
  let stepCount = 0;
1111
1124
  while (stepCount < this.maxToolExecutionSteps) {
1112
1125
  stepCount++;
1113
- if (process.env.DEBUG === "true") {
1114
- console.log(`
1115
- [Agent] Step ${stepCount}: Thinking...`);
1116
- }
1126
+ debugLog(`Step ${stepCount}: Thinking...`);
1117
1127
  const llmRequest = new LLMRequest({
1118
1128
  messages: context.messages,
1119
1129
  config: {
@@ -1130,9 +1140,7 @@ ${relevantInfo.join("\n\n")}`
1130
1140
  throw new Error("No response from LLM");
1131
1141
  }
1132
1142
  if (currentResponse.tool_calls && currentResponse.tool_calls.length > 0) {
1133
- if (process.env.DEBUG === "true") {
1134
- console.log("[Agent] Executing tools...");
1135
- }
1143
+ debugLog(`Tool calls: ${JSON.stringify(currentResponse.tool_calls)}`);
1136
1144
  context.addMessage({
1137
1145
  role: "assistant",
1138
1146
  content: currentResponse.content || "",
@@ -1151,9 +1159,7 @@ ${relevantInfo.join("\n\n")}`
1151
1159
  });
1152
1160
  }
1153
1161
  } else {
1154
- if (process.env.DEBUG === "true") {
1155
- console.log("[Agent] No tool calls, finishing...");
1156
- }
1162
+ debugLog("[Agent] No tool calls, finishing...");
1157
1163
  context.addMessage({
1158
1164
  role: "assistant",
1159
1165
  content: currentResponse.content || ""
@@ -1191,10 +1197,7 @@ ${relevantInfo.join("\n\n")}`
1191
1197
  let stepCount = 0;
1192
1198
  let hadToolCalls = false;
1193
1199
  while (stepCount < this.maxToolExecutionSteps) {
1194
- if (process.env.DEBUG === "true") {
1195
- console.log(`
1196
- [Agent] Step ${stepCount + 1}: Thinking...`);
1197
- }
1200
+ debugLog(`[Agent] Step ${stepCount}: Thinking...`);
1198
1201
  const toolDeclarations = this.tools.map((tool) => tool.getDeclaration()).filter((declaration) => declaration !== null);
1199
1202
  const request = {
1200
1203
  messages: context.messages,
@@ -1223,14 +1226,10 @@ ${relevantInfo.join("\n\n")}`
1223
1226
  function_call: finalResponse.function_call
1224
1227
  });
1225
1228
  if (!hadToolCalls) {
1226
- if (process.env.DEBUG === "true") {
1227
- console.log("[Agent] No tool calls, finishing...");
1228
- }
1229
+ debugLog("[Agent] No tool calls, finishing...");
1229
1230
  break;
1230
1231
  }
1231
- if (process.env.DEBUG === "true") {
1232
- console.log("[Agent] Executing tools...");
1233
- }
1232
+ debugLog(`[Agent] Step ${stepCount + 1}: Executing tools...`);
1234
1233
  stepCount++;
1235
1234
  if (finalResponse.function_call) {
1236
1235
  const toolCall = {
@@ -1247,11 +1246,9 @@ ${relevantInfo.join("\n\n")}`
1247
1246
  content: JSON.stringify(result.result)
1248
1247
  });
1249
1248
  } else if (finalResponse.tool_calls && finalResponse.tool_calls.length > 0) {
1250
- if (process.env.DEBUG === "true") {
1251
- console.log(
1252
- `[Agent] Executing ${finalResponse.tool_calls.length} tool(s)...`
1253
- );
1254
- }
1249
+ debugLog(
1250
+ `[Agent] Step ${stepCount + 1}: Executing ${finalResponse.tool_calls.length} tool(s)...`
1251
+ );
1255
1252
  context.messages.pop();
1256
1253
  context.addMessage({
1257
1254
  role: "assistant",
@@ -1276,6 +1273,7 @@ ${relevantInfo.join("\n\n")}`
1276
1273
  };
1277
1274
 
1278
1275
  // src/agents/sequential-agent.ts
1276
+ init_debug();
1279
1277
  var SequentialAgent = class extends BaseAgent {
1280
1278
  /**
1281
1279
  * Constructor for SequentialAgent
@@ -1296,11 +1294,9 @@ var SequentialAgent = class extends BaseAgent {
1296
1294
  * Executes sub-agents sequentially, passing output from one to the next
1297
1295
  */
1298
1296
  async run(options) {
1299
- if (process.env.DEBUG === "true") {
1300
- console.log(
1301
- `[SequentialAgent] Running ${this.subAgents.length} sub-agents in sequence`
1302
- );
1303
- }
1297
+ debugLog(
1298
+ `[SequentialAgent] Running ${this.subAgents.length} sub-agents in sequence`
1299
+ );
1304
1300
  if (this.subAgents.length === 0) {
1305
1301
  return {
1306
1302
  content: "No sub-agents defined for sequential execution.",
@@ -1316,11 +1312,9 @@ var SequentialAgent = class extends BaseAgent {
1316
1312
  let finalResponse = null;
1317
1313
  for (let i = 0; i < this.subAgents.length; i++) {
1318
1314
  const agent = this.subAgents[i];
1319
- if (process.env.DEBUG === "true") {
1320
- console.log(
1321
- `[SequentialAgent] Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1322
- );
1323
- }
1315
+ debugLog(
1316
+ `[SequentialAgent] Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1317
+ );
1324
1318
  try {
1325
1319
  const response = await agent.run({
1326
1320
  messages: currentMessages,
@@ -1376,11 +1370,9 @@ var SequentialAgent = class extends BaseAgent {
1376
1370
  * Streams responses from each sub-agent in sequence
1377
1371
  */
1378
1372
  async *runStreaming(options) {
1379
- if (process.env.DEBUG === "true") {
1380
- console.log(
1381
- `[SequentialAgent] Streaming ${this.subAgents.length} sub-agents in sequence`
1382
- );
1383
- }
1373
+ debugLog(
1374
+ `[SequentialAgent] Streaming ${this.subAgents.length} sub-agents in sequence`
1375
+ );
1384
1376
  if (this.subAgents.length === 0) {
1385
1377
  yield {
1386
1378
  content: "No sub-agents defined for sequential execution.",
@@ -1396,11 +1388,9 @@ var SequentialAgent = class extends BaseAgent {
1396
1388
  const currentMessages = [...options.messages];
1397
1389
  for (let i = 0; i < this.subAgents.length; i++) {
1398
1390
  const agent = this.subAgents[i];
1399
- if (process.env.DEBUG === "true") {
1400
- console.log(
1401
- `[SequentialAgent] Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1402
- );
1403
- }
1391
+ debugLog(
1392
+ `[SequentialAgent] Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1393
+ );
1404
1394
  try {
1405
1395
  const streamGenerator = agent.runStreaming({
1406
1396
  messages: currentMessages,
@@ -1453,6 +1443,7 @@ var SequentialAgent = class extends BaseAgent {
1453
1443
  };
1454
1444
 
1455
1445
  // src/agents/parallel-agent.ts
1446
+ init_debug();
1456
1447
  var ParallelAgent = class extends BaseAgent {
1457
1448
  /**
1458
1449
  * Constructor for ParallelAgent
@@ -1473,11 +1464,9 @@ var ParallelAgent = class extends BaseAgent {
1473
1464
  * Executes all sub-agents in parallel
1474
1465
  */
1475
1466
  async run(options) {
1476
- if (process.env.DEBUG === "true") {
1477
- console.log(
1478
- `[ParallelAgent] Running ${this.subAgents.length} sub-agents in parallel`
1479
- );
1480
- }
1467
+ debugLog(
1468
+ `[ParallelAgent] Running ${this.subAgents.length} sub-agents in parallel`
1469
+ );
1481
1470
  if (this.subAgents.length === 0) {
1482
1471
  return {
1483
1472
  content: "No sub-agents defined for parallel execution.",
@@ -1520,11 +1509,9 @@ ${result.content || "No content"}
1520
1509
  * Collects streaming responses from all sub-agents
1521
1510
  */
1522
1511
  async *runStreaming(options) {
1523
- if (process.env.DEBUG === "true") {
1524
- console.log(
1525
- `[ParallelAgent] Streaming ${this.subAgents.length} sub-agents in parallel`
1526
- );
1527
- }
1512
+ debugLog(
1513
+ `[ParallelAgent] Streaming ${this.subAgents.length} sub-agents in parallel`
1514
+ );
1528
1515
  if (this.subAgents.length === 0) {
1529
1516
  yield {
1530
1517
  content: "No sub-agents defined for parallel execution.",
@@ -1590,6 +1577,7 @@ ${response.content || "No content"}
1590
1577
  };
1591
1578
 
1592
1579
  // src/agents/loop-agent.ts
1580
+ init_debug();
1593
1581
  var LoopAgent = class extends BaseAgent {
1594
1582
  /**
1595
1583
  * Maximum number of iterations to prevent infinite loops
@@ -1630,28 +1618,20 @@ var LoopAgent = class extends BaseAgent {
1630
1618
  */
1631
1619
  async shouldContinue(response, iterationCount, messages, config) {
1632
1620
  if (iterationCount >= this.maxIterations) {
1633
- if (process.env.DEBUG === "true") {
1634
- console.log(
1635
- `[LoopAgent] Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
1636
- );
1637
- }
1621
+ debugLog(
1622
+ `[LoopAgent] Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
1623
+ );
1638
1624
  return false;
1639
1625
  }
1640
1626
  if (this.conditionCheck) {
1641
1627
  const shouldContinue = await this.conditionCheck(response);
1642
- if (process.env.DEBUG === "true") {
1643
- console.log(
1644
- `[LoopAgent] Custom condition check result: ${shouldContinue}`
1645
- );
1646
- }
1628
+ debugLog(`[LoopAgent] Custom condition check result: ${shouldContinue}`);
1647
1629
  return shouldContinue;
1648
1630
  }
1649
1631
  if (this.conditionAgent) {
1650
- if (process.env.DEBUG === "true") {
1651
- console.log(
1652
- `[LoopAgent] Using condition agent ${this.conditionAgent.name} to check loop condition`
1653
- );
1654
- }
1632
+ debugLog(
1633
+ `[LoopAgent] Using condition agent ${this.conditionAgent.name} to check loop condition`
1634
+ );
1655
1635
  const conditionMessages = [
1656
1636
  ...messages,
1657
1637
  {
@@ -1670,11 +1650,9 @@ var LoopAgent = class extends BaseAgent {
1670
1650
  });
1671
1651
  const content = _optionalChain([conditionResponse, 'access', _21 => _21.content, 'optionalAccess', _22 => _22.toLowerCase, 'call', _23 => _23()]) || "";
1672
1652
  const shouldContinue = content.includes("yes") && !content.includes("no");
1673
- if (process.env.DEBUG === "true") {
1674
- console.log(
1675
- `[LoopAgent] Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
1676
- );
1677
- }
1653
+ debugLog(
1654
+ `[LoopAgent] Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
1655
+ );
1678
1656
  return shouldContinue;
1679
1657
  } catch (error) {
1680
1658
  console.error("[LoopAgent] Error in condition agent:", error);
@@ -1688,11 +1666,9 @@ var LoopAgent = class extends BaseAgent {
1688
1666
  * Executes the sub-agent in a loop until the condition is met
1689
1667
  */
1690
1668
  async run(options) {
1691
- if (process.env.DEBUG === "true") {
1692
- console.log(
1693
- `[LoopAgent] Starting loop with max ${this.maxIterations} iterations`
1694
- );
1695
- }
1669
+ debugLog(
1670
+ `[LoopAgent] Starting loop with max ${this.maxIterations} iterations`
1671
+ );
1696
1672
  if (this.subAgents.length === 0) {
1697
1673
  return {
1698
1674
  content: "No sub-agent defined for loop execution.",
@@ -1706,11 +1682,9 @@ var LoopAgent = class extends BaseAgent {
1706
1682
  let shouldContinueLoop = true;
1707
1683
  while (shouldContinueLoop && iterationCount < this.maxIterations) {
1708
1684
  iterationCount++;
1709
- if (process.env.DEBUG === "true") {
1710
- console.log(
1711
- `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations}`
1712
- );
1713
- }
1685
+ debugLog(
1686
+ `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations}`
1687
+ );
1714
1688
  try {
1715
1689
  const response = await subAgent.run({
1716
1690
  messages: currentMessages,
@@ -1758,11 +1732,9 @@ ${lastResponse.content || ""}`,
1758
1732
  * Runs the agent with streaming support
1759
1733
  */
1760
1734
  async *runStreaming(options) {
1761
- if (process.env.DEBUG === "true") {
1762
- console.log(
1763
- `[LoopAgent] Starting loop with max ${this.maxIterations} iterations (streaming)`
1764
- );
1765
- }
1735
+ debugLog(
1736
+ `[LoopAgent] Starting loop with max ${this.maxIterations} iterations (streaming)`
1737
+ );
1766
1738
  if (this.subAgents.length === 0) {
1767
1739
  yield {
1768
1740
  content: "No sub-agent defined for loop execution.",
@@ -1781,11 +1753,9 @@ ${lastResponse.content || ""}`,
1781
1753
  };
1782
1754
  while (shouldContinueLoop && iterationCount < this.maxIterations) {
1783
1755
  iterationCount++;
1784
- if (process.env.DEBUG === "true") {
1785
- console.log(
1786
- `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
1787
- );
1788
- }
1756
+ debugLog(
1757
+ `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
1758
+ );
1789
1759
  yield {
1790
1760
  content: `Running iteration ${iterationCount}/${this.maxIterations}...`,
1791
1761
  role: "assistant",
@@ -1809,11 +1779,9 @@ ${lastResponse.content || ""}`,
1809
1779
  }
1810
1780
  }
1811
1781
  if (!lastChunk) {
1812
- if (process.env.DEBUG === "true") {
1813
- console.warn(
1814
- `[LoopAgent] No complete chunk received from iteration ${iterationCount}`
1815
- );
1816
- }
1782
+ debugLog(
1783
+ `[LoopAgent] No complete chunk received from iteration ${iterationCount}`
1784
+ );
1817
1785
  shouldContinueLoop = false;
1818
1786
  continue;
1819
1787
  }
@@ -1839,9 +1807,8 @@ ${lastResponse.content || ""}`,
1839
1807
  };
1840
1808
  }
1841
1809
  } catch (error) {
1842
- console.error(
1843
- `[LoopAgent] Error in loop iteration ${iterationCount}:`,
1844
- error
1810
+ debugLog(
1811
+ `[LoopAgent] Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`
1845
1812
  );
1846
1813
  yield {
1847
1814
  content: `Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`,
@@ -1858,6 +1825,7 @@ ${lastResponse.content || ""}`,
1858
1825
  };
1859
1826
 
1860
1827
  // src/agents/lang-graph-agent.ts
1828
+ init_debug();
1861
1829
  var LangGraphAgent = (_class6 = class extends BaseAgent {
1862
1830
  /**
1863
1831
  * Graph nodes (agents and their connections)
@@ -1955,11 +1923,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
1955
1923
  if (targetNode.condition) {
1956
1924
  const shouldExecute = await targetNode.condition(result, context);
1957
1925
  if (!shouldExecute) {
1958
- if (process.env.DEBUG === "true") {
1959
- console.log(
1960
- `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1961
- );
1962
- }
1926
+ debugLog(
1927
+ `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1928
+ );
1963
1929
  continue;
1964
1930
  }
1965
1931
  }
@@ -1984,11 +1950,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
1984
1950
  };
1985
1951
  const shouldExecute = await node.condition(mockResponse, mockContext);
1986
1952
  if (!shouldExecute) {
1987
- if (process.env.DEBUG === "true") {
1988
- console.log(
1989
- `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1990
- );
1991
- }
1953
+ debugLog(
1954
+ `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1955
+ );
1992
1956
  }
1993
1957
  return { shouldExecute };
1994
1958
  }
@@ -2001,11 +1965,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
2001
1965
  messages: options.messages,
2002
1966
  config: options.config
2003
1967
  });
2004
- if (process.env.DEBUG === "true") {
2005
- console.log(
2006
- `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}"`
2007
- );
2008
- }
1968
+ debugLog(
1969
+ `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}"`
1970
+ );
2009
1971
  if (this.nodes.size === 0) {
2010
1972
  return {
2011
1973
  content: "No nodes defined in the graph.",
@@ -2025,11 +1987,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
2025
1987
  while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
2026
1988
  stepCount++;
2027
1989
  const { node, messages } = nodesToExecute.shift();
2028
- if (process.env.DEBUG === "true") {
2029
- console.log(
2030
- `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
2031
- );
2032
- }
1990
+ debugLog(
1991
+ `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
1992
+ );
2033
1993
  executedNodes.push(node.name);
2034
1994
  try {
2035
1995
  const result = await node.agent.run({
@@ -2101,11 +2061,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
2101
2061
  messages: options.messages,
2102
2062
  config: options.config
2103
2063
  });
2104
- if (process.env.DEBUG === "true") {
2105
- console.log(
2106
- `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}" (streaming)`
2107
- );
2108
- }
2064
+ debugLog(
2065
+ `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}" (streaming)`
2066
+ );
2109
2067
  if (this.nodes.size === 0) {
2110
2068
  yield {
2111
2069
  content: "No nodes defined in the graph.",
@@ -2132,11 +2090,9 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
2132
2090
  while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
2133
2091
  stepCount++;
2134
2092
  const { node, messages } = nodesToExecute.shift();
2135
- if (process.env.DEBUG === "true") {
2136
- console.log(
2137
- `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}" (streaming)`
2138
- );
2139
- }
2093
+ debugLog(
2094
+ `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}" (streaming)`
2095
+ );
2140
2096
  executedNodes.push(node.name);
2141
2097
  try {
2142
2098
  const result = await node.agent.run({
@@ -2250,6 +2206,7 @@ function createFunctionTool(func, options) {
2250
2206
  init_function_utils();
2251
2207
 
2252
2208
  // src/tools/common/google-search.ts
2209
+ init_debug();
2253
2210
  init_base_tool();
2254
2211
  var GoogleSearch = class extends BaseTool {
2255
2212
  /**
@@ -2290,9 +2247,7 @@ var GoogleSearch = class extends BaseTool {
2290
2247
  * This is a simplified implementation that doesn't actually search, just returns mock results
2291
2248
  */
2292
2249
  async runAsync(args, _context) {
2293
- if (process.env.DEBUG === "true") {
2294
- console.log(`Executing Google search for: ${args.query}`);
2295
- }
2250
+ debugLog(`[GoogleSearch] Executing Google search for: ${args.query}`);
2296
2251
  return {
2297
2252
  results: [
2298
2253
  {
@@ -2432,7 +2387,7 @@ var HttpRequestTool = class extends BaseTool {
2432
2387
  // src/tools/common/file-operations-tool.ts
2433
2388
  init_base_tool();
2434
2389
  var _promises = require('fs/promises'); var _promises2 = _interopRequireDefault(_promises);
2435
- var _path = require('path'); var _path2 = _interopRequireDefault(_path);
2390
+ var _path = require('path'); var path3 = _interopRequireWildcard(_path);
2436
2391
  var FileOperationsTool = class extends BaseTool {
2437
2392
 
2438
2393
  constructor(options) {
@@ -2528,14 +2483,14 @@ var FileOperationsTool = class extends BaseTool {
2528
2483
  * Resolve a file path relative to the base path
2529
2484
  */
2530
2485
  resolvePath(filepath) {
2531
- return _path2.default.isAbsolute(filepath) ? filepath : _path2.default.resolve(this.basePath, filepath);
2486
+ return path3.default.isAbsolute(filepath) ? filepath : path3.default.resolve(this.basePath, filepath);
2532
2487
  }
2533
2488
  /**
2534
2489
  * Validate that a path is within the base path for security
2535
2490
  */
2536
2491
  validatePath(filepath) {
2537
- const normalizedPath = _path2.default.normalize(filepath);
2538
- const normalizedBasePath = _path2.default.normalize(this.basePath);
2492
+ const normalizedPath = path3.default.normalize(filepath);
2493
+ const normalizedBasePath = path3.default.normalize(this.basePath);
2539
2494
  if (!normalizedPath.startsWith(normalizedBasePath)) {
2540
2495
  throw new Error(
2541
2496
  `Access denied: Can't access paths outside the base directory`
@@ -2564,7 +2519,7 @@ var FileOperationsTool = class extends BaseTool {
2564
2519
  */
2565
2520
  async writeFile(filepath, content, encoding) {
2566
2521
  try {
2567
- const dir = _path2.default.dirname(filepath);
2522
+ const dir = path3.default.dirname(filepath);
2568
2523
  await _promises2.default.mkdir(dir, { recursive: true });
2569
2524
  await _promises2.default.writeFile(filepath, content, { encoding });
2570
2525
  return {
@@ -2582,7 +2537,7 @@ var FileOperationsTool = class extends BaseTool {
2582
2537
  */
2583
2538
  async appendFile(filepath, content, encoding) {
2584
2539
  try {
2585
- const dir = _path2.default.dirname(filepath);
2540
+ const dir = path3.default.dirname(filepath);
2586
2541
  await _promises2.default.mkdir(dir, { recursive: true });
2587
2542
  await _promises2.default.appendFile(filepath, content, { encoding });
2588
2543
  return {
@@ -2636,7 +2591,7 @@ var FileOperationsTool = class extends BaseTool {
2636
2591
  const entries = await _promises2.default.readdir(dirpath, { withFileTypes: true });
2637
2592
  const results = await Promise.all(
2638
2593
  entries.map(async (entry) => {
2639
- const entryPath = _path2.default.join(dirpath, entry.name);
2594
+ const entryPath = path3.default.join(dirpath, entry.name);
2640
2595
  const stats = await _promises2.default.stat(entryPath);
2641
2596
  return {
2642
2597
  name: entry.name,
@@ -2755,6 +2710,7 @@ var UserInteractionTool = class extends BaseTool {
2755
2710
  };
2756
2711
 
2757
2712
  // src/tools/common/exit-loop-tool.ts
2713
+ init_debug();
2758
2714
  init_base_tool();
2759
2715
  var ExitLoopTool = class extends BaseTool {
2760
2716
  /**
@@ -2784,9 +2740,7 @@ var ExitLoopTool = class extends BaseTool {
2784
2740
  * Execute the exit loop action
2785
2741
  */
2786
2742
  async runAsync(_args, context) {
2787
- if (process.env.DEBUG === "true") {
2788
- console.log("Executing exit loop tool");
2789
- }
2743
+ debugLog("[ExitLoopTool] Executing exit loop tool");
2790
2744
  if (context.actions) {
2791
2745
  context.actions.escalate = true;
2792
2746
  } else {
@@ -2801,6 +2755,7 @@ var ExitLoopTool = class extends BaseTool {
2801
2755
  };
2802
2756
 
2803
2757
  // src/tools/common/get-user-choice-tool.ts
2758
+ init_debug();
2804
2759
  init_base_tool();
2805
2760
  var GetUserChoiceTool = class extends BaseTool {
2806
2761
  /**
@@ -2845,13 +2800,13 @@ var GetUserChoiceTool = class extends BaseTool {
2845
2800
  * and the actual choice will be provided asynchronously
2846
2801
  */
2847
2802
  async runAsync(args, context) {
2848
- if (process.env.DEBUG === "true") {
2849
- console.log(
2850
- `Executing get_user_choice with options: ${args.options.join(", ")}`
2851
- );
2852
- if (args.question) {
2853
- console.log(`Question: ${args.question}`);
2854
- }
2803
+ debugLog(
2804
+ `[GetUserChoiceTool] Executing get_user_choice with options: ${args.options.join(
2805
+ ", "
2806
+ )}`
2807
+ );
2808
+ if (args.question) {
2809
+ debugLog(`[GetUserChoiceTool] Question: ${args.question}`);
2855
2810
  }
2856
2811
  if (context.actions) {
2857
2812
  context.actions.skip_summarization = true;
@@ -2865,6 +2820,7 @@ var GetUserChoiceTool = class extends BaseTool {
2865
2820
  };
2866
2821
 
2867
2822
  // src/tools/common/transfer-to-agent-tool.ts
2823
+ init_debug();
2868
2824
  init_base_tool();
2869
2825
  var TransferToAgentTool = class extends BaseTool {
2870
2826
  /**
@@ -2899,9 +2855,9 @@ var TransferToAgentTool = class extends BaseTool {
2899
2855
  * Execute the transfer to agent action
2900
2856
  */
2901
2857
  async runAsync(args, context) {
2902
- if (process.env.DEBUG === "true") {
2903
- console.log(`Executing transfer to agent: ${args.agent_name}`);
2904
- }
2858
+ debugLog(
2859
+ `[TransferToAgentTool] Executing transfer to agent: ${args.agent_name}`
2860
+ );
2905
2861
  if (context.actions) {
2906
2862
  context.actions.transfer_to_agent = args.agent_name;
2907
2863
  } else {
@@ -2916,6 +2872,7 @@ var TransferToAgentTool = class extends BaseTool {
2916
2872
  };
2917
2873
 
2918
2874
  // src/tools/common/load-memory-tool.ts
2875
+ init_debug();
2919
2876
  init_base_tool();
2920
2877
  var LoadMemoryTool = class extends BaseTool {
2921
2878
  /**
@@ -2950,9 +2907,9 @@ var LoadMemoryTool = class extends BaseTool {
2950
2907
  * Execute the memory loading action
2951
2908
  */
2952
2909
  async runAsync(args, context) {
2953
- if (process.env.DEBUG === "true") {
2954
- console.log(`Executing load_memory with query: ${args.query}`);
2955
- }
2910
+ debugLog(
2911
+ `[LoadMemoryTool] Executing load_memory with query: ${args.query}`
2912
+ );
2956
2913
  if (!context.memoryService) {
2957
2914
  return {
2958
2915
  error: "Memory service is not available",
@@ -3238,6 +3195,7 @@ var McpClientService = (_class7 = class {
3238
3195
  }, _class7);
3239
3196
 
3240
3197
  // src/tools/mcp/create-tool.ts
3198
+ init_debug();
3241
3199
  init_base_tool();
3242
3200
 
3243
3201
  // src/tools/mcp/schema-conversion.ts
@@ -3472,9 +3430,10 @@ var McpToolAdapter = (_class8 = class extends BaseTool {
3472
3430
  }
3473
3431
  }
3474
3432
  async runAsync(args, _context) {
3475
- if (process.env.DEBUG === "true") {
3476
- console.log(`Executing MCP tool ${this.name} with args:`, args);
3477
- }
3433
+ debugLog(
3434
+ `[McpToolAdapter] Executing MCP tool ${this.name} with args:`,
3435
+ args
3436
+ );
3478
3437
  try {
3479
3438
  if (typeof this.mcpTool.execute === "function") {
3480
3439
  return await this.mcpTool.execute(args);
@@ -3794,9 +3753,11 @@ var BaseLLMConnection = (_class10 = class {constructor() { _class10.prototype.__
3794
3753
  }, _class10);
3795
3754
 
3796
3755
  // src/models/anthropic-llm.ts
3756
+ init_debug();
3797
3757
  var _axios = require('axios'); var _axios2 = _interopRequireDefault(_axios);
3798
3758
 
3799
3759
  // src/models/anthropic-llm-connection.ts
3760
+ init_debug();
3800
3761
  var AnthropicLLMConnection = class extends BaseLLMConnection {
3801
3762
  /**
3802
3763
  * Axios instance for API calls
@@ -3926,19 +3887,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3926
3887
  if (!_optionalChain([content, 'optionalAccess', _49 => _49.length])) return [];
3927
3888
  const toolUses = [];
3928
3889
  for (const block of content) {
3929
- if (process.env.DEBUG === "true") {
3930
- console.log(
3931
- "Connection - Processing content block of type:",
3932
- block.type
3933
- );
3934
- }
3890
+ debugLog(
3891
+ `[AnthropicLLMConnection] Processing content block of type: ${block.type}`
3892
+ );
3935
3893
  if (block.type === "tool_use") {
3936
- if (process.env.DEBUG === "true") {
3937
- console.log(
3938
- "Connection - Found tool_use block:",
3939
- JSON.stringify(block, null, 2)
3940
- );
3941
- }
3894
+ debugLog(
3895
+ "[AnthropicLLMConnection] Found tool_use block:",
3896
+ JSON.stringify(block, null, 2)
3897
+ );
3942
3898
  toolUses.push({
3943
3899
  id: block.id || "unknown-id",
3944
3900
  name: block.name || "unknown-name",
@@ -3946,14 +3902,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3946
3902
  });
3947
3903
  }
3948
3904
  }
3949
- if (process.env.DEBUG === "true") {
3950
- console.log(`Connection - Found ${toolUses.length} tool uses in content`);
3951
- if (toolUses.length > 0) {
3952
- console.log(
3953
- "Connection - Extracted tool uses:",
3954
- JSON.stringify(toolUses, null, 2)
3955
- );
3956
- }
3905
+ debugLog(
3906
+ `[AnthropicLLMConnection] Found ${toolUses.length} tool uses in content`
3907
+ );
3908
+ if (toolUses.length > 0) {
3909
+ debugLog(
3910
+ "[AnthropicLLMConnection] Extracted tool uses:",
3911
+ JSON.stringify(toolUses, null, 2)
3912
+ );
3957
3913
  }
3958
3914
  return toolUses;
3959
3915
  }
@@ -4047,43 +4003,30 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4047
4003
  }
4048
4004
  const toolUses = this.extractToolUses(apiResponse.content);
4049
4005
  const toolCalls = this.convertToolCalls(toolUses);
4050
- if (process.env.DEBUG === "true") {
4051
- if (toolUses.length > 0) {
4052
- console.log(
4053
- "Connection - Extracted Tool Uses:",
4054
- JSON.stringify(toolUses, null, 2)
4055
- );
4056
- console.log(
4057
- "Connection - Converted Tool Calls:",
4058
- JSON.stringify(toolCalls, null, 2)
4059
- );
4060
- }
4061
- }
4006
+ debugLog(
4007
+ `[AnthropicLLMConnection] - Extracted ${toolUses.length} tool uses in content and converted ${_optionalChain([toolCalls, 'optionalAccess', _50 => _50.length]) || 0} tool calls`
4008
+ );
4062
4009
  const llmResponse = new LLMResponse({
4063
4010
  role: "assistant",
4064
4011
  content,
4065
- tool_calls: _optionalChain([toolCalls, 'optionalAccess', _50 => _50.length]) ? toolCalls : void 0,
4012
+ tool_calls: _optionalChain([toolCalls, 'optionalAccess', _51 => _51.length]) ? toolCalls : void 0,
4066
4013
  raw_response: apiResponse
4067
4014
  });
4068
- if (process.env.DEBUG === "true") {
4069
- console.log(
4070
- "Connection - Final LLMResponse object:",
4071
- JSON.stringify(
4072
- {
4073
- role: llmResponse.role,
4074
- content: _optionalChain([llmResponse, 'access', _51 => _51.content, 'optionalAccess', _52 => _52.substring, 'call', _53 => _53(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4075
- tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4076
- },
4077
- null,
4078
- 2
4079
- )
4080
- );
4081
- }
4015
+ const logObject = {
4016
+ role: llmResponse.role,
4017
+ content: _optionalChain([llmResponse, 'access', _52 => _52.content, 'optionalAccess', _53 => _53.substring, 'call', _54 => _54(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4018
+ tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4019
+ };
4020
+ debugLog(
4021
+ "[AnthropicLLMConnection] Final LLMResponse object:",
4022
+ JSON.stringify(logObject, null, 2)
4023
+ );
4082
4024
  return llmResponse;
4083
4025
  } catch (error) {
4084
- if (process.env.DEBUG === "true") {
4085
- console.error("Error sending message to Anthropic:", error);
4086
- }
4026
+ debugLog(
4027
+ "[AnthropicLLMConnection] Error sending message to Anthropic:",
4028
+ error
4029
+ );
4087
4030
  throw error;
4088
4031
  }
4089
4032
  }
@@ -4108,17 +4051,17 @@ var AnthropicLLM = class extends BaseLLM {
4108
4051
  */
4109
4052
  constructor(model, config) {
4110
4053
  super(model);
4111
- this.apiKey = _optionalChain([config, 'optionalAccess', _54 => _54.apiKey]) || process.env.ANTHROPIC_API_KEY || "";
4112
- this.baseURL = _optionalChain([config, 'optionalAccess', _55 => _55.baseURL]) || "https://api.anthropic.com/v1";
4054
+ this.apiKey = _optionalChain([config, 'optionalAccess', _55 => _55.apiKey]) || process.env.ANTHROPIC_API_KEY || "";
4055
+ this.baseURL = _optionalChain([config, 'optionalAccess', _56 => _56.baseURL]) || "https://api.anthropic.com/v1";
4113
4056
  if (!this.apiKey) {
4114
4057
  throw new Error(
4115
4058
  "Anthropic API key is required. Provide it in config or set ANTHROPIC_API_KEY environment variable."
4116
4059
  );
4117
4060
  }
4118
4061
  this.defaultParams = {
4119
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _56 => _56.defaultParams, 'optionalAccess', _57 => _57.temperature]), () => ( 0.7)),
4120
- top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _58 => _58.defaultParams, 'optionalAccess', _59 => _59.top_p]), () => ( 1)),
4121
- max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _60 => _60.defaultParams, 'optionalAccess', _61 => _61.max_tokens]), () => ( 1024))
4062
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _57 => _57.defaultParams, 'optionalAccess', _58 => _58.temperature]), () => ( 0.7)),
4063
+ top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _59 => _59.defaultParams, 'optionalAccess', _60 => _60.top_p]), () => ( 1)),
4064
+ max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _61 => _61.defaultParams, 'optionalAccess', _62 => _62.max_tokens]), () => ( 1024))
4122
4065
  };
4123
4066
  }
4124
4067
  /**
@@ -4204,7 +4147,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4204
4147
  * Convert ADK function declarations to Anthropic tool format
4205
4148
  */
4206
4149
  convertFunctionsToTools(functions) {
4207
- if (!_optionalChain([functions, 'optionalAccess', _62 => _62.length])) {
4150
+ if (!_optionalChain([functions, 'optionalAccess', _63 => _63.length])) {
4208
4151
  return [];
4209
4152
  }
4210
4153
  return functions.map((func) => ({
@@ -4217,7 +4160,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4217
4160
  * Convert Anthropic tool calls to ADK tool calls
4218
4161
  */
4219
4162
  convertToolUses(toolUses) {
4220
- if (!_optionalChain([toolUses, 'optionalAccess', _63 => _63.length])) {
4163
+ if (!_optionalChain([toolUses, 'optionalAccess', _64 => _64.length])) {
4221
4164
  return [];
4222
4165
  }
4223
4166
  return toolUses.map((toolUse) => ({
@@ -4232,16 +4175,16 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4232
4175
  * Extract tool uses from response content
4233
4176
  */
4234
4177
  extractToolUses(content) {
4235
- if (!_optionalChain([content, 'optionalAccess', _64 => _64.length])) return [];
4178
+ if (!_optionalChain([content, 'optionalAccess', _65 => _65.length])) return [];
4236
4179
  const toolUses = [];
4237
4180
  for (const block of content) {
4238
- if (process.env.DEBUG === "true") {
4239
- console.log("Processing content block of type:", block.type);
4240
- }
4181
+ debugLog(
4182
+ `[AnthropicLLM] Processing content block of type: ${block.type}`
4183
+ );
4241
4184
  if (block.type === "tool_use") {
4242
- if (process.env.DEBUG === "true") {
4243
- console.log("Found tool_use block:", JSON.stringify(block, null, 2));
4244
- }
4185
+ debugLog(
4186
+ `[AnthropicLLM] Found tool_use block: ${JSON.stringify(block, null, 2)}`
4187
+ );
4245
4188
  toolUses.push({
4246
4189
  id: block.id || "unknown-id",
4247
4190
  name: block.name || "unknown-name",
@@ -4249,12 +4192,10 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4249
4192
  });
4250
4193
  }
4251
4194
  }
4252
- if (process.env.DEBUG === "true") {
4253
- console.log(`Found ${toolUses.length} tool uses in content`);
4254
- if (toolUses.length > 0) {
4255
- console.log("Extracted tool uses:", JSON.stringify(toolUses, null, 2));
4256
- }
4257
- }
4195
+ debugLog(
4196
+ `[AnthropicLLM] Found ${toolUses.length} tool uses in content`,
4197
+ toolUses
4198
+ );
4258
4199
  return toolUses;
4259
4200
  }
4260
4201
  /**
@@ -4276,16 +4217,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4276
4217
  },
4277
4218
  responseType: stream ? "stream" : "json"
4278
4219
  });
4279
- if (process.env.DEBUG === "true") {
4280
- console.log("Anthropic API Response Status:", response.status);
4281
- if (!stream) {
4282
- console.log("Response Data Structure:", Object.keys(response.data));
4283
- console.log(
4284
- "Response Content Structure:",
4285
- response.data.content.map((block) => ({ type: block.type }))
4286
- );
4287
- }
4288
- }
4220
+ debugLog(
4221
+ `[AnthropicLLM] API Response done with ${response.status}:`,
4222
+ response.data
4223
+ );
4224
+ debugLog(
4225
+ "[AnthropicLLM] API Response content:",
4226
+ response.data.content.map((block) => ({ type: block.type }))
4227
+ );
4289
4228
  return response.data;
4290
4229
  } catch (error) {
4291
4230
  console.error("Error calling Anthropic API:", error);
@@ -4309,26 +4248,19 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4309
4248
  temperature: _nullishCoalesce(llmRequest.config.temperature, () => ( this.defaultParams.temperature)),
4310
4249
  max_tokens: _nullishCoalesce(llmRequest.config.max_tokens, () => ( this.defaultParams.max_tokens)),
4311
4250
  top_p: _nullishCoalesce(llmRequest.config.top_p, () => ( this.defaultParams.top_p)),
4312
- tools: _optionalChain([tools, 'optionalAccess', _65 => _65.length]) ? tools : void 0
4251
+ tools: _optionalChain([tools, 'optionalAccess', _66 => _66.length]) ? tools : void 0
4313
4252
  };
4314
- if (process.env.DEBUG === "true") {
4315
- console.log("Anthropic API Request:", {
4316
- model: params.model,
4317
- messageCount: params.messages.length,
4318
- systemMessage: params.system ? "present" : "none",
4319
- tools: params.tools ? params.tools.map((t) => t.name) : "none"
4320
- });
4321
- }
4253
+ debugLog("[AnthropicLLM] API Request:", {
4254
+ model: params.model,
4255
+ messageCount: params.messages.length,
4256
+ systemMessage: params.system ? "present" : "none",
4257
+ tools: params.tools ? params.tools.map((t) => t.name) : "none"
4258
+ });
4322
4259
  if (stream) {
4323
4260
  throw new Error("Streaming is not supported in this implementation");
4324
4261
  }
4325
4262
  const response = await this.callAnthropicAPI(params);
4326
- if (process.env.DEBUG === "true") {
4327
- console.log(
4328
- "Full Response Content:",
4329
- JSON.stringify(response.content, null, 2)
4330
- );
4331
- }
4263
+ debugLog("[AnthropicLLM] Full Response Content:", response.content);
4332
4264
  let content = "";
4333
4265
  for (const block of response.content) {
4334
4266
  if (block.type === "text") {
@@ -4337,43 +4269,26 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4337
4269
  }
4338
4270
  const toolUses = this.extractToolUses(response.content);
4339
4271
  const toolCalls = this.convertToolUses(toolUses);
4340
- if (process.env.DEBUG === "true") {
4341
- if (toolUses.length > 0) {
4342
- console.log(
4343
- "Extracted Tool Uses:",
4344
- JSON.stringify(toolUses, null, 2)
4345
- );
4346
- console.log(
4347
- "Converted Tool Calls:",
4348
- JSON.stringify(toolCalls, null, 2)
4349
- );
4350
- }
4351
- }
4272
+ debugLog("[AnthropicLLM] Extracted Tool Uses:", toolUses);
4273
+ debugLog("[AnthropicLLM] Converted Tool Calls:", toolCalls);
4352
4274
  const llmResponse = new LLMResponse({
4353
4275
  role: "assistant",
4354
4276
  content,
4355
4277
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
4356
4278
  raw_response: response
4357
4279
  });
4358
- if (process.env.DEBUG === "true") {
4359
- console.log(
4360
- "Final LLMResponse object:",
4361
- JSON.stringify(
4362
- {
4363
- role: llmResponse.role,
4364
- content: _optionalChain([llmResponse, 'access', _66 => _66.content, 'optionalAccess', _67 => _67.substring, 'call', _68 => _68(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4365
- tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4366
- },
4367
- null,
4368
- 2
4369
- )
4370
- );
4371
- }
4280
+ const logObject = {
4281
+ role: llmResponse.role,
4282
+ content: _optionalChain([llmResponse, 'access', _67 => _67.content, 'optionalAccess', _68 => _68.substring, 'call', _69 => _69(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4283
+ tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4284
+ };
4285
+ debugLog(
4286
+ "[AnthropicLLM] Final LLMResponse object:",
4287
+ JSON.stringify(logObject, null, 2)
4288
+ );
4372
4289
  yield llmResponse;
4373
4290
  } catch (error) {
4374
- if (process.env.DEBUG === "true") {
4375
- console.error("Error calling Anthropic:", error);
4376
- }
4291
+ debugLog("[AnthropicLLM] Error:", error);
4377
4292
  throw error;
4378
4293
  }
4379
4294
  }
@@ -4417,9 +4332,9 @@ var GoogleLLM = class extends BaseLLM {
4417
4332
  constructor(model, config) {
4418
4333
  super(model);
4419
4334
  const apiKey = process.env.GOOGLE_API_KEY;
4420
- const projectId = _optionalChain([config, 'optionalAccess', _69 => _69.projectId]) || process.env.GOOGLE_CLOUD_PROJECT;
4421
- const location = _optionalChain([config, 'optionalAccess', _70 => _70.location]) || process.env.GOOGLE_CLOUD_LOCATION;
4422
- const useVertexAI = _optionalChain([process, 'access', _71 => _71.env, 'access', _72 => _72.USE_VERTEX_AI, 'optionalAccess', _73 => _73.toLowerCase, 'call', _74 => _74()]) === "true";
4335
+ const projectId = _optionalChain([config, 'optionalAccess', _70 => _70.projectId]) || process.env.GOOGLE_CLOUD_PROJECT;
4336
+ const location = _optionalChain([config, 'optionalAccess', _71 => _71.location]) || process.env.GOOGLE_CLOUD_LOCATION;
4337
+ const useVertexAI = _optionalChain([process, 'access', _72 => _72.env, 'access', _73 => _73.USE_VERTEX_AI, 'optionalAccess', _74 => _74.toLowerCase, 'call', _75 => _75()]) === "true";
4423
4338
  if (!useVertexAI && !apiKey) {
4424
4339
  throw new Error(
4425
4340
  "Google API Key is required. Provide via config or GOOGLE_API_KEY env var."
@@ -4444,9 +4359,9 @@ var GoogleLLM = class extends BaseLLM {
4444
4359
  }
4445
4360
  this.ai = new (0, _genai.GoogleGenAI)(options);
4446
4361
  this.defaultParams = {
4447
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _75 => _75.defaultParams, 'optionalAccess', _76 => _76.temperature]), () => ( 0.7)),
4448
- topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _77 => _77.defaultParams, 'optionalAccess', _78 => _78.top_p]), () => ( 1)),
4449
- maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _79 => _79.defaultParams, 'optionalAccess', _80 => _80.maxOutputTokens]), () => ( 1024))
4362
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _76 => _76.defaultParams, 'optionalAccess', _77 => _77.temperature]), () => ( 0.7)),
4363
+ topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _78 => _78.defaultParams, 'optionalAccess', _79 => _79.top_p]), () => ( 1)),
4364
+ maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _80 => _80.defaultParams, 'optionalAccess', _81 => _81.maxOutputTokens]), () => ( 1024))
4450
4365
  };
4451
4366
  }
4452
4367
  /**
@@ -4584,7 +4499,7 @@ var GoogleLLM = class extends BaseLLM {
4584
4499
  );
4585
4500
  parts.push({ text: "" });
4586
4501
  }
4587
- if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access', _81 => _81[0], 'optionalAccess', _82 => _82.functionResponse]))) {
4502
+ if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access', _82 => _82[0], 'optionalAccess', _83 => _83.functionResponse]))) {
4588
4503
  console.error(
4589
4504
  `[GoogleLLM] convertMessage - Invalid parts for 'function' role. Expected 1 functionResponse part. Got:`,
4590
4505
  JSON.stringify(parts),
@@ -4692,13 +4607,13 @@ var GoogleLLM = class extends BaseLLM {
4692
4607
  role: "assistant",
4693
4608
  content: null
4694
4609
  });
4695
- if (typeof _optionalChain([response, 'optionalAccess', _83 => _83.candidates, 'optionalAccess', _84 => _84[0], 'optionalAccess', _85 => _85.content, 'optionalAccess', _86 => _86.parts, 'optionalAccess', _87 => _87[0], 'optionalAccess', _88 => _88.text]) === "string") {
4610
+ if (typeof _optionalChain([response, 'optionalAccess', _84 => _84.candidates, 'optionalAccess', _85 => _85[0], 'optionalAccess', _86 => _86.content, 'optionalAccess', _87 => _87.parts, 'optionalAccess', _88 => _88[0], 'optionalAccess', _89 => _89.text]) === "string") {
4696
4611
  result.content = response.candidates[0].content.parts[0].text;
4697
4612
  }
4698
- if (_optionalChain([response, 'optionalAccess', _89 => _89.candidates, 'optionalAccess', _90 => _90[0], 'optionalAccess', _91 => _91.content, 'optionalAccess', _92 => _92.parts, 'optionalAccess', _93 => _93[0], 'optionalAccess', _94 => _94.text])) {
4613
+ if (_optionalChain([response, 'optionalAccess', _90 => _90.candidates, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.content, 'optionalAccess', _93 => _93.parts, 'optionalAccess', _94 => _94[0], 'optionalAccess', _95 => _95.text])) {
4699
4614
  result.content = response.candidates[0].content.parts[0].text;
4700
4615
  }
4701
- if (_optionalChain([response, 'optionalAccess', _95 => _95.candidates, 'optionalAccess', _96 => _96[0], 'optionalAccess', _97 => _97.content, 'optionalAccess', _98 => _98.parts, 'optionalAccess', _99 => _99[0], 'optionalAccess', _100 => _100.functionCall])) {
4616
+ if (_optionalChain([response, 'optionalAccess', _96 => _96.candidates, 'optionalAccess', _97 => _97[0], 'optionalAccess', _98 => _98.content, 'optionalAccess', _99 => _99.parts, 'optionalAccess', _100 => _100[0], 'optionalAccess', _101 => _101.functionCall])) {
4702
4617
  const functionCall = response.candidates[0].content.parts[0].functionCall;
4703
4618
  result.function_call = {
4704
4619
  name: functionCall.name,
@@ -4745,10 +4660,10 @@ var GoogleLLM = class extends BaseLLM {
4745
4660
  if (stream) {
4746
4661
  const streamingResult = await this.ai.models.generateContentStream(requestOptions);
4747
4662
  for await (const chunk of streamingResult) {
4748
- if (!_optionalChain([chunk, 'access', _101 => _101.candidates, 'optionalAccess', _102 => _102[0], 'optionalAccess', _103 => _103.content, 'optionalAccess', _104 => _104.parts, 'optionalAccess', _105 => _105[0], 'optionalAccess', _106 => _106.text])) {
4663
+ if (!_optionalChain([chunk, 'access', _102 => _102.candidates, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.content, 'optionalAccess', _105 => _105.parts, 'optionalAccess', _106 => _106[0], 'optionalAccess', _107 => _107.text])) {
4749
4664
  continue;
4750
4665
  }
4751
- const partialText = _optionalChain([chunk, 'access', _107 => _107.candidates, 'access', _108 => _108[0], 'optionalAccess', _109 => _109.content, 'optionalAccess', _110 => _110.parts, 'access', _111 => _111[0], 'optionalAccess', _112 => _112.text]) || "";
4666
+ const partialText = _optionalChain([chunk, 'access', _108 => _108.candidates, 'access', _109 => _109[0], 'optionalAccess', _110 => _110.content, 'optionalAccess', _111 => _111.parts, 'access', _112 => _112[0], 'optionalAccess', _113 => _113.text]) || "";
4752
4667
  const partialResponse = new LLMResponse({
4753
4668
  content: partialText,
4754
4669
  role: "assistant",
@@ -4768,6 +4683,7 @@ var GoogleLLM = class extends BaseLLM {
4768
4683
  };
4769
4684
 
4770
4685
  // src/models/openai-llm.ts
4686
+ init_debug();
4771
4687
  var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
4772
4688
 
4773
4689
  // src/models/openai-llm-connection.ts
@@ -4888,10 +4804,10 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
4888
4804
  for await (const chunk of stream) {
4889
4805
  if (chunk.choices.length === 0) continue;
4890
4806
  const delta = chunk.choices[0].delta;
4891
- if (_optionalChain([delta, 'optionalAccess', _113 => _113.content])) {
4807
+ if (_optionalChain([delta, 'optionalAccess', _114 => _114.content])) {
4892
4808
  responseContent += delta.content;
4893
4809
  }
4894
- if (_optionalChain([delta, 'optionalAccess', _114 => _114.function_call])) {
4810
+ if (_optionalChain([delta, 'optionalAccess', _115 => _115.function_call])) {
4895
4811
  if (!functionCall) {
4896
4812
  functionCall = {
4897
4813
  name: delta.function_call.name || "",
@@ -4902,7 +4818,7 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
4902
4818
  functionCall.arguments += delta.function_call.arguments || "";
4903
4819
  }
4904
4820
  }
4905
- if (_optionalChain([delta, 'optionalAccess', _115 => _115.tool_calls])) {
4821
+ if (_optionalChain([delta, 'optionalAccess', _116 => _116.tool_calls])) {
4906
4822
  for (const toolDelta of delta.tool_calls) {
4907
4823
  const id = toolDelta.id || "";
4908
4824
  let tool = toolCalls.find((t) => t.id === id);
@@ -4910,20 +4826,20 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
4910
4826
  tool = {
4911
4827
  id,
4912
4828
  function: {
4913
- name: _optionalChain([toolDelta, 'access', _116 => _116.function, 'optionalAccess', _117 => _117.name]) || "",
4914
- arguments: _optionalChain([toolDelta, 'access', _118 => _118.function, 'optionalAccess', _119 => _119.arguments]) || ""
4829
+ name: _optionalChain([toolDelta, 'access', _117 => _117.function, 'optionalAccess', _118 => _118.name]) || "",
4830
+ arguments: _optionalChain([toolDelta, 'access', _119 => _119.function, 'optionalAccess', _120 => _120.arguments]) || ""
4915
4831
  }
4916
4832
  };
4917
4833
  toolCalls.push(tool);
4918
4834
  } else {
4919
- tool.function.name += _optionalChain([toolDelta, 'access', _120 => _120.function, 'optionalAccess', _121 => _121.name]) || "";
4920
- tool.function.arguments += _optionalChain([toolDelta, 'access', _122 => _122.function, 'optionalAccess', _123 => _123.arguments]) || "";
4835
+ tool.function.name += _optionalChain([toolDelta, 'access', _121 => _121.function, 'optionalAccess', _122 => _122.name]) || "";
4836
+ tool.function.arguments += _optionalChain([toolDelta, 'access', _123 => _123.function, 'optionalAccess', _124 => _124.arguments]) || "";
4921
4837
  }
4922
4838
  }
4923
4839
  }
4924
4840
  if (this.responseCallback) {
4925
4841
  const response = new LLMResponse({
4926
- content: _optionalChain([delta, 'optionalAccess', _124 => _124.content]) || null,
4842
+ content: _optionalChain([delta, 'optionalAccess', _125 => _125.content]) || null,
4927
4843
  role: "assistant",
4928
4844
  function_call: functionCall,
4929
4845
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
@@ -5033,16 +4949,16 @@ var OpenAILLM = class extends BaseLLM {
5033
4949
  constructor(model, config) {
5034
4950
  super(model);
5035
4951
  this.client = new (0, _openai2.default)({
5036
- apiKey: _optionalChain([config, 'optionalAccess', _125 => _125.apiKey]) || process.env.OPENAI_API_KEY,
5037
- baseURL: _optionalChain([config, 'optionalAccess', _126 => _126.baseURL]),
5038
- organization: _optionalChain([config, 'optionalAccess', _127 => _127.organization])
4952
+ apiKey: _optionalChain([config, 'optionalAccess', _126 => _126.apiKey]) || process.env.OPENAI_API_KEY,
4953
+ baseURL: _optionalChain([config, 'optionalAccess', _127 => _127.baseURL]),
4954
+ organization: _optionalChain([config, 'optionalAccess', _128 => _128.organization])
5039
4955
  });
5040
4956
  this.defaultParams = {
5041
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _128 => _128.defaultParams, 'optionalAccess', _129 => _129.temperature]), () => ( 0.7)),
5042
- top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _130 => _130.defaultParams, 'optionalAccess', _131 => _131.top_p]), () => ( 1)),
5043
- max_tokens: _optionalChain([config, 'optionalAccess', _132 => _132.defaultParams, 'optionalAccess', _133 => _133.max_tokens]),
5044
- frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _134 => _134.defaultParams, 'optionalAccess', _135 => _135.frequency_penalty]), () => ( 0)),
5045
- presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _136 => _136.defaultParams, 'optionalAccess', _137 => _137.presence_penalty]), () => ( 0))
4957
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _129 => _129.defaultParams, 'optionalAccess', _130 => _130.temperature]), () => ( 0.7)),
4958
+ top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _131 => _131.defaultParams, 'optionalAccess', _132 => _132.top_p]), () => ( 1)),
4959
+ max_tokens: _optionalChain([config, 'optionalAccess', _133 => _133.defaultParams, 'optionalAccess', _134 => _134.max_tokens]),
4960
+ frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _135 => _135.defaultParams, 'optionalAccess', _136 => _136.frequency_penalty]), () => ( 0)),
4961
+ presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _137 => _137.defaultParams, 'optionalAccess', _138 => _138.presence_penalty]), () => ( 0))
5046
4962
  };
5047
4963
  }
5048
4964
  /**
@@ -5152,16 +5068,16 @@ var OpenAILLM = class extends BaseLLM {
5152
5068
  */
5153
5069
  convertResponse(response) {
5154
5070
  const result = new LLMResponse({
5155
- content: _optionalChain([response, 'access', _138 => _138.message, 'optionalAccess', _139 => _139.content]) || null,
5156
- role: _optionalChain([response, 'access', _140 => _140.message, 'optionalAccess', _141 => _141.role]) || "assistant"
5071
+ content: _optionalChain([response, 'access', _139 => _139.message, 'optionalAccess', _140 => _140.content]) || null,
5072
+ role: _optionalChain([response, 'access', _141 => _141.message, 'optionalAccess', _142 => _142.role]) || "assistant"
5157
5073
  });
5158
- if (_optionalChain([response, 'access', _142 => _142.message, 'optionalAccess', _143 => _143.function_call])) {
5074
+ if (_optionalChain([response, 'access', _143 => _143.message, 'optionalAccess', _144 => _144.function_call])) {
5159
5075
  result.function_call = {
5160
5076
  name: response.message.function_call.name,
5161
5077
  arguments: response.message.function_call.arguments
5162
5078
  };
5163
5079
  }
5164
- if (_optionalChain([response, 'access', _144 => _144.message, 'optionalAccess', _145 => _145.tool_calls])) {
5080
+ if (_optionalChain([response, 'access', _145 => _145.message, 'optionalAccess', _146 => _146.tool_calls])) {
5165
5081
  result.tool_calls = response.message.tool_calls.map((tool) => ({
5166
5082
  id: tool.id,
5167
5083
  function: {
@@ -5176,29 +5092,27 @@ var OpenAILLM = class extends BaseLLM {
5176
5092
  * Convert OpenAI streaming chunk to LLMResponse
5177
5093
  */
5178
5094
  convertChunk(chunk) {
5179
- if (process.env.DEBUG === "true") {
5180
- console.log(
5181
- `OpenAI: Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
5182
- );
5183
- }
5184
- const content = _optionalChain([chunk, 'access', _146 => _146.delta, 'optionalAccess', _147 => _147.content]);
5095
+ debugLog(
5096
+ `[OpenAILLM]: Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
5097
+ );
5098
+ const content = _optionalChain([chunk, 'access', _147 => _147.delta, 'optionalAccess', _148 => _148.content]);
5185
5099
  const result = new LLMResponse({
5186
5100
  content: content !== void 0 ? content : null,
5187
- role: _optionalChain([chunk, 'access', _148 => _148.delta, 'optionalAccess', _149 => _149.role]) || "assistant",
5101
+ role: _optionalChain([chunk, 'access', _149 => _149.delta, 'optionalAccess', _150 => _150.role]) || "assistant",
5188
5102
  is_partial: true
5189
5103
  });
5190
- if (_optionalChain([chunk, 'access', _150 => _150.delta, 'optionalAccess', _151 => _151.function_call])) {
5104
+ if (_optionalChain([chunk, 'access', _151 => _151.delta, 'optionalAccess', _152 => _152.function_call])) {
5191
5105
  result.function_call = {
5192
5106
  name: chunk.delta.function_call.name || "",
5193
5107
  arguments: chunk.delta.function_call.arguments || ""
5194
5108
  };
5195
5109
  }
5196
- if (_optionalChain([chunk, 'access', _152 => _152.delta, 'optionalAccess', _153 => _153.tool_calls])) {
5110
+ if (_optionalChain([chunk, 'access', _153 => _153.delta, 'optionalAccess', _154 => _154.tool_calls])) {
5197
5111
  result.tool_calls = chunk.delta.tool_calls.map((tool) => ({
5198
5112
  id: tool.id || "",
5199
5113
  function: {
5200
- name: _optionalChain([tool, 'access', _154 => _154.function, 'optionalAccess', _155 => _155.name]) || "",
5201
- arguments: _optionalChain([tool, 'access', _156 => _156.function, 'optionalAccess', _157 => _157.arguments]) || ""
5114
+ name: _optionalChain([tool, 'access', _155 => _155.function, 'optionalAccess', _156 => _156.name]) || "",
5115
+ arguments: _optionalChain([tool, 'access', _157 => _157.function, 'optionalAccess', _158 => _158.arguments]) || ""
5202
5116
  }
5203
5117
  }));
5204
5118
  }
@@ -5221,32 +5135,24 @@ var OpenAILLM = class extends BaseLLM {
5221
5135
  presence_penalty: _nullishCoalesce(llmRequest.config.presence_penalty, () => ( this.defaultParams.presence_penalty)),
5222
5136
  stream: shouldStream
5223
5137
  };
5224
- if (process.env.DEBUG === "true") {
5225
- console.log(
5226
- `OpenAI: Streaming mode ${shouldStream ? "enabled" : "disabled"}`
5227
- );
5228
- }
5138
+ debugLog(
5139
+ `[OpenAILLM] Request parameters - model: ${params.model}, messages: ${params.messages.length}, functions: ${params.tools ? params.tools.length : 0}, streaming: ${shouldStream}`
5140
+ );
5229
5141
  if (tools && tools.length > 0) {
5230
5142
  params.tools = tools;
5231
5143
  }
5232
5144
  try {
5233
5145
  if (shouldStream) {
5234
- if (process.env.DEBUG === "true") {
5235
- console.log("OpenAI: Starting streaming request");
5236
- }
5146
+ debugLog("[OpenAILLM] Starting streaming request");
5237
5147
  const streamResponse = await this.client.chat.completions.create(params);
5238
5148
  let partialFunctionCall;
5239
5149
  const partialToolCalls = /* @__PURE__ */ new Map();
5240
5150
  let accumulatedContent = "";
5241
5151
  const asyncIterable = streamResponse;
5242
- if (process.env.DEBUG === "true") {
5243
- console.log("OpenAI: Stream response received, processing chunks");
5244
- }
5152
+ debugLog("[OpenAILLM] Stream response received, processing chunks");
5245
5153
  for await (const chunk of asyncIterable) {
5246
5154
  if (!chunk.choices || chunk.choices.length === 0) {
5247
- if (process.env.DEBUG === "true") {
5248
- console.log("OpenAI: Empty chunk received, skipping");
5249
- }
5155
+ debugLog("[OpenAILLM] Empty chunk received, skipping");
5250
5156
  continue;
5251
5157
  }
5252
5158
  const choice = chunk.choices[0];
@@ -5254,14 +5160,12 @@ var OpenAILLM = class extends BaseLLM {
5254
5160
  if (responseChunk.content !== null) {
5255
5161
  accumulatedContent += responseChunk.content;
5256
5162
  }
5257
- if (process.env.DEBUG === "true") {
5258
- console.log(
5259
- `OpenAI: Chunk received - delta: "${_optionalChain([choice, 'access', _158 => _158.delta, 'optionalAccess', _159 => _159.content]) || ""}"`,
5260
- `responseChunk content: "${responseChunk.content || ""}"`,
5261
- `is_partial: ${responseChunk.is_partial}`,
5262
- `accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5263
- );
5264
- }
5163
+ debugLog(
5164
+ `[OpenAILLM] Chunk received - delta: "${_optionalChain([choice, 'access', _159 => _159.delta, 'optionalAccess', _160 => _160.content]) || ""}"`,
5165
+ `responseChunk content: "${responseChunk.content || ""}"`,
5166
+ `is_partial: ${responseChunk.is_partial}`,
5167
+ `accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5168
+ );
5265
5169
  if (responseChunk.function_call) {
5266
5170
  if (!partialFunctionCall) {
5267
5171
  partialFunctionCall = {
@@ -5286,37 +5190,27 @@ var OpenAILLM = class extends BaseLLM {
5286
5190
  }
5287
5191
  responseChunk.tool_calls = Array.from(partialToolCalls.values());
5288
5192
  }
5289
- if (process.env.DEBUG === "true") {
5290
- console.log("OpenAI: Yielding chunk to caller");
5291
- }
5193
+ debugLog("[OpenAILLM] Yielding chunk to caller");
5292
5194
  yield responseChunk;
5293
5195
  }
5294
5196
  if (accumulatedContent.length > 0) {
5295
- if (process.env.DEBUG === "true") {
5296
- console.log(
5297
- `OpenAI: Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5298
- );
5299
- }
5197
+ debugLog(
5198
+ `[OpenAILLM] Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5199
+ );
5300
5200
  yield new LLMResponse({
5301
5201
  content: accumulatedContent,
5302
5202
  role: "assistant",
5303
5203
  is_partial: false
5304
5204
  });
5305
5205
  }
5306
- if (process.env.DEBUG === "true") {
5307
- console.log("OpenAI: Finished processing all stream chunks");
5308
- }
5206
+ debugLog("[OpenAILLM] Finished processing all stream chunks");
5309
5207
  } else {
5310
- if (process.env.DEBUG === "true") {
5311
- console.log("OpenAI: Making non-streaming request");
5312
- }
5208
+ debugLog("[OpenAILLM] Making non-streaming request");
5313
5209
  const response = await this.client.chat.completions.create(params);
5314
5210
  if (!response.choices || response.choices.length === 0) {
5315
5211
  throw new Error("No response from OpenAI");
5316
5212
  }
5317
- if (process.env.DEBUG === "true") {
5318
- console.log("OpenAI: Non-streaming response received");
5319
- }
5213
+ debugLog("[OpenAILLM] Non-streaming response received");
5320
5214
  yield this.convertResponse(response.choices[0]);
5321
5215
  }
5322
5216
  } catch (error) {
@@ -5533,7 +5427,7 @@ var OAuth2Credential = class extends AuthCredential {
5533
5427
  "Cannot refresh token: no refresh token or refresh function"
5534
5428
  );
5535
5429
  }
5536
- const result = await _optionalChain([this, 'access', _160 => _160.refreshFunction, 'optionalCall', _161 => _161(this.refreshToken)]);
5430
+ const result = await _optionalChain([this, 'access', _161 => _161.refreshFunction, 'optionalCall', _162 => _162(this.refreshToken)]);
5537
5431
  if (!result) {
5538
5432
  throw new Error("Failed to refresh token");
5539
5433
  }
@@ -5587,7 +5481,7 @@ var AuthHandler = class {
5587
5481
  * Gets the authentication token
5588
5482
  */
5589
5483
  getToken() {
5590
- return _optionalChain([this, 'access', _162 => _162.credential, 'optionalAccess', _163 => _163.getToken, 'call', _164 => _164()]);
5484
+ return _optionalChain([this, 'access', _163 => _163.credential, 'optionalAccess', _164 => _164.getToken, 'call', _165 => _165()]);
5591
5485
  }
5592
5486
  /**
5593
5487
  * Gets headers for HTTP requests
@@ -5602,7 +5496,7 @@ var AuthHandler = class {
5602
5496
  * Refreshes the token if necessary
5603
5497
  */
5604
5498
  async refreshToken() {
5605
- if (_optionalChain([this, 'access', _165 => _165.credential, 'optionalAccess', _166 => _166.canRefresh, 'call', _167 => _167()])) {
5499
+ if (_optionalChain([this, 'access', _166 => _166.credential, 'optionalAccess', _167 => _167.canRefresh, 'call', _168 => _168()])) {
5606
5500
  await this.credential.refresh();
5607
5501
  }
5608
5502
  }
@@ -5828,7 +5722,7 @@ var InMemoryMemoryService = class {
5828
5722
  };
5829
5723
  const normalizedQuery = query.toLowerCase().trim();
5830
5724
  const queryTerms = normalizedQuery.split(/\s+/);
5831
- const sessionsToSearch = _optionalChain([options, 'optionalAccess', _168 => _168.sessionId]) ? this.sessions.has(options.sessionId) ? [this.sessions.get(options.sessionId)] : [] : Array.from(this.sessions.values());
5725
+ const sessionsToSearch = _optionalChain([options, 'optionalAccess', _169 => _169.sessionId]) ? this.sessions.has(options.sessionId) ? [this.sessions.get(options.sessionId)] : [] : Array.from(this.sessions.values());
5832
5726
  for (const session of sessionsToSearch) {
5833
5727
  const matchedEvents = [];
5834
5728
  const scores = [];
@@ -5854,7 +5748,7 @@ var InMemoryMemoryService = class {
5854
5748
  }
5855
5749
  }
5856
5750
  const score = queryTerms.length > 0 ? termMatches / queryTerms.length : 0;
5857
- if (_optionalChain([options, 'optionalAccess', _169 => _169.threshold]) !== void 0 && score < options.threshold) {
5751
+ if (_optionalChain([options, 'optionalAccess', _170 => _170.threshold]) !== void 0 && score < options.threshold) {
5858
5752
  continue;
5859
5753
  }
5860
5754
  if (score > 0) {
@@ -5874,7 +5768,7 @@ var InMemoryMemoryService = class {
5874
5768
  response.memories.sort(
5875
5769
  (a, b) => (_nullishCoalesce(b.relevanceScore, () => ( 0))) - (_nullishCoalesce(a.relevanceScore, () => ( 0)))
5876
5770
  );
5877
- if (_optionalChain([options, 'optionalAccess', _170 => _170.limit]) !== void 0 && options.limit > 0) {
5771
+ if (_optionalChain([options, 'optionalAccess', _171 => _171.limit]) !== void 0 && options.limit > 0) {
5878
5772
  response.memories = response.memories.slice(0, options.limit);
5879
5773
  }
5880
5774
  return response;
@@ -5903,7 +5797,8 @@ var InMemoryMemoryService = class {
5903
5797
  };
5904
5798
 
5905
5799
  // src/memory/persistent-memory-service.ts
5906
- var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);
5800
+ init_debug();
5801
+ var _fs = require('fs'); var fs3 = _interopRequireWildcard(_fs);
5907
5802
 
5908
5803
  var PersistentMemoryService = class {
5909
5804
  /**
@@ -5925,8 +5820,8 @@ var PersistentMemoryService = class {
5925
5820
  this.inMemoryService = new InMemoryMemoryService();
5926
5821
  this.storageDir = config.storageDir;
5927
5822
  this.filePrefix = config.filePrefix || "memory";
5928
- if (config.createDir && !_fs2.default.existsSync(this.storageDir)) {
5929
- _fs2.default.mkdirSync(this.storageDir, { recursive: true });
5823
+ if (config.createDir && !fs3.default.existsSync(this.storageDir)) {
5824
+ fs3.default.mkdirSync(this.storageDir, { recursive: true });
5930
5825
  }
5931
5826
  this.loadMemoryFiles();
5932
5827
  }
@@ -5963,7 +5858,7 @@ var PersistentMemoryService = class {
5963
5858
  persistedAt: /* @__PURE__ */ new Date()
5964
5859
  }
5965
5860
  };
5966
- await _fs2.default.promises.writeFile(
5861
+ await fs3.default.promises.writeFile(
5967
5862
  filePath,
5968
5863
  JSON.stringify(sessionData, null, 2),
5969
5864
  "utf-8"
@@ -5981,22 +5876,22 @@ var PersistentMemoryService = class {
5981
5876
  * @returns The file path
5982
5877
  */
5983
5878
  getSessionFilePath(sessionId) {
5984
- return _path2.default.join(this.storageDir, `${this.filePrefix}-${sessionId}.json`);
5879
+ return path3.default.join(this.storageDir, `${this.filePrefix}-${sessionId}.json`);
5985
5880
  }
5986
5881
  /**
5987
5882
  * Loads all memory files from disk
5988
5883
  */
5989
5884
  loadMemoryFiles() {
5990
5885
  try {
5991
- if (!_fs2.default.existsSync(this.storageDir)) {
5886
+ if (!fs3.default.existsSync(this.storageDir)) {
5992
5887
  return;
5993
5888
  }
5994
- const files = _fs2.default.readdirSync(this.storageDir);
5889
+ const files = fs3.default.readdirSync(this.storageDir);
5995
5890
  for (const file of files) {
5996
5891
  if (file.startsWith(this.filePrefix) && file.endsWith(".json")) {
5997
5892
  try {
5998
- const filePath = _path2.default.join(this.storageDir, file);
5999
- const content = _fs2.default.readFileSync(filePath, "utf-8");
5893
+ const filePath = path3.default.join(this.storageDir, file);
5894
+ const content = fs3.default.readFileSync(filePath, "utf-8");
6000
5895
  const session = JSON.parse(content);
6001
5896
  session.createdAt = new Date(session.createdAt);
6002
5897
  session.updatedAt = new Date(session.updatedAt);
@@ -6006,11 +5901,9 @@ var PersistentMemoryService = class {
6006
5901
  }
6007
5902
  }
6008
5903
  }
6009
- if (process.env.DEBUG === "true") {
6010
- console.log(
6011
- `Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
6012
- );
6013
- }
5904
+ debugLog(
5905
+ `Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
5906
+ );
6014
5907
  } catch (error) {
6015
5908
  console.error("Error loading memory files:", error);
6016
5909
  }
@@ -6037,8 +5930,8 @@ var PersistentMemoryService = class {
6037
5930
  async deleteSession(sessionId) {
6038
5931
  const filePath = this.getSessionFilePath(sessionId);
6039
5932
  try {
6040
- if (_fs2.default.existsSync(filePath)) {
6041
- await _fs2.default.promises.unlink(filePath);
5933
+ if (fs3.default.existsSync(filePath)) {
5934
+ await fs3.default.promises.unlink(filePath);
6042
5935
  }
6043
5936
  this.inMemoryService.getSession(sessionId);
6044
5937
  } catch (error) {
@@ -6074,9 +5967,9 @@ __export(sessions_exports, {
6074
5967
  PgLiteSessionService: () => PgLiteSessionService,
6075
5968
  PostgresSessionService: () => PostgresSessionService,
6076
5969
  SessionState: () => SessionState,
5970
+ SqliteSessionService: () => SqliteSessionService,
6077
5971
  cloneSession: () => cloneSession,
6078
5972
  generateSessionId: () => generateSessionId,
6079
- sessionsSchema: () => sessionsSchema2,
6080
5973
  validateSession: () => validateSession
6081
5974
  });
6082
5975
 
@@ -6140,17 +6033,17 @@ var InMemorySessionService = class {
6140
6033
  let sessions = Array.from(this.sessions.values()).filter(
6141
6034
  (session) => session.userId === userId
6142
6035
  );
6143
- if (_optionalChain([options, 'optionalAccess', _171 => _171.createdAfter])) {
6036
+ if (_optionalChain([options, 'optionalAccess', _172 => _172.createdAfter])) {
6144
6037
  sessions = sessions.filter(
6145
6038
  (session) => session.createdAt >= options.createdAfter
6146
6039
  );
6147
6040
  }
6148
- if (_optionalChain([options, 'optionalAccess', _172 => _172.updatedAfter])) {
6041
+ if (_optionalChain([options, 'optionalAccess', _173 => _173.updatedAfter])) {
6149
6042
  sessions = sessions.filter(
6150
6043
  (session) => session.updatedAt >= options.updatedAfter
6151
6044
  );
6152
6045
  }
6153
- if (_optionalChain([options, 'optionalAccess', _173 => _173.metadataFilter])) {
6046
+ if (_optionalChain([options, 'optionalAccess', _174 => _174.metadataFilter])) {
6154
6047
  sessions = sessions.filter((session) => {
6155
6048
  for (const [key, value] of Object.entries(options.metadataFilter)) {
6156
6049
  if (session.metadata[key] !== value) {
@@ -6161,7 +6054,7 @@ var InMemorySessionService = class {
6161
6054
  });
6162
6055
  }
6163
6056
  sessions.sort((a, b) => b.updatedAt.getTime() - a.updatedAt.getTime());
6164
- if (_optionalChain([options, 'optionalAccess', _174 => _174.limit]) !== void 0 && options.limit > 0) {
6057
+ if (_optionalChain([options, 'optionalAccess', _175 => _175.limit]) !== void 0 && options.limit > 0) {
6165
6058
  sessions = sessions.slice(0, options.limit);
6166
6059
  }
6167
6060
  return sessions;
@@ -6196,7 +6089,7 @@ var InMemorySessionService = class {
6196
6089
  if (event.is_partial) {
6197
6090
  return event;
6198
6091
  }
6199
- if (_optionalChain([event, 'access', _175 => _175.actions, 'optionalAccess', _176 => _176.stateDelta])) {
6092
+ if (_optionalChain([event, 'access', _176 => _176.actions, 'optionalAccess', _177 => _177.stateDelta])) {
6200
6093
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6201
6094
  if (key.startsWith("_temp_")) {
6202
6095
  continue;
@@ -6302,7 +6195,7 @@ var PostgresSessionService = class {
6302
6195
  }
6303
6196
  async listSessions(userId, options) {
6304
6197
  let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
6305
- if (_optionalChain([options, 'optionalAccess', _177 => _177.limit]) !== void 0 && options.limit > 0) {
6198
+ if (_optionalChain([options, 'optionalAccess', _178 => _178.limit]) !== void 0 && options.limit > 0) {
6306
6199
  query = query.limit(options.limit);
6307
6200
  }
6308
6201
  const results = await query;
@@ -6329,12 +6222,12 @@ var PostgresSessionService = class {
6329
6222
  if (event.is_partial) {
6330
6223
  return event;
6331
6224
  }
6332
- if (_optionalChain([event, 'access', _178 => _178.actions, 'optionalAccess', _179 => _179.stateDelta])) {
6225
+ if (_optionalChain([event, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.stateDelta])) {
6333
6226
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6334
6227
  if (key.startsWith("_temp_")) {
6335
6228
  continue;
6336
6229
  }
6337
- _optionalChain([session, 'access', _180 => _180.state, 'optionalAccess', _181 => _181.set, 'call', _182 => _182(key, value)]);
6230
+ _optionalChain([session, 'access', _181 => _181.state, 'optionalAccess', _182 => _182.set, 'call', _183 => _183(key, value)]);
6338
6231
  }
6339
6232
  }
6340
6233
  if (!session.events) {
@@ -6478,7 +6371,7 @@ var PgLiteSessionService = (_class12 = class {
6478
6371
  async listSessions(userId, options) {
6479
6372
  await this.ensureInitialized();
6480
6373
  let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
6481
- if (_optionalChain([options, 'optionalAccess', _183 => _183.limit]) !== void 0 && options.limit > 0) {
6374
+ if (_optionalChain([options, 'optionalAccess', _184 => _184.limit]) !== void 0 && options.limit > 0) {
6482
6375
  query = query.limit(options.limit);
6483
6376
  }
6484
6377
  const results = await query;
@@ -6501,12 +6394,12 @@ var PgLiteSessionService = (_class12 = class {
6501
6394
  if (event.is_partial) {
6502
6395
  return event;
6503
6396
  }
6504
- if (_optionalChain([event, 'access', _184 => _184.actions, 'optionalAccess', _185 => _185.stateDelta])) {
6397
+ if (_optionalChain([event, 'access', _185 => _185.actions, 'optionalAccess', _186 => _186.stateDelta])) {
6505
6398
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6506
6399
  if (key.startsWith("_temp_")) {
6507
6400
  continue;
6508
6401
  }
6509
- _optionalChain([session, 'access', _186 => _186.state, 'optionalAccess', _187 => _187.set, 'call', _188 => _188(key, value)]);
6402
+ _optionalChain([session, 'access', _187 => _187.state, 'optionalAccess', _188 => _188.set, 'call', _189 => _189(key, value)]);
6510
6403
  }
6511
6404
  }
6512
6405
  if (!session.events) {
@@ -6519,6 +6412,191 @@ var PgLiteSessionService = (_class12 = class {
6519
6412
  }
6520
6413
  }, _class12);
6521
6414
 
6415
+ // src/sessions/sqlite-session-service.ts
6416
+
6417
+
6418
+
6419
+
6420
+
6421
+ var _bettersqlite3 = require('drizzle-orm/better-sqlite3');
6422
+ var _sqlitecore = require('drizzle-orm/sqlite-core');
6423
+
6424
+ var sessionsSchema3 = _sqlitecore.sqliteTable.call(void 0, "sessions", {
6425
+ id: _sqlitecore.text.call(void 0, "id").primaryKey(),
6426
+ userId: _sqlitecore.text.call(void 0, "user_id").notNull(),
6427
+ messages: _sqlitecore.text.call(void 0, "messages", { mode: "json" }).default("[]").$type(),
6428
+ metadata: _sqlitecore.text.call(void 0, "metadata", { mode: "json" }).default("{}").$type(),
6429
+ createdAt: _sqlitecore.integer.call(void 0, "created_at", { mode: "timestamp" }).notNull(),
6430
+ updatedAt: _sqlitecore.integer.call(void 0, "updated_at", { mode: "timestamp" }).notNull(),
6431
+ state: _sqlitecore.text.call(void 0, "state", { mode: "json" }).default("{}").$type()
6432
+ });
6433
+ var SqliteSessionService = (_class13 = class {
6434
+
6435
+
6436
+ __init20() {this.initialized = false}
6437
+
6438
+ constructor(config) {;_class13.prototype.__init20.call(this);
6439
+ this.sqliteInstance = config.sqlite;
6440
+ const dbPath = this.sqliteInstance.name;
6441
+ if (dbPath && dbPath !== ":memory:") {
6442
+ const dbDir = path3.dirname(dbPath);
6443
+ if (!fs3.existsSync(dbDir)) {
6444
+ fs3.mkdirSync(dbDir, { recursive: true });
6445
+ }
6446
+ }
6447
+ this.db = _bettersqlite3.drizzle.call(void 0, config.sqlite, {
6448
+ schema: { sessions: sessionsSchema3 }
6449
+ });
6450
+ this.sessionsTable = sessionsSchema3;
6451
+ if (!config.skipTableCreation) {
6452
+ this.initializeDatabase().catch((error) => {
6453
+ console.error("Failed to initialize SQLite database:", error);
6454
+ });
6455
+ }
6456
+ }
6457
+ /**
6458
+ * Initialize the database by creating required tables if they don't exist
6459
+ */
6460
+ async initializeDatabase() {
6461
+ if (this.initialized) {
6462
+ return;
6463
+ }
6464
+ try {
6465
+ this.sqliteInstance.pragma("journal_mode = WAL");
6466
+ this.sqliteInstance.exec(`
6467
+ CREATE TABLE IF NOT EXISTS sessions (
6468
+ id TEXT PRIMARY KEY,
6469
+ user_id TEXT NOT NULL,
6470
+ messages TEXT DEFAULT '[]',
6471
+ metadata TEXT DEFAULT '{}',
6472
+ created_at INTEGER NOT NULL,
6473
+ updated_at INTEGER NOT NULL,
6474
+ state TEXT DEFAULT '{}'
6475
+ );
6476
+ `);
6477
+ this.sqliteInstance.exec(`
6478
+ CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
6479
+ `);
6480
+ this.initialized = true;
6481
+ } catch (error) {
6482
+ console.error("Error initializing SQLite database:", error);
6483
+ throw error;
6484
+ }
6485
+ }
6486
+ /**
6487
+ * Ensure database is initialized before any operation
6488
+ */
6489
+ async ensureInitialized() {
6490
+ if (!this.initialized) {
6491
+ await this.initializeDatabase();
6492
+ }
6493
+ }
6494
+ generateSessionId() {
6495
+ return `session-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
6496
+ }
6497
+ async createSession(userId, metadata = {}) {
6498
+ await this.ensureInitialized();
6499
+ const sessionId = this.generateSessionId();
6500
+ const now = /* @__PURE__ */ new Date();
6501
+ const sessionState = new SessionState();
6502
+ const newSessionData = {
6503
+ id: sessionId,
6504
+ userId,
6505
+ messages: [],
6506
+ metadata,
6507
+ createdAt: now,
6508
+ updatedAt: now,
6509
+ state: sessionState.toObject()
6510
+ };
6511
+ const results = await this.db.insert(this.sessionsTable).values(newSessionData).returning();
6512
+ const result = results[0];
6513
+ if (!result) {
6514
+ throw new Error(
6515
+ "Failed to create session, no data returned from insert."
6516
+ );
6517
+ }
6518
+ return {
6519
+ id: result.id,
6520
+ userId: result.userId,
6521
+ messages: Array.isArray(result.messages) ? result.messages : [],
6522
+ metadata: result.metadata || {},
6523
+ state: SessionState.fromObject(result.state || {}),
6524
+ createdAt: result.createdAt,
6525
+ updatedAt: result.updatedAt
6526
+ };
6527
+ }
6528
+ async getSession(sessionId) {
6529
+ await this.ensureInitialized();
6530
+ const results = await this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, sessionId)).limit(1);
6531
+ const sessionData = results[0];
6532
+ if (!sessionData) {
6533
+ return void 0;
6534
+ }
6535
+ return {
6536
+ id: sessionData.id,
6537
+ userId: sessionData.userId,
6538
+ messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
6539
+ metadata: sessionData.metadata || {},
6540
+ state: SessionState.fromObject(sessionData.state || {}),
6541
+ createdAt: sessionData.createdAt,
6542
+ updatedAt: sessionData.updatedAt
6543
+ };
6544
+ }
6545
+ async updateSession(session) {
6546
+ await this.ensureInitialized();
6547
+ const updateData = {
6548
+ userId: session.userId,
6549
+ messages: session.messages,
6550
+ metadata: session.metadata,
6551
+ updatedAt: /* @__PURE__ */ new Date(),
6552
+ state: session.state.toObject()
6553
+ };
6554
+ await this.db.update(this.sessionsTable).set(updateData).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, session.id));
6555
+ }
6556
+ async listSessions(userId, options) {
6557
+ await this.ensureInitialized();
6558
+ let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
6559
+ if (_optionalChain([options, 'optionalAccess', _190 => _190.limit]) !== void 0 && options.limit > 0) {
6560
+ query = query.limit(options.limit);
6561
+ }
6562
+ const results = await query;
6563
+ return results.map((sessionData) => ({
6564
+ id: sessionData.id,
6565
+ userId: sessionData.userId,
6566
+ messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
6567
+ metadata: sessionData.metadata || {},
6568
+ state: SessionState.fromObject(sessionData.state || {}),
6569
+ createdAt: sessionData.createdAt,
6570
+ updatedAt: sessionData.updatedAt
6571
+ }));
6572
+ }
6573
+ async deleteSession(sessionId) {
6574
+ await this.ensureInitialized();
6575
+ await this.db.delete(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, sessionId));
6576
+ }
6577
+ async appendEvent(session, event) {
6578
+ await this.ensureInitialized();
6579
+ if (event.is_partial) {
6580
+ return event;
6581
+ }
6582
+ if (_optionalChain([event, 'access', _191 => _191.actions, 'optionalAccess', _192 => _192.stateDelta])) {
6583
+ for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6584
+ if (key.startsWith("_temp_")) {
6585
+ continue;
6586
+ }
6587
+ _optionalChain([session, 'access', _193 => _193.state, 'optionalAccess', _194 => _194.set, 'call', _195 => _195(key, value)]);
6588
+ }
6589
+ }
6590
+ if (!session.events) {
6591
+ session.events = [];
6592
+ }
6593
+ session.events.push(event);
6594
+ session.updatedAt = /* @__PURE__ */ new Date();
6595
+ await this.updateSession(session);
6596
+ return event;
6597
+ }
6598
+ }, _class13);
6599
+
6522
6600
  // src/sessions/session-util.ts
6523
6601
  function generateSessionId() {
6524
6602
  return `session-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
@@ -6553,7 +6631,7 @@ function cloneSession(session) {
6553
6631
  var _uuid = require('uuid');
6554
6632
 
6555
6633
  // src/events/event-actions.ts
6556
- var EventActions = (_class13 = class {
6634
+ var EventActions = (_class14 = class {
6557
6635
  /**
6558
6636
  * If true, it won't call model to summarize function response.
6559
6637
  * Only used for function_response event.
@@ -6562,12 +6640,12 @@ var EventActions = (_class13 = class {
6562
6640
  /**
6563
6641
  * Indicates that the event is updating the state with the given delta.
6564
6642
  */
6565
- __init20() {this.stateDelta = {}}
6643
+ __init21() {this.stateDelta = {}}
6566
6644
  /**
6567
6645
  * Indicates that the event is updating an artifact. key is the filename,
6568
6646
  * value is the version.
6569
6647
  */
6570
- __init21() {this.artifactDelta = {}}
6648
+ __init22() {this.artifactDelta = {}}
6571
6649
  /**
6572
6650
  * If set, the event transfers to the specified agent.
6573
6651
  */
@@ -6579,21 +6657,21 @@ var EventActions = (_class13 = class {
6579
6657
  /**
6580
6658
  * Constructor for EventActions
6581
6659
  */
6582
- constructor(options = {}) {;_class13.prototype.__init20.call(this);_class13.prototype.__init21.call(this);
6660
+ constructor(options = {}) {;_class14.prototype.__init21.call(this);_class14.prototype.__init22.call(this);
6583
6661
  this.skipSummarization = options.skipSummarization;
6584
6662
  this.stateDelta = options.stateDelta || {};
6585
6663
  this.artifactDelta = options.artifactDelta || {};
6586
6664
  this.transferToAgent = options.transferToAgent;
6587
6665
  this.escalate = options.escalate;
6588
6666
  }
6589
- }, _class13);
6667
+ }, _class14);
6590
6668
 
6591
6669
  // src/events/event.ts
6592
- var Event = (_class14 = class _Event extends LLMResponse {
6670
+ var Event = (_class15 = class _Event extends LLMResponse {
6593
6671
  /**
6594
6672
  * The invocation ID of the event.
6595
6673
  */
6596
- __init22() {this.invocationId = ""}
6674
+ __init23() {this.invocationId = ""}
6597
6675
  /**
6598
6676
  * 'user' or the name of the agent, indicating who appended the event to the session.
6599
6677
  */
@@ -6601,7 +6679,7 @@ var Event = (_class14 = class _Event extends LLMResponse {
6601
6679
  /**
6602
6680
  * The actions taken by the agent.
6603
6681
  */
6604
- __init23() {this.actions = new EventActions()}
6682
+ __init24() {this.actions = new EventActions()}
6605
6683
  /**
6606
6684
  * Set of ids of the long running function calls.
6607
6685
  * Agent client will know from this field about which function call is long running.
@@ -6619,7 +6697,7 @@ var Event = (_class14 = class _Event extends LLMResponse {
6619
6697
  /**
6620
6698
  * The unique identifier of the event.
6621
6699
  */
6622
- __init24() {this.id = ""}
6700
+ __init25() {this.id = ""}
6623
6701
  /**
6624
6702
  * The timestamp of the event.
6625
6703
  */
@@ -6649,7 +6727,7 @@ var Event = (_class14 = class _Event extends LLMResponse {
6649
6727
  role,
6650
6728
  is_partial: partial,
6651
6729
  raw_response
6652
- });_class14.prototype.__init22.call(this);_class14.prototype.__init23.call(this);_class14.prototype.__init24.call(this);;
6730
+ });_class15.prototype.__init23.call(this);_class15.prototype.__init24.call(this);_class15.prototype.__init25.call(this);;
6653
6731
  this.invocationId = invocationId;
6654
6732
  this.author = author;
6655
6733
  this.actions = actions;
@@ -6683,7 +6761,7 @@ var Event = (_class14 = class _Event extends LLMResponse {
6683
6761
  static newId() {
6684
6762
  return _uuid.v4.call(void 0, ).substring(0, 8);
6685
6763
  }
6686
- }, _class14);
6764
+ }, _class15);
6687
6765
 
6688
6766
  // src/runners.ts
6689
6767
  var Runner = class {
@@ -6904,4 +6982,4 @@ var VERSION = "0.1.0";
6904
6982
 
6905
6983
 
6906
6984
 
6907
- exports.Agent = Agent; exports.Agents = agents_exports; exports.AnthropicLLM = AnthropicLLM; exports.AnthropicLLMConnection = AnthropicLLMConnection; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.BaseAgent = BaseAgent; exports.BaseLLM = BaseLLM; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.FunctionTool = FunctionTool; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLLM = GoogleLLM; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LLMRequest = LLMRequest; exports.LLMResponse = LLMResponse; exports.LangGraphAgent = LangGraphAgent; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAILLM = OpenAILLM; exports.OpenAILLMConnection = OpenAILLMConnection; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PersistentMemoryService = PersistentMemoryService; exports.PgLiteSessionService = PgLiteSessionService; exports.PostgresSessionService = PostgresSessionService; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.SessionState = SessionState; exports.Sessions = sessions_exports; exports.StreamingMode = StreamingMode; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.adkToMcpToolType = adkToMcpToolType; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.cloneSession = cloneSession; exports.createFunctionTool = createFunctionTool; exports.generateSessionId = generateSessionId; exports.getMcpTools = getMcpTools; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.normalizeJsonSchema = normalizeJsonSchema; exports.registerProviders = registerProviders; exports.sessionsSchema = sessionsSchema2; exports.validateSession = validateSession;
6985
+ exports.Agent = Agent; exports.Agents = agents_exports; exports.AnthropicLLM = AnthropicLLM; exports.AnthropicLLMConnection = AnthropicLLMConnection; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.BaseAgent = BaseAgent; exports.BaseLLM = BaseLLM; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.FunctionTool = FunctionTool; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLLM = GoogleLLM; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LLMRequest = LLMRequest; exports.LLMResponse = LLMResponse; exports.LangGraphAgent = LangGraphAgent; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAILLM = OpenAILLM; exports.OpenAILLMConnection = OpenAILLMConnection; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PersistentMemoryService = PersistentMemoryService; exports.PgLiteSessionService = PgLiteSessionService; exports.PostgresSessionService = PostgresSessionService; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.SessionState = SessionState; exports.Sessions = sessions_exports; exports.SqliteSessionService = SqliteSessionService; exports.StreamingMode = StreamingMode; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.adkToMcpToolType = adkToMcpToolType; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.cloneSession = cloneSession; exports.createFunctionTool = createFunctionTool; exports.generateSessionId = generateSessionId; exports.getMcpTools = getMcpTools; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.normalizeJsonSchema = normalizeJsonSchema; exports.registerProviders = registerProviders; exports.validateSession = validateSession;