@iqai/adk 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var __defProp = Object.defineProperty;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15;var __defProp = Object.defineProperty;
2
2
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
3
  var __getOwnPropNames = Object.getOwnPropertyNames;
4
4
  var __hasOwnProp = Object.prototype.hasOwnProperty;
@@ -19,24 +19,60 @@ var __copyProps = (to, from, except, desc) => {
19
19
  };
20
20
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
21
21
 
22
+ // src/helpers/debug.ts
23
+ var isDebugEnabled, debugLog;
24
+ var init_debug = __esm({
25
+ "src/helpers/debug.ts"() {
26
+ isDebugEnabled = () => {
27
+ return process.env.NODE_ENV === "development" || process.env.DEBUG === "true";
28
+ };
29
+ debugLog = (message, ...args) => {
30
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
31
+ if (isDebugEnabled()) {
32
+ console.log(`[DEBUG] ${time}: ${message}`, ...args);
33
+ }
34
+ };
35
+ }
36
+ });
37
+
22
38
  // src/tools/base/base-tool.ts
23
39
  var BaseTool;
24
40
  var init_base_tool = __esm({
25
41
  "src/tools/base/base-tool.ts"() {
26
- "use strict";
27
- BaseTool = exports.BaseTool = class {
42
+ init_debug();
43
+ BaseTool = exports.BaseTool = (_class = class {
44
+ /**
45
+ * Name of the tool
46
+ */
47
+
48
+ /**
49
+ * Description of the tool
50
+ */
51
+
52
+ /**
53
+ * Whether the tool is a long running operation
54
+ */
55
+
56
+ /**
57
+ * Whether the tool execution should be retried on failure
58
+ */
59
+
60
+ /**
61
+ * Maximum retry attempts
62
+ */
63
+
64
+ /**
65
+ * Base delay for retry in ms (will be used with exponential backoff)
66
+ */
67
+ __init2() {this.baseRetryDelay = 1e3}
68
+ /**
69
+ * Maximum delay for retry in ms
70
+ */
71
+ __init3() {this.maxRetryDelay = 1e4}
28
72
  /**
29
73
  * Constructor for BaseTool
30
74
  */
31
- constructor(config) {
32
- /**
33
- * Base delay for retry in ms (will be used with exponential backoff)
34
- */
35
- this.baseRetryDelay = 1e3;
36
- /**
37
- * Maximum delay for retry in ms
38
- */
39
- this.maxRetryDelay = 1e4;
75
+ constructor(config) {;_class.prototype.__init2.call(this);_class.prototype.__init3.call(this);
40
76
  this.name = config.name;
41
77
  this.description = config.description;
42
78
  this.isLongRunning = config.isLongRunning || false;
@@ -93,11 +129,9 @@ var init_base_tool = __esm({
93
129
  while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
94
130
  try {
95
131
  if (attempts > 0) {
96
- if (process.env.DEBUG === "true") {
97
- console.log(
98
- `Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
99
- );
100
- }
132
+ debugLog(
133
+ `[BaseTool] Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
134
+ );
101
135
  const delay = Math.min(
102
136
  this.baseRetryDelay * 2 ** (attempts - 1) + Math.random() * 1e3,
103
137
  this.maxRetryDelay
@@ -118,7 +152,7 @@ var init_base_tool = __esm({
118
152
  tool: this.name
119
153
  };
120
154
  }
121
- };
155
+ }, _class);
122
156
  }
123
157
  });
124
158
 
@@ -231,7 +265,6 @@ function extractJSDocParams(funcStr) {
231
265
  }
232
266
  var init_function_utils = __esm({
233
267
  "src/tools/function/function-utils.ts"() {
234
- "use strict";
235
268
  }
236
269
  });
237
270
 
@@ -243,10 +276,11 @@ __export(function_tool_exports, {
243
276
  var FunctionTool;
244
277
  var init_function_tool = __esm({
245
278
  "src/tools/function/function-tool.ts"() {
246
- "use strict";
247
279
  init_base_tool();
248
280
  init_function_utils();
249
- FunctionTool = exports.FunctionTool = class extends BaseTool {
281
+ FunctionTool = exports.FunctionTool = (_class2 = class extends BaseTool {
282
+
283
+ __init4() {this.mandatoryArgs = []}
250
284
  /**
251
285
  * Creates a new FunctionTool wrapping the provided function.
252
286
  *
@@ -262,8 +296,7 @@ var init_function_tool = __esm({
262
296
  isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
263
297
  shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
264
298
  maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
265
- });
266
- this.mandatoryArgs = [];
299
+ });_class2.prototype.__init4.call(this);;
267
300
  this.func = func;
268
301
  this.mandatoryArgs = this.getMandatoryArgs(func);
269
302
  }
@@ -339,7 +372,7 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
339
372
  getMissingMandatoryArgs(args) {
340
373
  return this.mandatoryArgs.filter((arg) => !(arg in args));
341
374
  }
342
- };
375
+ }, _class2);
343
376
  }
344
377
  });
345
378
 
@@ -359,6 +392,25 @@ __export(agents_exports, {
359
392
 
360
393
  // src/agents/base-agent.ts
361
394
  var BaseAgent = class {
395
+ /**
396
+ * The agent's name
397
+ * Agent name must be a unique identifier within the agent tree
398
+ */
399
+
400
+ /**
401
+ * Description about the agent's capability
402
+ * The LLM uses this to determine whether to delegate control to the agent
403
+ */
404
+
405
+ /**
406
+ * The parent agent of this agent
407
+ * Note that an agent can ONLY be added as sub-agent once
408
+ */
409
+
410
+ /**
411
+ * The sub-agents of this agent
412
+ */
413
+
362
414
  /**
363
415
  * Constructs a new BaseAgent
364
416
  */
@@ -422,8 +474,16 @@ var BaseAgent = class {
422
474
  }
423
475
  };
424
476
 
477
+ // src/agents/llm-agent.ts
478
+ init_debug();
479
+
425
480
  // src/models/llm-registry.ts
426
- var _LLMRegistry = class _LLMRegistry {
481
+ init_debug();
482
+ var LLMRegistry = (_class3 = class _LLMRegistry {
483
+ /**
484
+ * Map of model name regex to LLM class
485
+ */
486
+ static __initStatic() {this.llmRegistry = /* @__PURE__ */ new Map()}
427
487
  /**
428
488
  * Creates a new LLM instance
429
489
  *
@@ -475,22 +535,23 @@ var _LLMRegistry = class _LLMRegistry {
475
535
  * Logs all registered models for debugging
476
536
  */
477
537
  static logRegisteredModels() {
478
- if (process.env.DEBUG === "true") {
479
- console.log("Registered LLM models:");
480
- for (const [regex, llmClass] of _LLMRegistry.llmRegistry.entries()) {
481
- console.log(` - Pattern: ${regex.toString()}`);
482
- }
483
- }
538
+ debugLog(
539
+ "Registered LLM models:",
540
+ [..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
541
+ );
484
542
  }
485
- };
486
- /**
487
- * Map of model name regex to LLM class
488
- */
489
- _LLMRegistry.llmRegistry = /* @__PURE__ */ new Map();
490
- var LLMRegistry = _LLMRegistry;
543
+ }, _class3.__initStatic(), _class3);
491
544
 
492
545
  // src/models/llm-request.ts
493
546
  var LLMRequest = class {
547
+ /**
548
+ * The conversation history
549
+ */
550
+
551
+ /**
552
+ * LLM configuration parameters
553
+ */
554
+
494
555
  constructor(data) {
495
556
  this.messages = data.messages;
496
557
  this.config = data.config || {};
@@ -498,19 +559,35 @@ var LLMRequest = class {
498
559
  };
499
560
 
500
561
  // src/tools/tool-context.ts
501
- var ToolContext = class {
562
+ var ToolContext = (_class4 = class {
563
+ /**
564
+ * The parent invocation context
565
+ */
566
+
567
+ /**
568
+ * Authentication handler for the tool
569
+ */
570
+
571
+ /**
572
+ * Additional parameters for the tool
573
+ */
574
+
575
+ /**
576
+ * Tool name
577
+ */
578
+ __init5() {this.toolName = ""}
579
+ /**
580
+ * Tool ID
581
+ */
582
+ __init6() {this.toolId = ""}
583
+ /**
584
+ * Variables stored in the context
585
+ */
586
+
502
587
  /**
503
588
  * Constructor for ToolContext
504
589
  */
505
- constructor(options) {
506
- /**
507
- * Tool name
508
- */
509
- this.toolName = "";
510
- /**
511
- * Tool ID
512
- */
513
- this.toolId = "";
590
+ constructor(options) {;_class4.prototype.__init5.call(this);_class4.prototype.__init6.call(this);
514
591
  this.invocationContext = options.invocationContext;
515
592
  this.auth = options.auth;
516
593
  this.parameters = options.parameters || {};
@@ -576,7 +653,7 @@ var ToolContext = class {
576
653
  async searchMemory(query, options) {
577
654
  return this.invocationContext.searchMemory(query, options);
578
655
  }
579
- };
656
+ }, _class4);
580
657
 
581
658
  // src/agents/run-config.ts
582
659
  var StreamingMode = /* @__PURE__ */ ((StreamingMode2) => {
@@ -586,6 +663,36 @@ var StreamingMode = /* @__PURE__ */ ((StreamingMode2) => {
586
663
  return StreamingMode2;
587
664
  })(StreamingMode || {});
588
665
  var RunConfig = class {
666
+ /**
667
+ * Speech configuration for the live agent
668
+ */
669
+
670
+ /**
671
+ * The output modalities
672
+ */
673
+
674
+ /**
675
+ * Whether to save input blobs as artifacts
676
+ */
677
+
678
+ /**
679
+ * Whether to support Compositional Function Calling
680
+ * Only applicable for StreamingMode.SSE
681
+ */
682
+
683
+ /**
684
+ * Streaming mode
685
+ */
686
+
687
+ /**
688
+ * Whether to stream the response
689
+ * This is derived from streamingMode and used by LLM implementations
690
+ */
691
+
692
+ /**
693
+ * Output audio transcription configuration
694
+ */
695
+
589
696
  constructor(config) {
590
697
  this.speechConfig = _optionalChain([config, 'optionalAccess', _11 => _11.speechConfig]);
591
698
  this.responseModalities = _optionalChain([config, 'optionalAccess', _12 => _12.responseModalities]);
@@ -598,15 +705,51 @@ var RunConfig = class {
598
705
  };
599
706
 
600
707
  // src/agents/invocation-context.ts
601
- var InvocationContext = class _InvocationContext {
708
+ var InvocationContext = (_class5 = class _InvocationContext {
709
+ /**
710
+ * Unique session ID for the current conversation
711
+ */
712
+
713
+ /**
714
+ * Current conversation history
715
+ */
716
+
717
+ /**
718
+ * Run configuration
719
+ */
720
+
721
+ /**
722
+ * User identifier associated with the session
723
+ */
724
+
725
+ /**
726
+ * Application name (for multi-app environments)
727
+ */
728
+
729
+ /**
730
+ * Memory service for long-term storage
731
+ */
732
+
733
+ /**
734
+ * Session service for session management
735
+ */
736
+
737
+ /**
738
+ * Additional context metadata
739
+ */
740
+
741
+ /**
742
+ * Variables stored in the context
743
+ */
744
+
745
+ /**
746
+ * In-memory storage for node execution results
747
+ */
748
+ __init7() {this.memory = /* @__PURE__ */ new Map()}
602
749
  /**
603
750
  * Constructor for InvocationContext
604
751
  */
605
- constructor(options = {}) {
606
- /**
607
- * In-memory storage for node execution results
608
- */
609
- this.memory = /* @__PURE__ */ new Map();
752
+ constructor(options = {}) {;_class5.prototype.__init7.call(this);
610
753
  this.sessionId = options.sessionId || this.generateSessionId();
611
754
  this.messages = options.messages || [];
612
755
  this.config = options.config || new RunConfig();
@@ -709,10 +852,58 @@ var InvocationContext = class _InvocationContext {
709
852
  };
710
853
  return await this.memoryService.searchMemory(query, searchOptions);
711
854
  }
712
- };
855
+ }, _class5);
713
856
 
714
857
  // src/agents/llm-agent.ts
715
858
  var Agent = class extends BaseAgent {
859
+ /**
860
+ * The LLM model to use
861
+ */
862
+
863
+ /**
864
+ * The LLM instance
865
+ */
866
+
867
+ /**
868
+ * Instructions for the agent
869
+ */
870
+
871
+ /**
872
+ * Tools available to the agent
873
+ */
874
+
875
+ /**
876
+ * Maximum number of tool execution steps to prevent infinite loops
877
+ */
878
+
879
+ /**
880
+ * Memory service for long-term storage and retrieval
881
+ */
882
+
883
+ /**
884
+ * Session service for managing conversations
885
+ */
886
+
887
+ /**
888
+ * User ID for the session
889
+ */
890
+
891
+ /**
892
+ * Application name
893
+ */
894
+
895
+ /**
896
+ * Whether to automatically augment prompts with relevant memory
897
+ */
898
+
899
+ /**
900
+ * The maximum number of memory items to include in augmentation
901
+ */
902
+
903
+ /**
904
+ * The minimum relevance score for memory augmentation (0-1)
905
+ */
906
+
716
907
  /**
717
908
  * Constructor for Agent
718
909
  */
@@ -745,9 +936,7 @@ var Agent = class extends BaseAgent {
745
936
  */
746
937
  async executeTool(toolCall, context) {
747
938
  const { name, arguments: argsString } = toolCall.function;
748
- if (process.env.DEBUG === "true") {
749
- console.log(`Executing tool: ${name}`);
750
- }
939
+ debugLog(`Executing tool: ${name}`);
751
940
  const tool = this.findTool(name);
752
941
  if (!tool) {
753
942
  console.warn(`Tool '${name}' not found`);
@@ -765,9 +954,7 @@ var Agent = class extends BaseAgent {
765
954
  toolContext.toolName = name;
766
955
  toolContext.toolId = toolCall.id;
767
956
  const result = await tool.runAsync(args, toolContext);
768
- if (process.env.DEBUG === "true") {
769
- console.log(`Tool ${name} execution complete`);
770
- }
957
+ debugLog(`Tool ${name} execution complete`);
771
958
  return {
772
959
  name,
773
960
  result: typeof result === "string" ? result : JSON.stringify(result)
@@ -936,10 +1123,7 @@ ${relevantInfo.join("\n\n")}`
936
1123
  let stepCount = 0;
937
1124
  while (stepCount < this.maxToolExecutionSteps) {
938
1125
  stepCount++;
939
- if (process.env.DEBUG === "true") {
940
- console.log(`
941
- [Agent] Step ${stepCount}: Thinking...`);
942
- }
1126
+ debugLog(`Step ${stepCount}: Thinking...`);
943
1127
  const llmRequest = new LLMRequest({
944
1128
  messages: context.messages,
945
1129
  config: {
@@ -956,9 +1140,7 @@ ${relevantInfo.join("\n\n")}`
956
1140
  throw new Error("No response from LLM");
957
1141
  }
958
1142
  if (currentResponse.tool_calls && currentResponse.tool_calls.length > 0) {
959
- if (process.env.DEBUG === "true") {
960
- console.log("[Agent] Executing tools...");
961
- }
1143
+ debugLog(`Tool calls: ${JSON.stringify(currentResponse.tool_calls)}`);
962
1144
  context.addMessage({
963
1145
  role: "assistant",
964
1146
  content: currentResponse.content || "",
@@ -977,9 +1159,7 @@ ${relevantInfo.join("\n\n")}`
977
1159
  });
978
1160
  }
979
1161
  } else {
980
- if (process.env.DEBUG === "true") {
981
- console.log("[Agent] No tool calls, finishing...");
982
- }
1162
+ debugLog("[Agent] No tool calls, finishing...");
983
1163
  context.addMessage({
984
1164
  role: "assistant",
985
1165
  content: currentResponse.content || ""
@@ -1017,10 +1197,7 @@ ${relevantInfo.join("\n\n")}`
1017
1197
  let stepCount = 0;
1018
1198
  let hadToolCalls = false;
1019
1199
  while (stepCount < this.maxToolExecutionSteps) {
1020
- if (process.env.DEBUG === "true") {
1021
- console.log(`
1022
- [Agent] Step ${stepCount + 1}: Thinking...`);
1023
- }
1200
+ debugLog(`[Agent] Step ${stepCount}: Thinking...`);
1024
1201
  const toolDeclarations = this.tools.map((tool) => tool.getDeclaration()).filter((declaration) => declaration !== null);
1025
1202
  const request = {
1026
1203
  messages: context.messages,
@@ -1049,14 +1226,10 @@ ${relevantInfo.join("\n\n")}`
1049
1226
  function_call: finalResponse.function_call
1050
1227
  });
1051
1228
  if (!hadToolCalls) {
1052
- if (process.env.DEBUG === "true") {
1053
- console.log("[Agent] No tool calls, finishing...");
1054
- }
1229
+ debugLog("[Agent] No tool calls, finishing...");
1055
1230
  break;
1056
1231
  }
1057
- if (process.env.DEBUG === "true") {
1058
- console.log("[Agent] Executing tools...");
1059
- }
1232
+ debugLog(`[Agent] Step ${stepCount + 1}: Executing tools...`);
1060
1233
  stepCount++;
1061
1234
  if (finalResponse.function_call) {
1062
1235
  const toolCall = {
@@ -1073,11 +1246,9 @@ ${relevantInfo.join("\n\n")}`
1073
1246
  content: JSON.stringify(result.result)
1074
1247
  });
1075
1248
  } else if (finalResponse.tool_calls && finalResponse.tool_calls.length > 0) {
1076
- if (process.env.DEBUG === "true") {
1077
- console.log(
1078
- `[Agent] Executing ${finalResponse.tool_calls.length} tool(s)...`
1079
- );
1080
- }
1249
+ debugLog(
1250
+ `[Agent] Step ${stepCount + 1}: Executing ${finalResponse.tool_calls.length} tool(s)...`
1251
+ );
1081
1252
  context.messages.pop();
1082
1253
  context.addMessage({
1083
1254
  role: "assistant",
@@ -1102,6 +1273,7 @@ ${relevantInfo.join("\n\n")}`
1102
1273
  };
1103
1274
 
1104
1275
  // src/agents/sequential-agent.ts
1276
+ init_debug();
1105
1277
  var SequentialAgent = class extends BaseAgent {
1106
1278
  /**
1107
1279
  * Constructor for SequentialAgent
@@ -1122,11 +1294,9 @@ var SequentialAgent = class extends BaseAgent {
1122
1294
  * Executes sub-agents sequentially, passing output from one to the next
1123
1295
  */
1124
1296
  async run(options) {
1125
- if (process.env.DEBUG === "true") {
1126
- console.log(
1127
- `[SequentialAgent] Running ${this.subAgents.length} sub-agents in sequence`
1128
- );
1129
- }
1297
+ debugLog(
1298
+ `[SequentialAgent] Running ${this.subAgents.length} sub-agents in sequence`
1299
+ );
1130
1300
  if (this.subAgents.length === 0) {
1131
1301
  return {
1132
1302
  content: "No sub-agents defined for sequential execution.",
@@ -1142,11 +1312,9 @@ var SequentialAgent = class extends BaseAgent {
1142
1312
  let finalResponse = null;
1143
1313
  for (let i = 0; i < this.subAgents.length; i++) {
1144
1314
  const agent = this.subAgents[i];
1145
- if (process.env.DEBUG === "true") {
1146
- console.log(
1147
- `[SequentialAgent] Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1148
- );
1149
- }
1315
+ debugLog(
1316
+ `[SequentialAgent] Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1317
+ );
1150
1318
  try {
1151
1319
  const response = await agent.run({
1152
1320
  messages: currentMessages,
@@ -1202,11 +1370,9 @@ var SequentialAgent = class extends BaseAgent {
1202
1370
  * Streams responses from each sub-agent in sequence
1203
1371
  */
1204
1372
  async *runStreaming(options) {
1205
- if (process.env.DEBUG === "true") {
1206
- console.log(
1207
- `[SequentialAgent] Streaming ${this.subAgents.length} sub-agents in sequence`
1208
- );
1209
- }
1373
+ debugLog(
1374
+ `[SequentialAgent] Streaming ${this.subAgents.length} sub-agents in sequence`
1375
+ );
1210
1376
  if (this.subAgents.length === 0) {
1211
1377
  yield {
1212
1378
  content: "No sub-agents defined for sequential execution.",
@@ -1222,11 +1388,9 @@ var SequentialAgent = class extends BaseAgent {
1222
1388
  const currentMessages = [...options.messages];
1223
1389
  for (let i = 0; i < this.subAgents.length; i++) {
1224
1390
  const agent = this.subAgents[i];
1225
- if (process.env.DEBUG === "true") {
1226
- console.log(
1227
- `[SequentialAgent] Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1228
- );
1229
- }
1391
+ debugLog(
1392
+ `[SequentialAgent] Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
1393
+ );
1230
1394
  try {
1231
1395
  const streamGenerator = agent.runStreaming({
1232
1396
  messages: currentMessages,
@@ -1279,6 +1443,7 @@ var SequentialAgent = class extends BaseAgent {
1279
1443
  };
1280
1444
 
1281
1445
  // src/agents/parallel-agent.ts
1446
+ init_debug();
1282
1447
  var ParallelAgent = class extends BaseAgent {
1283
1448
  /**
1284
1449
  * Constructor for ParallelAgent
@@ -1299,11 +1464,9 @@ var ParallelAgent = class extends BaseAgent {
1299
1464
  * Executes all sub-agents in parallel
1300
1465
  */
1301
1466
  async run(options) {
1302
- if (process.env.DEBUG === "true") {
1303
- console.log(
1304
- `[ParallelAgent] Running ${this.subAgents.length} sub-agents in parallel`
1305
- );
1306
- }
1467
+ debugLog(
1468
+ `[ParallelAgent] Running ${this.subAgents.length} sub-agents in parallel`
1469
+ );
1307
1470
  if (this.subAgents.length === 0) {
1308
1471
  return {
1309
1472
  content: "No sub-agents defined for parallel execution.",
@@ -1346,11 +1509,9 @@ ${result.content || "No content"}
1346
1509
  * Collects streaming responses from all sub-agents
1347
1510
  */
1348
1511
  async *runStreaming(options) {
1349
- if (process.env.DEBUG === "true") {
1350
- console.log(
1351
- `[ParallelAgent] Streaming ${this.subAgents.length} sub-agents in parallel`
1352
- );
1353
- }
1512
+ debugLog(
1513
+ `[ParallelAgent] Streaming ${this.subAgents.length} sub-agents in parallel`
1514
+ );
1354
1515
  if (this.subAgents.length === 0) {
1355
1516
  yield {
1356
1517
  content: "No sub-agents defined for parallel execution.",
@@ -1416,7 +1577,20 @@ ${response.content || "No content"}
1416
1577
  };
1417
1578
 
1418
1579
  // src/agents/loop-agent.ts
1580
+ init_debug();
1419
1581
  var LoopAgent = class extends BaseAgent {
1582
+ /**
1583
+ * Maximum number of iterations to prevent infinite loops
1584
+ */
1585
+
1586
+ /**
1587
+ * Agent that decides whether to continue the loop
1588
+ */
1589
+
1590
+ /**
1591
+ * Custom condition check function
1592
+ */
1593
+
1420
1594
  /**
1421
1595
  * Constructor for LoopAgent
1422
1596
  */
@@ -1444,28 +1618,20 @@ var LoopAgent = class extends BaseAgent {
1444
1618
  */
1445
1619
  async shouldContinue(response, iterationCount, messages, config) {
1446
1620
  if (iterationCount >= this.maxIterations) {
1447
- if (process.env.DEBUG === "true") {
1448
- console.log(
1449
- `[LoopAgent] Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
1450
- );
1451
- }
1621
+ debugLog(
1622
+ `[LoopAgent] Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
1623
+ );
1452
1624
  return false;
1453
1625
  }
1454
1626
  if (this.conditionCheck) {
1455
1627
  const shouldContinue = await this.conditionCheck(response);
1456
- if (process.env.DEBUG === "true") {
1457
- console.log(
1458
- `[LoopAgent] Custom condition check result: ${shouldContinue}`
1459
- );
1460
- }
1628
+ debugLog(`[LoopAgent] Custom condition check result: ${shouldContinue}`);
1461
1629
  return shouldContinue;
1462
1630
  }
1463
1631
  if (this.conditionAgent) {
1464
- if (process.env.DEBUG === "true") {
1465
- console.log(
1466
- `[LoopAgent] Using condition agent ${this.conditionAgent.name} to check loop condition`
1467
- );
1468
- }
1632
+ debugLog(
1633
+ `[LoopAgent] Using condition agent ${this.conditionAgent.name} to check loop condition`
1634
+ );
1469
1635
  const conditionMessages = [
1470
1636
  ...messages,
1471
1637
  {
@@ -1484,11 +1650,9 @@ var LoopAgent = class extends BaseAgent {
1484
1650
  });
1485
1651
  const content = _optionalChain([conditionResponse, 'access', _21 => _21.content, 'optionalAccess', _22 => _22.toLowerCase, 'call', _23 => _23()]) || "";
1486
1652
  const shouldContinue = content.includes("yes") && !content.includes("no");
1487
- if (process.env.DEBUG === "true") {
1488
- console.log(
1489
- `[LoopAgent] Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
1490
- );
1491
- }
1653
+ debugLog(
1654
+ `[LoopAgent] Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
1655
+ );
1492
1656
  return shouldContinue;
1493
1657
  } catch (error) {
1494
1658
  console.error("[LoopAgent] Error in condition agent:", error);
@@ -1502,11 +1666,9 @@ var LoopAgent = class extends BaseAgent {
1502
1666
  * Executes the sub-agent in a loop until the condition is met
1503
1667
  */
1504
1668
  async run(options) {
1505
- if (process.env.DEBUG === "true") {
1506
- console.log(
1507
- `[LoopAgent] Starting loop with max ${this.maxIterations} iterations`
1508
- );
1509
- }
1669
+ debugLog(
1670
+ `[LoopAgent] Starting loop with max ${this.maxIterations} iterations`
1671
+ );
1510
1672
  if (this.subAgents.length === 0) {
1511
1673
  return {
1512
1674
  content: "No sub-agent defined for loop execution.",
@@ -1520,11 +1682,9 @@ var LoopAgent = class extends BaseAgent {
1520
1682
  let shouldContinueLoop = true;
1521
1683
  while (shouldContinueLoop && iterationCount < this.maxIterations) {
1522
1684
  iterationCount++;
1523
- if (process.env.DEBUG === "true") {
1524
- console.log(
1525
- `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations}`
1526
- );
1527
- }
1685
+ debugLog(
1686
+ `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations}`
1687
+ );
1528
1688
  try {
1529
1689
  const response = await subAgent.run({
1530
1690
  messages: currentMessages,
@@ -1572,11 +1732,9 @@ ${lastResponse.content || ""}`,
1572
1732
  * Runs the agent with streaming support
1573
1733
  */
1574
1734
  async *runStreaming(options) {
1575
- if (process.env.DEBUG === "true") {
1576
- console.log(
1577
- `[LoopAgent] Starting loop with max ${this.maxIterations} iterations (streaming)`
1578
- );
1579
- }
1735
+ debugLog(
1736
+ `[LoopAgent] Starting loop with max ${this.maxIterations} iterations (streaming)`
1737
+ );
1580
1738
  if (this.subAgents.length === 0) {
1581
1739
  yield {
1582
1740
  content: "No sub-agent defined for loop execution.",
@@ -1595,11 +1753,9 @@ ${lastResponse.content || ""}`,
1595
1753
  };
1596
1754
  while (shouldContinueLoop && iterationCount < this.maxIterations) {
1597
1755
  iterationCount++;
1598
- if (process.env.DEBUG === "true") {
1599
- console.log(
1600
- `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
1601
- );
1602
- }
1756
+ debugLog(
1757
+ `[LoopAgent] Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
1758
+ );
1603
1759
  yield {
1604
1760
  content: `Running iteration ${iterationCount}/${this.maxIterations}...`,
1605
1761
  role: "assistant",
@@ -1623,11 +1779,9 @@ ${lastResponse.content || ""}`,
1623
1779
  }
1624
1780
  }
1625
1781
  if (!lastChunk) {
1626
- if (process.env.DEBUG === "true") {
1627
- console.warn(
1628
- `[LoopAgent] No complete chunk received from iteration ${iterationCount}`
1629
- );
1630
- }
1782
+ debugLog(
1783
+ `[LoopAgent] No complete chunk received from iteration ${iterationCount}`
1784
+ );
1631
1785
  shouldContinueLoop = false;
1632
1786
  continue;
1633
1787
  }
@@ -1653,9 +1807,8 @@ ${lastResponse.content || ""}`,
1653
1807
  };
1654
1808
  }
1655
1809
  } catch (error) {
1656
- console.error(
1657
- `[LoopAgent] Error in loop iteration ${iterationCount}:`,
1658
- error
1810
+ debugLog(
1811
+ `[LoopAgent] Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`
1659
1812
  );
1660
1813
  yield {
1661
1814
  content: `Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`,
@@ -1672,7 +1825,24 @@ ${lastResponse.content || ""}`,
1672
1825
  };
1673
1826
 
1674
1827
  // src/agents/lang-graph-agent.ts
1675
- var LangGraphAgent = class extends BaseAgent {
1828
+ init_debug();
1829
+ var LangGraphAgent = (_class6 = class extends BaseAgent {
1830
+ /**
1831
+ * Graph nodes (agents and their connections)
1832
+ */
1833
+
1834
+ /**
1835
+ * Root node to start execution from
1836
+ */
1837
+
1838
+ /**
1839
+ * Maximum number of steps to prevent infinite loops
1840
+ */
1841
+
1842
+ /**
1843
+ * Results from node executions
1844
+ */
1845
+ __init8() {this.results = []}
1676
1846
  /**
1677
1847
  * Constructor for LangGraphAgent
1678
1848
  */
@@ -1680,11 +1850,7 @@ var LangGraphAgent = class extends BaseAgent {
1680
1850
  super({
1681
1851
  name: config.name,
1682
1852
  description: config.description
1683
- });
1684
- /**
1685
- * Results from node executions
1686
- */
1687
- this.results = [];
1853
+ });_class6.prototype.__init8.call(this);;
1688
1854
  this.nodes = /* @__PURE__ */ new Map();
1689
1855
  for (const node of config.nodes) {
1690
1856
  if (this.nodes.has(node.name)) {
@@ -1757,11 +1923,9 @@ var LangGraphAgent = class extends BaseAgent {
1757
1923
  if (targetNode.condition) {
1758
1924
  const shouldExecute = await targetNode.condition(result, context);
1759
1925
  if (!shouldExecute) {
1760
- if (process.env.DEBUG === "true") {
1761
- console.log(
1762
- `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1763
- );
1764
- }
1926
+ debugLog(
1927
+ `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1928
+ );
1765
1929
  continue;
1766
1930
  }
1767
1931
  }
@@ -1786,11 +1950,9 @@ var LangGraphAgent = class extends BaseAgent {
1786
1950
  };
1787
1951
  const shouldExecute = await node.condition(mockResponse, mockContext);
1788
1952
  if (!shouldExecute) {
1789
- if (process.env.DEBUG === "true") {
1790
- console.log(
1791
- `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1792
- );
1793
- }
1953
+ debugLog(
1954
+ `[LangGraphAgent] Skipping node "${targetName}" due to condition`
1955
+ );
1794
1956
  }
1795
1957
  return { shouldExecute };
1796
1958
  }
@@ -1803,11 +1965,9 @@ var LangGraphAgent = class extends BaseAgent {
1803
1965
  messages: options.messages,
1804
1966
  config: options.config
1805
1967
  });
1806
- if (process.env.DEBUG === "true") {
1807
- console.log(
1808
- `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}"`
1809
- );
1810
- }
1968
+ debugLog(
1969
+ `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}"`
1970
+ );
1811
1971
  if (this.nodes.size === 0) {
1812
1972
  return {
1813
1973
  content: "No nodes defined in the graph.",
@@ -1827,11 +1987,9 @@ var LangGraphAgent = class extends BaseAgent {
1827
1987
  while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
1828
1988
  stepCount++;
1829
1989
  const { node, messages } = nodesToExecute.shift();
1830
- if (process.env.DEBUG === "true") {
1831
- console.log(
1832
- `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
1833
- );
1834
- }
1990
+ debugLog(
1991
+ `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
1992
+ );
1835
1993
  executedNodes.push(node.name);
1836
1994
  try {
1837
1995
  const result = await node.agent.run({
@@ -1903,11 +2061,9 @@ var LangGraphAgent = class extends BaseAgent {
1903
2061
  messages: options.messages,
1904
2062
  config: options.config
1905
2063
  });
1906
- if (process.env.DEBUG === "true") {
1907
- console.log(
1908
- `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}" (streaming)`
1909
- );
1910
- }
2064
+ debugLog(
2065
+ `[LangGraphAgent] Starting graph execution from root node "${this.rootNode}" (streaming)`
2066
+ );
1911
2067
  if (this.nodes.size === 0) {
1912
2068
  yield {
1913
2069
  content: "No nodes defined in the graph.",
@@ -1934,11 +2090,9 @@ var LangGraphAgent = class extends BaseAgent {
1934
2090
  while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
1935
2091
  stepCount++;
1936
2092
  const { node, messages } = nodesToExecute.shift();
1937
- if (process.env.DEBUG === "true") {
1938
- console.log(
1939
- `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}" (streaming)`
1940
- );
1941
- }
2093
+ debugLog(
2094
+ `[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}" (streaming)`
2095
+ );
1942
2096
  executedNodes.push(node.name);
1943
2097
  try {
1944
2098
  const result = await node.agent.run({
@@ -2010,7 +2164,7 @@ Node output: ${this.extractTextContent(result.content)}` : ""}`,
2010
2164
  };
2011
2165
  }
2012
2166
  }
2013
- };
2167
+ }, _class6);
2014
2168
 
2015
2169
  // src/tools/index.ts
2016
2170
  var tools_exports = {};
@@ -2052,6 +2206,7 @@ function createFunctionTool(func, options) {
2052
2206
  init_function_utils();
2053
2207
 
2054
2208
  // src/tools/common/google-search.ts
2209
+ init_debug();
2055
2210
  init_base_tool();
2056
2211
  var GoogleSearch = class extends BaseTool {
2057
2212
  /**
@@ -2092,9 +2247,7 @@ var GoogleSearch = class extends BaseTool {
2092
2247
  * This is a simplified implementation that doesn't actually search, just returns mock results
2093
2248
  */
2094
2249
  async runAsync(args, _context) {
2095
- if (process.env.DEBUG === "true") {
2096
- console.log(`Executing Google search for: ${args.query}`);
2097
- }
2250
+ debugLog(`[GoogleSearch] Executing Google search for: ${args.query}`);
2098
2251
  return {
2099
2252
  results: [
2100
2253
  {
@@ -2234,8 +2387,9 @@ var HttpRequestTool = class extends BaseTool {
2234
2387
  // src/tools/common/file-operations-tool.ts
2235
2388
  init_base_tool();
2236
2389
  var _promises = require('fs/promises'); var _promises2 = _interopRequireDefault(_promises);
2237
- var _path = require('path'); var _path2 = _interopRequireDefault(_path);
2390
+ var _path = require('path'); var path3 = _interopRequireWildcard(_path);
2238
2391
  var FileOperationsTool = class extends BaseTool {
2392
+
2239
2393
  constructor(options) {
2240
2394
  super({
2241
2395
  name: "file_operations",
@@ -2329,14 +2483,14 @@ var FileOperationsTool = class extends BaseTool {
2329
2483
  * Resolve a file path relative to the base path
2330
2484
  */
2331
2485
  resolvePath(filepath) {
2332
- return _path2.default.isAbsolute(filepath) ? filepath : _path2.default.resolve(this.basePath, filepath);
2486
+ return path3.default.isAbsolute(filepath) ? filepath : path3.default.resolve(this.basePath, filepath);
2333
2487
  }
2334
2488
  /**
2335
2489
  * Validate that a path is within the base path for security
2336
2490
  */
2337
2491
  validatePath(filepath) {
2338
- const normalizedPath = _path2.default.normalize(filepath);
2339
- const normalizedBasePath = _path2.default.normalize(this.basePath);
2492
+ const normalizedPath = path3.default.normalize(filepath);
2493
+ const normalizedBasePath = path3.default.normalize(this.basePath);
2340
2494
  if (!normalizedPath.startsWith(normalizedBasePath)) {
2341
2495
  throw new Error(
2342
2496
  `Access denied: Can't access paths outside the base directory`
@@ -2365,7 +2519,7 @@ var FileOperationsTool = class extends BaseTool {
2365
2519
  */
2366
2520
  async writeFile(filepath, content, encoding) {
2367
2521
  try {
2368
- const dir = _path2.default.dirname(filepath);
2522
+ const dir = path3.default.dirname(filepath);
2369
2523
  await _promises2.default.mkdir(dir, { recursive: true });
2370
2524
  await _promises2.default.writeFile(filepath, content, { encoding });
2371
2525
  return {
@@ -2383,7 +2537,7 @@ var FileOperationsTool = class extends BaseTool {
2383
2537
  */
2384
2538
  async appendFile(filepath, content, encoding) {
2385
2539
  try {
2386
- const dir = _path2.default.dirname(filepath);
2540
+ const dir = path3.default.dirname(filepath);
2387
2541
  await _promises2.default.mkdir(dir, { recursive: true });
2388
2542
  await _promises2.default.appendFile(filepath, content, { encoding });
2389
2543
  return {
@@ -2437,7 +2591,7 @@ var FileOperationsTool = class extends BaseTool {
2437
2591
  const entries = await _promises2.default.readdir(dirpath, { withFileTypes: true });
2438
2592
  const results = await Promise.all(
2439
2593
  entries.map(async (entry) => {
2440
- const entryPath = _path2.default.join(dirpath, entry.name);
2594
+ const entryPath = path3.default.join(dirpath, entry.name);
2441
2595
  const stats = await _promises2.default.stat(entryPath);
2442
2596
  return {
2443
2597
  name: entry.name,
@@ -2556,6 +2710,7 @@ var UserInteractionTool = class extends BaseTool {
2556
2710
  };
2557
2711
 
2558
2712
  // src/tools/common/exit-loop-tool.ts
2713
+ init_debug();
2559
2714
  init_base_tool();
2560
2715
  var ExitLoopTool = class extends BaseTool {
2561
2716
  /**
@@ -2585,9 +2740,7 @@ var ExitLoopTool = class extends BaseTool {
2585
2740
  * Execute the exit loop action
2586
2741
  */
2587
2742
  async runAsync(_args, context) {
2588
- if (process.env.DEBUG === "true") {
2589
- console.log("Executing exit loop tool");
2590
- }
2743
+ debugLog("[ExitLoopTool] Executing exit loop tool");
2591
2744
  if (context.actions) {
2592
2745
  context.actions.escalate = true;
2593
2746
  } else {
@@ -2602,6 +2755,7 @@ var ExitLoopTool = class extends BaseTool {
2602
2755
  };
2603
2756
 
2604
2757
  // src/tools/common/get-user-choice-tool.ts
2758
+ init_debug();
2605
2759
  init_base_tool();
2606
2760
  var GetUserChoiceTool = class extends BaseTool {
2607
2761
  /**
@@ -2646,13 +2800,13 @@ var GetUserChoiceTool = class extends BaseTool {
2646
2800
  * and the actual choice will be provided asynchronously
2647
2801
  */
2648
2802
  async runAsync(args, context) {
2649
- if (process.env.DEBUG === "true") {
2650
- console.log(
2651
- `Executing get_user_choice with options: ${args.options.join(", ")}`
2652
- );
2653
- if (args.question) {
2654
- console.log(`Question: ${args.question}`);
2655
- }
2803
+ debugLog(
2804
+ `[GetUserChoiceTool] Executing get_user_choice with options: ${args.options.join(
2805
+ ", "
2806
+ )}`
2807
+ );
2808
+ if (args.question) {
2809
+ debugLog(`[GetUserChoiceTool] Question: ${args.question}`);
2656
2810
  }
2657
2811
  if (context.actions) {
2658
2812
  context.actions.skip_summarization = true;
@@ -2666,6 +2820,7 @@ var GetUserChoiceTool = class extends BaseTool {
2666
2820
  };
2667
2821
 
2668
2822
  // src/tools/common/transfer-to-agent-tool.ts
2823
+ init_debug();
2669
2824
  init_base_tool();
2670
2825
  var TransferToAgentTool = class extends BaseTool {
2671
2826
  /**
@@ -2700,9 +2855,9 @@ var TransferToAgentTool = class extends BaseTool {
2700
2855
  * Execute the transfer to agent action
2701
2856
  */
2702
2857
  async runAsync(args, context) {
2703
- if (process.env.DEBUG === "true") {
2704
- console.log(`Executing transfer to agent: ${args.agent_name}`);
2705
- }
2858
+ debugLog(
2859
+ `[TransferToAgentTool] Executing transfer to agent: ${args.agent_name}`
2860
+ );
2706
2861
  if (context.actions) {
2707
2862
  context.actions.transfer_to_agent = args.agent_name;
2708
2863
  } else {
@@ -2717,6 +2872,7 @@ var TransferToAgentTool = class extends BaseTool {
2717
2872
  };
2718
2873
 
2719
2874
  // src/tools/common/load-memory-tool.ts
2875
+ init_debug();
2720
2876
  init_base_tool();
2721
2877
  var LoadMemoryTool = class extends BaseTool {
2722
2878
  /**
@@ -2751,9 +2907,9 @@ var LoadMemoryTool = class extends BaseTool {
2751
2907
  * Execute the memory loading action
2752
2908
  */
2753
2909
  async runAsync(args, context) {
2754
- if (process.env.DEBUG === "true") {
2755
- console.log(`Executing load_memory with query: ${args.query}`);
2756
- }
2910
+ debugLog(
2911
+ `[LoadMemoryTool] Executing load_memory with query: ${args.query}`
2912
+ );
2757
2913
  if (!context.memoryService) {
2758
2914
  return {
2759
2915
  error: "Memory service is not available",
@@ -2791,6 +2947,8 @@ var McpErrorType = /* @__PURE__ */ ((McpErrorType2) => {
2791
2947
  return McpErrorType2;
2792
2948
  })(McpErrorType || {});
2793
2949
  var McpError = class extends Error {
2950
+
2951
+
2794
2952
  constructor(message, type, originalError) {
2795
2953
  super(message);
2796
2954
  this.name = "McpError";
@@ -2828,11 +2986,12 @@ function withRetry(fn, instance, reinitMethod, maxRetries = 1) {
2828
2986
  }
2829
2987
 
2830
2988
  // src/tools/mcp/client.ts
2831
- var McpClientService = class {
2832
- constructor(config) {
2833
- this.client = null;
2834
- this.transport = null;
2835
- this.isClosing = false;
2989
+ var McpClientService = (_class7 = class {
2990
+
2991
+ __init9() {this.client = null}
2992
+ __init10() {this.transport = null}
2993
+ __init11() {this.isClosing = false}
2994
+ constructor(config) {;_class7.prototype.__init9.call(this);_class7.prototype.__init10.call(this);_class7.prototype.__init11.call(this);
2836
2995
  this.config = config;
2837
2996
  }
2838
2997
  /**
@@ -3033,9 +3192,10 @@ var McpClientService = class {
3033
3192
  isConnected() {
3034
3193
  return !!this.client && !this.isClosing;
3035
3194
  }
3036
- };
3195
+ }, _class7);
3037
3196
 
3038
3197
  // src/tools/mcp/create-tool.ts
3198
+ init_debug();
3039
3199
  init_base_tool();
3040
3200
 
3041
3201
  // src/tools/mcp/schema-conversion.ts
@@ -3234,7 +3394,10 @@ async function createTool(mcpTool, client) {
3234
3394
  throw error;
3235
3395
  }
3236
3396
  }
3237
- var McpToolAdapter = class extends BaseTool {
3397
+ var McpToolAdapter = (_class8 = class extends BaseTool {
3398
+
3399
+
3400
+ __init12() {this.clientService = null}
3238
3401
  constructor(mcpTool, client) {
3239
3402
  const metadata = mcpTool.metadata || {};
3240
3403
  super({
@@ -3243,8 +3406,7 @@ var McpToolAdapter = class extends BaseTool {
3243
3406
  isLongRunning: _nullishCoalesce(metadata.isLongRunning, () => ( false)),
3244
3407
  shouldRetryOnFailure: _nullishCoalesce(metadata.shouldRetryOnFailure, () => ( false)),
3245
3408
  maxRetryAttempts: _nullishCoalesce(metadata.maxRetryAttempts, () => ( 3))
3246
- });
3247
- this.clientService = null;
3409
+ });_class8.prototype.__init12.call(this);;
3248
3410
  this.mcpTool = mcpTool;
3249
3411
  this.client = client;
3250
3412
  if (client.reinitialize && typeof client.reinitialize === "function") {
@@ -3268,9 +3430,10 @@ var McpToolAdapter = class extends BaseTool {
3268
3430
  }
3269
3431
  }
3270
3432
  async runAsync(args, _context) {
3271
- if (process.env.DEBUG === "true") {
3272
- console.log(`Executing MCP tool ${this.name} with args:`, args);
3273
- }
3433
+ debugLog(
3434
+ `[McpToolAdapter] Executing MCP tool ${this.name} with args:`,
3435
+ args
3436
+ );
3274
3437
  try {
3275
3438
  if (typeof this.mcpTool.execute === "function") {
3276
3439
  return await this.mcpTool.execute(args);
@@ -3319,15 +3482,16 @@ var McpToolAdapter = class extends BaseTool {
3319
3482
  throw error;
3320
3483
  }
3321
3484
  }
3322
- };
3485
+ }, _class8);
3323
3486
 
3324
3487
  // src/tools/mcp/index.ts
3325
- var McpToolset = class {
3326
- constructor(config, toolFilter = null) {
3327
- this.clientService = null;
3328
- this.toolFilter = null;
3329
- this.tools = [];
3330
- this.isClosing = false;
3488
+ var McpToolset = (_class9 = class {
3489
+
3490
+ __init13() {this.clientService = null}
3491
+ __init14() {this.toolFilter = null}
3492
+ __init15() {this.tools = []}
3493
+ __init16() {this.isClosing = false}
3494
+ constructor(config, toolFilter = null) {;_class9.prototype.__init13.call(this);_class9.prototype.__init14.call(this);_class9.prototype.__init15.call(this);_class9.prototype.__init16.call(this);
3331
3495
  this.config = config;
3332
3496
  this.toolFilter = toolFilter;
3333
3497
  this.clientService = new McpClientService(config);
@@ -3462,7 +3626,7 @@ var McpToolset = class {
3462
3626
  async dispose() {
3463
3627
  await this.close();
3464
3628
  }
3465
- };
3629
+ }, _class9);
3466
3630
  async function getMcpTools(config, toolFilter) {
3467
3631
  const toolset = new McpToolset(config, toolFilter);
3468
3632
  try {
@@ -3505,6 +3669,30 @@ __export(models_exports, {
3505
3669
 
3506
3670
  // src/models/llm-response.ts
3507
3671
  var LLMResponse = class {
3672
+ /**
3673
+ * Content of the response
3674
+ */
3675
+
3676
+ /**
3677
+ * Function calls in the response
3678
+ */
3679
+
3680
+ /**
3681
+ * Tool calls in the response
3682
+ */
3683
+
3684
+ /**
3685
+ * Role of the message (usually 'assistant')
3686
+ */
3687
+
3688
+ /**
3689
+ * Whether this is a partial response in a stream
3690
+ */
3691
+
3692
+ /**
3693
+ * Raw provider response
3694
+ */
3695
+
3508
3696
  constructor(data) {
3509
3697
  this.content = data.content;
3510
3698
  this.function_call = data.function_call;
@@ -3517,6 +3705,10 @@ var LLMResponse = class {
3517
3705
 
3518
3706
  // src/models/base-llm.ts
3519
3707
  var BaseLLM = class {
3708
+ /**
3709
+ * The name of the LLM model
3710
+ */
3711
+
3520
3712
  /**
3521
3713
  * Constructor for BaseLLM
3522
3714
  */
@@ -3541,13 +3733,11 @@ var BaseLLM = class {
3541
3733
  };
3542
3734
 
3543
3735
  // src/models/base-llm-connection.ts
3544
- var BaseLLMConnection = class {
3545
- constructor() {
3546
- /**
3547
- * Whether the connection is active
3548
- */
3549
- this._isActive = true;
3550
- }
3736
+ var BaseLLMConnection = (_class10 = class {constructor() { _class10.prototype.__init17.call(this); }
3737
+ /**
3738
+ * Whether the connection is active
3739
+ */
3740
+ __init17() {this._isActive = true}
3551
3741
  /**
3552
3742
  * Gets whether the connection is active
3553
3743
  */
@@ -3560,13 +3750,41 @@ var BaseLLMConnection = class {
3560
3750
  close() {
3561
3751
  this._isActive = false;
3562
3752
  }
3563
- };
3753
+ }, _class10);
3564
3754
 
3565
3755
  // src/models/anthropic-llm.ts
3756
+ init_debug();
3566
3757
  var _axios = require('axios'); var _axios2 = _interopRequireDefault(_axios);
3567
3758
 
3568
3759
  // src/models/anthropic-llm-connection.ts
3760
+ init_debug();
3569
3761
  var AnthropicLLMConnection = class extends BaseLLMConnection {
3762
+ /**
3763
+ * Axios instance for API calls
3764
+ */
3765
+
3766
+ /**
3767
+ * Current model to use
3768
+ */
3769
+
3770
+ /**
3771
+ * Current messages in the conversation
3772
+ */
3773
+
3774
+ /**
3775
+ * System message if present
3776
+ */
3777
+
3778
+ /**
3779
+ * Default parameters for requests
3780
+ */
3781
+
3782
+ /**
3783
+ * Callbacks for handling responses, errors, and connection end
3784
+ */
3785
+
3786
+
3787
+
3570
3788
  /**
3571
3789
  * Constructor
3572
3790
  */
@@ -3669,19 +3887,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3669
3887
  if (!_optionalChain([content, 'optionalAccess', _49 => _49.length])) return [];
3670
3888
  const toolUses = [];
3671
3889
  for (const block of content) {
3672
- if (process.env.DEBUG === "true") {
3673
- console.log(
3674
- "Connection - Processing content block of type:",
3675
- block.type
3676
- );
3677
- }
3890
+ debugLog(
3891
+ `[AnthropicLLMConnection] Processing content block of type: ${block.type}`
3892
+ );
3678
3893
  if (block.type === "tool_use") {
3679
- if (process.env.DEBUG === "true") {
3680
- console.log(
3681
- "Connection - Found tool_use block:",
3682
- JSON.stringify(block, null, 2)
3683
- );
3684
- }
3894
+ debugLog(
3895
+ "[AnthropicLLMConnection] Found tool_use block:",
3896
+ JSON.stringify(block, null, 2)
3897
+ );
3685
3898
  toolUses.push({
3686
3899
  id: block.id || "unknown-id",
3687
3900
  name: block.name || "unknown-name",
@@ -3689,14 +3902,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3689
3902
  });
3690
3903
  }
3691
3904
  }
3692
- if (process.env.DEBUG === "true") {
3693
- console.log(`Connection - Found ${toolUses.length} tool uses in content`);
3694
- if (toolUses.length > 0) {
3695
- console.log(
3696
- "Connection - Extracted tool uses:",
3697
- JSON.stringify(toolUses, null, 2)
3698
- );
3699
- }
3905
+ debugLog(
3906
+ `[AnthropicLLMConnection] Found ${toolUses.length} tool uses in content`
3907
+ );
3908
+ if (toolUses.length > 0) {
3909
+ debugLog(
3910
+ "[AnthropicLLMConnection] Extracted tool uses:",
3911
+ JSON.stringify(toolUses, null, 2)
3912
+ );
3700
3913
  }
3701
3914
  return toolUses;
3702
3915
  }
@@ -3790,43 +4003,30 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3790
4003
  }
3791
4004
  const toolUses = this.extractToolUses(apiResponse.content);
3792
4005
  const toolCalls = this.convertToolCalls(toolUses);
3793
- if (process.env.DEBUG === "true") {
3794
- if (toolUses.length > 0) {
3795
- console.log(
3796
- "Connection - Extracted Tool Uses:",
3797
- JSON.stringify(toolUses, null, 2)
3798
- );
3799
- console.log(
3800
- "Connection - Converted Tool Calls:",
3801
- JSON.stringify(toolCalls, null, 2)
3802
- );
3803
- }
3804
- }
4006
+ debugLog(
4007
+ `[AnthropicLLMConnection] - Extracted ${toolUses.length} tool uses in content and converted ${_optionalChain([toolCalls, 'optionalAccess', _50 => _50.length]) || 0} tool calls`
4008
+ );
3805
4009
  const llmResponse = new LLMResponse({
3806
4010
  role: "assistant",
3807
4011
  content,
3808
- tool_calls: _optionalChain([toolCalls, 'optionalAccess', _50 => _50.length]) ? toolCalls : void 0,
4012
+ tool_calls: _optionalChain([toolCalls, 'optionalAccess', _51 => _51.length]) ? toolCalls : void 0,
3809
4013
  raw_response: apiResponse
3810
4014
  });
3811
- if (process.env.DEBUG === "true") {
3812
- console.log(
3813
- "Connection - Final LLMResponse object:",
3814
- JSON.stringify(
3815
- {
3816
- role: llmResponse.role,
3817
- content: _optionalChain([llmResponse, 'access', _51 => _51.content, 'optionalAccess', _52 => _52.substring, 'call', _53 => _53(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
3818
- tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
3819
- },
3820
- null,
3821
- 2
3822
- )
3823
- );
3824
- }
4015
+ const logObject = {
4016
+ role: llmResponse.role,
4017
+ content: _optionalChain([llmResponse, 'access', _52 => _52.content, 'optionalAccess', _53 => _53.substring, 'call', _54 => _54(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4018
+ tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4019
+ };
4020
+ debugLog(
4021
+ "[AnthropicLLMConnection] Final LLMResponse object:",
4022
+ JSON.stringify(logObject, null, 2)
4023
+ );
3825
4024
  return llmResponse;
3826
4025
  } catch (error) {
3827
- if (process.env.DEBUG === "true") {
3828
- console.error("Error sending message to Anthropic:", error);
3829
- }
4026
+ debugLog(
4027
+ "[AnthropicLLMConnection] Error sending message to Anthropic:",
4028
+ error
4029
+ );
3830
4030
  throw error;
3831
4031
  }
3832
4032
  }
@@ -3834,22 +4034,34 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3834
4034
 
3835
4035
  // src/models/anthropic-llm.ts
3836
4036
  var AnthropicLLM = class extends BaseLLM {
4037
+ /**
4038
+ * Anthropic API key
4039
+ */
4040
+
4041
+ /**
4042
+ * Anthropic API base URL
4043
+ */
4044
+
4045
+ /**
4046
+ * Default parameters for requests
4047
+ */
4048
+
3837
4049
  /**
3838
4050
  * Constructor for AnthropicLLM
3839
4051
  */
3840
4052
  constructor(model, config) {
3841
4053
  super(model);
3842
- this.apiKey = _optionalChain([config, 'optionalAccess', _54 => _54.apiKey]) || process.env.ANTHROPIC_API_KEY || "";
3843
- this.baseURL = _optionalChain([config, 'optionalAccess', _55 => _55.baseURL]) || "https://api.anthropic.com/v1";
4054
+ this.apiKey = _optionalChain([config, 'optionalAccess', _55 => _55.apiKey]) || process.env.ANTHROPIC_API_KEY || "";
4055
+ this.baseURL = _optionalChain([config, 'optionalAccess', _56 => _56.baseURL]) || "https://api.anthropic.com/v1";
3844
4056
  if (!this.apiKey) {
3845
4057
  throw new Error(
3846
4058
  "Anthropic API key is required. Provide it in config or set ANTHROPIC_API_KEY environment variable."
3847
4059
  );
3848
4060
  }
3849
4061
  this.defaultParams = {
3850
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _56 => _56.defaultParams, 'optionalAccess', _57 => _57.temperature]), () => ( 0.7)),
3851
- top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _58 => _58.defaultParams, 'optionalAccess', _59 => _59.top_p]), () => ( 1)),
3852
- max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _60 => _60.defaultParams, 'optionalAccess', _61 => _61.max_tokens]), () => ( 1024))
4062
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _57 => _57.defaultParams, 'optionalAccess', _58 => _58.temperature]), () => ( 0.7)),
4063
+ top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _59 => _59.defaultParams, 'optionalAccess', _60 => _60.top_p]), () => ( 1)),
4064
+ max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _61 => _61.defaultParams, 'optionalAccess', _62 => _62.max_tokens]), () => ( 1024))
3853
4065
  };
3854
4066
  }
3855
4067
  /**
@@ -3935,7 +4147,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3935
4147
  * Convert ADK function declarations to Anthropic tool format
3936
4148
  */
3937
4149
  convertFunctionsToTools(functions) {
3938
- if (!_optionalChain([functions, 'optionalAccess', _62 => _62.length])) {
4150
+ if (!_optionalChain([functions, 'optionalAccess', _63 => _63.length])) {
3939
4151
  return [];
3940
4152
  }
3941
4153
  return functions.map((func) => ({
@@ -3948,7 +4160,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3948
4160
  * Convert Anthropic tool calls to ADK tool calls
3949
4161
  */
3950
4162
  convertToolUses(toolUses) {
3951
- if (!_optionalChain([toolUses, 'optionalAccess', _63 => _63.length])) {
4163
+ if (!_optionalChain([toolUses, 'optionalAccess', _64 => _64.length])) {
3952
4164
  return [];
3953
4165
  }
3954
4166
  return toolUses.map((toolUse) => ({
@@ -3963,16 +4175,16 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3963
4175
  * Extract tool uses from response content
3964
4176
  */
3965
4177
  extractToolUses(content) {
3966
- if (!_optionalChain([content, 'optionalAccess', _64 => _64.length])) return [];
4178
+ if (!_optionalChain([content, 'optionalAccess', _65 => _65.length])) return [];
3967
4179
  const toolUses = [];
3968
4180
  for (const block of content) {
3969
- if (process.env.DEBUG === "true") {
3970
- console.log("Processing content block of type:", block.type);
3971
- }
4181
+ debugLog(
4182
+ `[AnthropicLLM] Processing content block of type: ${block.type}`
4183
+ );
3972
4184
  if (block.type === "tool_use") {
3973
- if (process.env.DEBUG === "true") {
3974
- console.log("Found tool_use block:", JSON.stringify(block, null, 2));
3975
- }
4185
+ debugLog(
4186
+ `[AnthropicLLM] Found tool_use block: ${JSON.stringify(block, null, 2)}`
4187
+ );
3976
4188
  toolUses.push({
3977
4189
  id: block.id || "unknown-id",
3978
4190
  name: block.name || "unknown-name",
@@ -3980,12 +4192,10 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
3980
4192
  });
3981
4193
  }
3982
4194
  }
3983
- if (process.env.DEBUG === "true") {
3984
- console.log(`Found ${toolUses.length} tool uses in content`);
3985
- if (toolUses.length > 0) {
3986
- console.log("Extracted tool uses:", JSON.stringify(toolUses, null, 2));
3987
- }
3988
- }
4195
+ debugLog(
4196
+ `[AnthropicLLM] Found ${toolUses.length} tool uses in content`,
4197
+ toolUses
4198
+ );
3989
4199
  return toolUses;
3990
4200
  }
3991
4201
  /**
@@ -4007,16 +4217,14 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4007
4217
  },
4008
4218
  responseType: stream ? "stream" : "json"
4009
4219
  });
4010
- if (process.env.DEBUG === "true") {
4011
- console.log("Anthropic API Response Status:", response.status);
4012
- if (!stream) {
4013
- console.log("Response Data Structure:", Object.keys(response.data));
4014
- console.log(
4015
- "Response Content Structure:",
4016
- response.data.content.map((block) => ({ type: block.type }))
4017
- );
4018
- }
4019
- }
4220
+ debugLog(
4221
+ `[AnthropicLLM] API Response done with ${response.status}:`,
4222
+ response.data
4223
+ );
4224
+ debugLog(
4225
+ "[AnthropicLLM] API Response content:",
4226
+ response.data.content.map((block) => ({ type: block.type }))
4227
+ );
4020
4228
  return response.data;
4021
4229
  } catch (error) {
4022
4230
  console.error("Error calling Anthropic API:", error);
@@ -4040,26 +4248,19 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4040
4248
  temperature: _nullishCoalesce(llmRequest.config.temperature, () => ( this.defaultParams.temperature)),
4041
4249
  max_tokens: _nullishCoalesce(llmRequest.config.max_tokens, () => ( this.defaultParams.max_tokens)),
4042
4250
  top_p: _nullishCoalesce(llmRequest.config.top_p, () => ( this.defaultParams.top_p)),
4043
- tools: _optionalChain([tools, 'optionalAccess', _65 => _65.length]) ? tools : void 0
4251
+ tools: _optionalChain([tools, 'optionalAccess', _66 => _66.length]) ? tools : void 0
4044
4252
  };
4045
- if (process.env.DEBUG === "true") {
4046
- console.log("Anthropic API Request:", {
4047
- model: params.model,
4048
- messageCount: params.messages.length,
4049
- systemMessage: params.system ? "present" : "none",
4050
- tools: params.tools ? params.tools.map((t) => t.name) : "none"
4051
- });
4052
- }
4253
+ debugLog("[AnthropicLLM] API Request:", {
4254
+ model: params.model,
4255
+ messageCount: params.messages.length,
4256
+ systemMessage: params.system ? "present" : "none",
4257
+ tools: params.tools ? params.tools.map((t) => t.name) : "none"
4258
+ });
4053
4259
  if (stream) {
4054
4260
  throw new Error("Streaming is not supported in this implementation");
4055
4261
  }
4056
4262
  const response = await this.callAnthropicAPI(params);
4057
- if (process.env.DEBUG === "true") {
4058
- console.log(
4059
- "Full Response Content:",
4060
- JSON.stringify(response.content, null, 2)
4061
- );
4062
- }
4263
+ debugLog("[AnthropicLLM] Full Response Content:", response.content);
4063
4264
  let content = "";
4064
4265
  for (const block of response.content) {
4065
4266
  if (block.type === "text") {
@@ -4068,43 +4269,26 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4068
4269
  }
4069
4270
  const toolUses = this.extractToolUses(response.content);
4070
4271
  const toolCalls = this.convertToolUses(toolUses);
4071
- if (process.env.DEBUG === "true") {
4072
- if (toolUses.length > 0) {
4073
- console.log(
4074
- "Extracted Tool Uses:",
4075
- JSON.stringify(toolUses, null, 2)
4076
- );
4077
- console.log(
4078
- "Converted Tool Calls:",
4079
- JSON.stringify(toolCalls, null, 2)
4080
- );
4081
- }
4082
- }
4272
+ debugLog("[AnthropicLLM] Extracted Tool Uses:", toolUses);
4273
+ debugLog("[AnthropicLLM] Converted Tool Calls:", toolCalls);
4083
4274
  const llmResponse = new LLMResponse({
4084
4275
  role: "assistant",
4085
4276
  content,
4086
4277
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
4087
4278
  raw_response: response
4088
4279
  });
4089
- if (process.env.DEBUG === "true") {
4090
- console.log(
4091
- "Final LLMResponse object:",
4092
- JSON.stringify(
4093
- {
4094
- role: llmResponse.role,
4095
- content: _optionalChain([llmResponse, 'access', _66 => _66.content, 'optionalAccess', _67 => _67.substring, 'call', _68 => _68(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4096
- tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4097
- },
4098
- null,
4099
- 2
4100
- )
4101
- );
4102
- }
4280
+ const logObject = {
4281
+ role: llmResponse.role,
4282
+ content: _optionalChain([llmResponse, 'access', _67 => _67.content, 'optionalAccess', _68 => _68.substring, 'call', _69 => _69(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
4283
+ tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
4284
+ };
4285
+ debugLog(
4286
+ "[AnthropicLLM] Final LLMResponse object:",
4287
+ JSON.stringify(logObject, null, 2)
4288
+ );
4103
4289
  yield llmResponse;
4104
4290
  } catch (error) {
4105
- if (process.env.DEBUG === "true") {
4106
- console.error("Error calling Anthropic:", error);
4107
- }
4291
+ debugLog("[AnthropicLLM] Error:", error);
4108
4292
  throw error;
4109
4293
  }
4110
4294
  }
@@ -4134,15 +4318,23 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
4134
4318
 
4135
4319
  var _genai = require('@google/genai');
4136
4320
  var GoogleLLM = class extends BaseLLM {
4321
+ /**
4322
+ * Generative model instance
4323
+ */
4324
+
4325
+ /**
4326
+ * Default parameters for requests
4327
+ */
4328
+
4137
4329
  /**
4138
4330
  * Constructor for GoogleLLM
4139
4331
  */
4140
4332
  constructor(model, config) {
4141
4333
  super(model);
4142
4334
  const apiKey = process.env.GOOGLE_API_KEY;
4143
- const projectId = _optionalChain([config, 'optionalAccess', _69 => _69.projectId]) || process.env.GOOGLE_CLOUD_PROJECT;
4144
- const location = _optionalChain([config, 'optionalAccess', _70 => _70.location]) || process.env.GOOGLE_CLOUD_LOCATION;
4145
- const useVertexAI = _optionalChain([process, 'access', _71 => _71.env, 'access', _72 => _72.USE_VERTEX_AI, 'optionalAccess', _73 => _73.toLowerCase, 'call', _74 => _74()]) === "true";
4335
+ const projectId = _optionalChain([config, 'optionalAccess', _70 => _70.projectId]) || process.env.GOOGLE_CLOUD_PROJECT;
4336
+ const location = _optionalChain([config, 'optionalAccess', _71 => _71.location]) || process.env.GOOGLE_CLOUD_LOCATION;
4337
+ const useVertexAI = _optionalChain([process, 'access', _72 => _72.env, 'access', _73 => _73.USE_VERTEX_AI, 'optionalAccess', _74 => _74.toLowerCase, 'call', _75 => _75()]) === "true";
4146
4338
  if (!useVertexAI && !apiKey) {
4147
4339
  throw new Error(
4148
4340
  "Google API Key is required. Provide via config or GOOGLE_API_KEY env var."
@@ -4167,9 +4359,9 @@ var GoogleLLM = class extends BaseLLM {
4167
4359
  }
4168
4360
  this.ai = new (0, _genai.GoogleGenAI)(options);
4169
4361
  this.defaultParams = {
4170
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _75 => _75.defaultParams, 'optionalAccess', _76 => _76.temperature]), () => ( 0.7)),
4171
- topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _77 => _77.defaultParams, 'optionalAccess', _78 => _78.top_p]), () => ( 1)),
4172
- maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _79 => _79.defaultParams, 'optionalAccess', _80 => _80.maxOutputTokens]), () => ( 1024))
4362
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _76 => _76.defaultParams, 'optionalAccess', _77 => _77.temperature]), () => ( 0.7)),
4363
+ topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _78 => _78.defaultParams, 'optionalAccess', _79 => _79.top_p]), () => ( 1)),
4364
+ maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _80 => _80.defaultParams, 'optionalAccess', _81 => _81.maxOutputTokens]), () => ( 1024))
4173
4365
  };
4174
4366
  }
4175
4367
  /**
@@ -4307,7 +4499,7 @@ var GoogleLLM = class extends BaseLLM {
4307
4499
  );
4308
4500
  parts.push({ text: "" });
4309
4501
  }
4310
- if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access', _81 => _81[0], 'optionalAccess', _82 => _82.functionResponse]))) {
4502
+ if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access', _82 => _82[0], 'optionalAccess', _83 => _83.functionResponse]))) {
4311
4503
  console.error(
4312
4504
  `[GoogleLLM] convertMessage - Invalid parts for 'function' role. Expected 1 functionResponse part. Got:`,
4313
4505
  JSON.stringify(parts),
@@ -4415,13 +4607,13 @@ var GoogleLLM = class extends BaseLLM {
4415
4607
  role: "assistant",
4416
4608
  content: null
4417
4609
  });
4418
- if (typeof _optionalChain([response, 'optionalAccess', _83 => _83.candidates, 'optionalAccess', _84 => _84[0], 'optionalAccess', _85 => _85.content, 'optionalAccess', _86 => _86.parts, 'optionalAccess', _87 => _87[0], 'optionalAccess', _88 => _88.text]) === "string") {
4610
+ if (typeof _optionalChain([response, 'optionalAccess', _84 => _84.candidates, 'optionalAccess', _85 => _85[0], 'optionalAccess', _86 => _86.content, 'optionalAccess', _87 => _87.parts, 'optionalAccess', _88 => _88[0], 'optionalAccess', _89 => _89.text]) === "string") {
4419
4611
  result.content = response.candidates[0].content.parts[0].text;
4420
4612
  }
4421
- if (_optionalChain([response, 'optionalAccess', _89 => _89.candidates, 'optionalAccess', _90 => _90[0], 'optionalAccess', _91 => _91.content, 'optionalAccess', _92 => _92.parts, 'optionalAccess', _93 => _93[0], 'optionalAccess', _94 => _94.text])) {
4613
+ if (_optionalChain([response, 'optionalAccess', _90 => _90.candidates, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.content, 'optionalAccess', _93 => _93.parts, 'optionalAccess', _94 => _94[0], 'optionalAccess', _95 => _95.text])) {
4422
4614
  result.content = response.candidates[0].content.parts[0].text;
4423
4615
  }
4424
- if (_optionalChain([response, 'optionalAccess', _95 => _95.candidates, 'optionalAccess', _96 => _96[0], 'optionalAccess', _97 => _97.content, 'optionalAccess', _98 => _98.parts, 'optionalAccess', _99 => _99[0], 'optionalAccess', _100 => _100.functionCall])) {
4616
+ if (_optionalChain([response, 'optionalAccess', _96 => _96.candidates, 'optionalAccess', _97 => _97[0], 'optionalAccess', _98 => _98.content, 'optionalAccess', _99 => _99.parts, 'optionalAccess', _100 => _100[0], 'optionalAccess', _101 => _101.functionCall])) {
4425
4617
  const functionCall = response.candidates[0].content.parts[0].functionCall;
4426
4618
  result.function_call = {
4427
4619
  name: functionCall.name,
@@ -4468,10 +4660,10 @@ var GoogleLLM = class extends BaseLLM {
4468
4660
  if (stream) {
4469
4661
  const streamingResult = await this.ai.models.generateContentStream(requestOptions);
4470
4662
  for await (const chunk of streamingResult) {
4471
- if (!_optionalChain([chunk, 'access', _101 => _101.candidates, 'optionalAccess', _102 => _102[0], 'optionalAccess', _103 => _103.content, 'optionalAccess', _104 => _104.parts, 'optionalAccess', _105 => _105[0], 'optionalAccess', _106 => _106.text])) {
4663
+ if (!_optionalChain([chunk, 'access', _102 => _102.candidates, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.content, 'optionalAccess', _105 => _105.parts, 'optionalAccess', _106 => _106[0], 'optionalAccess', _107 => _107.text])) {
4472
4664
  continue;
4473
4665
  }
4474
- const partialText = _optionalChain([chunk, 'access', _107 => _107.candidates, 'access', _108 => _108[0], 'optionalAccess', _109 => _109.content, 'optionalAccess', _110 => _110.parts, 'access', _111 => _111[0], 'optionalAccess', _112 => _112.text]) || "";
4666
+ const partialText = _optionalChain([chunk, 'access', _108 => _108.candidates, 'access', _109 => _109[0], 'optionalAccess', _110 => _110.content, 'optionalAccess', _111 => _111.parts, 'access', _112 => _112[0], 'optionalAccess', _113 => _113.text]) || "";
4475
4667
  const partialResponse = new LLMResponse({
4476
4668
  content: partialText,
4477
4669
  role: "assistant",
@@ -4491,19 +4683,48 @@ var GoogleLLM = class extends BaseLLM {
4491
4683
  };
4492
4684
 
4493
4685
  // src/models/openai-llm.ts
4686
+ init_debug();
4494
4687
  var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
4495
4688
 
4496
4689
  // src/models/openai-llm-connection.ts
4497
- var OpenAILLMConnection = class extends BaseLLMConnection {
4690
+ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
4691
+ /**
4692
+ * OpenAI client
4693
+ */
4694
+
4695
+ /**
4696
+ * The model name
4697
+ */
4698
+
4699
+ /**
4700
+ * The initial request
4701
+ */
4702
+
4703
+ /**
4704
+ * Default parameters
4705
+ */
4706
+
4707
+ /**
4708
+ * Response callback
4709
+ */
4710
+
4711
+ /**
4712
+ * Error callback
4713
+ */
4714
+
4715
+ /**
4716
+ * End callback
4717
+ */
4718
+
4719
+ /**
4720
+ * Ongoing chat history
4721
+ */
4722
+ __init18() {this.messages = []}
4498
4723
  /**
4499
4724
  * Constructor for OpenAILLMConnection
4500
4725
  */
4501
4726
  constructor(client, model, initialRequest, defaultParams) {
4502
- super();
4503
- /**
4504
- * Ongoing chat history
4505
- */
4506
- this.messages = [];
4727
+ super();_class11.prototype.__init18.call(this);;
4507
4728
  this.client = client;
4508
4729
  this.model = model;
4509
4730
  this.initialRequest = initialRequest;
@@ -4583,10 +4804,10 @@ var OpenAILLMConnection = class extends BaseLLMConnection {
4583
4804
  for await (const chunk of stream) {
4584
4805
  if (chunk.choices.length === 0) continue;
4585
4806
  const delta = chunk.choices[0].delta;
4586
- if (_optionalChain([delta, 'optionalAccess', _113 => _113.content])) {
4807
+ if (_optionalChain([delta, 'optionalAccess', _114 => _114.content])) {
4587
4808
  responseContent += delta.content;
4588
4809
  }
4589
- if (_optionalChain([delta, 'optionalAccess', _114 => _114.function_call])) {
4810
+ if (_optionalChain([delta, 'optionalAccess', _115 => _115.function_call])) {
4590
4811
  if (!functionCall) {
4591
4812
  functionCall = {
4592
4813
  name: delta.function_call.name || "",
@@ -4597,7 +4818,7 @@ var OpenAILLMConnection = class extends BaseLLMConnection {
4597
4818
  functionCall.arguments += delta.function_call.arguments || "";
4598
4819
  }
4599
4820
  }
4600
- if (_optionalChain([delta, 'optionalAccess', _115 => _115.tool_calls])) {
4821
+ if (_optionalChain([delta, 'optionalAccess', _116 => _116.tool_calls])) {
4601
4822
  for (const toolDelta of delta.tool_calls) {
4602
4823
  const id = toolDelta.id || "";
4603
4824
  let tool = toolCalls.find((t) => t.id === id);
@@ -4605,20 +4826,20 @@ var OpenAILLMConnection = class extends BaseLLMConnection {
4605
4826
  tool = {
4606
4827
  id,
4607
4828
  function: {
4608
- name: _optionalChain([toolDelta, 'access', _116 => _116.function, 'optionalAccess', _117 => _117.name]) || "",
4609
- arguments: _optionalChain([toolDelta, 'access', _118 => _118.function, 'optionalAccess', _119 => _119.arguments]) || ""
4829
+ name: _optionalChain([toolDelta, 'access', _117 => _117.function, 'optionalAccess', _118 => _118.name]) || "",
4830
+ arguments: _optionalChain([toolDelta, 'access', _119 => _119.function, 'optionalAccess', _120 => _120.arguments]) || ""
4610
4831
  }
4611
4832
  };
4612
4833
  toolCalls.push(tool);
4613
4834
  } else {
4614
- tool.function.name += _optionalChain([toolDelta, 'access', _120 => _120.function, 'optionalAccess', _121 => _121.name]) || "";
4615
- tool.function.arguments += _optionalChain([toolDelta, 'access', _122 => _122.function, 'optionalAccess', _123 => _123.arguments]) || "";
4835
+ tool.function.name += _optionalChain([toolDelta, 'access', _121 => _121.function, 'optionalAccess', _122 => _122.name]) || "";
4836
+ tool.function.arguments += _optionalChain([toolDelta, 'access', _123 => _123.function, 'optionalAccess', _124 => _124.arguments]) || "";
4616
4837
  }
4617
4838
  }
4618
4839
  }
4619
4840
  if (this.responseCallback) {
4620
4841
  const response = new LLMResponse({
4621
- content: _optionalChain([delta, 'optionalAccess', _124 => _124.content]) || null,
4842
+ content: _optionalChain([delta, 'optionalAccess', _125 => _125.content]) || null,
4622
4843
  role: "assistant",
4623
4844
  function_call: functionCall,
4624
4845
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
@@ -4710,26 +4931,34 @@ var OpenAILLMConnection = class extends BaseLLMConnection {
4710
4931
  onEnd(callback) {
4711
4932
  this.endCallback = callback;
4712
4933
  }
4713
- };
4934
+ }, _class11);
4714
4935
 
4715
4936
  // src/models/openai-llm.ts
4716
4937
  var OpenAILLM = class extends BaseLLM {
4938
+ /**
4939
+ * OpenAI client instance
4940
+ */
4941
+
4942
+ /**
4943
+ * Default parameters for requests
4944
+ */
4945
+
4717
4946
  /**
4718
4947
  * Constructor for OpenAILLM
4719
4948
  */
4720
4949
  constructor(model, config) {
4721
4950
  super(model);
4722
4951
  this.client = new (0, _openai2.default)({
4723
- apiKey: _optionalChain([config, 'optionalAccess', _125 => _125.apiKey]) || process.env.OPENAI_API_KEY,
4724
- baseURL: _optionalChain([config, 'optionalAccess', _126 => _126.baseURL]),
4725
- organization: _optionalChain([config, 'optionalAccess', _127 => _127.organization])
4952
+ apiKey: _optionalChain([config, 'optionalAccess', _126 => _126.apiKey]) || process.env.OPENAI_API_KEY,
4953
+ baseURL: _optionalChain([config, 'optionalAccess', _127 => _127.baseURL]),
4954
+ organization: _optionalChain([config, 'optionalAccess', _128 => _128.organization])
4726
4955
  });
4727
4956
  this.defaultParams = {
4728
- temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _128 => _128.defaultParams, 'optionalAccess', _129 => _129.temperature]), () => ( 0.7)),
4729
- top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _130 => _130.defaultParams, 'optionalAccess', _131 => _131.top_p]), () => ( 1)),
4730
- max_tokens: _optionalChain([config, 'optionalAccess', _132 => _132.defaultParams, 'optionalAccess', _133 => _133.max_tokens]),
4731
- frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _134 => _134.defaultParams, 'optionalAccess', _135 => _135.frequency_penalty]), () => ( 0)),
4732
- presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _136 => _136.defaultParams, 'optionalAccess', _137 => _137.presence_penalty]), () => ( 0))
4957
+ temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _129 => _129.defaultParams, 'optionalAccess', _130 => _130.temperature]), () => ( 0.7)),
4958
+ top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _131 => _131.defaultParams, 'optionalAccess', _132 => _132.top_p]), () => ( 1)),
4959
+ max_tokens: _optionalChain([config, 'optionalAccess', _133 => _133.defaultParams, 'optionalAccess', _134 => _134.max_tokens]),
4960
+ frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _135 => _135.defaultParams, 'optionalAccess', _136 => _136.frequency_penalty]), () => ( 0)),
4961
+ presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _137 => _137.defaultParams, 'optionalAccess', _138 => _138.presence_penalty]), () => ( 0))
4733
4962
  };
4734
4963
  }
4735
4964
  /**
@@ -4839,16 +5068,16 @@ var OpenAILLM = class extends BaseLLM {
4839
5068
  */
4840
5069
  convertResponse(response) {
4841
5070
  const result = new LLMResponse({
4842
- content: _optionalChain([response, 'access', _138 => _138.message, 'optionalAccess', _139 => _139.content]) || null,
4843
- role: _optionalChain([response, 'access', _140 => _140.message, 'optionalAccess', _141 => _141.role]) || "assistant"
5071
+ content: _optionalChain([response, 'access', _139 => _139.message, 'optionalAccess', _140 => _140.content]) || null,
5072
+ role: _optionalChain([response, 'access', _141 => _141.message, 'optionalAccess', _142 => _142.role]) || "assistant"
4844
5073
  });
4845
- if (_optionalChain([response, 'access', _142 => _142.message, 'optionalAccess', _143 => _143.function_call])) {
5074
+ if (_optionalChain([response, 'access', _143 => _143.message, 'optionalAccess', _144 => _144.function_call])) {
4846
5075
  result.function_call = {
4847
5076
  name: response.message.function_call.name,
4848
5077
  arguments: response.message.function_call.arguments
4849
5078
  };
4850
5079
  }
4851
- if (_optionalChain([response, 'access', _144 => _144.message, 'optionalAccess', _145 => _145.tool_calls])) {
5080
+ if (_optionalChain([response, 'access', _145 => _145.message, 'optionalAccess', _146 => _146.tool_calls])) {
4852
5081
  result.tool_calls = response.message.tool_calls.map((tool) => ({
4853
5082
  id: tool.id,
4854
5083
  function: {
@@ -4863,29 +5092,27 @@ var OpenAILLM = class extends BaseLLM {
4863
5092
  * Convert OpenAI streaming chunk to LLMResponse
4864
5093
  */
4865
5094
  convertChunk(chunk) {
4866
- if (process.env.DEBUG === "true") {
4867
- console.log(
4868
- `OpenAI: Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
4869
- );
4870
- }
4871
- const content = _optionalChain([chunk, 'access', _146 => _146.delta, 'optionalAccess', _147 => _147.content]);
5095
+ debugLog(
5096
+ `[OpenAILLM]: Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
5097
+ );
5098
+ const content = _optionalChain([chunk, 'access', _147 => _147.delta, 'optionalAccess', _148 => _148.content]);
4872
5099
  const result = new LLMResponse({
4873
5100
  content: content !== void 0 ? content : null,
4874
- role: _optionalChain([chunk, 'access', _148 => _148.delta, 'optionalAccess', _149 => _149.role]) || "assistant",
5101
+ role: _optionalChain([chunk, 'access', _149 => _149.delta, 'optionalAccess', _150 => _150.role]) || "assistant",
4875
5102
  is_partial: true
4876
5103
  });
4877
- if (_optionalChain([chunk, 'access', _150 => _150.delta, 'optionalAccess', _151 => _151.function_call])) {
5104
+ if (_optionalChain([chunk, 'access', _151 => _151.delta, 'optionalAccess', _152 => _152.function_call])) {
4878
5105
  result.function_call = {
4879
5106
  name: chunk.delta.function_call.name || "",
4880
5107
  arguments: chunk.delta.function_call.arguments || ""
4881
5108
  };
4882
5109
  }
4883
- if (_optionalChain([chunk, 'access', _152 => _152.delta, 'optionalAccess', _153 => _153.tool_calls])) {
5110
+ if (_optionalChain([chunk, 'access', _153 => _153.delta, 'optionalAccess', _154 => _154.tool_calls])) {
4884
5111
  result.tool_calls = chunk.delta.tool_calls.map((tool) => ({
4885
5112
  id: tool.id || "",
4886
5113
  function: {
4887
- name: _optionalChain([tool, 'access', _154 => _154.function, 'optionalAccess', _155 => _155.name]) || "",
4888
- arguments: _optionalChain([tool, 'access', _156 => _156.function, 'optionalAccess', _157 => _157.arguments]) || ""
5114
+ name: _optionalChain([tool, 'access', _155 => _155.function, 'optionalAccess', _156 => _156.name]) || "",
5115
+ arguments: _optionalChain([tool, 'access', _157 => _157.function, 'optionalAccess', _158 => _158.arguments]) || ""
4889
5116
  }
4890
5117
  }));
4891
5118
  }
@@ -4908,32 +5135,24 @@ var OpenAILLM = class extends BaseLLM {
4908
5135
  presence_penalty: _nullishCoalesce(llmRequest.config.presence_penalty, () => ( this.defaultParams.presence_penalty)),
4909
5136
  stream: shouldStream
4910
5137
  };
4911
- if (process.env.DEBUG === "true") {
4912
- console.log(
4913
- `OpenAI: Streaming mode ${shouldStream ? "enabled" : "disabled"}`
4914
- );
4915
- }
5138
+ debugLog(
5139
+ `[OpenAILLM] Request parameters - model: ${params.model}, messages: ${params.messages.length}, functions: ${params.tools ? params.tools.length : 0}, streaming: ${shouldStream}`
5140
+ );
4916
5141
  if (tools && tools.length > 0) {
4917
5142
  params.tools = tools;
4918
5143
  }
4919
5144
  try {
4920
5145
  if (shouldStream) {
4921
- if (process.env.DEBUG === "true") {
4922
- console.log("OpenAI: Starting streaming request");
4923
- }
5146
+ debugLog("[OpenAILLM] Starting streaming request");
4924
5147
  const streamResponse = await this.client.chat.completions.create(params);
4925
5148
  let partialFunctionCall;
4926
5149
  const partialToolCalls = /* @__PURE__ */ new Map();
4927
5150
  let accumulatedContent = "";
4928
5151
  const asyncIterable = streamResponse;
4929
- if (process.env.DEBUG === "true") {
4930
- console.log("OpenAI: Stream response received, processing chunks");
4931
- }
5152
+ debugLog("[OpenAILLM] Stream response received, processing chunks");
4932
5153
  for await (const chunk of asyncIterable) {
4933
5154
  if (!chunk.choices || chunk.choices.length === 0) {
4934
- if (process.env.DEBUG === "true") {
4935
- console.log("OpenAI: Empty chunk received, skipping");
4936
- }
5155
+ debugLog("[OpenAILLM] Empty chunk received, skipping");
4937
5156
  continue;
4938
5157
  }
4939
5158
  const choice = chunk.choices[0];
@@ -4941,14 +5160,12 @@ var OpenAILLM = class extends BaseLLM {
4941
5160
  if (responseChunk.content !== null) {
4942
5161
  accumulatedContent += responseChunk.content;
4943
5162
  }
4944
- if (process.env.DEBUG === "true") {
4945
- console.log(
4946
- `OpenAI: Chunk received - delta: "${_optionalChain([choice, 'access', _158 => _158.delta, 'optionalAccess', _159 => _159.content]) || ""}"`,
4947
- `responseChunk content: "${responseChunk.content || ""}"`,
4948
- `is_partial: ${responseChunk.is_partial}`,
4949
- `accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
4950
- );
4951
- }
5163
+ debugLog(
5164
+ `[OpenAILLM] Chunk received - delta: "${_optionalChain([choice, 'access', _159 => _159.delta, 'optionalAccess', _160 => _160.content]) || ""}"`,
5165
+ `responseChunk content: "${responseChunk.content || ""}"`,
5166
+ `is_partial: ${responseChunk.is_partial}`,
5167
+ `accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5168
+ );
4952
5169
  if (responseChunk.function_call) {
4953
5170
  if (!partialFunctionCall) {
4954
5171
  partialFunctionCall = {
@@ -4973,37 +5190,27 @@ var OpenAILLM = class extends BaseLLM {
4973
5190
  }
4974
5191
  responseChunk.tool_calls = Array.from(partialToolCalls.values());
4975
5192
  }
4976
- if (process.env.DEBUG === "true") {
4977
- console.log("OpenAI: Yielding chunk to caller");
4978
- }
5193
+ debugLog("[OpenAILLM] Yielding chunk to caller");
4979
5194
  yield responseChunk;
4980
5195
  }
4981
5196
  if (accumulatedContent.length > 0) {
4982
- if (process.env.DEBUG === "true") {
4983
- console.log(
4984
- `OpenAI: Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
4985
- );
4986
- }
5197
+ debugLog(
5198
+ `[OpenAILLM] Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
5199
+ );
4987
5200
  yield new LLMResponse({
4988
5201
  content: accumulatedContent,
4989
5202
  role: "assistant",
4990
5203
  is_partial: false
4991
5204
  });
4992
5205
  }
4993
- if (process.env.DEBUG === "true") {
4994
- console.log("OpenAI: Finished processing all stream chunks");
4995
- }
5206
+ debugLog("[OpenAILLM] Finished processing all stream chunks");
4996
5207
  } else {
4997
- if (process.env.DEBUG === "true") {
4998
- console.log("OpenAI: Making non-streaming request");
4999
- }
5208
+ debugLog("[OpenAILLM] Making non-streaming request");
5000
5209
  const response = await this.client.chat.completions.create(params);
5001
5210
  if (!response.choices || response.choices.length === 0) {
5002
5211
  throw new Error("No response from OpenAI");
5003
5212
  }
5004
- if (process.env.DEBUG === "true") {
5005
- console.log("OpenAI: Non-streaming response received");
5006
- }
5213
+ debugLog("[OpenAILLM] Non-streaming response received");
5007
5214
  yield this.convertResponse(response.choices[0]);
5008
5215
  }
5009
5216
  } catch (error) {
@@ -5042,6 +5249,10 @@ var AuthCredentialType = /* @__PURE__ */ ((AuthCredentialType2) => {
5042
5249
  return AuthCredentialType2;
5043
5250
  })(AuthCredentialType || {});
5044
5251
  var AuthCredential = class {
5252
+ /**
5253
+ * Type of credential
5254
+ */
5255
+
5045
5256
  /**
5046
5257
  * Constructor for AuthCredential
5047
5258
  */
@@ -5062,6 +5273,10 @@ var AuthCredential = class {
5062
5273
  }
5063
5274
  };
5064
5275
  var ApiKeyCredential = class extends AuthCredential {
5276
+ /**
5277
+ * The API key
5278
+ */
5279
+
5065
5280
  /**
5066
5281
  * Constructor for ApiKeyCredential
5067
5282
  */
@@ -5087,6 +5302,14 @@ var ApiKeyCredential = class extends AuthCredential {
5087
5302
  }
5088
5303
  };
5089
5304
  var BasicAuthCredential = class extends AuthCredential {
5305
+ /**
5306
+ * The username
5307
+ */
5308
+
5309
+ /**
5310
+ * The password
5311
+ */
5312
+
5090
5313
  /**
5091
5314
  * Constructor for BasicAuthCredential
5092
5315
  */
@@ -5111,6 +5334,10 @@ var BasicAuthCredential = class extends AuthCredential {
5111
5334
  }
5112
5335
  };
5113
5336
  var BearerTokenCredential = class extends AuthCredential {
5337
+ /**
5338
+ * The bearer token
5339
+ */
5340
+
5114
5341
  /**
5115
5342
  * Constructor for BearerTokenCredential
5116
5343
  */
@@ -5134,6 +5361,22 @@ var BearerTokenCredential = class extends AuthCredential {
5134
5361
  }
5135
5362
  };
5136
5363
  var OAuth2Credential = class extends AuthCredential {
5364
+ /**
5365
+ * The access token
5366
+ */
5367
+
5368
+ /**
5369
+ * The refresh token
5370
+ */
5371
+
5372
+ /**
5373
+ * When the token expires
5374
+ */
5375
+
5376
+ /**
5377
+ * Function to refresh the token
5378
+ */
5379
+
5137
5380
  /**
5138
5381
  * Constructor for OAuth2Credential
5139
5382
  */
@@ -5184,7 +5427,7 @@ var OAuth2Credential = class extends AuthCredential {
5184
5427
  "Cannot refresh token: no refresh token or refresh function"
5185
5428
  );
5186
5429
  }
5187
- const result = await _optionalChain([this, 'access', _160 => _160.refreshFunction, 'optionalCall', _161 => _161(this.refreshToken)]);
5430
+ const result = await _optionalChain([this, 'access', _161 => _161.refreshFunction, 'optionalCall', _162 => _162(this.refreshToken)]);
5188
5431
  if (!result) {
5189
5432
  throw new Error("Failed to refresh token");
5190
5433
  }
@@ -5200,6 +5443,14 @@ var OAuth2Credential = class extends AuthCredential {
5200
5443
 
5201
5444
  // src/auth/auth-config.ts
5202
5445
  var AuthConfig = class {
5446
+ /**
5447
+ * The authentication scheme
5448
+ */
5449
+
5450
+ /**
5451
+ * Additional context properties
5452
+ */
5453
+
5203
5454
  /**
5204
5455
  * Constructor for AuthConfig
5205
5456
  */
@@ -5211,6 +5462,14 @@ var AuthConfig = class {
5211
5462
 
5212
5463
  // src/auth/auth-handler.ts
5213
5464
  var AuthHandler = class {
5465
+ /**
5466
+ * The authentication configuration
5467
+ */
5468
+
5469
+ /**
5470
+ * The authentication credential
5471
+ */
5472
+
5214
5473
  /**
5215
5474
  * Constructor for AuthHandler
5216
5475
  */
@@ -5222,7 +5481,7 @@ var AuthHandler = class {
5222
5481
  * Gets the authentication token
5223
5482
  */
5224
5483
  getToken() {
5225
- return _optionalChain([this, 'access', _162 => _162.credential, 'optionalAccess', _163 => _163.getToken, 'call', _164 => _164()]);
5484
+ return _optionalChain([this, 'access', _163 => _163.credential, 'optionalAccess', _164 => _164.getToken, 'call', _165 => _165()]);
5226
5485
  }
5227
5486
  /**
5228
5487
  * Gets headers for HTTP requests
@@ -5237,7 +5496,7 @@ var AuthHandler = class {
5237
5496
  * Refreshes the token if necessary
5238
5497
  */
5239
5498
  async refreshToken() {
5240
- if (_optionalChain([this, 'access', _165 => _165.credential, 'optionalAccess', _166 => _166.canRefresh, 'call', _167 => _167()])) {
5499
+ if (_optionalChain([this, 'access', _166 => _166.credential, 'optionalAccess', _167 => _167.canRefresh, 'call', _168 => _168()])) {
5241
5500
  await this.credential.refresh();
5242
5501
  }
5243
5502
  }
@@ -5252,11 +5511,27 @@ var AuthSchemeType = /* @__PURE__ */ ((AuthSchemeType2) => {
5252
5511
  return AuthSchemeType2;
5253
5512
  })(AuthSchemeType || {});
5254
5513
  var AuthScheme = class {
5514
+ /**
5515
+ * The type of authentication scheme
5516
+ */
5517
+
5255
5518
  constructor(type) {
5256
5519
  this.type = type;
5257
5520
  }
5258
5521
  };
5259
5522
  var ApiKeyScheme = class extends AuthScheme {
5523
+ /**
5524
+ * Where the API key is sent
5525
+ */
5526
+
5527
+ /**
5528
+ * Name of the parameter
5529
+ */
5530
+
5531
+ /**
5532
+ * Description of the API key
5533
+ */
5534
+
5260
5535
  /**
5261
5536
  * Constructor for ApiKeyScheme
5262
5537
  */
@@ -5268,6 +5543,18 @@ var ApiKeyScheme = class extends AuthScheme {
5268
5543
  }
5269
5544
  };
5270
5545
  var HttpScheme = class extends AuthScheme {
5546
+ /**
5547
+ * The HTTP authentication scheme
5548
+ */
5549
+
5550
+ /**
5551
+ * Bearer format when scheme is 'bearer'
5552
+ */
5553
+
5554
+ /**
5555
+ * Description of the scheme
5556
+ */
5557
+
5271
5558
  /**
5272
5559
  * Constructor for HttpScheme
5273
5560
  */
@@ -5279,6 +5566,14 @@ var HttpScheme = class extends AuthScheme {
5279
5566
  }
5280
5567
  };
5281
5568
  var OAuth2Scheme = class extends AuthScheme {
5569
+ /**
5570
+ * OAuth flows
5571
+ */
5572
+
5573
+ /**
5574
+ * Description of the scheme
5575
+ */
5576
+
5282
5577
  /**
5283
5578
  * Constructor for OAuth2Scheme
5284
5579
  */
@@ -5289,6 +5584,14 @@ var OAuth2Scheme = class extends AuthScheme {
5289
5584
  }
5290
5585
  };
5291
5586
  var OpenIdConnectScheme = class extends AuthScheme {
5587
+ /**
5588
+ * OpenID Connect URL
5589
+ */
5590
+
5591
+ /**
5592
+ * Description of the scheme
5593
+ */
5594
+
5292
5595
  /**
5293
5596
  * Constructor for OpenIdConnectScheme
5294
5597
  */
@@ -5301,6 +5604,8 @@ var OpenIdConnectScheme = class extends AuthScheme {
5301
5604
 
5302
5605
  // src/sessions/state.ts
5303
5606
  var SessionState = class _SessionState {
5607
+
5608
+
5304
5609
  constructor() {
5305
5610
  this.state = /* @__PURE__ */ new Map();
5306
5611
  this.dirty = /* @__PURE__ */ new Set();
@@ -5388,6 +5693,10 @@ __export(memory_exports, {
5388
5693
 
5389
5694
  // src/memory/in-memory-memory-service.ts
5390
5695
  var InMemoryMemoryService = class {
5696
+ /**
5697
+ * Map of sessions by ID
5698
+ */
5699
+
5391
5700
  /**
5392
5701
  * Constructor for InMemoryMemoryService
5393
5702
  */
@@ -5413,7 +5722,7 @@ var InMemoryMemoryService = class {
5413
5722
  };
5414
5723
  const normalizedQuery = query.toLowerCase().trim();
5415
5724
  const queryTerms = normalizedQuery.split(/\s+/);
5416
- const sessionsToSearch = _optionalChain([options, 'optionalAccess', _168 => _168.sessionId]) ? this.sessions.has(options.sessionId) ? [this.sessions.get(options.sessionId)] : [] : Array.from(this.sessions.values());
5725
+ const sessionsToSearch = _optionalChain([options, 'optionalAccess', _169 => _169.sessionId]) ? this.sessions.has(options.sessionId) ? [this.sessions.get(options.sessionId)] : [] : Array.from(this.sessions.values());
5417
5726
  for (const session of sessionsToSearch) {
5418
5727
  const matchedEvents = [];
5419
5728
  const scores = [];
@@ -5439,7 +5748,7 @@ var InMemoryMemoryService = class {
5439
5748
  }
5440
5749
  }
5441
5750
  const score = queryTerms.length > 0 ? termMatches / queryTerms.length : 0;
5442
- if (_optionalChain([options, 'optionalAccess', _169 => _169.threshold]) !== void 0 && score < options.threshold) {
5751
+ if (_optionalChain([options, 'optionalAccess', _170 => _170.threshold]) !== void 0 && score < options.threshold) {
5443
5752
  continue;
5444
5753
  }
5445
5754
  if (score > 0) {
@@ -5459,7 +5768,7 @@ var InMemoryMemoryService = class {
5459
5768
  response.memories.sort(
5460
5769
  (a, b) => (_nullishCoalesce(b.relevanceScore, () => ( 0))) - (_nullishCoalesce(a.relevanceScore, () => ( 0)))
5461
5770
  );
5462
- if (_optionalChain([options, 'optionalAccess', _170 => _170.limit]) !== void 0 && options.limit > 0) {
5771
+ if (_optionalChain([options, 'optionalAccess', _171 => _171.limit]) !== void 0 && options.limit > 0) {
5463
5772
  response.memories = response.memories.slice(0, options.limit);
5464
5773
  }
5465
5774
  return response;
@@ -5488,9 +5797,22 @@ var InMemoryMemoryService = class {
5488
5797
  };
5489
5798
 
5490
5799
  // src/memory/persistent-memory-service.ts
5491
- var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);
5800
+ init_debug();
5801
+ var _fs = require('fs'); var fs3 = _interopRequireWildcard(_fs);
5492
5802
 
5493
5803
  var PersistentMemoryService = class {
5804
+ /**
5805
+ * In-memory service used for search operations
5806
+ */
5807
+
5808
+ /**
5809
+ * Directory where memory files will be stored
5810
+ */
5811
+
5812
+ /**
5813
+ * File prefix for memory files
5814
+ */
5815
+
5494
5816
  /**
5495
5817
  * Constructor for PersistentMemoryService
5496
5818
  */
@@ -5498,8 +5820,8 @@ var PersistentMemoryService = class {
5498
5820
  this.inMemoryService = new InMemoryMemoryService();
5499
5821
  this.storageDir = config.storageDir;
5500
5822
  this.filePrefix = config.filePrefix || "memory";
5501
- if (config.createDir && !_fs2.default.existsSync(this.storageDir)) {
5502
- _fs2.default.mkdirSync(this.storageDir, { recursive: true });
5823
+ if (config.createDir && !fs3.default.existsSync(this.storageDir)) {
5824
+ fs3.default.mkdirSync(this.storageDir, { recursive: true });
5503
5825
  }
5504
5826
  this.loadMemoryFiles();
5505
5827
  }
@@ -5536,7 +5858,7 @@ var PersistentMemoryService = class {
5536
5858
  persistedAt: /* @__PURE__ */ new Date()
5537
5859
  }
5538
5860
  };
5539
- await _fs2.default.promises.writeFile(
5861
+ await fs3.default.promises.writeFile(
5540
5862
  filePath,
5541
5863
  JSON.stringify(sessionData, null, 2),
5542
5864
  "utf-8"
@@ -5554,22 +5876,22 @@ var PersistentMemoryService = class {
5554
5876
  * @returns The file path
5555
5877
  */
5556
5878
  getSessionFilePath(sessionId) {
5557
- return _path2.default.join(this.storageDir, `${this.filePrefix}-${sessionId}.json`);
5879
+ return path3.default.join(this.storageDir, `${this.filePrefix}-${sessionId}.json`);
5558
5880
  }
5559
5881
  /**
5560
5882
  * Loads all memory files from disk
5561
5883
  */
5562
5884
  loadMemoryFiles() {
5563
5885
  try {
5564
- if (!_fs2.default.existsSync(this.storageDir)) {
5886
+ if (!fs3.default.existsSync(this.storageDir)) {
5565
5887
  return;
5566
5888
  }
5567
- const files = _fs2.default.readdirSync(this.storageDir);
5889
+ const files = fs3.default.readdirSync(this.storageDir);
5568
5890
  for (const file of files) {
5569
5891
  if (file.startsWith(this.filePrefix) && file.endsWith(".json")) {
5570
5892
  try {
5571
- const filePath = _path2.default.join(this.storageDir, file);
5572
- const content = _fs2.default.readFileSync(filePath, "utf-8");
5893
+ const filePath = path3.default.join(this.storageDir, file);
5894
+ const content = fs3.default.readFileSync(filePath, "utf-8");
5573
5895
  const session = JSON.parse(content);
5574
5896
  session.createdAt = new Date(session.createdAt);
5575
5897
  session.updatedAt = new Date(session.updatedAt);
@@ -5579,11 +5901,9 @@ var PersistentMemoryService = class {
5579
5901
  }
5580
5902
  }
5581
5903
  }
5582
- if (process.env.DEBUG === "true") {
5583
- console.log(
5584
- `Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
5585
- );
5586
- }
5904
+ debugLog(
5905
+ `Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
5906
+ );
5587
5907
  } catch (error) {
5588
5908
  console.error("Error loading memory files:", error);
5589
5909
  }
@@ -5610,8 +5930,8 @@ var PersistentMemoryService = class {
5610
5930
  async deleteSession(sessionId) {
5611
5931
  const filePath = this.getSessionFilePath(sessionId);
5612
5932
  try {
5613
- if (_fs2.default.existsSync(filePath)) {
5614
- await _fs2.default.promises.unlink(filePath);
5933
+ if (fs3.default.existsSync(filePath)) {
5934
+ await fs3.default.promises.unlink(filePath);
5615
5935
  }
5616
5936
  this.inMemoryService.getSession(sessionId);
5617
5937
  } catch (error) {
@@ -5647,6 +5967,7 @@ __export(sessions_exports, {
5647
5967
  PgLiteSessionService: () => PgLiteSessionService,
5648
5968
  PostgresSessionService: () => PostgresSessionService,
5649
5969
  SessionState: () => SessionState,
5970
+ SqliteSessionService: () => SqliteSessionService,
5650
5971
  cloneSession: () => cloneSession,
5651
5972
  generateSessionId: () => generateSessionId,
5652
5973
  validateSession: () => validateSession
@@ -5654,6 +5975,10 @@ __export(sessions_exports, {
5654
5975
 
5655
5976
  // src/sessions/in-memory-session-service.ts
5656
5977
  var InMemorySessionService = class {
5978
+ /**
5979
+ * Map of sessions by ID
5980
+ */
5981
+
5657
5982
  /**
5658
5983
  * Constructor for InMemorySessionService
5659
5984
  */
@@ -5708,17 +6033,17 @@ var InMemorySessionService = class {
5708
6033
  let sessions = Array.from(this.sessions.values()).filter(
5709
6034
  (session) => session.userId === userId
5710
6035
  );
5711
- if (_optionalChain([options, 'optionalAccess', _171 => _171.createdAfter])) {
6036
+ if (_optionalChain([options, 'optionalAccess', _172 => _172.createdAfter])) {
5712
6037
  sessions = sessions.filter(
5713
6038
  (session) => session.createdAt >= options.createdAfter
5714
6039
  );
5715
6040
  }
5716
- if (_optionalChain([options, 'optionalAccess', _172 => _172.updatedAfter])) {
6041
+ if (_optionalChain([options, 'optionalAccess', _173 => _173.updatedAfter])) {
5717
6042
  sessions = sessions.filter(
5718
6043
  (session) => session.updatedAt >= options.updatedAfter
5719
6044
  );
5720
6045
  }
5721
- if (_optionalChain([options, 'optionalAccess', _173 => _173.metadataFilter])) {
6046
+ if (_optionalChain([options, 'optionalAccess', _174 => _174.metadataFilter])) {
5722
6047
  sessions = sessions.filter((session) => {
5723
6048
  for (const [key, value] of Object.entries(options.metadataFilter)) {
5724
6049
  if (session.metadata[key] !== value) {
@@ -5729,7 +6054,7 @@ var InMemorySessionService = class {
5729
6054
  });
5730
6055
  }
5731
6056
  sessions.sort((a, b) => b.updatedAt.getTime() - a.updatedAt.getTime());
5732
- if (_optionalChain([options, 'optionalAccess', _174 => _174.limit]) !== void 0 && options.limit > 0) {
6057
+ if (_optionalChain([options, 'optionalAccess', _175 => _175.limit]) !== void 0 && options.limit > 0) {
5733
6058
  sessions = sessions.slice(0, options.limit);
5734
6059
  }
5735
6060
  return sessions;
@@ -5764,7 +6089,7 @@ var InMemorySessionService = class {
5764
6089
  if (event.is_partial) {
5765
6090
  return event;
5766
6091
  }
5767
- if (_optionalChain([event, 'access', _175 => _175.actions, 'optionalAccess', _176 => _176.stateDelta])) {
6092
+ if (_optionalChain([event, 'access', _176 => _176.actions, 'optionalAccess', _177 => _177.stateDelta])) {
5768
6093
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
5769
6094
  if (key.startsWith("_temp_")) {
5770
6095
  continue;
@@ -5799,6 +6124,8 @@ var sessionsSchema = _pgcore.pgTable.call(void 0, "sessions", {
5799
6124
  // Store serialized SessionState as JSONB
5800
6125
  });
5801
6126
  var PostgresSessionService = class {
6127
+
6128
+
5802
6129
  constructor(config) {
5803
6130
  this.db = config.db;
5804
6131
  this.sessionsTable = config.sessionsTable || sessionsSchema;
@@ -5868,7 +6195,7 @@ var PostgresSessionService = class {
5868
6195
  }
5869
6196
  async listSessions(userId, options) {
5870
6197
  let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
5871
- if (_optionalChain([options, 'optionalAccess', _177 => _177.limit]) !== void 0 && options.limit > 0) {
6198
+ if (_optionalChain([options, 'optionalAccess', _178 => _178.limit]) !== void 0 && options.limit > 0) {
5872
6199
  query = query.limit(options.limit);
5873
6200
  }
5874
6201
  const results = await query;
@@ -5895,12 +6222,12 @@ var PostgresSessionService = class {
5895
6222
  if (event.is_partial) {
5896
6223
  return event;
5897
6224
  }
5898
- if (_optionalChain([event, 'access', _178 => _178.actions, 'optionalAccess', _179 => _179.stateDelta])) {
6225
+ if (_optionalChain([event, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.stateDelta])) {
5899
6226
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
5900
6227
  if (key.startsWith("_temp_")) {
5901
6228
  continue;
5902
6229
  }
5903
- _optionalChain([session, 'access', _180 => _180.state, 'optionalAccess', _181 => _181.set, 'call', _182 => _182(key, value)]);
6230
+ _optionalChain([session, 'access', _181 => _181.state, 'optionalAccess', _182 => _182.set, 'call', _183 => _183(key, value)]);
5904
6231
  }
5905
6232
  }
5906
6233
  if (!session.events) {
@@ -5921,22 +6248,25 @@ var PostgresSessionService = class {
5921
6248
  // src/sessions/pglite-session-service.ts
5922
6249
 
5923
6250
 
6251
+ var _pglite = require('drizzle-orm/pglite');
5924
6252
  var sessionsSchema2 = _pgcore.pgTable.call(void 0, "sessions", {
5925
6253
  id: _pgcore.varchar.call(void 0, "id", { length: 255 }).primaryKey(),
5926
6254
  userId: _pgcore.varchar.call(void 0, "user_id", { length: 255 }).notNull(),
5927
6255
  messages: _pgcore.jsonb.call(void 0, "messages").default("[]").$type(),
5928
- // Store Message array as JSONB
5929
6256
  metadata: _pgcore.jsonb.call(void 0, "metadata").default("{}").$type(),
5930
6257
  createdAt: _pgcore.timestamp.call(void 0, "created_at", { withTimezone: true }).defaultNow().notNull(),
5931
6258
  updatedAt: _pgcore.timestamp.call(void 0, "updated_at", { withTimezone: true }).defaultNow().notNull(),
5932
6259
  state: _pgcore.jsonb.call(void 0, "state").default("{}").$type()
5933
- // Store serialized SessionState as JSONB
5934
6260
  });
5935
- var PgLiteSessionService = class {
5936
- constructor(config) {
5937
- this.initialized = false;
5938
- this.db = config.db;
5939
- this.sessionsTable = config.sessionsTable || sessionsSchema2;
6261
+ var PgLiteSessionService = (_class12 = class {
6262
+
6263
+
6264
+ __init19() {this.initialized = false}
6265
+ constructor(config) {;_class12.prototype.__init19.call(this);
6266
+ this.db = _pglite.drizzle.call(void 0, config.pglite, {
6267
+ schema: { sessions: sessionsSchema2 }
6268
+ });
6269
+ this.sessionsTable = sessionsSchema2;
5940
6270
  if (!config.skipTableCreation) {
5941
6271
  this.initializeDatabase().catch((error) => {
5942
6272
  console.error("Failed to initialize PgLite database:", error);
@@ -5991,9 +6321,7 @@ var PgLiteSessionService = class {
5991
6321
  metadata,
5992
6322
  createdAt: now,
5993
6323
  updatedAt: now,
5994
- // Drizzle's defaultNow() on schema handles this, but explicit is fine
5995
6324
  state: sessionState.toObject()
5996
- // Serialize SessionState
5997
6325
  };
5998
6326
  const results = await this.db.insert(this.sessionsTable).values(newSessionData).returning();
5999
6327
  const result = results[0];
@@ -6008,7 +6336,6 @@ var PgLiteSessionService = class {
6008
6336
  messages: Array.isArray(result.messages) ? result.messages : [],
6009
6337
  metadata: result.metadata || {},
6010
6338
  state: SessionState.fromObject(result.state || {}),
6011
- // Ensure dates are Date objects if Drizzle returns strings for some drivers/configs
6012
6339
  createdAt: new Date(result.createdAt),
6013
6340
  updatedAt: new Date(result.updatedAt)
6014
6341
  };
@@ -6036,7 +6363,6 @@ var PgLiteSessionService = class {
6036
6363
  userId: session.userId,
6037
6364
  messages: session.messages,
6038
6365
  metadata: session.metadata,
6039
- // createdAt should typically not be updated after creation
6040
6366
  updatedAt: /* @__PURE__ */ new Date(),
6041
6367
  state: session.state.toObject()
6042
6368
  };
@@ -6045,7 +6371,7 @@ var PgLiteSessionService = class {
6045
6371
  async listSessions(userId, options) {
6046
6372
  await this.ensureInitialized();
6047
6373
  let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
6048
- if (_optionalChain([options, 'optionalAccess', _183 => _183.limit]) !== void 0 && options.limit > 0) {
6374
+ if (_optionalChain([options, 'optionalAccess', _184 => _184.limit]) !== void 0 && options.limit > 0) {
6049
6375
  query = query.limit(options.limit);
6050
6376
  }
6051
6377
  const results = await query;
@@ -6063,23 +6389,202 @@ var PgLiteSessionService = class {
6063
6389
  await this.ensureInitialized();
6064
6390
  await this.db.delete(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, sessionId));
6065
6391
  }
6392
+ async appendEvent(session, event) {
6393
+ await this.ensureInitialized();
6394
+ if (event.is_partial) {
6395
+ return event;
6396
+ }
6397
+ if (_optionalChain([event, 'access', _185 => _185.actions, 'optionalAccess', _186 => _186.stateDelta])) {
6398
+ for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6399
+ if (key.startsWith("_temp_")) {
6400
+ continue;
6401
+ }
6402
+ _optionalChain([session, 'access', _187 => _187.state, 'optionalAccess', _188 => _188.set, 'call', _189 => _189(key, value)]);
6403
+ }
6404
+ }
6405
+ if (!session.events) {
6406
+ session.events = [];
6407
+ }
6408
+ session.events.push(event);
6409
+ session.updatedAt = /* @__PURE__ */ new Date();
6410
+ await this.updateSession(session);
6411
+ return event;
6412
+ }
6413
+ }, _class12);
6414
+
6415
+ // src/sessions/sqlite-session-service.ts
6416
+
6417
+
6418
+
6419
+
6420
+
6421
+ var _bettersqlite3 = require('drizzle-orm/better-sqlite3');
6422
+ var _sqlitecore = require('drizzle-orm/sqlite-core');
6423
+
6424
+ var sessionsSchema3 = _sqlitecore.sqliteTable.call(void 0, "sessions", {
6425
+ id: _sqlitecore.text.call(void 0, "id").primaryKey(),
6426
+ userId: _sqlitecore.text.call(void 0, "user_id").notNull(),
6427
+ messages: _sqlitecore.text.call(void 0, "messages", { mode: "json" }).default("[]").$type(),
6428
+ metadata: _sqlitecore.text.call(void 0, "metadata", { mode: "json" }).default("{}").$type(),
6429
+ createdAt: _sqlitecore.integer.call(void 0, "created_at", { mode: "timestamp" }).notNull(),
6430
+ updatedAt: _sqlitecore.integer.call(void 0, "updated_at", { mode: "timestamp" }).notNull(),
6431
+ state: _sqlitecore.text.call(void 0, "state", { mode: "json" }).default("{}").$type()
6432
+ });
6433
+ var SqliteSessionService = (_class13 = class {
6434
+
6435
+
6436
+ __init20() {this.initialized = false}
6437
+
6438
+ constructor(config) {;_class13.prototype.__init20.call(this);
6439
+ this.sqliteInstance = config.sqlite;
6440
+ const dbPath = this.sqliteInstance.name;
6441
+ if (dbPath && dbPath !== ":memory:") {
6442
+ const dbDir = path3.dirname(dbPath);
6443
+ if (!fs3.existsSync(dbDir)) {
6444
+ fs3.mkdirSync(dbDir, { recursive: true });
6445
+ }
6446
+ }
6447
+ this.db = _bettersqlite3.drizzle.call(void 0, config.sqlite, {
6448
+ schema: { sessions: sessionsSchema3 }
6449
+ });
6450
+ this.sessionsTable = sessionsSchema3;
6451
+ if (!config.skipTableCreation) {
6452
+ this.initializeDatabase().catch((error) => {
6453
+ console.error("Failed to initialize SQLite database:", error);
6454
+ });
6455
+ }
6456
+ }
6066
6457
  /**
6067
- * Appends an event to a session object
6068
- * @param session The session to append the event to
6069
- * @param event The event to append
6070
- * @returns The appended event
6458
+ * Initialize the database by creating required tables if they don't exist
6459
+ */
6460
+ async initializeDatabase() {
6461
+ if (this.initialized) {
6462
+ return;
6463
+ }
6464
+ try {
6465
+ this.sqliteInstance.pragma("journal_mode = WAL");
6466
+ this.sqliteInstance.exec(`
6467
+ CREATE TABLE IF NOT EXISTS sessions (
6468
+ id TEXT PRIMARY KEY,
6469
+ user_id TEXT NOT NULL,
6470
+ messages TEXT DEFAULT '[]',
6471
+ metadata TEXT DEFAULT '{}',
6472
+ created_at INTEGER NOT NULL,
6473
+ updated_at INTEGER NOT NULL,
6474
+ state TEXT DEFAULT '{}'
6475
+ );
6476
+ `);
6477
+ this.sqliteInstance.exec(`
6478
+ CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
6479
+ `);
6480
+ this.initialized = true;
6481
+ } catch (error) {
6482
+ console.error("Error initializing SQLite database:", error);
6483
+ throw error;
6484
+ }
6485
+ }
6486
+ /**
6487
+ * Ensure database is initialized before any operation
6071
6488
  */
6489
+ async ensureInitialized() {
6490
+ if (!this.initialized) {
6491
+ await this.initializeDatabase();
6492
+ }
6493
+ }
6494
+ generateSessionId() {
6495
+ return `session-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
6496
+ }
6497
+ async createSession(userId, metadata = {}) {
6498
+ await this.ensureInitialized();
6499
+ const sessionId = this.generateSessionId();
6500
+ const now = /* @__PURE__ */ new Date();
6501
+ const sessionState = new SessionState();
6502
+ const newSessionData = {
6503
+ id: sessionId,
6504
+ userId,
6505
+ messages: [],
6506
+ metadata,
6507
+ createdAt: now,
6508
+ updatedAt: now,
6509
+ state: sessionState.toObject()
6510
+ };
6511
+ const results = await this.db.insert(this.sessionsTable).values(newSessionData).returning();
6512
+ const result = results[0];
6513
+ if (!result) {
6514
+ throw new Error(
6515
+ "Failed to create session, no data returned from insert."
6516
+ );
6517
+ }
6518
+ return {
6519
+ id: result.id,
6520
+ userId: result.userId,
6521
+ messages: Array.isArray(result.messages) ? result.messages : [],
6522
+ metadata: result.metadata || {},
6523
+ state: SessionState.fromObject(result.state || {}),
6524
+ createdAt: result.createdAt,
6525
+ updatedAt: result.updatedAt
6526
+ };
6527
+ }
6528
+ async getSession(sessionId) {
6529
+ await this.ensureInitialized();
6530
+ const results = await this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, sessionId)).limit(1);
6531
+ const sessionData = results[0];
6532
+ if (!sessionData) {
6533
+ return void 0;
6534
+ }
6535
+ return {
6536
+ id: sessionData.id,
6537
+ userId: sessionData.userId,
6538
+ messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
6539
+ metadata: sessionData.metadata || {},
6540
+ state: SessionState.fromObject(sessionData.state || {}),
6541
+ createdAt: sessionData.createdAt,
6542
+ updatedAt: sessionData.updatedAt
6543
+ };
6544
+ }
6545
+ async updateSession(session) {
6546
+ await this.ensureInitialized();
6547
+ const updateData = {
6548
+ userId: session.userId,
6549
+ messages: session.messages,
6550
+ metadata: session.metadata,
6551
+ updatedAt: /* @__PURE__ */ new Date(),
6552
+ state: session.state.toObject()
6553
+ };
6554
+ await this.db.update(this.sessionsTable).set(updateData).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, session.id));
6555
+ }
6556
+ async listSessions(userId, options) {
6557
+ await this.ensureInitialized();
6558
+ let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
6559
+ if (_optionalChain([options, 'optionalAccess', _190 => _190.limit]) !== void 0 && options.limit > 0) {
6560
+ query = query.limit(options.limit);
6561
+ }
6562
+ const results = await query;
6563
+ return results.map((sessionData) => ({
6564
+ id: sessionData.id,
6565
+ userId: sessionData.userId,
6566
+ messages: Array.isArray(sessionData.messages) ? sessionData.messages : [],
6567
+ metadata: sessionData.metadata || {},
6568
+ state: SessionState.fromObject(sessionData.state || {}),
6569
+ createdAt: sessionData.createdAt,
6570
+ updatedAt: sessionData.updatedAt
6571
+ }));
6572
+ }
6573
+ async deleteSession(sessionId) {
6574
+ await this.ensureInitialized();
6575
+ await this.db.delete(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.id, sessionId));
6576
+ }
6072
6577
  async appendEvent(session, event) {
6073
6578
  await this.ensureInitialized();
6074
6579
  if (event.is_partial) {
6075
6580
  return event;
6076
6581
  }
6077
- if (_optionalChain([event, 'access', _184 => _184.actions, 'optionalAccess', _185 => _185.stateDelta])) {
6582
+ if (_optionalChain([event, 'access', _191 => _191.actions, 'optionalAccess', _192 => _192.stateDelta])) {
6078
6583
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
6079
6584
  if (key.startsWith("_temp_")) {
6080
6585
  continue;
6081
6586
  }
6082
- _optionalChain([session, 'access', _186 => _186.state, 'optionalAccess', _187 => _187.set, 'call', _188 => _188(key, value)]);
6587
+ _optionalChain([session, 'access', _193 => _193.state, 'optionalAccess', _194 => _194.set, 'call', _195 => _195(key, value)]);
6083
6588
  }
6084
6589
  }
6085
6590
  if (!session.events) {
@@ -6090,12 +6595,7 @@ var PgLiteSessionService = class {
6090
6595
  await this.updateSession(session);
6091
6596
  return event;
6092
6597
  }
6093
- // TODO: Consider if table creation/migration logic is needed here or handled externally (e.g., drizzle-kit migrations)
6094
- // TODO: Implement methods corresponding to Python's append_event, list_events,
6095
- // get_app_state, update_app_state, get_user_state, update_user_state
6096
- // if full parity with Python's DatabaseSessionService is desired.
6097
- // This would require defining corresponding Drizzle schemas for Events, AppState, UserState.
6098
- };
6598
+ }, _class13);
6099
6599
 
6100
6600
  // src/sessions/session-util.ts
6101
6601
  function generateSessionId() {
@@ -6131,30 +6631,77 @@ function cloneSession(session) {
6131
6631
  var _uuid = require('uuid');
6132
6632
 
6133
6633
  // src/events/event-actions.ts
6134
- var EventActions = class {
6634
+ var EventActions = (_class14 = class {
6635
+ /**
6636
+ * If true, it won't call model to summarize function response.
6637
+ * Only used for function_response event.
6638
+ */
6639
+
6640
+ /**
6641
+ * Indicates that the event is updating the state with the given delta.
6642
+ */
6643
+ __init21() {this.stateDelta = {}}
6644
+ /**
6645
+ * Indicates that the event is updating an artifact. key is the filename,
6646
+ * value is the version.
6647
+ */
6648
+ __init22() {this.artifactDelta = {}}
6649
+ /**
6650
+ * If set, the event transfers to the specified agent.
6651
+ */
6652
+
6653
+ /**
6654
+ * The agent is escalating to a higher level agent.
6655
+ */
6656
+
6135
6657
  /**
6136
6658
  * Constructor for EventActions
6137
6659
  */
6138
- constructor(options = {}) {
6139
- /**
6140
- * Indicates that the event is updating the state with the given delta.
6141
- */
6142
- this.stateDelta = {};
6143
- /**
6144
- * Indicates that the event is updating an artifact. key is the filename,
6145
- * value is the version.
6146
- */
6147
- this.artifactDelta = {};
6660
+ constructor(options = {}) {;_class14.prototype.__init21.call(this);_class14.prototype.__init22.call(this);
6148
6661
  this.skipSummarization = options.skipSummarization;
6149
6662
  this.stateDelta = options.stateDelta || {};
6150
6663
  this.artifactDelta = options.artifactDelta || {};
6151
6664
  this.transferToAgent = options.transferToAgent;
6152
6665
  this.escalate = options.escalate;
6153
6666
  }
6154
- };
6667
+ }, _class14);
6155
6668
 
6156
6669
  // src/events/event.ts
6157
- var Event = class _Event extends LLMResponse {
6670
+ var Event = (_class15 = class _Event extends LLMResponse {
6671
+ /**
6672
+ * The invocation ID of the event.
6673
+ */
6674
+ __init23() {this.invocationId = ""}
6675
+ /**
6676
+ * 'user' or the name of the agent, indicating who appended the event to the session.
6677
+ */
6678
+
6679
+ /**
6680
+ * The actions taken by the agent.
6681
+ */
6682
+ __init24() {this.actions = new EventActions()}
6683
+ /**
6684
+ * Set of ids of the long running function calls.
6685
+ * Agent client will know from this field about which function call is long running.
6686
+ * Only valid for function call event.
6687
+ */
6688
+
6689
+ /**
6690
+ * The branch of the event.
6691
+ * The format is like agent_1.agent_2.agent_3, where agent_1 is the parent of
6692
+ * agent_2, and agent_2 is the parent of agent_3.
6693
+ * Branch is used when multiple sub-agent shouldn't see their peer agents'
6694
+ * conversation history.
6695
+ */
6696
+
6697
+ /**
6698
+ * The unique identifier of the event.
6699
+ */
6700
+ __init25() {this.id = ""}
6701
+ /**
6702
+ * The timestamp of the event.
6703
+ */
6704
+
6158
6705
  /**
6159
6706
  * Constructor for Event
6160
6707
  */
@@ -6180,19 +6727,7 @@ var Event = class _Event extends LLMResponse {
6180
6727
  role,
6181
6728
  is_partial: partial,
6182
6729
  raw_response
6183
- });
6184
- /**
6185
- * The invocation ID of the event.
6186
- */
6187
- this.invocationId = "";
6188
- /**
6189
- * The actions taken by the agent.
6190
- */
6191
- this.actions = new EventActions();
6192
- /**
6193
- * The unique identifier of the event.
6194
- */
6195
- this.id = "";
6730
+ });_class15.prototype.__init23.call(this);_class15.prototype.__init24.call(this);_class15.prototype.__init25.call(this);;
6196
6731
  this.invocationId = invocationId;
6197
6732
  this.author = author;
6198
6733
  this.actions = actions;
@@ -6226,10 +6761,26 @@ var Event = class _Event extends LLMResponse {
6226
6761
  static newId() {
6227
6762
  return _uuid.v4.call(void 0, ).substring(0, 8);
6228
6763
  }
6229
- };
6764
+ }, _class15);
6230
6765
 
6231
6766
  // src/runners.ts
6232
6767
  var Runner = class {
6768
+ /**
6769
+ * The app name of the runner.
6770
+ */
6771
+
6772
+ /**
6773
+ * The root agent to run.
6774
+ */
6775
+
6776
+ /**
6777
+ * The session service for the runner.
6778
+ */
6779
+
6780
+ /**
6781
+ * The memory service for the runner.
6782
+ */
6783
+
6233
6784
  /**
6234
6785
  * Initializes the Runner.
6235
6786
  */
@@ -6430,4 +6981,5 @@ var VERSION = "0.1.0";
6430
6981
 
6431
6982
 
6432
6983
 
6433
- exports.Agent = Agent; exports.Agents = agents_exports; exports.AnthropicLLM = AnthropicLLM; exports.AnthropicLLMConnection = AnthropicLLMConnection; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.BaseAgent = BaseAgent; exports.BaseLLM = BaseLLM; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.FunctionTool = FunctionTool; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLLM = GoogleLLM; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LLMRequest = LLMRequest; exports.LLMResponse = LLMResponse; exports.LangGraphAgent = LangGraphAgent; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAILLM = OpenAILLM; exports.OpenAILLMConnection = OpenAILLMConnection; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PersistentMemoryService = PersistentMemoryService; exports.PgLiteSessionService = PgLiteSessionService; exports.PostgresSessionService = PostgresSessionService; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.SessionState = SessionState; exports.Sessions = sessions_exports; exports.StreamingMode = StreamingMode; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.adkToMcpToolType = adkToMcpToolType; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.cloneSession = cloneSession; exports.createFunctionTool = createFunctionTool; exports.generateSessionId = generateSessionId; exports.getMcpTools = getMcpTools; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.normalizeJsonSchema = normalizeJsonSchema; exports.registerProviders = registerProviders; exports.validateSession = validateSession;
6984
+
6985
+ exports.Agent = Agent; exports.Agents = agents_exports; exports.AnthropicLLM = AnthropicLLM; exports.AnthropicLLMConnection = AnthropicLLMConnection; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.BaseAgent = BaseAgent; exports.BaseLLM = BaseLLM; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.FunctionTool = FunctionTool; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLLM = GoogleLLM; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LLMRequest = LLMRequest; exports.LLMResponse = LLMResponse; exports.LangGraphAgent = LangGraphAgent; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAILLM = OpenAILLM; exports.OpenAILLMConnection = OpenAILLMConnection; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PersistentMemoryService = PersistentMemoryService; exports.PgLiteSessionService = PgLiteSessionService; exports.PostgresSessionService = PostgresSessionService; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.SessionState = SessionState; exports.Sessions = sessions_exports; exports.SqliteSessionService = SqliteSessionService; exports.StreamingMode = StreamingMode; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.adkToMcpToolType = adkToMcpToolType; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.cloneSession = cloneSession; exports.createFunctionTool = createFunctionTool; exports.generateSessionId = generateSessionId; exports.getMcpTools = getMcpTools; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.normalizeJsonSchema = normalizeJsonSchema; exports.registerProviders = registerProviders; exports.validateSession = validateSession;