@iqai/adk 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +2 -8
- package/README.md +177 -0
- package/dist/index.d.mts +33 -4
- package/dist/index.d.ts +33 -4
- package/dist/index.js +440 -364
- package/dist/index.mjs +257 -181
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15;var __defProp = Object.defineProperty;
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29;var __defProp = Object.defineProperty;
|
|
2
2
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
3
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
4
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
@@ -19,28 +19,48 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
19
19
|
};
|
|
20
20
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
21
21
|
|
|
22
|
-
// src/helpers/
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
22
|
+
// src/helpers/logger.ts
|
|
23
|
+
function isDebugEnabled() {
|
|
24
|
+
return process.env.NODE_ENV === "development" || process.env.DEBUG === "true";
|
|
25
|
+
}
|
|
26
|
+
var Logger;
|
|
27
|
+
var init_logger = __esm({
|
|
28
|
+
"src/helpers/logger.ts"() {
|
|
29
|
+
Logger = (_class = class {
|
|
30
|
+
|
|
31
|
+
__init2() {this.isDebugEnabled = isDebugEnabled()}
|
|
32
|
+
constructor({ name }) {;_class.prototype.__init2.call(this);
|
|
33
|
+
this.name = name;
|
|
34
|
+
}
|
|
35
|
+
debug(message, ...args) {
|
|
36
|
+
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
37
|
+
if (this.isDebugEnabled) {
|
|
38
|
+
console.log(`[${time}] \u{1F41B} [DEBUG] \u2728 [${this.name}] ${message}`, ...args);
|
|
39
|
+
}
|
|
33
40
|
}
|
|
34
|
-
|
|
41
|
+
info(message, ...args) {
|
|
42
|
+
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
43
|
+
console.info(`[${time}] \u2139\uFE0F [INFO] \u2728 [${this.name}] ${message}`, ...args);
|
|
44
|
+
}
|
|
45
|
+
warn(message, ...args) {
|
|
46
|
+
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
47
|
+
console.warn(`[${time}] \u{1F6A7} [WARN] \u2728 [${this.name}] ${message}`, ...args);
|
|
48
|
+
}
|
|
49
|
+
error(message, ...args) {
|
|
50
|
+
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
51
|
+
console.error(`[${time}] \u274C [ERROR] \u2728 [${this.name}] ${message}`, ...args);
|
|
52
|
+
}
|
|
53
|
+
}, _class);
|
|
35
54
|
}
|
|
36
55
|
});
|
|
37
56
|
|
|
38
57
|
// src/tools/base/base-tool.ts
|
|
39
|
-
var BaseTool;
|
|
58
|
+
var logger2, BaseTool;
|
|
40
59
|
var init_base_tool = __esm({
|
|
41
60
|
"src/tools/base/base-tool.ts"() {
|
|
42
|
-
|
|
43
|
-
|
|
61
|
+
init_logger();
|
|
62
|
+
logger2 = new Logger({ name: "BaseTool" });
|
|
63
|
+
BaseTool = exports.BaseTool = (_class2 = class {
|
|
44
64
|
/**
|
|
45
65
|
* Name of the tool
|
|
46
66
|
*/
|
|
@@ -64,15 +84,15 @@ var init_base_tool = __esm({
|
|
|
64
84
|
/**
|
|
65
85
|
* Base delay for retry in ms (will be used with exponential backoff)
|
|
66
86
|
*/
|
|
67
|
-
|
|
87
|
+
__init3() {this.baseRetryDelay = 1e3}
|
|
68
88
|
/**
|
|
69
89
|
* Maximum delay for retry in ms
|
|
70
90
|
*/
|
|
71
|
-
|
|
91
|
+
__init4() {this.maxRetryDelay = 1e4}
|
|
72
92
|
/**
|
|
73
93
|
* Constructor for BaseTool
|
|
74
94
|
*/
|
|
75
|
-
constructor(config) {;
|
|
95
|
+
constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
|
|
76
96
|
this.name = config.name;
|
|
77
97
|
this.description = config.description;
|
|
78
98
|
this.isLongRunning = config.isLongRunning || false;
|
|
@@ -129,8 +149,8 @@ var init_base_tool = __esm({
|
|
|
129
149
|
while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
|
|
130
150
|
try {
|
|
131
151
|
if (attempts > 0) {
|
|
132
|
-
|
|
133
|
-
`
|
|
152
|
+
logger2.debug(
|
|
153
|
+
`Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
|
|
134
154
|
);
|
|
135
155
|
const delay = Math.min(
|
|
136
156
|
this.baseRetryDelay * 2 ** (attempts - 1) + Math.random() * 1e3,
|
|
@@ -152,7 +172,7 @@ var init_base_tool = __esm({
|
|
|
152
172
|
tool: this.name
|
|
153
173
|
};
|
|
154
174
|
}
|
|
155
|
-
},
|
|
175
|
+
}, _class2);
|
|
156
176
|
}
|
|
157
177
|
});
|
|
158
178
|
|
|
@@ -278,9 +298,9 @@ var init_function_tool = __esm({
|
|
|
278
298
|
"src/tools/function/function-tool.ts"() {
|
|
279
299
|
init_base_tool();
|
|
280
300
|
init_function_utils();
|
|
281
|
-
FunctionTool = exports.FunctionTool = (
|
|
301
|
+
FunctionTool = exports.FunctionTool = (_class3 = class extends BaseTool {
|
|
282
302
|
|
|
283
|
-
|
|
303
|
+
__init5() {this.mandatoryArgs = []}
|
|
284
304
|
/**
|
|
285
305
|
* Creates a new FunctionTool wrapping the provided function.
|
|
286
306
|
*
|
|
@@ -296,7 +316,7 @@ var init_function_tool = __esm({
|
|
|
296
316
|
isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
|
|
297
317
|
shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
|
|
298
318
|
maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
|
|
299
|
-
});
|
|
319
|
+
});_class3.prototype.__init5.call(this);;
|
|
300
320
|
this.func = func;
|
|
301
321
|
this.mandatoryArgs = this.getMandatoryArgs(func);
|
|
302
322
|
}
|
|
@@ -372,7 +392,7 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
|
|
|
372
392
|
getMissingMandatoryArgs(args) {
|
|
373
393
|
return this.mandatoryArgs.filter((arg) => !(arg in args));
|
|
374
394
|
}
|
|
375
|
-
},
|
|
395
|
+
}, _class3);
|
|
376
396
|
}
|
|
377
397
|
});
|
|
378
398
|
|
|
@@ -475,11 +495,12 @@ var BaseAgent = class {
|
|
|
475
495
|
};
|
|
476
496
|
|
|
477
497
|
// src/agents/llm-agent.ts
|
|
478
|
-
|
|
498
|
+
init_logger();
|
|
479
499
|
|
|
480
500
|
// src/models/llm-registry.ts
|
|
481
|
-
|
|
482
|
-
var
|
|
501
|
+
init_logger();
|
|
502
|
+
var logger = new Logger({ name: "LLMRegistry" });
|
|
503
|
+
var LLMRegistry = (_class4 = class _LLMRegistry {
|
|
483
504
|
/**
|
|
484
505
|
* Map of model name regex to LLM class
|
|
485
506
|
*/
|
|
@@ -535,12 +556,12 @@ var LLMRegistry = (_class3 = class _LLMRegistry {
|
|
|
535
556
|
* Logs all registered models for debugging
|
|
536
557
|
*/
|
|
537
558
|
static logRegisteredModels() {
|
|
538
|
-
|
|
559
|
+
logger.debug(
|
|
539
560
|
"Registered LLM models:",
|
|
540
561
|
[..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
|
|
541
562
|
);
|
|
542
563
|
}
|
|
543
|
-
},
|
|
564
|
+
}, _class4.__initStatic(), _class4);
|
|
544
565
|
|
|
545
566
|
// src/models/llm-request.ts
|
|
546
567
|
var LLMRequest = class {
|
|
@@ -559,7 +580,7 @@ var LLMRequest = class {
|
|
|
559
580
|
};
|
|
560
581
|
|
|
561
582
|
// src/tools/tool-context.ts
|
|
562
|
-
var ToolContext = (
|
|
583
|
+
var ToolContext = (_class5 = class {
|
|
563
584
|
/**
|
|
564
585
|
* The parent invocation context
|
|
565
586
|
*/
|
|
@@ -575,11 +596,11 @@ var ToolContext = (_class4 = class {
|
|
|
575
596
|
/**
|
|
576
597
|
* Tool name
|
|
577
598
|
*/
|
|
578
|
-
|
|
599
|
+
__init6() {this.toolName = ""}
|
|
579
600
|
/**
|
|
580
601
|
* Tool ID
|
|
581
602
|
*/
|
|
582
|
-
|
|
603
|
+
__init7() {this.toolId = ""}
|
|
583
604
|
/**
|
|
584
605
|
* Variables stored in the context
|
|
585
606
|
*/
|
|
@@ -587,7 +608,7 @@ var ToolContext = (_class4 = class {
|
|
|
587
608
|
/**
|
|
588
609
|
* Constructor for ToolContext
|
|
589
610
|
*/
|
|
590
|
-
constructor(options) {;
|
|
611
|
+
constructor(options) {;_class5.prototype.__init6.call(this);_class5.prototype.__init7.call(this);
|
|
591
612
|
this.invocationContext = options.invocationContext;
|
|
592
613
|
this.auth = options.auth;
|
|
593
614
|
this.parameters = options.parameters || {};
|
|
@@ -653,7 +674,7 @@ var ToolContext = (_class4 = class {
|
|
|
653
674
|
async searchMemory(query, options) {
|
|
654
675
|
return this.invocationContext.searchMemory(query, options);
|
|
655
676
|
}
|
|
656
|
-
},
|
|
677
|
+
}, _class5);
|
|
657
678
|
|
|
658
679
|
// src/agents/run-config.ts
|
|
659
680
|
var StreamingMode = /* @__PURE__ */ ((StreamingMode2) => {
|
|
@@ -705,7 +726,7 @@ var RunConfig = class {
|
|
|
705
726
|
};
|
|
706
727
|
|
|
707
728
|
// src/agents/invocation-context.ts
|
|
708
|
-
var InvocationContext = (
|
|
729
|
+
var InvocationContext = (_class6 = class _InvocationContext {
|
|
709
730
|
/**
|
|
710
731
|
* Unique session ID for the current conversation
|
|
711
732
|
*/
|
|
@@ -745,11 +766,11 @@ var InvocationContext = (_class5 = class _InvocationContext {
|
|
|
745
766
|
/**
|
|
746
767
|
* In-memory storage for node execution results
|
|
747
768
|
*/
|
|
748
|
-
|
|
769
|
+
__init8() {this.memory = /* @__PURE__ */ new Map()}
|
|
749
770
|
/**
|
|
750
771
|
* Constructor for InvocationContext
|
|
751
772
|
*/
|
|
752
|
-
constructor(options = {}) {;
|
|
773
|
+
constructor(options = {}) {;_class6.prototype.__init8.call(this);
|
|
753
774
|
this.sessionId = options.sessionId || this.generateSessionId();
|
|
754
775
|
this.messages = options.messages || [];
|
|
755
776
|
this.config = options.config || new RunConfig();
|
|
@@ -852,10 +873,10 @@ var InvocationContext = (_class5 = class _InvocationContext {
|
|
|
852
873
|
};
|
|
853
874
|
return await this.memoryService.searchMemory(query, searchOptions);
|
|
854
875
|
}
|
|
855
|
-
},
|
|
876
|
+
}, _class6);
|
|
856
877
|
|
|
857
878
|
// src/agents/llm-agent.ts
|
|
858
|
-
var Agent = class extends BaseAgent {
|
|
879
|
+
var Agent = (_class7 = class extends BaseAgent {
|
|
859
880
|
/**
|
|
860
881
|
* The LLM model to use
|
|
861
882
|
*/
|
|
@@ -904,6 +925,7 @@ var Agent = class extends BaseAgent {
|
|
|
904
925
|
* The minimum relevance score for memory augmentation (0-1)
|
|
905
926
|
*/
|
|
906
927
|
|
|
928
|
+
__init9() {this.logger = new Logger({ name: "LlmAgent" })}
|
|
907
929
|
/**
|
|
908
930
|
* Constructor for Agent
|
|
909
931
|
*/
|
|
@@ -911,7 +933,7 @@ var Agent = class extends BaseAgent {
|
|
|
911
933
|
super({
|
|
912
934
|
name: config.name,
|
|
913
935
|
description: config.description
|
|
914
|
-
});
|
|
936
|
+
});_class7.prototype.__init9.call(this);;
|
|
915
937
|
this.model = config.model;
|
|
916
938
|
this.instructions = config.instructions;
|
|
917
939
|
this.tools = config.tools || [];
|
|
@@ -936,7 +958,7 @@ var Agent = class extends BaseAgent {
|
|
|
936
958
|
*/
|
|
937
959
|
async executeTool(toolCall, context) {
|
|
938
960
|
const { name, arguments: argsString } = toolCall.function;
|
|
939
|
-
|
|
961
|
+
this.logger.debug(`Executing tool: ${name}`);
|
|
940
962
|
const tool = this.findTool(name);
|
|
941
963
|
if (!tool) {
|
|
942
964
|
console.warn(`Tool '${name}' not found`);
|
|
@@ -954,7 +976,7 @@ var Agent = class extends BaseAgent {
|
|
|
954
976
|
toolContext.toolName = name;
|
|
955
977
|
toolContext.toolId = toolCall.id;
|
|
956
978
|
const result = await tool.runAsync(args, toolContext);
|
|
957
|
-
|
|
979
|
+
this.logger.debug(`Tool ${name} execution complete`);
|
|
958
980
|
return {
|
|
959
981
|
name,
|
|
960
982
|
result: typeof result === "string" ? result : JSON.stringify(result)
|
|
@@ -1123,7 +1145,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1123
1145
|
let stepCount = 0;
|
|
1124
1146
|
while (stepCount < this.maxToolExecutionSteps) {
|
|
1125
1147
|
stepCount++;
|
|
1126
|
-
|
|
1148
|
+
this.logger.debug(`Step ${stepCount}: Thinking...`);
|
|
1127
1149
|
const llmRequest = new LLMRequest({
|
|
1128
1150
|
messages: context.messages,
|
|
1129
1151
|
config: {
|
|
@@ -1140,7 +1162,9 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1140
1162
|
throw new Error("No response from LLM");
|
|
1141
1163
|
}
|
|
1142
1164
|
if (currentResponse.tool_calls && currentResponse.tool_calls.length > 0) {
|
|
1143
|
-
|
|
1165
|
+
this.logger.debug(
|
|
1166
|
+
`Tool calls: ${JSON.stringify(currentResponse.tool_calls)}`
|
|
1167
|
+
);
|
|
1144
1168
|
context.addMessage({
|
|
1145
1169
|
role: "assistant",
|
|
1146
1170
|
content: currentResponse.content || "",
|
|
@@ -1159,7 +1183,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1159
1183
|
});
|
|
1160
1184
|
}
|
|
1161
1185
|
} else {
|
|
1162
|
-
|
|
1186
|
+
this.logger.debug("No tool calls, finishing...");
|
|
1163
1187
|
context.addMessage({
|
|
1164
1188
|
role: "assistant",
|
|
1165
1189
|
content: currentResponse.content || ""
|
|
@@ -1197,7 +1221,7 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1197
1221
|
let stepCount = 0;
|
|
1198
1222
|
let hadToolCalls = false;
|
|
1199
1223
|
while (stepCount < this.maxToolExecutionSteps) {
|
|
1200
|
-
|
|
1224
|
+
this.logger.debug(`Step ${stepCount}: Thinking...`);
|
|
1201
1225
|
const toolDeclarations = this.tools.map((tool) => tool.getDeclaration()).filter((declaration) => declaration !== null);
|
|
1202
1226
|
const request = {
|
|
1203
1227
|
messages: context.messages,
|
|
@@ -1226,10 +1250,10 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1226
1250
|
function_call: finalResponse.function_call
|
|
1227
1251
|
});
|
|
1228
1252
|
if (!hadToolCalls) {
|
|
1229
|
-
|
|
1253
|
+
this.logger.debug("No tool calls, finishing...");
|
|
1230
1254
|
break;
|
|
1231
1255
|
}
|
|
1232
|
-
|
|
1256
|
+
this.logger.debug(`Step ${stepCount + 1}: Executing tools...`);
|
|
1233
1257
|
stepCount++;
|
|
1234
1258
|
if (finalResponse.function_call) {
|
|
1235
1259
|
const toolCall = {
|
|
@@ -1246,8 +1270,8 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1246
1270
|
content: JSON.stringify(result.result)
|
|
1247
1271
|
});
|
|
1248
1272
|
} else if (finalResponse.tool_calls && finalResponse.tool_calls.length > 0) {
|
|
1249
|
-
|
|
1250
|
-
`
|
|
1273
|
+
this.logger.debug(
|
|
1274
|
+
`Step ${stepCount + 1}: Executing ${finalResponse.tool_calls.length} tool(s)...`
|
|
1251
1275
|
);
|
|
1252
1276
|
context.messages.pop();
|
|
1253
1277
|
context.addMessage({
|
|
@@ -1270,11 +1294,12 @@ ${relevantInfo.join("\n\n")}`
|
|
|
1270
1294
|
}
|
|
1271
1295
|
await this.saveToMemory(context);
|
|
1272
1296
|
}
|
|
1273
|
-
};
|
|
1297
|
+
}, _class7);
|
|
1274
1298
|
|
|
1275
1299
|
// src/agents/sequential-agent.ts
|
|
1276
|
-
|
|
1277
|
-
var SequentialAgent = class extends BaseAgent {
|
|
1300
|
+
init_logger();
|
|
1301
|
+
var SequentialAgent = (_class8 = class extends BaseAgent {
|
|
1302
|
+
__init10() {this.logger = new Logger({ name: "SequentialAgent" })}
|
|
1278
1303
|
/**
|
|
1279
1304
|
* Constructor for SequentialAgent
|
|
1280
1305
|
*/
|
|
@@ -1282,7 +1307,7 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1282
1307
|
super({
|
|
1283
1308
|
name: config.name,
|
|
1284
1309
|
description: config.description
|
|
1285
|
-
});
|
|
1310
|
+
});_class8.prototype.__init10.call(this);;
|
|
1286
1311
|
if (config.agents && config.agents.length > 0) {
|
|
1287
1312
|
for (const agent of config.agents) {
|
|
1288
1313
|
this.addSubAgent(agent);
|
|
@@ -1294,8 +1319,8 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1294
1319
|
* Executes sub-agents sequentially, passing output from one to the next
|
|
1295
1320
|
*/
|
|
1296
1321
|
async run(options) {
|
|
1297
|
-
|
|
1298
|
-
`
|
|
1322
|
+
this.logger.debug(
|
|
1323
|
+
`Running ${this.subAgents.length} sub-agents in sequence`
|
|
1299
1324
|
);
|
|
1300
1325
|
if (this.subAgents.length === 0) {
|
|
1301
1326
|
return {
|
|
@@ -1312,8 +1337,8 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1312
1337
|
let finalResponse = null;
|
|
1313
1338
|
for (let i = 0; i < this.subAgents.length; i++) {
|
|
1314
1339
|
const agent = this.subAgents[i];
|
|
1315
|
-
|
|
1316
|
-
`
|
|
1340
|
+
this.logger.debug(
|
|
1341
|
+
`Running sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
|
|
1317
1342
|
);
|
|
1318
1343
|
try {
|
|
1319
1344
|
const response = await agent.run({
|
|
@@ -1329,10 +1354,7 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1329
1354
|
});
|
|
1330
1355
|
}
|
|
1331
1356
|
} catch (error) {
|
|
1332
|
-
console.error(
|
|
1333
|
-
`[SequentialAgent] Error in sub-agent ${agent.name}:`,
|
|
1334
|
-
error
|
|
1335
|
-
);
|
|
1357
|
+
console.error(`Error in sub-agent ${agent.name}:`, error);
|
|
1336
1358
|
return {
|
|
1337
1359
|
content: `Error in sub-agent ${agent.name}: ${error instanceof Error ? error.message : String(error)}`,
|
|
1338
1360
|
role: "assistant",
|
|
@@ -1370,8 +1392,8 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1370
1392
|
* Streams responses from each sub-agent in sequence
|
|
1371
1393
|
*/
|
|
1372
1394
|
async *runStreaming(options) {
|
|
1373
|
-
|
|
1374
|
-
`
|
|
1395
|
+
this.logger.debug(
|
|
1396
|
+
`Streaming ${this.subAgents.length} sub-agents in sequence`
|
|
1375
1397
|
);
|
|
1376
1398
|
if (this.subAgents.length === 0) {
|
|
1377
1399
|
yield {
|
|
@@ -1388,8 +1410,8 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1388
1410
|
const currentMessages = [...options.messages];
|
|
1389
1411
|
for (let i = 0; i < this.subAgents.length; i++) {
|
|
1390
1412
|
const agent = this.subAgents[i];
|
|
1391
|
-
|
|
1392
|
-
`
|
|
1413
|
+
this.logger.debug(
|
|
1414
|
+
`Streaming sub-agent ${i + 1}/${this.subAgents.length}: ${agent.name}`
|
|
1393
1415
|
);
|
|
1394
1416
|
try {
|
|
1395
1417
|
const streamGenerator = agent.runStreaming({
|
|
@@ -1422,10 +1444,7 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1422
1444
|
});
|
|
1423
1445
|
}
|
|
1424
1446
|
} catch (error) {
|
|
1425
|
-
console.error(
|
|
1426
|
-
`[SequentialAgent] Error in streaming sub-agent ${agent.name}:`,
|
|
1427
|
-
error
|
|
1428
|
-
);
|
|
1447
|
+
console.error(`Error in streaming sub-agent ${agent.name}:`, error);
|
|
1429
1448
|
yield {
|
|
1430
1449
|
content: `Error in sub-agent ${agent.name}: ${error instanceof Error ? error.message : String(error)}`,
|
|
1431
1450
|
role: "assistant",
|
|
@@ -1440,11 +1459,12 @@ var SequentialAgent = class extends BaseAgent {
|
|
|
1440
1459
|
}
|
|
1441
1460
|
}
|
|
1442
1461
|
}
|
|
1443
|
-
};
|
|
1462
|
+
}, _class8);
|
|
1444
1463
|
|
|
1445
1464
|
// src/agents/parallel-agent.ts
|
|
1446
|
-
|
|
1447
|
-
var ParallelAgent = class extends BaseAgent {
|
|
1465
|
+
init_logger();
|
|
1466
|
+
var ParallelAgent = (_class9 = class extends BaseAgent {
|
|
1467
|
+
__init11() {this.logger = new Logger({ name: "ParallelAgent" })}
|
|
1448
1468
|
/**
|
|
1449
1469
|
* Constructor for ParallelAgent
|
|
1450
1470
|
*/
|
|
@@ -1452,7 +1472,7 @@ var ParallelAgent = class extends BaseAgent {
|
|
|
1452
1472
|
super({
|
|
1453
1473
|
name: config.name,
|
|
1454
1474
|
description: config.description
|
|
1455
|
-
});
|
|
1475
|
+
});_class9.prototype.__init11.call(this);;
|
|
1456
1476
|
if (config.agents && config.agents.length > 0) {
|
|
1457
1477
|
for (const agent of config.agents) {
|
|
1458
1478
|
this.addSubAgent(agent);
|
|
@@ -1464,8 +1484,8 @@ var ParallelAgent = class extends BaseAgent {
|
|
|
1464
1484
|
* Executes all sub-agents in parallel
|
|
1465
1485
|
*/
|
|
1466
1486
|
async run(options) {
|
|
1467
|
-
|
|
1468
|
-
`
|
|
1487
|
+
this.logger.debug(
|
|
1488
|
+
`Running ${this.subAgents.length} sub-agents in parallel`
|
|
1469
1489
|
);
|
|
1470
1490
|
if (this.subAgents.length === 0) {
|
|
1471
1491
|
return {
|
|
@@ -1478,10 +1498,7 @@ var ParallelAgent = class extends BaseAgent {
|
|
|
1478
1498
|
messages: options.messages,
|
|
1479
1499
|
config: options.config
|
|
1480
1500
|
}).catch((error) => {
|
|
1481
|
-
console.error(
|
|
1482
|
-
`[ParallelAgent] Error in sub-agent ${agent.name}:`,
|
|
1483
|
-
error
|
|
1484
|
-
);
|
|
1501
|
+
console.error(`Error in sub-agent ${agent.name}:`, error);
|
|
1485
1502
|
return {
|
|
1486
1503
|
content: `Error in sub-agent ${agent.name}: ${error instanceof Error ? error.message : String(error)}`,
|
|
1487
1504
|
role: "assistant"
|
|
@@ -1509,8 +1526,8 @@ ${result.content || "No content"}
|
|
|
1509
1526
|
* Collects streaming responses from all sub-agents
|
|
1510
1527
|
*/
|
|
1511
1528
|
async *runStreaming(options) {
|
|
1512
|
-
|
|
1513
|
-
`
|
|
1529
|
+
this.logger.debug(
|
|
1530
|
+
`Streaming ${this.subAgents.length} sub-agents in parallel`
|
|
1514
1531
|
);
|
|
1515
1532
|
if (this.subAgents.length === 0) {
|
|
1516
1533
|
yield {
|
|
@@ -1524,10 +1541,7 @@ ${result.content || "No content"}
|
|
|
1524
1541
|
messages: options.messages,
|
|
1525
1542
|
config: options.config
|
|
1526
1543
|
}).catch((error) => {
|
|
1527
|
-
console.error(
|
|
1528
|
-
`[ParallelAgent] Error in sub-agent ${agent.name}:`,
|
|
1529
|
-
error
|
|
1530
|
-
);
|
|
1544
|
+
console.error(`Error in sub-agent ${agent.name}:`, error);
|
|
1531
1545
|
return {
|
|
1532
1546
|
content: `Error in sub-agent ${agent.name}: ${error instanceof Error ? error.message : String(error)}`,
|
|
1533
1547
|
role: "assistant"
|
|
@@ -1574,11 +1588,11 @@ ${response.content || "No content"}
|
|
|
1574
1588
|
};
|
|
1575
1589
|
}
|
|
1576
1590
|
}
|
|
1577
|
-
};
|
|
1591
|
+
}, _class9);
|
|
1578
1592
|
|
|
1579
1593
|
// src/agents/loop-agent.ts
|
|
1580
|
-
|
|
1581
|
-
var LoopAgent = class extends BaseAgent {
|
|
1594
|
+
init_logger();
|
|
1595
|
+
var LoopAgent = (_class10 = class extends BaseAgent {
|
|
1582
1596
|
/**
|
|
1583
1597
|
* Maximum number of iterations to prevent infinite loops
|
|
1584
1598
|
*/
|
|
@@ -1591,6 +1605,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1591
1605
|
* Custom condition check function
|
|
1592
1606
|
*/
|
|
1593
1607
|
|
|
1608
|
+
__init12() {this.logger = new Logger({ name: "LoopAgent" })}
|
|
1594
1609
|
/**
|
|
1595
1610
|
* Constructor for LoopAgent
|
|
1596
1611
|
*/
|
|
@@ -1598,7 +1613,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1598
1613
|
super({
|
|
1599
1614
|
name: config.name,
|
|
1600
1615
|
description: config.description
|
|
1601
|
-
});
|
|
1616
|
+
});_class10.prototype.__init12.call(this);;
|
|
1602
1617
|
this.maxIterations = config.maxIterations || 10;
|
|
1603
1618
|
this.conditionAgent = config.conditionAgent;
|
|
1604
1619
|
this.conditionCheck = config.conditionCheck;
|
|
@@ -1618,19 +1633,19 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1618
1633
|
*/
|
|
1619
1634
|
async shouldContinue(response, iterationCount, messages, config) {
|
|
1620
1635
|
if (iterationCount >= this.maxIterations) {
|
|
1621
|
-
|
|
1622
|
-
`
|
|
1636
|
+
this.logger.debug(
|
|
1637
|
+
`Maximum iterations (${this.maxIterations}) reached. Stopping loop.`
|
|
1623
1638
|
);
|
|
1624
1639
|
return false;
|
|
1625
1640
|
}
|
|
1626
1641
|
if (this.conditionCheck) {
|
|
1627
1642
|
const shouldContinue = await this.conditionCheck(response);
|
|
1628
|
-
|
|
1643
|
+
this.logger.debug(`Custom condition check result: ${shouldContinue}`);
|
|
1629
1644
|
return shouldContinue;
|
|
1630
1645
|
}
|
|
1631
1646
|
if (this.conditionAgent) {
|
|
1632
|
-
|
|
1633
|
-
`
|
|
1647
|
+
this.logger.debug(
|
|
1648
|
+
`Using condition agent ${this.conditionAgent.name} to check loop condition`
|
|
1634
1649
|
);
|
|
1635
1650
|
const conditionMessages = [
|
|
1636
1651
|
...messages,
|
|
@@ -1650,12 +1665,12 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1650
1665
|
});
|
|
1651
1666
|
const content = _optionalChain([conditionResponse, 'access', _21 => _21.content, 'optionalAccess', _22 => _22.toLowerCase, 'call', _23 => _23()]) || "";
|
|
1652
1667
|
const shouldContinue = content.includes("yes") && !content.includes("no");
|
|
1653
|
-
|
|
1654
|
-
`
|
|
1668
|
+
this.logger.debug(
|
|
1669
|
+
`Condition agent result: ${shouldContinue ? "Continue loop" : "Stop loop"}`
|
|
1655
1670
|
);
|
|
1656
1671
|
return shouldContinue;
|
|
1657
1672
|
} catch (error) {
|
|
1658
|
-
console.error("
|
|
1673
|
+
console.error("Error in condition agent:", error);
|
|
1659
1674
|
return false;
|
|
1660
1675
|
}
|
|
1661
1676
|
}
|
|
@@ -1666,8 +1681,8 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1666
1681
|
* Executes the sub-agent in a loop until the condition is met
|
|
1667
1682
|
*/
|
|
1668
1683
|
async run(options) {
|
|
1669
|
-
|
|
1670
|
-
`
|
|
1684
|
+
this.logger.debug(
|
|
1685
|
+
`Starting loop with max ${this.maxIterations} iterations`
|
|
1671
1686
|
);
|
|
1672
1687
|
if (this.subAgents.length === 0) {
|
|
1673
1688
|
return {
|
|
@@ -1682,8 +1697,8 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1682
1697
|
let shouldContinueLoop = true;
|
|
1683
1698
|
while (shouldContinueLoop && iterationCount < this.maxIterations) {
|
|
1684
1699
|
iterationCount++;
|
|
1685
|
-
|
|
1686
|
-
`
|
|
1700
|
+
this.logger.debug(
|
|
1701
|
+
`Running iteration ${iterationCount}/${this.maxIterations}`
|
|
1687
1702
|
);
|
|
1688
1703
|
try {
|
|
1689
1704
|
const response = await subAgent.run({
|
|
@@ -1708,10 +1723,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
1708
1723
|
});
|
|
1709
1724
|
}
|
|
1710
1725
|
} catch (error) {
|
|
1711
|
-
console.error(
|
|
1712
|
-
`[LoopAgent] Error in loop iteration ${iterationCount}:`,
|
|
1713
|
-
error
|
|
1714
|
-
);
|
|
1726
|
+
console.error(`Error in loop iteration ${iterationCount}:`, error);
|
|
1715
1727
|
break;
|
|
1716
1728
|
}
|
|
1717
1729
|
}
|
|
@@ -1732,8 +1744,8 @@ ${lastResponse.content || ""}`,
|
|
|
1732
1744
|
* Runs the agent with streaming support
|
|
1733
1745
|
*/
|
|
1734
1746
|
async *runStreaming(options) {
|
|
1735
|
-
|
|
1736
|
-
`
|
|
1747
|
+
this.logger.debug(
|
|
1748
|
+
`Starting loop with max ${this.maxIterations} iterations (streaming)`
|
|
1737
1749
|
);
|
|
1738
1750
|
if (this.subAgents.length === 0) {
|
|
1739
1751
|
yield {
|
|
@@ -1753,8 +1765,8 @@ ${lastResponse.content || ""}`,
|
|
|
1753
1765
|
};
|
|
1754
1766
|
while (shouldContinueLoop && iterationCount < this.maxIterations) {
|
|
1755
1767
|
iterationCount++;
|
|
1756
|
-
|
|
1757
|
-
`
|
|
1768
|
+
this.logger.debug(
|
|
1769
|
+
`Running iteration ${iterationCount}/${this.maxIterations} (streaming)`
|
|
1758
1770
|
);
|
|
1759
1771
|
yield {
|
|
1760
1772
|
content: `Running iteration ${iterationCount}/${this.maxIterations}...`,
|
|
@@ -1779,8 +1791,8 @@ ${lastResponse.content || ""}`,
|
|
|
1779
1791
|
}
|
|
1780
1792
|
}
|
|
1781
1793
|
if (!lastChunk) {
|
|
1782
|
-
|
|
1783
|
-
`
|
|
1794
|
+
this.logger.debug(
|
|
1795
|
+
`No complete chunk received from iteration ${iterationCount}`
|
|
1784
1796
|
);
|
|
1785
1797
|
shouldContinueLoop = false;
|
|
1786
1798
|
continue;
|
|
@@ -1807,8 +1819,8 @@ ${lastResponse.content || ""}`,
|
|
|
1807
1819
|
};
|
|
1808
1820
|
}
|
|
1809
1821
|
} catch (error) {
|
|
1810
|
-
|
|
1811
|
-
`
|
|
1822
|
+
this.logger.debug(
|
|
1823
|
+
`Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`
|
|
1812
1824
|
);
|
|
1813
1825
|
yield {
|
|
1814
1826
|
content: `Error in loop iteration ${iterationCount}: ${error instanceof Error ? error.message : String(error)}`,
|
|
@@ -1822,11 +1834,11 @@ ${lastResponse.content || ""}`,
|
|
|
1822
1834
|
role: "assistant"
|
|
1823
1835
|
};
|
|
1824
1836
|
}
|
|
1825
|
-
};
|
|
1837
|
+
}, _class10);
|
|
1826
1838
|
|
|
1827
1839
|
// src/agents/lang-graph-agent.ts
|
|
1828
|
-
|
|
1829
|
-
var LangGraphAgent = (
|
|
1840
|
+
init_logger();
|
|
1841
|
+
var LangGraphAgent = (_class11 = class extends BaseAgent {
|
|
1830
1842
|
/**
|
|
1831
1843
|
* Graph nodes (agents and their connections)
|
|
1832
1844
|
*/
|
|
@@ -1842,7 +1854,8 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1842
1854
|
/**
|
|
1843
1855
|
* Results from node executions
|
|
1844
1856
|
*/
|
|
1845
|
-
|
|
1857
|
+
__init13() {this.results = []}
|
|
1858
|
+
__init14() {this.logger = new Logger({ name: "LangGraphAgent" })}
|
|
1846
1859
|
/**
|
|
1847
1860
|
* Constructor for LangGraphAgent
|
|
1848
1861
|
*/
|
|
@@ -1850,7 +1863,7 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1850
1863
|
super({
|
|
1851
1864
|
name: config.name,
|
|
1852
1865
|
description: config.description
|
|
1853
|
-
});
|
|
1866
|
+
});_class11.prototype.__init13.call(this);_class11.prototype.__init14.call(this);;
|
|
1854
1867
|
this.nodes = /* @__PURE__ */ new Map();
|
|
1855
1868
|
for (const node of config.nodes) {
|
|
1856
1869
|
if (this.nodes.has(node.name)) {
|
|
@@ -1917,15 +1930,13 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1917
1930
|
for (const targetName of currentNode.targets) {
|
|
1918
1931
|
const targetNode = this.nodes.get(targetName);
|
|
1919
1932
|
if (!targetNode) {
|
|
1920
|
-
console.error(`
|
|
1933
|
+
console.error(`Target node "${targetName}" not found`);
|
|
1921
1934
|
continue;
|
|
1922
1935
|
}
|
|
1923
1936
|
if (targetNode.condition) {
|
|
1924
1937
|
const shouldExecute = await targetNode.condition(result, context);
|
|
1925
1938
|
if (!shouldExecute) {
|
|
1926
|
-
|
|
1927
|
-
`[LangGraphAgent] Skipping node "${targetName}" due to condition`
|
|
1928
|
-
);
|
|
1939
|
+
this.logger.debug(`Skipping node "${targetName}" due to condition`);
|
|
1929
1940
|
continue;
|
|
1930
1941
|
}
|
|
1931
1942
|
}
|
|
@@ -1950,9 +1961,7 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1950
1961
|
};
|
|
1951
1962
|
const shouldExecute = await node.condition(mockResponse, mockContext);
|
|
1952
1963
|
if (!shouldExecute) {
|
|
1953
|
-
|
|
1954
|
-
`[LangGraphAgent] Skipping node "${targetName}" due to condition`
|
|
1955
|
-
);
|
|
1964
|
+
this.logger.debug(`Skipping node "${targetName}" due to condition`);
|
|
1956
1965
|
}
|
|
1957
1966
|
return { shouldExecute };
|
|
1958
1967
|
}
|
|
@@ -1965,8 +1974,8 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1965
1974
|
messages: options.messages,
|
|
1966
1975
|
config: options.config
|
|
1967
1976
|
});
|
|
1968
|
-
|
|
1969
|
-
`
|
|
1977
|
+
this.logger.debug(
|
|
1978
|
+
`Starting graph execution from root node "${this.rootNode}"`
|
|
1970
1979
|
);
|
|
1971
1980
|
if (this.nodes.size === 0) {
|
|
1972
1981
|
return {
|
|
@@ -1987,9 +1996,7 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
1987
1996
|
while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
|
|
1988
1997
|
stepCount++;
|
|
1989
1998
|
const { node, messages } = nodesToExecute.shift();
|
|
1990
|
-
|
|
1991
|
-
`[LangGraphAgent] Step ${stepCount}: Executing node "${node.name}"`
|
|
1992
|
-
);
|
|
1999
|
+
this.logger.debug(`Step ${stepCount}: Executing node "${node.name}"`);
|
|
1993
2000
|
executedNodes.push(node.name);
|
|
1994
2001
|
try {
|
|
1995
2002
|
const result = await node.agent.run({
|
|
@@ -2037,7 +2044,7 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
2037
2044
|
});
|
|
2038
2045
|
}
|
|
2039
2046
|
} catch (error) {
|
|
2040
|
-
console.error(`
|
|
2047
|
+
console.error(`Error in node "${node.name}":`, error);
|
|
2041
2048
|
return {
|
|
2042
2049
|
content: `Error in node "${node.name}": ${error instanceof Error ? error.message : String(error)}`,
|
|
2043
2050
|
role: "assistant"
|
|
@@ -2061,8 +2068,8 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
2061
2068
|
messages: options.messages,
|
|
2062
2069
|
config: options.config
|
|
2063
2070
|
});
|
|
2064
|
-
|
|
2065
|
-
`
|
|
2071
|
+
this.logger.debug(
|
|
2072
|
+
`Starting graph execution from root node "${this.rootNode}" (streaming)`
|
|
2066
2073
|
);
|
|
2067
2074
|
if (this.nodes.size === 0) {
|
|
2068
2075
|
yield {
|
|
@@ -2090,8 +2097,8 @@ var LangGraphAgent = (_class6 = class extends BaseAgent {
|
|
|
2090
2097
|
while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
|
|
2091
2098
|
stepCount++;
|
|
2092
2099
|
const { node, messages } = nodesToExecute.shift();
|
|
2093
|
-
|
|
2094
|
-
`
|
|
2100
|
+
this.logger.debug(
|
|
2101
|
+
`Step ${stepCount}: Executing node "${node.name}" (streaming)`
|
|
2095
2102
|
);
|
|
2096
2103
|
executedNodes.push(node.name);
|
|
2097
2104
|
try {
|
|
@@ -2146,7 +2153,7 @@ Node output: ${this.extractTextContent(result.content)}` : ""}`,
|
|
|
2146
2153
|
});
|
|
2147
2154
|
}
|
|
2148
2155
|
} catch (error) {
|
|
2149
|
-
console.error(`
|
|
2156
|
+
console.error(`Error in node "${node.name}":`, error);
|
|
2150
2157
|
yield {
|
|
2151
2158
|
content: `Error in node "${node.name}": ${error instanceof Error ? error.message : String(error)}`,
|
|
2152
2159
|
role: "assistant"
|
|
@@ -2164,7 +2171,7 @@ Node output: ${this.extractTextContent(result.content)}` : ""}`,
|
|
|
2164
2171
|
};
|
|
2165
2172
|
}
|
|
2166
2173
|
}
|
|
2167
|
-
},
|
|
2174
|
+
}, _class11);
|
|
2168
2175
|
|
|
2169
2176
|
// src/tools/index.ts
|
|
2170
2177
|
var tools_exports = {};
|
|
@@ -2206,9 +2213,10 @@ function createFunctionTool(func, options) {
|
|
|
2206
2213
|
init_function_utils();
|
|
2207
2214
|
|
|
2208
2215
|
// src/tools/common/google-search.ts
|
|
2209
|
-
|
|
2216
|
+
init_logger();
|
|
2210
2217
|
init_base_tool();
|
|
2211
|
-
var GoogleSearch = class extends BaseTool {
|
|
2218
|
+
var GoogleSearch = (_class12 = class extends BaseTool {
|
|
2219
|
+
__init15() {this.logger = new Logger({ name: "GoogleSearch" })}
|
|
2212
2220
|
/**
|
|
2213
2221
|
* Constructor for GoogleSearch
|
|
2214
2222
|
*/
|
|
@@ -2216,7 +2224,7 @@ var GoogleSearch = class extends BaseTool {
|
|
|
2216
2224
|
super({
|
|
2217
2225
|
name: "google_search",
|
|
2218
2226
|
description: "Search the web using Google"
|
|
2219
|
-
});
|
|
2227
|
+
});_class12.prototype.__init15.call(this);;
|
|
2220
2228
|
}
|
|
2221
2229
|
/**
|
|
2222
2230
|
* Get the function declaration for the tool
|
|
@@ -2247,7 +2255,9 @@ var GoogleSearch = class extends BaseTool {
|
|
|
2247
2255
|
* This is a simplified implementation that doesn't actually search, just returns mock results
|
|
2248
2256
|
*/
|
|
2249
2257
|
async runAsync(args, _context) {
|
|
2250
|
-
|
|
2258
|
+
this.logger.debug(
|
|
2259
|
+
`[GoogleSearch] Executing Google search for: ${args.query}`
|
|
2260
|
+
);
|
|
2251
2261
|
return {
|
|
2252
2262
|
results: [
|
|
2253
2263
|
{
|
|
@@ -2263,7 +2273,7 @@ var GoogleSearch = class extends BaseTool {
|
|
|
2263
2273
|
]
|
|
2264
2274
|
};
|
|
2265
2275
|
}
|
|
2266
|
-
};
|
|
2276
|
+
}, _class12);
|
|
2267
2277
|
|
|
2268
2278
|
// src/tools/common/http-request-tool.ts
|
|
2269
2279
|
init_base_tool();
|
|
@@ -2710,9 +2720,10 @@ var UserInteractionTool = class extends BaseTool {
|
|
|
2710
2720
|
};
|
|
2711
2721
|
|
|
2712
2722
|
// src/tools/common/exit-loop-tool.ts
|
|
2713
|
-
|
|
2723
|
+
init_logger();
|
|
2714
2724
|
init_base_tool();
|
|
2715
|
-
var ExitLoopTool = class extends BaseTool {
|
|
2725
|
+
var ExitLoopTool = (_class13 = class extends BaseTool {
|
|
2726
|
+
__init16() {this.logger = new Logger({ name: "ExitLoopTool" })}
|
|
2716
2727
|
/**
|
|
2717
2728
|
* Constructor for ExitLoopTool
|
|
2718
2729
|
*/
|
|
@@ -2720,7 +2731,7 @@ var ExitLoopTool = class extends BaseTool {
|
|
|
2720
2731
|
super({
|
|
2721
2732
|
name: "exit_loop",
|
|
2722
2733
|
description: "Exits the loop. Call this function only when you are instructed to do so."
|
|
2723
|
-
});
|
|
2734
|
+
});_class13.prototype.__init16.call(this);;
|
|
2724
2735
|
}
|
|
2725
2736
|
/**
|
|
2726
2737
|
* Get the function declaration for the tool
|
|
@@ -2740,7 +2751,7 @@ var ExitLoopTool = class extends BaseTool {
|
|
|
2740
2751
|
* Execute the exit loop action
|
|
2741
2752
|
*/
|
|
2742
2753
|
async runAsync(_args, context) {
|
|
2743
|
-
|
|
2754
|
+
this.logger.debug("Executing exit loop tool");
|
|
2744
2755
|
if (context.actions) {
|
|
2745
2756
|
context.actions.escalate = true;
|
|
2746
2757
|
} else {
|
|
@@ -2752,12 +2763,13 @@ var ExitLoopTool = class extends BaseTool {
|
|
|
2752
2763
|
message: "Loop exited successfully"
|
|
2753
2764
|
};
|
|
2754
2765
|
}
|
|
2755
|
-
};
|
|
2766
|
+
}, _class13);
|
|
2756
2767
|
|
|
2757
2768
|
// src/tools/common/get-user-choice-tool.ts
|
|
2758
|
-
|
|
2769
|
+
init_logger();
|
|
2759
2770
|
init_base_tool();
|
|
2760
|
-
var GetUserChoiceTool = class extends BaseTool {
|
|
2771
|
+
var GetUserChoiceTool = (_class14 = class extends BaseTool {
|
|
2772
|
+
__init17() {this.logger = new Logger({ name: "GetUserChoiceTool" })}
|
|
2761
2773
|
/**
|
|
2762
2774
|
* Constructor for GetUserChoiceTool
|
|
2763
2775
|
*/
|
|
@@ -2766,7 +2778,7 @@ var GetUserChoiceTool = class extends BaseTool {
|
|
|
2766
2778
|
name: "get_user_choice",
|
|
2767
2779
|
description: "This tool provides the options to the user and asks them to choose one. Use this tool when you need the user to make a selection between multiple options. Do not list options in your response - use this tool instead.",
|
|
2768
2780
|
isLongRunning: true
|
|
2769
|
-
});
|
|
2781
|
+
});_class14.prototype.__init17.call(this);;
|
|
2770
2782
|
}
|
|
2771
2783
|
/**
|
|
2772
2784
|
* Get the function declaration for the tool
|
|
@@ -2800,13 +2812,11 @@ var GetUserChoiceTool = class extends BaseTool {
|
|
|
2800
2812
|
* and the actual choice will be provided asynchronously
|
|
2801
2813
|
*/
|
|
2802
2814
|
async runAsync(args, context) {
|
|
2803
|
-
|
|
2804
|
-
`
|
|
2805
|
-
", "
|
|
2806
|
-
)}`
|
|
2815
|
+
this.logger.debug(
|
|
2816
|
+
`Executing get_user_choice with options: ${args.options.join(", ")}`
|
|
2807
2817
|
);
|
|
2808
2818
|
if (args.question) {
|
|
2809
|
-
|
|
2819
|
+
this.logger.debug(`Question: ${args.question}`);
|
|
2810
2820
|
}
|
|
2811
2821
|
if (context.actions) {
|
|
2812
2822
|
context.actions.skip_summarization = true;
|
|
@@ -2817,12 +2827,13 @@ var GetUserChoiceTool = class extends BaseTool {
|
|
|
2817
2827
|
}
|
|
2818
2828
|
return null;
|
|
2819
2829
|
}
|
|
2820
|
-
};
|
|
2830
|
+
}, _class14);
|
|
2821
2831
|
|
|
2822
2832
|
// src/tools/common/transfer-to-agent-tool.ts
|
|
2823
|
-
|
|
2833
|
+
init_logger();
|
|
2824
2834
|
init_base_tool();
|
|
2825
|
-
var TransferToAgentTool = class extends BaseTool {
|
|
2835
|
+
var TransferToAgentTool = (_class15 = class extends BaseTool {
|
|
2836
|
+
__init18() {this.logger = new Logger({ name: "TransferToAgentTool" })}
|
|
2826
2837
|
/**
|
|
2827
2838
|
* Constructor for TransferToAgentTool
|
|
2828
2839
|
*/
|
|
@@ -2830,7 +2841,7 @@ var TransferToAgentTool = class extends BaseTool {
|
|
|
2830
2841
|
super({
|
|
2831
2842
|
name: "transfer_to_agent",
|
|
2832
2843
|
description: "Transfer the question to another agent."
|
|
2833
|
-
});
|
|
2844
|
+
});_class15.prototype.__init18.call(this);;
|
|
2834
2845
|
}
|
|
2835
2846
|
/**
|
|
2836
2847
|
* Get the function declaration for the tool
|
|
@@ -2855,9 +2866,7 @@ var TransferToAgentTool = class extends BaseTool {
|
|
|
2855
2866
|
* Execute the transfer to agent action
|
|
2856
2867
|
*/
|
|
2857
2868
|
async runAsync(args, context) {
|
|
2858
|
-
|
|
2859
|
-
`[TransferToAgentTool] Executing transfer to agent: ${args.agent_name}`
|
|
2860
|
-
);
|
|
2869
|
+
this.logger.debug(`Executing transfer to agent: ${args.agent_name}`);
|
|
2861
2870
|
if (context.actions) {
|
|
2862
2871
|
context.actions.transfer_to_agent = args.agent_name;
|
|
2863
2872
|
} else {
|
|
@@ -2869,12 +2878,13 @@ var TransferToAgentTool = class extends BaseTool {
|
|
|
2869
2878
|
message: `Transferred to agent: ${args.agent_name}`
|
|
2870
2879
|
};
|
|
2871
2880
|
}
|
|
2872
|
-
};
|
|
2881
|
+
}, _class15);
|
|
2873
2882
|
|
|
2874
2883
|
// src/tools/common/load-memory-tool.ts
|
|
2875
|
-
|
|
2884
|
+
init_logger();
|
|
2876
2885
|
init_base_tool();
|
|
2877
|
-
var LoadMemoryTool = class extends BaseTool {
|
|
2886
|
+
var LoadMemoryTool = (_class16 = class extends BaseTool {
|
|
2887
|
+
__init19() {this.logger = new Logger({ name: "LoadMemoryTool" })}
|
|
2878
2888
|
/**
|
|
2879
2889
|
* Constructor for LoadMemoryTool
|
|
2880
2890
|
*/
|
|
@@ -2882,7 +2892,7 @@ var LoadMemoryTool = class extends BaseTool {
|
|
|
2882
2892
|
super({
|
|
2883
2893
|
name: "load_memory",
|
|
2884
2894
|
description: "Loads the memory for the current user based on a query."
|
|
2885
|
-
});
|
|
2895
|
+
});_class16.prototype.__init19.call(this);;
|
|
2886
2896
|
}
|
|
2887
2897
|
/**
|
|
2888
2898
|
* Get the function declaration for the tool
|
|
@@ -2907,9 +2917,7 @@ var LoadMemoryTool = class extends BaseTool {
|
|
|
2907
2917
|
* Execute the memory loading action
|
|
2908
2918
|
*/
|
|
2909
2919
|
async runAsync(args, context) {
|
|
2910
|
-
|
|
2911
|
-
`[LoadMemoryTool] Executing load_memory with query: ${args.query}`
|
|
2912
|
-
);
|
|
2920
|
+
this.logger.debug(`Executing load_memory with query: ${args.query}`);
|
|
2913
2921
|
if (!context.memoryService) {
|
|
2914
2922
|
return {
|
|
2915
2923
|
error: "Memory service is not available",
|
|
@@ -2930,12 +2938,14 @@ var LoadMemoryTool = class extends BaseTool {
|
|
|
2930
2938
|
};
|
|
2931
2939
|
}
|
|
2932
2940
|
}
|
|
2933
|
-
};
|
|
2941
|
+
}, _class16);
|
|
2934
2942
|
|
|
2935
2943
|
// src/tools/mcp/client.ts
|
|
2944
|
+
init_logger();
|
|
2936
2945
|
var _indexjs = require('@modelcontextprotocol/sdk/client/index.js');
|
|
2937
2946
|
var _ssejs = require('@modelcontextprotocol/sdk/client/sse.js');
|
|
2938
2947
|
var _stdiojs = require('@modelcontextprotocol/sdk/client/stdio.js');
|
|
2948
|
+
var _typesjs = require('@modelcontextprotocol/sdk/types.js');
|
|
2939
2949
|
|
|
2940
2950
|
// src/tools/mcp/types.ts
|
|
2941
2951
|
var McpErrorType = /* @__PURE__ */ ((McpErrorType2) => {
|
|
@@ -2944,6 +2954,8 @@ var McpErrorType = /* @__PURE__ */ ((McpErrorType2) => {
|
|
|
2944
2954
|
McpErrorType2["RESOURCE_CLOSED_ERROR"] = "resource_closed_error";
|
|
2945
2955
|
McpErrorType2["TIMEOUT_ERROR"] = "timeout_error";
|
|
2946
2956
|
McpErrorType2["INVALID_SCHEMA_ERROR"] = "invalid_schema_error";
|
|
2957
|
+
McpErrorType2["SAMPLING_ERROR"] = "SAMPLING_ERROR";
|
|
2958
|
+
McpErrorType2["INVALID_REQUEST_ERROR"] = "INVALID_REQUEST_ERROR";
|
|
2947
2959
|
return McpErrorType2;
|
|
2948
2960
|
})(McpErrorType || {});
|
|
2949
2961
|
var McpError = class extends Error {
|
|
@@ -2986,13 +2998,16 @@ function withRetry(fn, instance, reinitMethod, maxRetries = 1) {
|
|
|
2986
2998
|
}
|
|
2987
2999
|
|
|
2988
3000
|
// src/tools/mcp/client.ts
|
|
2989
|
-
var McpClientService = (
|
|
2990
|
-
|
|
2991
|
-
|
|
2992
|
-
|
|
2993
|
-
|
|
2994
|
-
|
|
3001
|
+
var McpClientService = (_class17 = class {
|
|
3002
|
+
|
|
3003
|
+
__init20() {this.client = null}
|
|
3004
|
+
__init21() {this.transport = null}
|
|
3005
|
+
__init22() {this.isClosing = false}
|
|
3006
|
+
__init23() {this.samplingHandler = null}
|
|
3007
|
+
__init24() {this.logger = new Logger({ name: "McpClientService" })}
|
|
3008
|
+
constructor(config) {;_class17.prototype.__init20.call(this);_class17.prototype.__init21.call(this);_class17.prototype.__init22.call(this);_class17.prototype.__init23.call(this);_class17.prototype.__init24.call(this);
|
|
2995
3009
|
this.config = config;
|
|
3010
|
+
this.samplingHandler = config.samplingHandler || null;
|
|
2996
3011
|
}
|
|
2997
3012
|
/**
|
|
2998
3013
|
* Initializes and returns an MCP client based on configuration.
|
|
@@ -3041,6 +3056,7 @@ var McpClientService = (_class7 = class {
|
|
|
3041
3056
|
} else {
|
|
3042
3057
|
await connectPromise;
|
|
3043
3058
|
}
|
|
3059
|
+
await this.setupSamplingHandler(client);
|
|
3044
3060
|
if (this.config.debug) {
|
|
3045
3061
|
console.log("\u2705 MCP client connected successfully");
|
|
3046
3062
|
}
|
|
@@ -3192,10 +3208,77 @@ var McpClientService = (_class7 = class {
|
|
|
3192
3208
|
isConnected() {
|
|
3193
3209
|
return !!this.client && !this.isClosing;
|
|
3194
3210
|
}
|
|
3195
|
-
|
|
3211
|
+
async setupSamplingHandler(client) {
|
|
3212
|
+
if (!this.samplingHandler) {
|
|
3213
|
+
if (this.config.debug) {
|
|
3214
|
+
console.log(
|
|
3215
|
+
"\u26A0\uFE0F No sampling handler provided - sampling requests will be rejected"
|
|
3216
|
+
);
|
|
3217
|
+
}
|
|
3218
|
+
return;
|
|
3219
|
+
}
|
|
3220
|
+
client.setRequestHandler(_typesjs.CreateMessageRequestSchema, async (request) => {
|
|
3221
|
+
try {
|
|
3222
|
+
this.logger.debug("Received sampling request:", request);
|
|
3223
|
+
const samplingRequest = request.params;
|
|
3224
|
+
if (!samplingRequest.messages || !Array.isArray(samplingRequest.messages)) {
|
|
3225
|
+
throw new McpError(
|
|
3226
|
+
"Invalid sampling request: messages array is required",
|
|
3227
|
+
"INVALID_REQUEST_ERROR" /* INVALID_REQUEST_ERROR */
|
|
3228
|
+
);
|
|
3229
|
+
}
|
|
3230
|
+
if (!samplingRequest.maxTokens || samplingRequest.maxTokens <= 0) {
|
|
3231
|
+
throw new McpError(
|
|
3232
|
+
"Invalid sampling request: maxTokens must be a positive number",
|
|
3233
|
+
"INVALID_REQUEST_ERROR" /* INVALID_REQUEST_ERROR */
|
|
3234
|
+
);
|
|
3235
|
+
}
|
|
3236
|
+
const response = await this.samplingHandler({
|
|
3237
|
+
method: request.method,
|
|
3238
|
+
params: request.params
|
|
3239
|
+
});
|
|
3240
|
+
if (this.config.debug) {
|
|
3241
|
+
console.log("\u2705 Sampling request completed successfully");
|
|
3242
|
+
}
|
|
3243
|
+
return response;
|
|
3244
|
+
} catch (error) {
|
|
3245
|
+
console.error("\u274C Error handling sampling request:", error);
|
|
3246
|
+
if (error instanceof McpError) {
|
|
3247
|
+
throw error;
|
|
3248
|
+
}
|
|
3249
|
+
throw new McpError(
|
|
3250
|
+
`Sampling request failed: ${error instanceof Error ? error.message : String(error)}`,
|
|
3251
|
+
"SAMPLING_ERROR" /* SAMPLING_ERROR */,
|
|
3252
|
+
error instanceof Error ? error : void 0
|
|
3253
|
+
);
|
|
3254
|
+
}
|
|
3255
|
+
});
|
|
3256
|
+
if (this.config.debug) {
|
|
3257
|
+
console.log("\u{1F3AF} Sampling handler registered successfully");
|
|
3258
|
+
}
|
|
3259
|
+
}
|
|
3260
|
+
setSamplingHandler(handler) {
|
|
3261
|
+
this.samplingHandler = handler;
|
|
3262
|
+
if (this.client) {
|
|
3263
|
+
this.setupSamplingHandler(this.client).catch((error) => {
|
|
3264
|
+
console.error("Failed to update sampling handler:", error);
|
|
3265
|
+
});
|
|
3266
|
+
}
|
|
3267
|
+
}
|
|
3268
|
+
removeSamplingHandler() {
|
|
3269
|
+
this.samplingHandler = null;
|
|
3270
|
+
if (this.client) {
|
|
3271
|
+
try {
|
|
3272
|
+
_optionalChain([this, 'access', _42 => _42.client, 'access', _43 => _43.removeRequestHandler, 'optionalCall', _44 => _44("sampling/createMessage")]);
|
|
3273
|
+
} catch (error) {
|
|
3274
|
+
console.error("Failed to remove sampling handler:", error);
|
|
3275
|
+
}
|
|
3276
|
+
}
|
|
3277
|
+
}
|
|
3278
|
+
}, _class17);
|
|
3196
3279
|
|
|
3197
3280
|
// src/tools/mcp/create-tool.ts
|
|
3198
|
-
|
|
3281
|
+
init_logger();
|
|
3199
3282
|
init_base_tool();
|
|
3200
3283
|
|
|
3201
3284
|
// src/tools/mcp/schema-conversion.ts
|
|
@@ -3394,10 +3477,11 @@ async function createTool(mcpTool, client) {
|
|
|
3394
3477
|
throw error;
|
|
3395
3478
|
}
|
|
3396
3479
|
}
|
|
3397
|
-
var McpToolAdapter = (
|
|
3480
|
+
var McpToolAdapter = (_class18 = class extends BaseTool {
|
|
3398
3481
|
|
|
3399
3482
|
|
|
3400
|
-
|
|
3483
|
+
__init25() {this.clientService = null}
|
|
3484
|
+
__init26() {this.logger = new Logger({ name: "McpToolAdapter" })}
|
|
3401
3485
|
constructor(mcpTool, client) {
|
|
3402
3486
|
const metadata = mcpTool.metadata || {};
|
|
3403
3487
|
super({
|
|
@@ -3406,7 +3490,7 @@ var McpToolAdapter = (_class8 = class extends BaseTool {
|
|
|
3406
3490
|
isLongRunning: _nullishCoalesce(metadata.isLongRunning, () => ( false)),
|
|
3407
3491
|
shouldRetryOnFailure: _nullishCoalesce(metadata.shouldRetryOnFailure, () => ( false)),
|
|
3408
3492
|
maxRetryAttempts: _nullishCoalesce(metadata.maxRetryAttempts, () => ( 3))
|
|
3409
|
-
});
|
|
3493
|
+
});_class18.prototype.__init25.call(this);_class18.prototype.__init26.call(this);;
|
|
3410
3494
|
this.mcpTool = mcpTool;
|
|
3411
3495
|
this.client = client;
|
|
3412
3496
|
if (client.reinitialize && typeof client.reinitialize === "function") {
|
|
@@ -3430,10 +3514,7 @@ var McpToolAdapter = (_class8 = class extends BaseTool {
|
|
|
3430
3514
|
}
|
|
3431
3515
|
}
|
|
3432
3516
|
async runAsync(args, _context) {
|
|
3433
|
-
|
|
3434
|
-
`[McpToolAdapter] Executing MCP tool ${this.name} with args:`,
|
|
3435
|
-
args
|
|
3436
|
-
);
|
|
3517
|
+
this.logger.debug(`Executing MCP tool ${this.name} with args:`, args);
|
|
3437
3518
|
try {
|
|
3438
3519
|
if (typeof this.mcpTool.execute === "function") {
|
|
3439
3520
|
return await this.mcpTool.execute(args);
|
|
@@ -3482,16 +3563,16 @@ var McpToolAdapter = (_class8 = class extends BaseTool {
|
|
|
3482
3563
|
throw error;
|
|
3483
3564
|
}
|
|
3484
3565
|
}
|
|
3485
|
-
},
|
|
3566
|
+
}, _class18);
|
|
3486
3567
|
|
|
3487
3568
|
// src/tools/mcp/index.ts
|
|
3488
|
-
var McpToolset = (
|
|
3569
|
+
var McpToolset = (_class19 = class {
|
|
3489
3570
|
|
|
3490
|
-
|
|
3491
|
-
|
|
3492
|
-
|
|
3493
|
-
|
|
3494
|
-
constructor(config, toolFilter = null) {;
|
|
3571
|
+
__init27() {this.clientService = null}
|
|
3572
|
+
__init28() {this.toolFilter = null}
|
|
3573
|
+
__init29() {this.tools = []}
|
|
3574
|
+
__init30() {this.isClosing = false}
|
|
3575
|
+
constructor(config, toolFilter = null) {;_class19.prototype.__init27.call(this);_class19.prototype.__init28.call(this);_class19.prototype.__init29.call(this);_class19.prototype.__init30.call(this);
|
|
3495
3576
|
this.config = config;
|
|
3496
3577
|
this.toolFilter = toolFilter;
|
|
3497
3578
|
this.clientService = new McpClientService(config);
|
|
@@ -3540,7 +3621,7 @@ var McpToolset = (_class9 = class {
|
|
|
3540
3621
|
"resource_closed_error" /* RESOURCE_CLOSED_ERROR */
|
|
3541
3622
|
);
|
|
3542
3623
|
}
|
|
3543
|
-
if (this.tools.length > 0 && !_optionalChain([this, 'access',
|
|
3624
|
+
if (this.tools.length > 0 && !_optionalChain([this, 'access', _45 => _45.config, 'access', _46 => _46.cacheConfig, 'optionalAccess', _47 => _47.enabled]) === false) {
|
|
3544
3625
|
return this.tools;
|
|
3545
3626
|
}
|
|
3546
3627
|
if (!this.clientService) {
|
|
@@ -3566,7 +3647,7 @@ var McpToolset = (_class9 = class {
|
|
|
3566
3647
|
}
|
|
3567
3648
|
}
|
|
3568
3649
|
}
|
|
3569
|
-
if (_optionalChain([this, 'access',
|
|
3650
|
+
if (_optionalChain([this, 'access', _48 => _48.config, 'access', _49 => _49.cacheConfig, 'optionalAccess', _50 => _50.enabled]) !== false) {
|
|
3570
3651
|
this.tools = tools;
|
|
3571
3652
|
}
|
|
3572
3653
|
return tools;
|
|
@@ -3626,7 +3707,7 @@ var McpToolset = (_class9 = class {
|
|
|
3626
3707
|
async dispose() {
|
|
3627
3708
|
await this.close();
|
|
3628
3709
|
}
|
|
3629
|
-
},
|
|
3710
|
+
}, _class19);
|
|
3630
3711
|
async function getMcpTools(config, toolFilter) {
|
|
3631
3712
|
const toolset = new McpToolset(config, toolFilter);
|
|
3632
3713
|
try {
|
|
@@ -3733,11 +3814,11 @@ var BaseLLM = class {
|
|
|
3733
3814
|
};
|
|
3734
3815
|
|
|
3735
3816
|
// src/models/base-llm-connection.ts
|
|
3736
|
-
var BaseLLMConnection = (
|
|
3817
|
+
var BaseLLMConnection = (_class20 = class {constructor() { _class20.prototype.__init31.call(this); }
|
|
3737
3818
|
/**
|
|
3738
3819
|
* Whether the connection is active
|
|
3739
3820
|
*/
|
|
3740
|
-
|
|
3821
|
+
__init31() {this._isActive = true}
|
|
3741
3822
|
/**
|
|
3742
3823
|
* Gets whether the connection is active
|
|
3743
3824
|
*/
|
|
@@ -3750,15 +3831,15 @@ var BaseLLMConnection = (_class10 = class {constructor() { _class10.prototype.__
|
|
|
3750
3831
|
close() {
|
|
3751
3832
|
this._isActive = false;
|
|
3752
3833
|
}
|
|
3753
|
-
},
|
|
3834
|
+
}, _class20);
|
|
3754
3835
|
|
|
3755
3836
|
// src/models/anthropic-llm.ts
|
|
3756
|
-
|
|
3837
|
+
init_logger();
|
|
3757
3838
|
var _axios = require('axios'); var _axios2 = _interopRequireDefault(_axios);
|
|
3758
3839
|
|
|
3759
3840
|
// src/models/anthropic-llm-connection.ts
|
|
3760
|
-
|
|
3761
|
-
var AnthropicLLMConnection = class extends BaseLLMConnection {
|
|
3841
|
+
init_logger();
|
|
3842
|
+
var AnthropicLLMConnection = (_class21 = class extends BaseLLMConnection {
|
|
3762
3843
|
/**
|
|
3763
3844
|
* Axios instance for API calls
|
|
3764
3845
|
*/
|
|
@@ -3785,11 +3866,12 @@ var AnthropicLLMConnection = class extends BaseLLMConnection {
|
|
|
3785
3866
|
|
|
3786
3867
|
|
|
3787
3868
|
|
|
3869
|
+
__init32() {this.logger = new Logger({ name: "AnthropicLlmConnection" })}
|
|
3788
3870
|
/**
|
|
3789
3871
|
* Constructor
|
|
3790
3872
|
*/
|
|
3791
3873
|
constructor(client, model, initialRequest, defaultParams) {
|
|
3792
|
-
super();
|
|
3874
|
+
super();_class21.prototype.__init32.call(this);;
|
|
3793
3875
|
this.client = client;
|
|
3794
3876
|
this.model = model;
|
|
3795
3877
|
this.defaultParams = defaultParams;
|
|
@@ -3869,7 +3951,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
3869
3951
|
* Convert Anthropic tool calls to ADK tool calls
|
|
3870
3952
|
*/
|
|
3871
3953
|
convertToolCalls(toolUses) {
|
|
3872
|
-
if (!_optionalChain([toolUses, 'optionalAccess',
|
|
3954
|
+
if (!_optionalChain([toolUses, 'optionalAccess', _51 => _51.length])) {
|
|
3873
3955
|
return void 0;
|
|
3874
3956
|
}
|
|
3875
3957
|
return toolUses.map((toolUse) => ({
|
|
@@ -3884,15 +3966,13 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
3884
3966
|
* Extract tool uses from response content
|
|
3885
3967
|
*/
|
|
3886
3968
|
extractToolUses(content) {
|
|
3887
|
-
if (!_optionalChain([content, 'optionalAccess',
|
|
3969
|
+
if (!_optionalChain([content, 'optionalAccess', _52 => _52.length])) return [];
|
|
3888
3970
|
const toolUses = [];
|
|
3889
3971
|
for (const block of content) {
|
|
3890
|
-
|
|
3891
|
-
`[AnthropicLLMConnection] Processing content block of type: ${block.type}`
|
|
3892
|
-
);
|
|
3972
|
+
this.logger.debug(`Processing content block of type: ${block.type}`);
|
|
3893
3973
|
if (block.type === "tool_use") {
|
|
3894
|
-
|
|
3895
|
-
"
|
|
3974
|
+
this.logger.debug(
|
|
3975
|
+
"Found tool_use block:",
|
|
3896
3976
|
JSON.stringify(block, null, 2)
|
|
3897
3977
|
);
|
|
3898
3978
|
toolUses.push({
|
|
@@ -3902,12 +3982,10 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
3902
3982
|
});
|
|
3903
3983
|
}
|
|
3904
3984
|
}
|
|
3905
|
-
|
|
3906
|
-
`[AnthropicLLMConnection] Found ${toolUses.length} tool uses in content`
|
|
3907
|
-
);
|
|
3985
|
+
this.logger.debug(`Found ${toolUses.length} tool uses in content`);
|
|
3908
3986
|
if (toolUses.length > 0) {
|
|
3909
|
-
|
|
3910
|
-
"
|
|
3987
|
+
this.logger.debug(
|
|
3988
|
+
"Extracted tool uses:",
|
|
3911
3989
|
JSON.stringify(toolUses, null, 2)
|
|
3912
3990
|
);
|
|
3913
3991
|
}
|
|
@@ -4003,37 +4081,34 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4003
4081
|
}
|
|
4004
4082
|
const toolUses = this.extractToolUses(apiResponse.content);
|
|
4005
4083
|
const toolCalls = this.convertToolCalls(toolUses);
|
|
4006
|
-
|
|
4007
|
-
|
|
4084
|
+
this.logger.debug(
|
|
4085
|
+
`- Extracted ${toolUses.length} tool uses in content and converted ${_optionalChain([toolCalls, 'optionalAccess', _53 => _53.length]) || 0} tool calls`
|
|
4008
4086
|
);
|
|
4009
4087
|
const llmResponse = new LLMResponse({
|
|
4010
4088
|
role: "assistant",
|
|
4011
4089
|
content,
|
|
4012
|
-
tool_calls: _optionalChain([toolCalls, 'optionalAccess',
|
|
4090
|
+
tool_calls: _optionalChain([toolCalls, 'optionalAccess', _54 => _54.length]) ? toolCalls : void 0,
|
|
4013
4091
|
raw_response: apiResponse
|
|
4014
4092
|
});
|
|
4015
4093
|
const logObject = {
|
|
4016
4094
|
role: llmResponse.role,
|
|
4017
|
-
content: _optionalChain([llmResponse, 'access',
|
|
4095
|
+
content: _optionalChain([llmResponse, 'access', _55 => _55.content, 'optionalAccess', _56 => _56.substring, 'call', _57 => _57(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
|
|
4018
4096
|
tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
|
|
4019
4097
|
};
|
|
4020
|
-
|
|
4021
|
-
"
|
|
4098
|
+
this.logger.debug(
|
|
4099
|
+
"Final LLMResponse object:",
|
|
4022
4100
|
JSON.stringify(logObject, null, 2)
|
|
4023
4101
|
);
|
|
4024
4102
|
return llmResponse;
|
|
4025
4103
|
} catch (error) {
|
|
4026
|
-
|
|
4027
|
-
"[AnthropicLLMConnection] Error sending message to Anthropic:",
|
|
4028
|
-
error
|
|
4029
|
-
);
|
|
4104
|
+
this.logger.debug("Error sending message to Anthropic:", error);
|
|
4030
4105
|
throw error;
|
|
4031
4106
|
}
|
|
4032
4107
|
}
|
|
4033
|
-
};
|
|
4108
|
+
}, _class21);
|
|
4034
4109
|
|
|
4035
4110
|
// src/models/anthropic-llm.ts
|
|
4036
|
-
var AnthropicLLM = class extends BaseLLM {
|
|
4111
|
+
var AnthropicLLM = (_class22 = class extends BaseLLM {
|
|
4037
4112
|
/**
|
|
4038
4113
|
* Anthropic API key
|
|
4039
4114
|
*/
|
|
@@ -4046,22 +4121,23 @@ var AnthropicLLM = class extends BaseLLM {
|
|
|
4046
4121
|
* Default parameters for requests
|
|
4047
4122
|
*/
|
|
4048
4123
|
|
|
4124
|
+
__init33() {this.logger = new Logger({ name: "AnthropicLLM" })}
|
|
4049
4125
|
/**
|
|
4050
4126
|
* Constructor for AnthropicLLM
|
|
4051
4127
|
*/
|
|
4052
4128
|
constructor(model, config) {
|
|
4053
|
-
super(model);
|
|
4054
|
-
this.apiKey = _optionalChain([config, 'optionalAccess',
|
|
4055
|
-
this.baseURL = _optionalChain([config, 'optionalAccess',
|
|
4129
|
+
super(model);_class22.prototype.__init33.call(this);;
|
|
4130
|
+
this.apiKey = _optionalChain([config, 'optionalAccess', _58 => _58.apiKey]) || process.env.ANTHROPIC_API_KEY || "";
|
|
4131
|
+
this.baseURL = _optionalChain([config, 'optionalAccess', _59 => _59.baseURL]) || "https://api.anthropic.com/v1";
|
|
4056
4132
|
if (!this.apiKey) {
|
|
4057
4133
|
throw new Error(
|
|
4058
4134
|
"Anthropic API key is required. Provide it in config or set ANTHROPIC_API_KEY environment variable."
|
|
4059
4135
|
);
|
|
4060
4136
|
}
|
|
4061
4137
|
this.defaultParams = {
|
|
4062
|
-
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4063
|
-
top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4064
|
-
max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4138
|
+
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _60 => _60.defaultParams, 'optionalAccess', _61 => _61.temperature]), () => ( 0.7)),
|
|
4139
|
+
top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _62 => _62.defaultParams, 'optionalAccess', _63 => _63.top_p]), () => ( 1)),
|
|
4140
|
+
max_tokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _64 => _64.defaultParams, 'optionalAccess', _65 => _65.max_tokens]), () => ( 1024))
|
|
4065
4141
|
};
|
|
4066
4142
|
}
|
|
4067
4143
|
/**
|
|
@@ -4147,7 +4223,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4147
4223
|
* Convert ADK function declarations to Anthropic tool format
|
|
4148
4224
|
*/
|
|
4149
4225
|
convertFunctionsToTools(functions) {
|
|
4150
|
-
if (!_optionalChain([functions, 'optionalAccess',
|
|
4226
|
+
if (!_optionalChain([functions, 'optionalAccess', _66 => _66.length])) {
|
|
4151
4227
|
return [];
|
|
4152
4228
|
}
|
|
4153
4229
|
return functions.map((func) => ({
|
|
@@ -4160,7 +4236,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4160
4236
|
* Convert Anthropic tool calls to ADK tool calls
|
|
4161
4237
|
*/
|
|
4162
4238
|
convertToolUses(toolUses) {
|
|
4163
|
-
if (!_optionalChain([toolUses, 'optionalAccess',
|
|
4239
|
+
if (!_optionalChain([toolUses, 'optionalAccess', _67 => _67.length])) {
|
|
4164
4240
|
return [];
|
|
4165
4241
|
}
|
|
4166
4242
|
return toolUses.map((toolUse) => ({
|
|
@@ -4175,15 +4251,13 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4175
4251
|
* Extract tool uses from response content
|
|
4176
4252
|
*/
|
|
4177
4253
|
extractToolUses(content) {
|
|
4178
|
-
if (!_optionalChain([content, 'optionalAccess',
|
|
4254
|
+
if (!_optionalChain([content, 'optionalAccess', _68 => _68.length])) return [];
|
|
4179
4255
|
const toolUses = [];
|
|
4180
4256
|
for (const block of content) {
|
|
4181
|
-
|
|
4182
|
-
`[AnthropicLLM] Processing content block of type: ${block.type}`
|
|
4183
|
-
);
|
|
4257
|
+
this.logger.debug(`Processing content block of type: ${block.type}`);
|
|
4184
4258
|
if (block.type === "tool_use") {
|
|
4185
|
-
|
|
4186
|
-
`
|
|
4259
|
+
this.logger.debug(
|
|
4260
|
+
`Found tool_use block: ${JSON.stringify(block, null, 2)}`
|
|
4187
4261
|
);
|
|
4188
4262
|
toolUses.push({
|
|
4189
4263
|
id: block.id || "unknown-id",
|
|
@@ -4192,8 +4266,8 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4192
4266
|
});
|
|
4193
4267
|
}
|
|
4194
4268
|
}
|
|
4195
|
-
|
|
4196
|
-
`
|
|
4269
|
+
this.logger.debug(
|
|
4270
|
+
`Found ${toolUses.length} tool uses in content`,
|
|
4197
4271
|
toolUses
|
|
4198
4272
|
);
|
|
4199
4273
|
return toolUses;
|
|
@@ -4217,12 +4291,12 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4217
4291
|
},
|
|
4218
4292
|
responseType: stream ? "stream" : "json"
|
|
4219
4293
|
});
|
|
4220
|
-
|
|
4221
|
-
`
|
|
4294
|
+
this.logger.debug(
|
|
4295
|
+
`API Response done with ${response.status}:`,
|
|
4222
4296
|
response.data
|
|
4223
4297
|
);
|
|
4224
|
-
|
|
4225
|
-
"
|
|
4298
|
+
this.logger.debug(
|
|
4299
|
+
"API Response content:",
|
|
4226
4300
|
response.data.content.map((block) => ({ type: block.type }))
|
|
4227
4301
|
);
|
|
4228
4302
|
return response.data;
|
|
@@ -4248,9 +4322,9 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4248
4322
|
temperature: _nullishCoalesce(llmRequest.config.temperature, () => ( this.defaultParams.temperature)),
|
|
4249
4323
|
max_tokens: _nullishCoalesce(llmRequest.config.max_tokens, () => ( this.defaultParams.max_tokens)),
|
|
4250
4324
|
top_p: _nullishCoalesce(llmRequest.config.top_p, () => ( this.defaultParams.top_p)),
|
|
4251
|
-
tools: _optionalChain([tools, 'optionalAccess',
|
|
4325
|
+
tools: _optionalChain([tools, 'optionalAccess', _69 => _69.length]) ? tools : void 0
|
|
4252
4326
|
};
|
|
4253
|
-
|
|
4327
|
+
this.logger.debug("API Request:", {
|
|
4254
4328
|
model: params.model,
|
|
4255
4329
|
messageCount: params.messages.length,
|
|
4256
4330
|
systemMessage: params.system ? "present" : "none",
|
|
@@ -4260,7 +4334,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4260
4334
|
throw new Error("Streaming is not supported in this implementation");
|
|
4261
4335
|
}
|
|
4262
4336
|
const response = await this.callAnthropicAPI(params);
|
|
4263
|
-
|
|
4337
|
+
this.logger.debug("Full Response Content:", response.content);
|
|
4264
4338
|
let content = "";
|
|
4265
4339
|
for (const block of response.content) {
|
|
4266
4340
|
if (block.type === "text") {
|
|
@@ -4269,8 +4343,8 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4269
4343
|
}
|
|
4270
4344
|
const toolUses = this.extractToolUses(response.content);
|
|
4271
4345
|
const toolCalls = this.convertToolUses(toolUses);
|
|
4272
|
-
|
|
4273
|
-
|
|
4346
|
+
this.logger.debug("Extracted Tool Uses:", toolUses);
|
|
4347
|
+
this.logger.debug("Converted Tool Calls:", toolCalls);
|
|
4274
4348
|
const llmResponse = new LLMResponse({
|
|
4275
4349
|
role: "assistant",
|
|
4276
4350
|
content,
|
|
@@ -4279,16 +4353,16 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4279
4353
|
});
|
|
4280
4354
|
const logObject = {
|
|
4281
4355
|
role: llmResponse.role,
|
|
4282
|
-
content: _optionalChain([llmResponse, 'access',
|
|
4356
|
+
content: _optionalChain([llmResponse, 'access', _70 => _70.content, 'optionalAccess', _71 => _71.substring, 'call', _72 => _72(0, 50)]) + (llmResponse.content && llmResponse.content.length > 50 ? "..." : ""),
|
|
4283
4357
|
tool_calls: llmResponse.tool_calls ? `[${llmResponse.tool_calls.length} calls]` : "undefined"
|
|
4284
4358
|
};
|
|
4285
|
-
|
|
4286
|
-
"
|
|
4359
|
+
this.logger.debug(
|
|
4360
|
+
"Final LLMResponse object:",
|
|
4287
4361
|
JSON.stringify(logObject, null, 2)
|
|
4288
4362
|
);
|
|
4289
4363
|
yield llmResponse;
|
|
4290
4364
|
} catch (error) {
|
|
4291
|
-
|
|
4365
|
+
this.logger.debug("Error:", error);
|
|
4292
4366
|
throw error;
|
|
4293
4367
|
}
|
|
4294
4368
|
}
|
|
@@ -4311,7 +4385,7 @@ ${typeof message.content === "string" ? message.content : JSON.stringify(message
|
|
|
4311
4385
|
this.defaultParams
|
|
4312
4386
|
);
|
|
4313
4387
|
}
|
|
4314
|
-
};
|
|
4388
|
+
}, _class22);
|
|
4315
4389
|
|
|
4316
4390
|
// src/models/google-llm.ts
|
|
4317
4391
|
|
|
@@ -4332,9 +4406,9 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4332
4406
|
constructor(model, config) {
|
|
4333
4407
|
super(model);
|
|
4334
4408
|
const apiKey = process.env.GOOGLE_API_KEY;
|
|
4335
|
-
const projectId = _optionalChain([config, 'optionalAccess',
|
|
4336
|
-
const location = _optionalChain([config, 'optionalAccess',
|
|
4337
|
-
const useVertexAI = _optionalChain([process, 'access',
|
|
4409
|
+
const projectId = _optionalChain([config, 'optionalAccess', _73 => _73.projectId]) || process.env.GOOGLE_CLOUD_PROJECT;
|
|
4410
|
+
const location = _optionalChain([config, 'optionalAccess', _74 => _74.location]) || process.env.GOOGLE_CLOUD_LOCATION;
|
|
4411
|
+
const useVertexAI = _optionalChain([process, 'access', _75 => _75.env, 'access', _76 => _76.USE_VERTEX_AI, 'optionalAccess', _77 => _77.toLowerCase, 'call', _78 => _78()]) === "true";
|
|
4338
4412
|
if (!useVertexAI && !apiKey) {
|
|
4339
4413
|
throw new Error(
|
|
4340
4414
|
"Google API Key is required. Provide via config or GOOGLE_API_KEY env var."
|
|
@@ -4359,9 +4433,9 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4359
4433
|
}
|
|
4360
4434
|
this.ai = new (0, _genai.GoogleGenAI)(options);
|
|
4361
4435
|
this.defaultParams = {
|
|
4362
|
-
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4363
|
-
topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4364
|
-
maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4436
|
+
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _79 => _79.defaultParams, 'optionalAccess', _80 => _80.temperature]), () => ( 0.7)),
|
|
4437
|
+
topP: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _81 => _81.defaultParams, 'optionalAccess', _82 => _82.top_p]), () => ( 1)),
|
|
4438
|
+
maxOutputTokens: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _83 => _83.defaultParams, 'optionalAccess', _84 => _84.maxOutputTokens]), () => ( 1024))
|
|
4365
4439
|
};
|
|
4366
4440
|
}
|
|
4367
4441
|
/**
|
|
@@ -4499,7 +4573,7 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4499
4573
|
);
|
|
4500
4574
|
parts.push({ text: "" });
|
|
4501
4575
|
}
|
|
4502
|
-
if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access',
|
|
4576
|
+
if (googleRole === "function" && (parts.length !== 1 || !_optionalChain([parts, 'access', _85 => _85[0], 'optionalAccess', _86 => _86.functionResponse]))) {
|
|
4503
4577
|
console.error(
|
|
4504
4578
|
`[GoogleLLM] convertMessage - Invalid parts for 'function' role. Expected 1 functionResponse part. Got:`,
|
|
4505
4579
|
JSON.stringify(parts),
|
|
@@ -4607,13 +4681,13 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4607
4681
|
role: "assistant",
|
|
4608
4682
|
content: null
|
|
4609
4683
|
});
|
|
4610
|
-
if (typeof _optionalChain([response, 'optionalAccess',
|
|
4684
|
+
if (typeof _optionalChain([response, 'optionalAccess', _87 => _87.candidates, 'optionalAccess', _88 => _88[0], 'optionalAccess', _89 => _89.content, 'optionalAccess', _90 => _90.parts, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.text]) === "string") {
|
|
4611
4685
|
result.content = response.candidates[0].content.parts[0].text;
|
|
4612
4686
|
}
|
|
4613
|
-
if (_optionalChain([response, 'optionalAccess',
|
|
4687
|
+
if (_optionalChain([response, 'optionalAccess', _93 => _93.candidates, 'optionalAccess', _94 => _94[0], 'optionalAccess', _95 => _95.content, 'optionalAccess', _96 => _96.parts, 'optionalAccess', _97 => _97[0], 'optionalAccess', _98 => _98.text])) {
|
|
4614
4688
|
result.content = response.candidates[0].content.parts[0].text;
|
|
4615
4689
|
}
|
|
4616
|
-
if (_optionalChain([response, 'optionalAccess',
|
|
4690
|
+
if (_optionalChain([response, 'optionalAccess', _99 => _99.candidates, 'optionalAccess', _100 => _100[0], 'optionalAccess', _101 => _101.content, 'optionalAccess', _102 => _102.parts, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.functionCall])) {
|
|
4617
4691
|
const functionCall = response.candidates[0].content.parts[0].functionCall;
|
|
4618
4692
|
result.function_call = {
|
|
4619
4693
|
name: functionCall.name,
|
|
@@ -4660,10 +4734,10 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4660
4734
|
if (stream) {
|
|
4661
4735
|
const streamingResult = await this.ai.models.generateContentStream(requestOptions);
|
|
4662
4736
|
for await (const chunk of streamingResult) {
|
|
4663
|
-
if (!_optionalChain([chunk, 'access',
|
|
4737
|
+
if (!_optionalChain([chunk, 'access', _105 => _105.candidates, 'optionalAccess', _106 => _106[0], 'optionalAccess', _107 => _107.content, 'optionalAccess', _108 => _108.parts, 'optionalAccess', _109 => _109[0], 'optionalAccess', _110 => _110.text])) {
|
|
4664
4738
|
continue;
|
|
4665
4739
|
}
|
|
4666
|
-
const partialText = _optionalChain([chunk, 'access',
|
|
4740
|
+
const partialText = _optionalChain([chunk, 'access', _111 => _111.candidates, 'access', _112 => _112[0], 'optionalAccess', _113 => _113.content, 'optionalAccess', _114 => _114.parts, 'access', _115 => _115[0], 'optionalAccess', _116 => _116.text]) || "";
|
|
4667
4741
|
const partialResponse = new LLMResponse({
|
|
4668
4742
|
content: partialText,
|
|
4669
4743
|
role: "assistant",
|
|
@@ -4683,11 +4757,11 @@ var GoogleLLM = class extends BaseLLM {
|
|
|
4683
4757
|
};
|
|
4684
4758
|
|
|
4685
4759
|
// src/models/openai-llm.ts
|
|
4686
|
-
|
|
4760
|
+
init_logger();
|
|
4687
4761
|
var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
|
|
4688
4762
|
|
|
4689
4763
|
// src/models/openai-llm-connection.ts
|
|
4690
|
-
var OpenAILLMConnection = (
|
|
4764
|
+
var OpenAILLMConnection = (_class23 = class extends BaseLLMConnection {
|
|
4691
4765
|
/**
|
|
4692
4766
|
* OpenAI client
|
|
4693
4767
|
*/
|
|
@@ -4719,12 +4793,12 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
|
|
|
4719
4793
|
/**
|
|
4720
4794
|
* Ongoing chat history
|
|
4721
4795
|
*/
|
|
4722
|
-
|
|
4796
|
+
__init34() {this.messages = []}
|
|
4723
4797
|
/**
|
|
4724
4798
|
* Constructor for OpenAILLMConnection
|
|
4725
4799
|
*/
|
|
4726
4800
|
constructor(client, model, initialRequest, defaultParams) {
|
|
4727
|
-
super();
|
|
4801
|
+
super();_class23.prototype.__init34.call(this);;
|
|
4728
4802
|
this.client = client;
|
|
4729
4803
|
this.model = model;
|
|
4730
4804
|
this.initialRequest = initialRequest;
|
|
@@ -4804,10 +4878,10 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
|
|
|
4804
4878
|
for await (const chunk of stream) {
|
|
4805
4879
|
if (chunk.choices.length === 0) continue;
|
|
4806
4880
|
const delta = chunk.choices[0].delta;
|
|
4807
|
-
if (_optionalChain([delta, 'optionalAccess',
|
|
4881
|
+
if (_optionalChain([delta, 'optionalAccess', _117 => _117.content])) {
|
|
4808
4882
|
responseContent += delta.content;
|
|
4809
4883
|
}
|
|
4810
|
-
if (_optionalChain([delta, 'optionalAccess',
|
|
4884
|
+
if (_optionalChain([delta, 'optionalAccess', _118 => _118.function_call])) {
|
|
4811
4885
|
if (!functionCall) {
|
|
4812
4886
|
functionCall = {
|
|
4813
4887
|
name: delta.function_call.name || "",
|
|
@@ -4818,7 +4892,7 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
|
|
|
4818
4892
|
functionCall.arguments += delta.function_call.arguments || "";
|
|
4819
4893
|
}
|
|
4820
4894
|
}
|
|
4821
|
-
if (_optionalChain([delta, 'optionalAccess',
|
|
4895
|
+
if (_optionalChain([delta, 'optionalAccess', _119 => _119.tool_calls])) {
|
|
4822
4896
|
for (const toolDelta of delta.tool_calls) {
|
|
4823
4897
|
const id = toolDelta.id || "";
|
|
4824
4898
|
let tool = toolCalls.find((t) => t.id === id);
|
|
@@ -4826,20 +4900,20 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
|
|
|
4826
4900
|
tool = {
|
|
4827
4901
|
id,
|
|
4828
4902
|
function: {
|
|
4829
|
-
name: _optionalChain([toolDelta, 'access',
|
|
4830
|
-
arguments: _optionalChain([toolDelta, 'access',
|
|
4903
|
+
name: _optionalChain([toolDelta, 'access', _120 => _120.function, 'optionalAccess', _121 => _121.name]) || "",
|
|
4904
|
+
arguments: _optionalChain([toolDelta, 'access', _122 => _122.function, 'optionalAccess', _123 => _123.arguments]) || ""
|
|
4831
4905
|
}
|
|
4832
4906
|
};
|
|
4833
4907
|
toolCalls.push(tool);
|
|
4834
4908
|
} else {
|
|
4835
|
-
tool.function.name += _optionalChain([toolDelta, 'access',
|
|
4836
|
-
tool.function.arguments += _optionalChain([toolDelta, 'access',
|
|
4909
|
+
tool.function.name += _optionalChain([toolDelta, 'access', _124 => _124.function, 'optionalAccess', _125 => _125.name]) || "";
|
|
4910
|
+
tool.function.arguments += _optionalChain([toolDelta, 'access', _126 => _126.function, 'optionalAccess', _127 => _127.arguments]) || "";
|
|
4837
4911
|
}
|
|
4838
4912
|
}
|
|
4839
4913
|
}
|
|
4840
4914
|
if (this.responseCallback) {
|
|
4841
4915
|
const response = new LLMResponse({
|
|
4842
|
-
content: _optionalChain([delta, 'optionalAccess',
|
|
4916
|
+
content: _optionalChain([delta, 'optionalAccess', _128 => _128.content]) || null,
|
|
4843
4917
|
role: "assistant",
|
|
4844
4918
|
function_call: functionCall,
|
|
4845
4919
|
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
@@ -4931,10 +5005,10 @@ var OpenAILLMConnection = (_class11 = class extends BaseLLMConnection {
|
|
|
4931
5005
|
onEnd(callback) {
|
|
4932
5006
|
this.endCallback = callback;
|
|
4933
5007
|
}
|
|
4934
|
-
},
|
|
5008
|
+
}, _class23);
|
|
4935
5009
|
|
|
4936
5010
|
// src/models/openai-llm.ts
|
|
4937
|
-
var OpenAILLM = class extends BaseLLM {
|
|
5011
|
+
var OpenAILLM = (_class24 = class extends BaseLLM {
|
|
4938
5012
|
/**
|
|
4939
5013
|
* OpenAI client instance
|
|
4940
5014
|
*/
|
|
@@ -4943,22 +5017,23 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
4943
5017
|
* Default parameters for requests
|
|
4944
5018
|
*/
|
|
4945
5019
|
|
|
5020
|
+
__init35() {this.logger = new Logger({ name: "OpenAILLM" })}
|
|
4946
5021
|
/**
|
|
4947
5022
|
* Constructor for OpenAILLM
|
|
4948
5023
|
*/
|
|
4949
5024
|
constructor(model, config) {
|
|
4950
|
-
super(model);
|
|
5025
|
+
super(model);_class24.prototype.__init35.call(this);;
|
|
4951
5026
|
this.client = new (0, _openai2.default)({
|
|
4952
|
-
apiKey: _optionalChain([config, 'optionalAccess',
|
|
4953
|
-
baseURL: _optionalChain([config, 'optionalAccess',
|
|
4954
|
-
organization: _optionalChain([config, 'optionalAccess',
|
|
5027
|
+
apiKey: _optionalChain([config, 'optionalAccess', _129 => _129.apiKey]) || process.env.OPENAI_API_KEY,
|
|
5028
|
+
baseURL: _optionalChain([config, 'optionalAccess', _130 => _130.baseURL]),
|
|
5029
|
+
organization: _optionalChain([config, 'optionalAccess', _131 => _131.organization])
|
|
4955
5030
|
});
|
|
4956
5031
|
this.defaultParams = {
|
|
4957
|
-
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4958
|
-
top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4959
|
-
max_tokens: _optionalChain([config, 'optionalAccess',
|
|
4960
|
-
frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
4961
|
-
presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
5032
|
+
temperature: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _132 => _132.defaultParams, 'optionalAccess', _133 => _133.temperature]), () => ( 0.7)),
|
|
5033
|
+
top_p: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _134 => _134.defaultParams, 'optionalAccess', _135 => _135.top_p]), () => ( 1)),
|
|
5034
|
+
max_tokens: _optionalChain([config, 'optionalAccess', _136 => _136.defaultParams, 'optionalAccess', _137 => _137.max_tokens]),
|
|
5035
|
+
frequency_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _138 => _138.defaultParams, 'optionalAccess', _139 => _139.frequency_penalty]), () => ( 0)),
|
|
5036
|
+
presence_penalty: _nullishCoalesce(_optionalChain([config, 'optionalAccess', _140 => _140.defaultParams, 'optionalAccess', _141 => _141.presence_penalty]), () => ( 0))
|
|
4962
5037
|
};
|
|
4963
5038
|
}
|
|
4964
5039
|
/**
|
|
@@ -5068,16 +5143,16 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5068
5143
|
*/
|
|
5069
5144
|
convertResponse(response) {
|
|
5070
5145
|
const result = new LLMResponse({
|
|
5071
|
-
content: _optionalChain([response, 'access',
|
|
5072
|
-
role: _optionalChain([response, 'access',
|
|
5146
|
+
content: _optionalChain([response, 'access', _142 => _142.message, 'optionalAccess', _143 => _143.content]) || null,
|
|
5147
|
+
role: _optionalChain([response, 'access', _144 => _144.message, 'optionalAccess', _145 => _145.role]) || "assistant"
|
|
5073
5148
|
});
|
|
5074
|
-
if (_optionalChain([response, 'access',
|
|
5149
|
+
if (_optionalChain([response, 'access', _146 => _146.message, 'optionalAccess', _147 => _147.function_call])) {
|
|
5075
5150
|
result.function_call = {
|
|
5076
5151
|
name: response.message.function_call.name,
|
|
5077
5152
|
arguments: response.message.function_call.arguments
|
|
5078
5153
|
};
|
|
5079
5154
|
}
|
|
5080
|
-
if (_optionalChain([response, 'access',
|
|
5155
|
+
if (_optionalChain([response, 'access', _148 => _148.message, 'optionalAccess', _149 => _149.tool_calls])) {
|
|
5081
5156
|
result.tool_calls = response.message.tool_calls.map((tool) => ({
|
|
5082
5157
|
id: tool.id,
|
|
5083
5158
|
function: {
|
|
@@ -5092,27 +5167,27 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5092
5167
|
* Convert OpenAI streaming chunk to LLMResponse
|
|
5093
5168
|
*/
|
|
5094
5169
|
convertChunk(chunk) {
|
|
5095
|
-
|
|
5096
|
-
`
|
|
5170
|
+
this.logger.debug(
|
|
5171
|
+
`Converting chunk - delta: ${JSON.stringify(chunk.delta || {})}`
|
|
5097
5172
|
);
|
|
5098
|
-
const content = _optionalChain([chunk, 'access',
|
|
5173
|
+
const content = _optionalChain([chunk, 'access', _150 => _150.delta, 'optionalAccess', _151 => _151.content]);
|
|
5099
5174
|
const result = new LLMResponse({
|
|
5100
5175
|
content: content !== void 0 ? content : null,
|
|
5101
|
-
role: _optionalChain([chunk, 'access',
|
|
5176
|
+
role: _optionalChain([chunk, 'access', _152 => _152.delta, 'optionalAccess', _153 => _153.role]) || "assistant",
|
|
5102
5177
|
is_partial: true
|
|
5103
5178
|
});
|
|
5104
|
-
if (_optionalChain([chunk, 'access',
|
|
5179
|
+
if (_optionalChain([chunk, 'access', _154 => _154.delta, 'optionalAccess', _155 => _155.function_call])) {
|
|
5105
5180
|
result.function_call = {
|
|
5106
5181
|
name: chunk.delta.function_call.name || "",
|
|
5107
5182
|
arguments: chunk.delta.function_call.arguments || ""
|
|
5108
5183
|
};
|
|
5109
5184
|
}
|
|
5110
|
-
if (_optionalChain([chunk, 'access',
|
|
5185
|
+
if (_optionalChain([chunk, 'access', _156 => _156.delta, 'optionalAccess', _157 => _157.tool_calls])) {
|
|
5111
5186
|
result.tool_calls = chunk.delta.tool_calls.map((tool) => ({
|
|
5112
5187
|
id: tool.id || "",
|
|
5113
5188
|
function: {
|
|
5114
|
-
name: _optionalChain([tool, 'access',
|
|
5115
|
-
arguments: _optionalChain([tool, 'access',
|
|
5189
|
+
name: _optionalChain([tool, 'access', _158 => _158.function, 'optionalAccess', _159 => _159.name]) || "",
|
|
5190
|
+
arguments: _optionalChain([tool, 'access', _160 => _160.function, 'optionalAccess', _161 => _161.arguments]) || ""
|
|
5116
5191
|
}
|
|
5117
5192
|
}));
|
|
5118
5193
|
}
|
|
@@ -5135,24 +5210,24 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5135
5210
|
presence_penalty: _nullishCoalesce(llmRequest.config.presence_penalty, () => ( this.defaultParams.presence_penalty)),
|
|
5136
5211
|
stream: shouldStream
|
|
5137
5212
|
};
|
|
5138
|
-
|
|
5139
|
-
`
|
|
5213
|
+
this.logger.debug(
|
|
5214
|
+
`Request parameters - model: ${params.model}, messages: ${params.messages.length}, functions: ${params.tools ? params.tools.length : 0}, streaming: ${shouldStream}`
|
|
5140
5215
|
);
|
|
5141
5216
|
if (tools && tools.length > 0) {
|
|
5142
5217
|
params.tools = tools;
|
|
5143
5218
|
}
|
|
5144
5219
|
try {
|
|
5145
5220
|
if (shouldStream) {
|
|
5146
|
-
|
|
5221
|
+
this.logger.debug("Starting streaming request");
|
|
5147
5222
|
const streamResponse = await this.client.chat.completions.create(params);
|
|
5148
5223
|
let partialFunctionCall;
|
|
5149
5224
|
const partialToolCalls = /* @__PURE__ */ new Map();
|
|
5150
5225
|
let accumulatedContent = "";
|
|
5151
5226
|
const asyncIterable = streamResponse;
|
|
5152
|
-
|
|
5227
|
+
this.logger.debug("Stream response received, processing chunks");
|
|
5153
5228
|
for await (const chunk of asyncIterable) {
|
|
5154
5229
|
if (!chunk.choices || chunk.choices.length === 0) {
|
|
5155
|
-
|
|
5230
|
+
this.logger.debug("Empty chunk received, skipping");
|
|
5156
5231
|
continue;
|
|
5157
5232
|
}
|
|
5158
5233
|
const choice = chunk.choices[0];
|
|
@@ -5160,8 +5235,8 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5160
5235
|
if (responseChunk.content !== null) {
|
|
5161
5236
|
accumulatedContent += responseChunk.content;
|
|
5162
5237
|
}
|
|
5163
|
-
|
|
5164
|
-
`
|
|
5238
|
+
this.logger.debug(
|
|
5239
|
+
`Chunk received - delta: "${_optionalChain([choice, 'access', _162 => _162.delta, 'optionalAccess', _163 => _163.content]) || ""}"`,
|
|
5165
5240
|
`responseChunk content: "${responseChunk.content || ""}"`,
|
|
5166
5241
|
`is_partial: ${responseChunk.is_partial}`,
|
|
5167
5242
|
`accumulated: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
|
|
@@ -5190,12 +5265,12 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5190
5265
|
}
|
|
5191
5266
|
responseChunk.tool_calls = Array.from(partialToolCalls.values());
|
|
5192
5267
|
}
|
|
5193
|
-
|
|
5268
|
+
this.logger.debug("Yielding chunk to caller");
|
|
5194
5269
|
yield responseChunk;
|
|
5195
5270
|
}
|
|
5196
5271
|
if (accumulatedContent.length > 0) {
|
|
5197
|
-
|
|
5198
|
-
`
|
|
5272
|
+
this.logger.debug(
|
|
5273
|
+
`Yielding final accumulated content: "${accumulatedContent.substring(0, 30)}${accumulatedContent.length > 30 ? "..." : ""}"`
|
|
5199
5274
|
);
|
|
5200
5275
|
yield new LLMResponse({
|
|
5201
5276
|
content: accumulatedContent,
|
|
@@ -5203,14 +5278,14 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5203
5278
|
is_partial: false
|
|
5204
5279
|
});
|
|
5205
5280
|
}
|
|
5206
|
-
|
|
5281
|
+
this.logger.debug("Finished processing all stream chunks");
|
|
5207
5282
|
} else {
|
|
5208
|
-
|
|
5283
|
+
this.logger.debug("Making non-streaming request");
|
|
5209
5284
|
const response = await this.client.chat.completions.create(params);
|
|
5210
5285
|
if (!response.choices || response.choices.length === 0) {
|
|
5211
5286
|
throw new Error("No response from OpenAI");
|
|
5212
5287
|
}
|
|
5213
|
-
|
|
5288
|
+
this.logger.debug("Non-streaming response received");
|
|
5214
5289
|
yield this.convertResponse(response.choices[0]);
|
|
5215
5290
|
}
|
|
5216
5291
|
} catch (error) {
|
|
@@ -5229,7 +5304,7 @@ var OpenAILLM = class extends BaseLLM {
|
|
|
5229
5304
|
this.defaultParams
|
|
5230
5305
|
);
|
|
5231
5306
|
}
|
|
5232
|
-
};
|
|
5307
|
+
}, _class24);
|
|
5233
5308
|
|
|
5234
5309
|
// src/models/registry.ts
|
|
5235
5310
|
function registerProviders() {
|
|
@@ -5427,7 +5502,7 @@ var OAuth2Credential = class extends AuthCredential {
|
|
|
5427
5502
|
"Cannot refresh token: no refresh token or refresh function"
|
|
5428
5503
|
);
|
|
5429
5504
|
}
|
|
5430
|
-
const result = await _optionalChain([this, 'access',
|
|
5505
|
+
const result = await _optionalChain([this, 'access', _164 => _164.refreshFunction, 'optionalCall', _165 => _165(this.refreshToken)]);
|
|
5431
5506
|
if (!result) {
|
|
5432
5507
|
throw new Error("Failed to refresh token");
|
|
5433
5508
|
}
|
|
@@ -5481,7 +5556,7 @@ var AuthHandler = class {
|
|
|
5481
5556
|
* Gets the authentication token
|
|
5482
5557
|
*/
|
|
5483
5558
|
getToken() {
|
|
5484
|
-
return _optionalChain([this, 'access',
|
|
5559
|
+
return _optionalChain([this, 'access', _166 => _166.credential, 'optionalAccess', _167 => _167.getToken, 'call', _168 => _168()]);
|
|
5485
5560
|
}
|
|
5486
5561
|
/**
|
|
5487
5562
|
* Gets headers for HTTP requests
|
|
@@ -5496,7 +5571,7 @@ var AuthHandler = class {
|
|
|
5496
5571
|
* Refreshes the token if necessary
|
|
5497
5572
|
*/
|
|
5498
5573
|
async refreshToken() {
|
|
5499
|
-
if (_optionalChain([this, 'access',
|
|
5574
|
+
if (_optionalChain([this, 'access', _169 => _169.credential, 'optionalAccess', _170 => _170.canRefresh, 'call', _171 => _171()])) {
|
|
5500
5575
|
await this.credential.refresh();
|
|
5501
5576
|
}
|
|
5502
5577
|
}
|
|
@@ -5722,7 +5797,7 @@ var InMemoryMemoryService = class {
|
|
|
5722
5797
|
};
|
|
5723
5798
|
const normalizedQuery = query.toLowerCase().trim();
|
|
5724
5799
|
const queryTerms = normalizedQuery.split(/\s+/);
|
|
5725
|
-
const sessionsToSearch = _optionalChain([options, 'optionalAccess',
|
|
5800
|
+
const sessionsToSearch = _optionalChain([options, 'optionalAccess', _172 => _172.sessionId]) ? this.sessions.has(options.sessionId) ? [this.sessions.get(options.sessionId)] : [] : Array.from(this.sessions.values());
|
|
5726
5801
|
for (const session of sessionsToSearch) {
|
|
5727
5802
|
const matchedEvents = [];
|
|
5728
5803
|
const scores = [];
|
|
@@ -5748,7 +5823,7 @@ var InMemoryMemoryService = class {
|
|
|
5748
5823
|
}
|
|
5749
5824
|
}
|
|
5750
5825
|
const score = queryTerms.length > 0 ? termMatches / queryTerms.length : 0;
|
|
5751
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
5826
|
+
if (_optionalChain([options, 'optionalAccess', _173 => _173.threshold]) !== void 0 && score < options.threshold) {
|
|
5752
5827
|
continue;
|
|
5753
5828
|
}
|
|
5754
5829
|
if (score > 0) {
|
|
@@ -5768,7 +5843,7 @@ var InMemoryMemoryService = class {
|
|
|
5768
5843
|
response.memories.sort(
|
|
5769
5844
|
(a, b) => (_nullishCoalesce(b.relevanceScore, () => ( 0))) - (_nullishCoalesce(a.relevanceScore, () => ( 0)))
|
|
5770
5845
|
);
|
|
5771
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
5846
|
+
if (_optionalChain([options, 'optionalAccess', _174 => _174.limit]) !== void 0 && options.limit > 0) {
|
|
5772
5847
|
response.memories = response.memories.slice(0, options.limit);
|
|
5773
5848
|
}
|
|
5774
5849
|
return response;
|
|
@@ -5797,10 +5872,10 @@ var InMemoryMemoryService = class {
|
|
|
5797
5872
|
};
|
|
5798
5873
|
|
|
5799
5874
|
// src/memory/persistent-memory-service.ts
|
|
5800
|
-
|
|
5875
|
+
init_logger();
|
|
5801
5876
|
var _fs = require('fs'); var fs3 = _interopRequireWildcard(_fs);
|
|
5802
5877
|
|
|
5803
|
-
var PersistentMemoryService = class {
|
|
5878
|
+
var PersistentMemoryService = (_class25 = class {
|
|
5804
5879
|
/**
|
|
5805
5880
|
* In-memory service used for search operations
|
|
5806
5881
|
*/
|
|
@@ -5813,10 +5888,11 @@ var PersistentMemoryService = class {
|
|
|
5813
5888
|
* File prefix for memory files
|
|
5814
5889
|
*/
|
|
5815
5890
|
|
|
5891
|
+
__init36() {this.logger = new Logger({ name: "PersistentMemoryService" })}
|
|
5816
5892
|
/**
|
|
5817
5893
|
* Constructor for PersistentMemoryService
|
|
5818
5894
|
*/
|
|
5819
|
-
constructor(config) {
|
|
5895
|
+
constructor(config) {;_class25.prototype.__init36.call(this);
|
|
5820
5896
|
this.inMemoryService = new InMemoryMemoryService();
|
|
5821
5897
|
this.storageDir = config.storageDir;
|
|
5822
5898
|
this.filePrefix = config.filePrefix || "memory";
|
|
@@ -5901,7 +5977,7 @@ var PersistentMemoryService = class {
|
|
|
5901
5977
|
}
|
|
5902
5978
|
}
|
|
5903
5979
|
}
|
|
5904
|
-
|
|
5980
|
+
this.logger.debug(
|
|
5905
5981
|
`Loaded ${this.inMemoryService.getAllSessions().length} sessions from persistent storage`
|
|
5906
5982
|
);
|
|
5907
5983
|
} catch (error) {
|
|
@@ -5958,7 +6034,7 @@ var PersistentMemoryService = class {
|
|
|
5958
6034
|
);
|
|
5959
6035
|
}
|
|
5960
6036
|
}
|
|
5961
|
-
};
|
|
6037
|
+
}, _class25);
|
|
5962
6038
|
|
|
5963
6039
|
// src/sessions/index.ts
|
|
5964
6040
|
var sessions_exports = {};
|
|
@@ -6033,17 +6109,17 @@ var InMemorySessionService = class {
|
|
|
6033
6109
|
let sessions = Array.from(this.sessions.values()).filter(
|
|
6034
6110
|
(session) => session.userId === userId
|
|
6035
6111
|
);
|
|
6036
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6112
|
+
if (_optionalChain([options, 'optionalAccess', _175 => _175.createdAfter])) {
|
|
6037
6113
|
sessions = sessions.filter(
|
|
6038
6114
|
(session) => session.createdAt >= options.createdAfter
|
|
6039
6115
|
);
|
|
6040
6116
|
}
|
|
6041
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6117
|
+
if (_optionalChain([options, 'optionalAccess', _176 => _176.updatedAfter])) {
|
|
6042
6118
|
sessions = sessions.filter(
|
|
6043
6119
|
(session) => session.updatedAt >= options.updatedAfter
|
|
6044
6120
|
);
|
|
6045
6121
|
}
|
|
6046
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6122
|
+
if (_optionalChain([options, 'optionalAccess', _177 => _177.metadataFilter])) {
|
|
6047
6123
|
sessions = sessions.filter((session) => {
|
|
6048
6124
|
for (const [key, value] of Object.entries(options.metadataFilter)) {
|
|
6049
6125
|
if (session.metadata[key] !== value) {
|
|
@@ -6054,7 +6130,7 @@ var InMemorySessionService = class {
|
|
|
6054
6130
|
});
|
|
6055
6131
|
}
|
|
6056
6132
|
sessions.sort((a, b) => b.updatedAt.getTime() - a.updatedAt.getTime());
|
|
6057
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6133
|
+
if (_optionalChain([options, 'optionalAccess', _178 => _178.limit]) !== void 0 && options.limit > 0) {
|
|
6058
6134
|
sessions = sessions.slice(0, options.limit);
|
|
6059
6135
|
}
|
|
6060
6136
|
return sessions;
|
|
@@ -6089,7 +6165,7 @@ var InMemorySessionService = class {
|
|
|
6089
6165
|
if (event.is_partial) {
|
|
6090
6166
|
return event;
|
|
6091
6167
|
}
|
|
6092
|
-
if (_optionalChain([event, 'access',
|
|
6168
|
+
if (_optionalChain([event, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.stateDelta])) {
|
|
6093
6169
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
6094
6170
|
if (key.startsWith("_temp_")) {
|
|
6095
6171
|
continue;
|
|
@@ -6195,7 +6271,7 @@ var PostgresSessionService = class {
|
|
|
6195
6271
|
}
|
|
6196
6272
|
async listSessions(userId, options) {
|
|
6197
6273
|
let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
|
|
6198
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6274
|
+
if (_optionalChain([options, 'optionalAccess', _181 => _181.limit]) !== void 0 && options.limit > 0) {
|
|
6199
6275
|
query = query.limit(options.limit);
|
|
6200
6276
|
}
|
|
6201
6277
|
const results = await query;
|
|
@@ -6222,12 +6298,12 @@ var PostgresSessionService = class {
|
|
|
6222
6298
|
if (event.is_partial) {
|
|
6223
6299
|
return event;
|
|
6224
6300
|
}
|
|
6225
|
-
if (_optionalChain([event, 'access',
|
|
6301
|
+
if (_optionalChain([event, 'access', _182 => _182.actions, 'optionalAccess', _183 => _183.stateDelta])) {
|
|
6226
6302
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
6227
6303
|
if (key.startsWith("_temp_")) {
|
|
6228
6304
|
continue;
|
|
6229
6305
|
}
|
|
6230
|
-
_optionalChain([session, 'access',
|
|
6306
|
+
_optionalChain([session, 'access', _184 => _184.state, 'optionalAccess', _185 => _185.set, 'call', _186 => _186(key, value)]);
|
|
6231
6307
|
}
|
|
6232
6308
|
}
|
|
6233
6309
|
if (!session.events) {
|
|
@@ -6258,11 +6334,11 @@ var sessionsSchema2 = _pgcore.pgTable.call(void 0, "sessions", {
|
|
|
6258
6334
|
updatedAt: _pgcore.timestamp.call(void 0, "updated_at", { withTimezone: true }).defaultNow().notNull(),
|
|
6259
6335
|
state: _pgcore.jsonb.call(void 0, "state").default("{}").$type()
|
|
6260
6336
|
});
|
|
6261
|
-
var PgLiteSessionService = (
|
|
6337
|
+
var PgLiteSessionService = (_class26 = class {
|
|
6262
6338
|
|
|
6263
6339
|
|
|
6264
|
-
|
|
6265
|
-
constructor(config) {;
|
|
6340
|
+
__init37() {this.initialized = false}
|
|
6341
|
+
constructor(config) {;_class26.prototype.__init37.call(this);
|
|
6266
6342
|
this.db = _pglite.drizzle.call(void 0, config.pglite, {
|
|
6267
6343
|
schema: { sessions: sessionsSchema2 }
|
|
6268
6344
|
});
|
|
@@ -6371,7 +6447,7 @@ var PgLiteSessionService = (_class12 = class {
|
|
|
6371
6447
|
async listSessions(userId, options) {
|
|
6372
6448
|
await this.ensureInitialized();
|
|
6373
6449
|
let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
|
|
6374
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6450
|
+
if (_optionalChain([options, 'optionalAccess', _187 => _187.limit]) !== void 0 && options.limit > 0) {
|
|
6375
6451
|
query = query.limit(options.limit);
|
|
6376
6452
|
}
|
|
6377
6453
|
const results = await query;
|
|
@@ -6394,12 +6470,12 @@ var PgLiteSessionService = (_class12 = class {
|
|
|
6394
6470
|
if (event.is_partial) {
|
|
6395
6471
|
return event;
|
|
6396
6472
|
}
|
|
6397
|
-
if (_optionalChain([event, 'access',
|
|
6473
|
+
if (_optionalChain([event, 'access', _188 => _188.actions, 'optionalAccess', _189 => _189.stateDelta])) {
|
|
6398
6474
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
6399
6475
|
if (key.startsWith("_temp_")) {
|
|
6400
6476
|
continue;
|
|
6401
6477
|
}
|
|
6402
|
-
_optionalChain([session, 'access',
|
|
6478
|
+
_optionalChain([session, 'access', _190 => _190.state, 'optionalAccess', _191 => _191.set, 'call', _192 => _192(key, value)]);
|
|
6403
6479
|
}
|
|
6404
6480
|
}
|
|
6405
6481
|
if (!session.events) {
|
|
@@ -6410,7 +6486,7 @@ var PgLiteSessionService = (_class12 = class {
|
|
|
6410
6486
|
await this.updateSession(session);
|
|
6411
6487
|
return event;
|
|
6412
6488
|
}
|
|
6413
|
-
},
|
|
6489
|
+
}, _class26);
|
|
6414
6490
|
|
|
6415
6491
|
// src/sessions/sqlite-session-service.ts
|
|
6416
6492
|
|
|
@@ -6430,12 +6506,12 @@ var sessionsSchema3 = _sqlitecore.sqliteTable.call(void 0, "sessions", {
|
|
|
6430
6506
|
updatedAt: _sqlitecore.integer.call(void 0, "updated_at", { mode: "timestamp" }).notNull(),
|
|
6431
6507
|
state: _sqlitecore.text.call(void 0, "state", { mode: "json" }).default("{}").$type()
|
|
6432
6508
|
});
|
|
6433
|
-
var SqliteSessionService = (
|
|
6509
|
+
var SqliteSessionService = (_class27 = class {
|
|
6434
6510
|
|
|
6435
6511
|
|
|
6436
|
-
|
|
6512
|
+
__init38() {this.initialized = false}
|
|
6437
6513
|
|
|
6438
|
-
constructor(config) {;
|
|
6514
|
+
constructor(config) {;_class27.prototype.__init38.call(this);
|
|
6439
6515
|
this.sqliteInstance = config.sqlite;
|
|
6440
6516
|
const dbPath = this.sqliteInstance.name;
|
|
6441
6517
|
if (dbPath && dbPath !== ":memory:") {
|
|
@@ -6556,7 +6632,7 @@ var SqliteSessionService = (_class13 = class {
|
|
|
6556
6632
|
async listSessions(userId, options) {
|
|
6557
6633
|
await this.ensureInitialized();
|
|
6558
6634
|
let query = this.db.select().from(this.sessionsTable).where(_drizzleorm.eq.call(void 0, this.sessionsTable.userId, userId));
|
|
6559
|
-
if (_optionalChain([options, 'optionalAccess',
|
|
6635
|
+
if (_optionalChain([options, 'optionalAccess', _193 => _193.limit]) !== void 0 && options.limit > 0) {
|
|
6560
6636
|
query = query.limit(options.limit);
|
|
6561
6637
|
}
|
|
6562
6638
|
const results = await query;
|
|
@@ -6579,12 +6655,12 @@ var SqliteSessionService = (_class13 = class {
|
|
|
6579
6655
|
if (event.is_partial) {
|
|
6580
6656
|
return event;
|
|
6581
6657
|
}
|
|
6582
|
-
if (_optionalChain([event, 'access',
|
|
6658
|
+
if (_optionalChain([event, 'access', _194 => _194.actions, 'optionalAccess', _195 => _195.stateDelta])) {
|
|
6583
6659
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
6584
6660
|
if (key.startsWith("_temp_")) {
|
|
6585
6661
|
continue;
|
|
6586
6662
|
}
|
|
6587
|
-
_optionalChain([session, 'access',
|
|
6663
|
+
_optionalChain([session, 'access', _196 => _196.state, 'optionalAccess', _197 => _197.set, 'call', _198 => _198(key, value)]);
|
|
6588
6664
|
}
|
|
6589
6665
|
}
|
|
6590
6666
|
if (!session.events) {
|
|
@@ -6595,7 +6671,7 @@ var SqliteSessionService = (_class13 = class {
|
|
|
6595
6671
|
await this.updateSession(session);
|
|
6596
6672
|
return event;
|
|
6597
6673
|
}
|
|
6598
|
-
},
|
|
6674
|
+
}, _class27);
|
|
6599
6675
|
|
|
6600
6676
|
// src/sessions/session-util.ts
|
|
6601
6677
|
function generateSessionId() {
|
|
@@ -6631,7 +6707,7 @@ function cloneSession(session) {
|
|
|
6631
6707
|
var _uuid = require('uuid');
|
|
6632
6708
|
|
|
6633
6709
|
// src/events/event-actions.ts
|
|
6634
|
-
var EventActions = (
|
|
6710
|
+
var EventActions = (_class28 = class {
|
|
6635
6711
|
/**
|
|
6636
6712
|
* If true, it won't call model to summarize function response.
|
|
6637
6713
|
* Only used for function_response event.
|
|
@@ -6640,12 +6716,12 @@ var EventActions = (_class14 = class {
|
|
|
6640
6716
|
/**
|
|
6641
6717
|
* Indicates that the event is updating the state with the given delta.
|
|
6642
6718
|
*/
|
|
6643
|
-
|
|
6719
|
+
__init39() {this.stateDelta = {}}
|
|
6644
6720
|
/**
|
|
6645
6721
|
* Indicates that the event is updating an artifact. key is the filename,
|
|
6646
6722
|
* value is the version.
|
|
6647
6723
|
*/
|
|
6648
|
-
|
|
6724
|
+
__init40() {this.artifactDelta = {}}
|
|
6649
6725
|
/**
|
|
6650
6726
|
* If set, the event transfers to the specified agent.
|
|
6651
6727
|
*/
|
|
@@ -6657,21 +6733,21 @@ var EventActions = (_class14 = class {
|
|
|
6657
6733
|
/**
|
|
6658
6734
|
* Constructor for EventActions
|
|
6659
6735
|
*/
|
|
6660
|
-
constructor(options = {}) {;
|
|
6736
|
+
constructor(options = {}) {;_class28.prototype.__init39.call(this);_class28.prototype.__init40.call(this);
|
|
6661
6737
|
this.skipSummarization = options.skipSummarization;
|
|
6662
6738
|
this.stateDelta = options.stateDelta || {};
|
|
6663
6739
|
this.artifactDelta = options.artifactDelta || {};
|
|
6664
6740
|
this.transferToAgent = options.transferToAgent;
|
|
6665
6741
|
this.escalate = options.escalate;
|
|
6666
6742
|
}
|
|
6667
|
-
},
|
|
6743
|
+
}, _class28);
|
|
6668
6744
|
|
|
6669
6745
|
// src/events/event.ts
|
|
6670
|
-
var Event = (
|
|
6746
|
+
var Event = (_class29 = class _Event extends LLMResponse {
|
|
6671
6747
|
/**
|
|
6672
6748
|
* The invocation ID of the event.
|
|
6673
6749
|
*/
|
|
6674
|
-
|
|
6750
|
+
__init41() {this.invocationId = ""}
|
|
6675
6751
|
/**
|
|
6676
6752
|
* 'user' or the name of the agent, indicating who appended the event to the session.
|
|
6677
6753
|
*/
|
|
@@ -6679,7 +6755,7 @@ var Event = (_class15 = class _Event extends LLMResponse {
|
|
|
6679
6755
|
/**
|
|
6680
6756
|
* The actions taken by the agent.
|
|
6681
6757
|
*/
|
|
6682
|
-
|
|
6758
|
+
__init42() {this.actions = new EventActions()}
|
|
6683
6759
|
/**
|
|
6684
6760
|
* Set of ids of the long running function calls.
|
|
6685
6761
|
* Agent client will know from this field about which function call is long running.
|
|
@@ -6697,7 +6773,7 @@ var Event = (_class15 = class _Event extends LLMResponse {
|
|
|
6697
6773
|
/**
|
|
6698
6774
|
* The unique identifier of the event.
|
|
6699
6775
|
*/
|
|
6700
|
-
|
|
6776
|
+
__init43() {this.id = ""}
|
|
6701
6777
|
/**
|
|
6702
6778
|
* The timestamp of the event.
|
|
6703
6779
|
*/
|
|
@@ -6727,7 +6803,7 @@ var Event = (_class15 = class _Event extends LLMResponse {
|
|
|
6727
6803
|
role,
|
|
6728
6804
|
is_partial: partial,
|
|
6729
6805
|
raw_response
|
|
6730
|
-
});
|
|
6806
|
+
});_class29.prototype.__init41.call(this);_class29.prototype.__init42.call(this);_class29.prototype.__init43.call(this);;
|
|
6731
6807
|
this.invocationId = invocationId;
|
|
6732
6808
|
this.author = author;
|
|
6733
6809
|
this.actions = actions;
|
|
@@ -6761,7 +6837,7 @@ var Event = (_class15 = class _Event extends LLMResponse {
|
|
|
6761
6837
|
static newId() {
|
|
6762
6838
|
return _uuid.v4.call(void 0, ).substring(0, 8);
|
|
6763
6839
|
}
|
|
6764
|
-
},
|
|
6840
|
+
}, _class29);
|
|
6765
6841
|
|
|
6766
6842
|
// src/runners.ts
|
|
6767
6843
|
var Runner = class {
|