@iqai/adk 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.d.mts +316 -64
- package/dist/index.d.ts +316 -64
- package/dist/index.js +1653 -1016
- package/dist/index.mjs +1438 -801
- package/package.json +5 -1
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29;var __defProp = Object.defineProperty;
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29; var _class30; var _class31; var _class32;var __defProp = Object.defineProperty;
|
|
2
2
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
3
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
4
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
@@ -26,8 +26,9 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
26
26
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
27
|
|
|
28
28
|
// src/helpers/logger.ts
|
|
29
|
+
var _chalk = require('chalk'); var _chalk2 = _interopRequireDefault(_chalk);
|
|
29
30
|
function isDebugEnabled() {
|
|
30
|
-
return process.env.NODE_ENV === "development" || process.env.
|
|
31
|
+
return process.env.NODE_ENV === "development" || process.env.ADK_DEBUG === "true";
|
|
31
32
|
}
|
|
32
33
|
var Logger;
|
|
33
34
|
var init_logger = __esm({
|
|
@@ -38,34 +39,99 @@ var init_logger = __esm({
|
|
|
38
39
|
constructor({ name }) {;_class.prototype.__init2.call(this);
|
|
39
40
|
this.name = name;
|
|
40
41
|
}
|
|
42
|
+
colorize(message) {
|
|
43
|
+
return _chalk2.default.blue(message);
|
|
44
|
+
}
|
|
41
45
|
debug(message, ...args) {
|
|
42
|
-
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
43
46
|
if (this.isDebugEnabled) {
|
|
44
|
-
|
|
47
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
48
|
+
console.log(
|
|
49
|
+
this.colorize(`[${time}] \u{1F41B} [${this.name}] ${message}`),
|
|
50
|
+
...args
|
|
51
|
+
);
|
|
45
52
|
}
|
|
46
53
|
}
|
|
47
54
|
info(message, ...args) {
|
|
48
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
49
|
-
console.info(
|
|
55
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
56
|
+
console.info(
|
|
57
|
+
this.colorize(`[${time}] \u2139\uFE0F [${this.name}] ${message}`),
|
|
58
|
+
...args
|
|
59
|
+
);
|
|
50
60
|
}
|
|
51
61
|
warn(message, ...args) {
|
|
52
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
53
|
-
console.warn(
|
|
62
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
63
|
+
console.warn(
|
|
64
|
+
this.colorize(`[${time}] \u{1F6A7} [${this.name}] ${message}`),
|
|
65
|
+
...args
|
|
66
|
+
);
|
|
54
67
|
}
|
|
55
68
|
error(message, ...args) {
|
|
56
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
57
|
-
console.error(
|
|
69
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
70
|
+
console.error(
|
|
71
|
+
this.colorize(`[${time}] \u274C [${this.name}] ${message}`),
|
|
72
|
+
...args
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Logs structured data in a visually appealing table format.
|
|
77
|
+
* Uses vertical layout for better readability and respects debug settings.
|
|
78
|
+
*/
|
|
79
|
+
debugStructured(title, data) {
|
|
80
|
+
if (!this.isDebugEnabled) return;
|
|
81
|
+
const terminalWidth = process.stdout.columns || 60;
|
|
82
|
+
const width = Math.min(terminalWidth, 100);
|
|
83
|
+
const contentWidth = width - 4;
|
|
84
|
+
const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
|
|
85
|
+
const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
|
|
86
|
+
const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
|
|
87
|
+
console.log(this.colorize(topBorder));
|
|
88
|
+
console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
|
|
89
|
+
console.log(this.colorize(middleBorder));
|
|
90
|
+
Object.entries(data).forEach(([key, value]) => {
|
|
91
|
+
const formattedKey = key.padEnd(20);
|
|
92
|
+
const formattedValue = String(value);
|
|
93
|
+
const availableValueSpace = contentWidth - 20 - 2;
|
|
94
|
+
const truncatedValue = formattedValue.length > availableValueSpace ? `${formattedValue.substring(0, availableValueSpace - 3)}...` : formattedValue;
|
|
95
|
+
const content = `${formattedKey}: ${truncatedValue}`;
|
|
96
|
+
const paddedContent = content.padEnd(contentWidth);
|
|
97
|
+
console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
|
|
98
|
+
});
|
|
99
|
+
console.log(this.colorize(bottomBorder));
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Logs array data in a compact, readable format.
|
|
103
|
+
*/
|
|
104
|
+
debugArray(title, items) {
|
|
105
|
+
if (!this.isDebugEnabled) return;
|
|
106
|
+
const terminalWidth = process.stdout.columns || 78;
|
|
107
|
+
const width = Math.min(terminalWidth, 120);
|
|
108
|
+
const contentWidth = width - 4;
|
|
109
|
+
const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
|
|
110
|
+
const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
|
|
111
|
+
const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
|
|
112
|
+
console.log(this.colorize(topBorder));
|
|
113
|
+
console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
|
|
114
|
+
console.log(this.colorize(middleBorder));
|
|
115
|
+
items.forEach((item, index) => {
|
|
116
|
+
const itemStr = Object.entries(item).map(([k, v]) => `${k}: ${v}`).join(" \u2022 ");
|
|
117
|
+
const indexPart = `[${index + 1}] `;
|
|
118
|
+
const availableSpace = contentWidth - indexPart.length;
|
|
119
|
+
const truncatedItem = itemStr.length > availableSpace ? `${itemStr.substring(0, availableSpace - 3)}...` : itemStr;
|
|
120
|
+
const content = `${indexPart}${truncatedItem}`;
|
|
121
|
+
const paddedContent = content.padEnd(contentWidth);
|
|
122
|
+
console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
|
|
123
|
+
});
|
|
124
|
+
console.log(this.colorize(bottomBorder));
|
|
58
125
|
}
|
|
59
126
|
}, _class);
|
|
60
127
|
}
|
|
61
128
|
});
|
|
62
129
|
|
|
63
130
|
// src/tools/base/base-tool.ts
|
|
64
|
-
var
|
|
131
|
+
var BaseTool;
|
|
65
132
|
var init_base_tool = __esm({
|
|
66
133
|
"src/tools/base/base-tool.ts"() {
|
|
67
134
|
init_logger();
|
|
68
|
-
logger6 = new Logger({ name: "BaseTool" });
|
|
69
135
|
BaseTool = exports.BaseTool = (_class2 = class {
|
|
70
136
|
/**
|
|
71
137
|
* Name of the tool
|
|
@@ -96,10 +162,11 @@ var init_base_tool = __esm({
|
|
|
96
162
|
* Maximum delay for retry in ms
|
|
97
163
|
*/
|
|
98
164
|
__init4() {this.maxRetryDelay = 1e4}
|
|
165
|
+
__init5() {this.logger = new Logger({ name: "BaseTool" })}
|
|
99
166
|
/**
|
|
100
167
|
* Constructor for BaseTool
|
|
101
168
|
*/
|
|
102
|
-
constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
|
|
169
|
+
constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);_class2.prototype.__init5.call(this);
|
|
103
170
|
this.name = config.name;
|
|
104
171
|
this.description = config.description;
|
|
105
172
|
this.isLongRunning = config.isLongRunning || false;
|
|
@@ -226,7 +293,7 @@ var init_base_tool = __esm({
|
|
|
226
293
|
while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
|
|
227
294
|
try {
|
|
228
295
|
if (attempts > 0) {
|
|
229
|
-
|
|
296
|
+
this.logger.debug(
|
|
230
297
|
`Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
|
|
231
298
|
);
|
|
232
299
|
const delay = Math.min(
|
|
@@ -406,8 +473,8 @@ var init_function_tool = __esm({
|
|
|
406
473
|
init_function_utils();
|
|
407
474
|
FunctionTool = exports.FunctionTool = (_class3 = class extends BaseTool {
|
|
408
475
|
|
|
409
|
-
|
|
410
|
-
|
|
476
|
+
__init6() {this.mandatoryArgs = []}
|
|
477
|
+
__init7() {this.parameterTypes = {}}
|
|
411
478
|
/**
|
|
412
479
|
* Creates a new FunctionTool wrapping the provided function.
|
|
413
480
|
*
|
|
@@ -423,7 +490,7 @@ var init_function_tool = __esm({
|
|
|
423
490
|
isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
|
|
424
491
|
shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
|
|
425
492
|
maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
|
|
426
|
-
});_class3.prototype.
|
|
493
|
+
});_class3.prototype.__init6.call(this);_class3.prototype.__init7.call(this);;
|
|
427
494
|
this.func = func;
|
|
428
495
|
this.mandatoryArgs = this.getMandatoryArgs(func);
|
|
429
496
|
this.parameterTypes = _optionalChain([options, 'optionalAccess', _11 => _11.parameterTypes]) || {};
|
|
@@ -613,6 +680,7 @@ __export(agents_exports, {
|
|
|
613
680
|
// src/models/index.ts
|
|
614
681
|
var models_exports = {};
|
|
615
682
|
__export(models_exports, {
|
|
683
|
+
AiSdkLlm: () => AiSdkLlm,
|
|
616
684
|
AnthropicLlm: () => AnthropicLlm,
|
|
617
685
|
ApiKeyCredential: () => ApiKeyCredential,
|
|
618
686
|
ApiKeyScheme: () => ApiKeyScheme,
|
|
@@ -640,8 +708,6 @@ __export(models_exports, {
|
|
|
640
708
|
});
|
|
641
709
|
|
|
642
710
|
// src/models/llm-request.ts
|
|
643
|
-
init_logger();
|
|
644
|
-
var logger = new Logger({ name: "LlmRequest" });
|
|
645
711
|
var LlmRequest = class {
|
|
646
712
|
/**
|
|
647
713
|
* The model name.
|
|
@@ -805,6 +871,10 @@ var LlmResponse = class _LlmResponse {
|
|
|
805
871
|
* Reason why the model finished generating.
|
|
806
872
|
*/
|
|
807
873
|
|
|
874
|
+
/**
|
|
875
|
+
* Error object if the response is an error.
|
|
876
|
+
*/
|
|
877
|
+
|
|
808
878
|
/**
|
|
809
879
|
* Creates a new LlmResponse.
|
|
810
880
|
*/
|
|
@@ -848,6 +918,29 @@ var LlmResponse = class _LlmResponse {
|
|
|
848
918
|
usageMetadata
|
|
849
919
|
});
|
|
850
920
|
}
|
|
921
|
+
/**
|
|
922
|
+
* Creates an LlmResponse from an error.
|
|
923
|
+
*
|
|
924
|
+
* @param error The error object or message.
|
|
925
|
+
* @param options Additional options for the error response.
|
|
926
|
+
* @param options.errorCode A specific error code for the response.
|
|
927
|
+
* @param options.model The model that was being used when the error occurred.
|
|
928
|
+
* @returns The LlmResponse.
|
|
929
|
+
*/
|
|
930
|
+
static fromError(error, options = {}) {
|
|
931
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
932
|
+
const errorCode = options.errorCode || "UNKNOWN_ERROR";
|
|
933
|
+
return new _LlmResponse({
|
|
934
|
+
errorCode,
|
|
935
|
+
errorMessage: `LLM call failed for model ${options.model || "unknown"}: ${errorMessage}`,
|
|
936
|
+
content: {
|
|
937
|
+
role: "model",
|
|
938
|
+
parts: [{ text: `Error: ${errorMessage}` }]
|
|
939
|
+
},
|
|
940
|
+
finishReason: "STOP",
|
|
941
|
+
error: error instanceof Error ? error : new Error(errorMessage)
|
|
942
|
+
});
|
|
943
|
+
}
|
|
851
944
|
};
|
|
852
945
|
|
|
853
946
|
// src/models/base-llm.ts
|
|
@@ -869,11 +962,11 @@ var _sdknode = require('@opentelemetry/sdk-node');
|
|
|
869
962
|
|
|
870
963
|
var _semanticconventions = require('@opentelemetry/semantic-conventions');
|
|
871
964
|
var TelemetryService = (_class4 = class {
|
|
872
|
-
|
|
873
|
-
|
|
965
|
+
__init8() {this.sdk = null}
|
|
966
|
+
__init9() {this.isInitialized = false}
|
|
874
967
|
|
|
875
|
-
|
|
876
|
-
constructor() {;_class4.prototype.
|
|
968
|
+
__init10() {this.config = null}
|
|
969
|
+
constructor() {;_class4.prototype.__init8.call(this);_class4.prototype.__init9.call(this);_class4.prototype.__init10.call(this);
|
|
877
970
|
this.tracer = _api.trace.getTracer("iqai-adk", "0.1.0");
|
|
878
971
|
}
|
|
879
972
|
/**
|
|
@@ -1127,16 +1220,16 @@ var traceLlmCall = (invocationContext, eventId, llmRequest, llmResponse) => tele
|
|
|
1127
1220
|
);
|
|
1128
1221
|
|
|
1129
1222
|
// src/models/base-llm.ts
|
|
1130
|
-
var
|
|
1131
|
-
var BaseLlm = class {
|
|
1223
|
+
var BaseLlm = (_class5 = class {
|
|
1132
1224
|
/**
|
|
1133
1225
|
* The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001.
|
|
1134
1226
|
*/
|
|
1135
1227
|
|
|
1228
|
+
__init11() {this.logger = new Logger({ name: "BaseLlm" })}
|
|
1136
1229
|
/**
|
|
1137
1230
|
* Constructor for BaseLlm
|
|
1138
1231
|
*/
|
|
1139
|
-
constructor(model) {
|
|
1232
|
+
constructor(model) {;_class5.prototype.__init11.call(this);
|
|
1140
1233
|
this.model = model;
|
|
1141
1234
|
}
|
|
1142
1235
|
/**
|
|
@@ -1183,12 +1276,6 @@ var BaseLlm = class {
|
|
|
1183
1276
|
}),
|
|
1184
1277
|
"adk.streaming": stream || false
|
|
1185
1278
|
});
|
|
1186
|
-
logger2.debug("ADK LLM Request:", {
|
|
1187
|
-
model: this.model,
|
|
1188
|
-
contentCount: _optionalChain([llmRequest, 'access', _45 => _45.contents, 'optionalAccess', _46 => _46.length]) || 0,
|
|
1189
|
-
streaming: stream || false,
|
|
1190
|
-
config: llmRequest.config
|
|
1191
|
-
});
|
|
1192
1279
|
let responseCount = 0;
|
|
1193
1280
|
let totalTokens = 0;
|
|
1194
1281
|
for await (const response of this.generateContentAsyncImpl(
|
|
@@ -1196,14 +1283,6 @@ var BaseLlm = class {
|
|
|
1196
1283
|
stream
|
|
1197
1284
|
)) {
|
|
1198
1285
|
responseCount++;
|
|
1199
|
-
logger2.debug(`ADK LLM Response ${responseCount}:`, {
|
|
1200
|
-
model: this.model,
|
|
1201
|
-
parts: _optionalChain([response, 'access', _47 => _47.parts, 'optionalAccess', _48 => _48.map, 'call', _49 => _49((part) => ({
|
|
1202
|
-
text: typeof part.text === "string" ? part.text.substring(0, 200) + (part.text.length > 200 ? "..." : "") : "[non_text_content]"
|
|
1203
|
-
}))]),
|
|
1204
|
-
finishReason: response.finish_reason,
|
|
1205
|
-
usage: response.usage
|
|
1206
|
-
});
|
|
1207
1286
|
if (response.usage) {
|
|
1208
1287
|
totalTokens += response.usage.total_tokens || 0;
|
|
1209
1288
|
span.setAttributes({
|
|
@@ -1224,7 +1303,7 @@ var BaseLlm = class {
|
|
|
1224
1303
|
} catch (error) {
|
|
1225
1304
|
span.recordException(error);
|
|
1226
1305
|
span.setStatus({ code: 2, message: error.message });
|
|
1227
|
-
|
|
1306
|
+
this.logger.error("\u274C ADK LLM Error:", {
|
|
1228
1307
|
model: this.model,
|
|
1229
1308
|
error: error.message
|
|
1230
1309
|
});
|
|
@@ -1273,33 +1352,29 @@ var BaseLlm = class {
|
|
|
1273
1352
|
connect(llmRequest) {
|
|
1274
1353
|
throw new Error(`Live connection is not supported for ${this.model}.`);
|
|
1275
1354
|
}
|
|
1276
|
-
};
|
|
1355
|
+
}, _class5);
|
|
1277
1356
|
|
|
1278
1357
|
// src/models/base-llm-connection.ts
|
|
1279
1358
|
var BaseLLMConnection = class {
|
|
1280
1359
|
};
|
|
1281
1360
|
|
|
1282
1361
|
// src/models/google-llm.ts
|
|
1283
|
-
init_logger();
|
|
1284
1362
|
|
|
1285
1363
|
|
|
1286
1364
|
|
|
1287
1365
|
var _genai = require('@google/genai');
|
|
1288
|
-
var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
|
|
1289
|
-
var NEW_LINE = "\n";
|
|
1290
1366
|
var AGENT_ENGINE_TELEMETRY_TAG = "remote_reasoning_engine";
|
|
1291
1367
|
var AGENT_ENGINE_TELEMETRY_ENV_VARIABLE_NAME = "GOOGLE_CLOUD_AGENT_ENGINE_ID";
|
|
1292
|
-
var GoogleLlm =
|
|
1368
|
+
var GoogleLlm = class extends BaseLlm {
|
|
1293
1369
|
|
|
1294
1370
|
|
|
1295
1371
|
|
|
1296
1372
|
|
|
1297
|
-
__init10() {this.logger = new Logger({ name: "GoogleLlm" })}
|
|
1298
1373
|
/**
|
|
1299
1374
|
* Constructor for Gemini
|
|
1300
1375
|
*/
|
|
1301
1376
|
constructor(model = "gemini-1.5-flash") {
|
|
1302
|
-
super(model);
|
|
1377
|
+
super(model);
|
|
1303
1378
|
}
|
|
1304
1379
|
/**
|
|
1305
1380
|
* Provides the list of supported models.
|
|
@@ -1318,10 +1393,6 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1318
1393
|
*/
|
|
1319
1394
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1320
1395
|
this.preprocessRequest(llmRequest);
|
|
1321
|
-
this.logger.debug(
|
|
1322
|
-
`Sending out request, model: ${llmRequest.model || this.model}, backend: ${this.apiBackend}, stream: ${stream}`
|
|
1323
|
-
);
|
|
1324
|
-
this.logger.debug(this.buildRequestLog(llmRequest));
|
|
1325
1396
|
const model = llmRequest.model || this.model;
|
|
1326
1397
|
const contents = this.convertContents(llmRequest.contents || []);
|
|
1327
1398
|
const config = this.convertConfig(llmRequest.config);
|
|
@@ -1337,10 +1408,9 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1337
1408
|
let usageMetadata = null;
|
|
1338
1409
|
for await (const resp of responses) {
|
|
1339
1410
|
response = resp;
|
|
1340
|
-
this.logger.debug(this.buildResponseLog(resp));
|
|
1341
1411
|
const llmResponse = LlmResponse.create(resp);
|
|
1342
1412
|
usageMetadata = llmResponse.usageMetadata;
|
|
1343
|
-
if (_optionalChain([llmResponse, 'access',
|
|
1413
|
+
if (_optionalChain([llmResponse, 'access', _45 => _45.content, 'optionalAccess', _46 => _46.parts, 'optionalAccess', _47 => _47[0], 'optionalAccess', _48 => _48.text])) {
|
|
1344
1414
|
const part0 = llmResponse.content.parts[0];
|
|
1345
1415
|
if (part0.thought) {
|
|
1346
1416
|
thoughtText += part0.text;
|
|
@@ -1368,7 +1438,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1368
1438
|
}
|
|
1369
1439
|
yield llmResponse;
|
|
1370
1440
|
}
|
|
1371
|
-
if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access',
|
|
1441
|
+
if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _49 => _49.candidates, 'access', _50 => _50[0], 'optionalAccess', _51 => _51.finishReason]) === _genai.FinishReason.STOP) {
|
|
1372
1442
|
const parts = [];
|
|
1373
1443
|
if (thoughtText) {
|
|
1374
1444
|
parts.push({ text: thoughtText, thought: true });
|
|
@@ -1390,8 +1460,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1390
1460
|
contents,
|
|
1391
1461
|
config
|
|
1392
1462
|
});
|
|
1393
|
-
|
|
1394
|
-
|
|
1463
|
+
const llmResponse = LlmResponse.create(response);
|
|
1464
|
+
this.logger.debug(
|
|
1465
|
+
`Google response: ${_optionalChain([llmResponse, 'access', _52 => _52.usageMetadata, 'optionalAccess', _53 => _53.candidatesTokenCount]) || 0} tokens`
|
|
1466
|
+
);
|
|
1467
|
+
yield llmResponse;
|
|
1395
1468
|
}
|
|
1396
1469
|
}
|
|
1397
1470
|
/**
|
|
@@ -1404,8 +1477,8 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1404
1477
|
* Check if response has inline data
|
|
1405
1478
|
*/
|
|
1406
1479
|
hasInlineData(response) {
|
|
1407
|
-
const parts = _optionalChain([response, 'access',
|
|
1408
|
-
return _optionalChain([parts, 'optionalAccess',
|
|
1480
|
+
const parts = _optionalChain([response, 'access', _54 => _54.candidates, 'optionalAccess', _55 => _55[0], 'optionalAccess', _56 => _56.content, 'optionalAccess', _57 => _57.parts]);
|
|
1481
|
+
return _optionalChain([parts, 'optionalAccess', _58 => _58.some, 'call', _59 => _59((part) => _optionalChain([part, 'optionalAccess', _60 => _60.inlineData]))]) || false;
|
|
1409
1482
|
}
|
|
1410
1483
|
/**
|
|
1411
1484
|
* Convert LlmRequest contents to GoogleGenAI format
|
|
@@ -1452,7 +1525,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1452
1525
|
* Sets display_name to null for the Gemini API (non-Vertex) backend.
|
|
1453
1526
|
*/
|
|
1454
1527
|
removeDisplayNameIfPresent(dataObj) {
|
|
1455
|
-
if (_optionalChain([dataObj, 'optionalAccess',
|
|
1528
|
+
if (_optionalChain([dataObj, 'optionalAccess', _61 => _61.displayName])) {
|
|
1456
1529
|
dataObj.displayName = null;
|
|
1457
1530
|
}
|
|
1458
1531
|
}
|
|
@@ -1461,65 +1534,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1461
1534
|
*/
|
|
1462
1535
|
buildFunctionDeclarationLog(funcDecl) {
|
|
1463
1536
|
let paramStr = "{}";
|
|
1464
|
-
if (_optionalChain([funcDecl, 'access',
|
|
1537
|
+
if (_optionalChain([funcDecl, 'access', _62 => _62.parameters, 'optionalAccess', _63 => _63.properties])) {
|
|
1465
1538
|
paramStr = JSON.stringify(funcDecl.parameters.properties);
|
|
1466
1539
|
}
|
|
1467
1540
|
return `${funcDecl.name}: ${paramStr}`;
|
|
1468
1541
|
}
|
|
1469
|
-
/**
|
|
1470
|
-
* Builds request log string.
|
|
1471
|
-
*/
|
|
1472
|
-
buildRequestLog(req) {
|
|
1473
|
-
const functionDecls = _optionalChain([req, 'access', _67 => _67.config, 'optionalAccess', _68 => _68.tools, 'optionalAccess', _69 => _69[0], 'optionalAccess', _70 => _70.functionDeclarations]) || [];
|
|
1474
|
-
const functionLogs = functionDecls.length > 0 ? functionDecls.map(
|
|
1475
|
-
(funcDecl) => this.buildFunctionDeclarationLog(funcDecl)
|
|
1476
|
-
) : [];
|
|
1477
|
-
const contentsLogs = _optionalChain([req, 'access', _71 => _71.contents, 'optionalAccess', _72 => _72.map, 'call', _73 => _73(
|
|
1478
|
-
(content) => JSON.stringify(content, (key, value) => {
|
|
1479
|
-
if (key === "data" && typeof value === "string" && value.length > 100) {
|
|
1480
|
-
return "[EXCLUDED]";
|
|
1481
|
-
}
|
|
1482
|
-
return value;
|
|
1483
|
-
})
|
|
1484
|
-
)]) || [];
|
|
1485
|
-
return _dedent2.default`
|
|
1486
|
-
LLM Request:
|
|
1487
|
-
-----------------------------------------------------------
|
|
1488
|
-
System Instruction:
|
|
1489
|
-
${_optionalChain([req, 'access', _74 => _74.config, 'optionalAccess', _75 => _75.systemInstruction]) || ""}
|
|
1490
|
-
-----------------------------------------------------------
|
|
1491
|
-
Contents:
|
|
1492
|
-
${contentsLogs.join(NEW_LINE)}
|
|
1493
|
-
-----------------------------------------------------------
|
|
1494
|
-
Functions:
|
|
1495
|
-
${functionLogs.join(NEW_LINE)}
|
|
1496
|
-
-----------------------------------------------------------`;
|
|
1497
|
-
}
|
|
1498
|
-
/**
|
|
1499
|
-
* Builds response log string.
|
|
1500
|
-
*/
|
|
1501
|
-
buildResponseLog(resp) {
|
|
1502
|
-
const functionCallsText = [];
|
|
1503
|
-
if (resp.functionCalls) {
|
|
1504
|
-
for (const funcCall of resp.functionCalls) {
|
|
1505
|
-
functionCallsText.push(
|
|
1506
|
-
`name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
|
|
1507
|
-
);
|
|
1508
|
-
}
|
|
1509
|
-
}
|
|
1510
|
-
return _dedent2.default`
|
|
1511
|
-
LLM Response:
|
|
1512
|
-
-----------------------------------------------------------
|
|
1513
|
-
Text:
|
|
1514
|
-
${resp.text || ""}
|
|
1515
|
-
-----------------------------------------------------------
|
|
1516
|
-
Function calls:
|
|
1517
|
-
${functionCallsText.join(NEW_LINE)}
|
|
1518
|
-
-----------------------------------------------------------
|
|
1519
|
-
Raw response:
|
|
1520
|
-
${JSON.stringify(resp, null, 2)}
|
|
1521
|
-
-----------------------------------------------------------`;
|
|
1522
|
-
}
|
|
1523
1542
|
/**
|
|
1524
1543
|
* Provides the api client.
|
|
1525
1544
|
*/
|
|
@@ -1608,20 +1627,20 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1608
1627
|
}
|
|
1609
1628
|
return this._liveApiClient;
|
|
1610
1629
|
}
|
|
1611
|
-
}
|
|
1630
|
+
};
|
|
1612
1631
|
|
|
1613
1632
|
// src/models/anthropic-llm.ts
|
|
1614
1633
|
init_logger();
|
|
1615
1634
|
var _sdk = require('@anthropic-ai/sdk'); var _sdk2 = _interopRequireDefault(_sdk);
|
|
1616
|
-
var logger3 = new Logger({ name: "AnthropicLlm" });
|
|
1617
1635
|
var MAX_TOKENS = 1024;
|
|
1618
|
-
var AnthropicLlm = class extends BaseLlm {
|
|
1636
|
+
var AnthropicLlm = (_class6 = class extends BaseLlm {
|
|
1619
1637
|
|
|
1638
|
+
__init12() {this.logger = new Logger({ name: "AnthropicLlm" })}
|
|
1620
1639
|
/**
|
|
1621
1640
|
* Constructor for Anthropic LLM
|
|
1622
1641
|
*/
|
|
1623
1642
|
constructor(model = "claude-3-5-sonnet-20241022") {
|
|
1624
|
-
super(model);
|
|
1643
|
+
super(model);_class6.prototype.__init12.call(this);;
|
|
1625
1644
|
}
|
|
1626
1645
|
/**
|
|
1627
1646
|
* Provides the list of supported models
|
|
@@ -1633,15 +1652,12 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1633
1652
|
* Main content generation method - handles both streaming and non-streaming
|
|
1634
1653
|
*/
|
|
1635
1654
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1636
|
-
logger3.debug(
|
|
1637
|
-
`Sending Anthropic request, model: ${llmRequest.model || this.model}, stream: ${stream}`
|
|
1638
|
-
);
|
|
1639
1655
|
const model = llmRequest.model || this.model;
|
|
1640
1656
|
const messages = (llmRequest.contents || []).map(
|
|
1641
1657
|
(content) => this.contentToAnthropicMessage(content)
|
|
1642
1658
|
);
|
|
1643
1659
|
let tools;
|
|
1644
|
-
if (_optionalChain([llmRequest, 'access',
|
|
1660
|
+
if (_optionalChain([llmRequest, 'access', _64 => _64.config, 'optionalAccess', _65 => _65.tools, 'optionalAccess', _66 => _66[0], 'optionalAccess', _67 => _67.functionDeclarations])) {
|
|
1645
1661
|
tools = llmRequest.config.tools[0].functionDeclarations.map(
|
|
1646
1662
|
(decl) => this.functionDeclarationToAnthropicTool(decl)
|
|
1647
1663
|
);
|
|
@@ -1663,9 +1679,9 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1663
1679
|
messages: anthropicMessages,
|
|
1664
1680
|
tools,
|
|
1665
1681
|
tool_choice: tools ? { type: "auto" } : void 0,
|
|
1666
|
-
max_tokens: _optionalChain([llmRequest, 'access',
|
|
1667
|
-
temperature: _optionalChain([llmRequest, 'access',
|
|
1668
|
-
top_p: _optionalChain([llmRequest, 'access',
|
|
1682
|
+
max_tokens: _optionalChain([llmRequest, 'access', _68 => _68.config, 'optionalAccess', _69 => _69.maxOutputTokens]) || MAX_TOKENS,
|
|
1683
|
+
temperature: _optionalChain([llmRequest, 'access', _70 => _70.config, 'optionalAccess', _71 => _71.temperature]),
|
|
1684
|
+
top_p: _optionalChain([llmRequest, 'access', _72 => _72.config, 'optionalAccess', _73 => _73.topP])
|
|
1669
1685
|
});
|
|
1670
1686
|
yield this.anthropicMessageToLlmResponse(message);
|
|
1671
1687
|
}
|
|
@@ -1679,7 +1695,9 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1679
1695
|
* Convert Anthropic Message to ADK LlmResponse
|
|
1680
1696
|
*/
|
|
1681
1697
|
anthropicMessageToLlmResponse(message) {
|
|
1682
|
-
|
|
1698
|
+
this.logger.debug(
|
|
1699
|
+
`Anthropic response: ${message.usage.output_tokens} tokens, ${message.stop_reason}`
|
|
1700
|
+
);
|
|
1683
1701
|
return new LlmResponse({
|
|
1684
1702
|
content: {
|
|
1685
1703
|
role: "model",
|
|
@@ -1724,7 +1742,7 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1724
1742
|
}
|
|
1725
1743
|
if (part.function_response) {
|
|
1726
1744
|
let content = "";
|
|
1727
|
-
if (_optionalChain([part, 'access',
|
|
1745
|
+
if (_optionalChain([part, 'access', _74 => _74.function_response, 'access', _75 => _75.response, 'optionalAccess', _76 => _76.result])) {
|
|
1728
1746
|
content = String(part.function_response.response.result);
|
|
1729
1747
|
}
|
|
1730
1748
|
return {
|
|
@@ -1759,7 +1777,7 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1759
1777
|
*/
|
|
1760
1778
|
functionDeclarationToAnthropicTool(functionDeclaration) {
|
|
1761
1779
|
const properties = {};
|
|
1762
|
-
if (_optionalChain([functionDeclaration, 'access',
|
|
1780
|
+
if (_optionalChain([functionDeclaration, 'access', _77 => _77.parameters, 'optionalAccess', _78 => _78.properties])) {
|
|
1763
1781
|
for (const [key, value] of Object.entries(
|
|
1764
1782
|
functionDeclaration.parameters.properties
|
|
1765
1783
|
)) {
|
|
@@ -1833,14 +1851,10 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1833
1851
|
}
|
|
1834
1852
|
return this._client;
|
|
1835
1853
|
}
|
|
1836
|
-
};
|
|
1854
|
+
}, _class6);
|
|
1837
1855
|
|
|
1838
1856
|
// src/models/openai-llm.ts
|
|
1839
|
-
init_logger();
|
|
1840
|
-
|
|
1841
1857
|
var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
|
|
1842
|
-
var logger4 = new Logger({ name: "OpenAiLlm" });
|
|
1843
|
-
var NEW_LINE2 = "\n";
|
|
1844
1858
|
var OpenAiLlm = class extends BaseLlm {
|
|
1845
1859
|
|
|
1846
1860
|
/**
|
|
@@ -1860,16 +1874,12 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1860
1874
|
*/
|
|
1861
1875
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1862
1876
|
this.preprocessRequest(llmRequest);
|
|
1863
|
-
logger4.debug(
|
|
1864
|
-
`Sending OpenAI request, model: ${llmRequest.model || this.model}, stream: ${stream}`
|
|
1865
|
-
);
|
|
1866
|
-
logger4.debug(this.buildRequestLog(llmRequest));
|
|
1867
1877
|
const model = llmRequest.model || this.model;
|
|
1868
1878
|
const messages = (llmRequest.contents || []).map(
|
|
1869
1879
|
(content) => this.contentToOpenAiMessage(content)
|
|
1870
1880
|
);
|
|
1871
1881
|
let tools;
|
|
1872
|
-
if (_optionalChain([llmRequest, 'access',
|
|
1882
|
+
if (_optionalChain([llmRequest, 'access', _79 => _79.config, 'optionalAccess', _80 => _80.tools, 'optionalAccess', _81 => _81[0], 'optionalAccess', _82 => _82.functionDeclarations])) {
|
|
1873
1883
|
tools = llmRequest.config.tools[0].functionDeclarations.map(
|
|
1874
1884
|
(funcDecl) => this.functionDeclarationToOpenAiTool(funcDecl)
|
|
1875
1885
|
);
|
|
@@ -1887,9 +1897,9 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1887
1897
|
messages: openAiMessages,
|
|
1888
1898
|
tools,
|
|
1889
1899
|
tool_choice: tools ? "auto" : void 0,
|
|
1890
|
-
max_tokens: _optionalChain([llmRequest, 'access',
|
|
1891
|
-
temperature: _optionalChain([llmRequest, 'access',
|
|
1892
|
-
top_p: _optionalChain([llmRequest, 'access',
|
|
1900
|
+
max_tokens: _optionalChain([llmRequest, 'access', _83 => _83.config, 'optionalAccess', _84 => _84.maxOutputTokens]),
|
|
1901
|
+
temperature: _optionalChain([llmRequest, 'access', _85 => _85.config, 'optionalAccess', _86 => _86.temperature]),
|
|
1902
|
+
top_p: _optionalChain([llmRequest, 'access', _87 => _87.config, 'optionalAccess', _88 => _88.topP]),
|
|
1893
1903
|
stream
|
|
1894
1904
|
};
|
|
1895
1905
|
if (stream) {
|
|
@@ -1905,13 +1915,11 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1905
1915
|
const choice = chunk.choices[0];
|
|
1906
1916
|
if (!choice) continue;
|
|
1907
1917
|
const delta = choice.delta;
|
|
1908
|
-
logger4.debug("Delta content:", delta.content);
|
|
1909
1918
|
const llmResponse = this.createChunkResponse(delta, chunk.usage);
|
|
1910
1919
|
if (chunk.usage) {
|
|
1911
1920
|
usageMetadata = chunk.usage;
|
|
1912
1921
|
}
|
|
1913
|
-
|
|
1914
|
-
if (_optionalChain([llmResponse, 'access', _101 => _101.content, 'optionalAccess', _102 => _102.parts, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.text])) {
|
|
1922
|
+
if (_optionalChain([llmResponse, 'access', _89 => _89.content, 'optionalAccess', _90 => _90.parts, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.text])) {
|
|
1915
1923
|
const part0 = llmResponse.content.parts[0];
|
|
1916
1924
|
if (part0.thought) {
|
|
1917
1925
|
thoughtText += part0.text;
|
|
@@ -1952,10 +1960,10 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1952
1960
|
function: { name: "", arguments: "" }
|
|
1953
1961
|
};
|
|
1954
1962
|
}
|
|
1955
|
-
if (_optionalChain([toolCall, 'access',
|
|
1963
|
+
if (_optionalChain([toolCall, 'access', _93 => _93.function, 'optionalAccess', _94 => _94.name])) {
|
|
1956
1964
|
accumulatedToolCalls[index].function.name += toolCall.function.name;
|
|
1957
1965
|
}
|
|
1958
|
-
if (_optionalChain([toolCall, 'access',
|
|
1966
|
+
if (_optionalChain([toolCall, 'access', _95 => _95.function, 'optionalAccess', _96 => _96.arguments])) {
|
|
1959
1967
|
accumulatedToolCalls[index].function.arguments += toolCall.function.arguments;
|
|
1960
1968
|
}
|
|
1961
1969
|
}
|
|
@@ -1970,7 +1978,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1970
1978
|
}
|
|
1971
1979
|
if (accumulatedToolCalls.length > 0) {
|
|
1972
1980
|
for (const toolCall of accumulatedToolCalls) {
|
|
1973
|
-
if (_optionalChain([toolCall, 'access',
|
|
1981
|
+
if (_optionalChain([toolCall, 'access', _97 => _97.function, 'optionalAccess', _98 => _98.name])) {
|
|
1974
1982
|
parts.push({
|
|
1975
1983
|
functionCall: {
|
|
1976
1984
|
id: toolCall.id,
|
|
@@ -1993,7 +2001,6 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1993
2001
|
} : void 0,
|
|
1994
2002
|
finishReason: this.toAdkFinishReason(choice.finish_reason)
|
|
1995
2003
|
});
|
|
1996
|
-
logger4.debug(this.buildResponseLog(finalResponse));
|
|
1997
2004
|
yield finalResponse;
|
|
1998
2005
|
} else {
|
|
1999
2006
|
yield llmResponse;
|
|
@@ -2030,7 +2037,9 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2030
2037
|
choice,
|
|
2031
2038
|
response.usage
|
|
2032
2039
|
);
|
|
2033
|
-
|
|
2040
|
+
this.logger.debug(
|
|
2041
|
+
`OpenAI response: ${_optionalChain([response, 'access', _99 => _99.usage, 'optionalAccess', _100 => _100.completion_tokens]) || 0} tokens`
|
|
2042
|
+
);
|
|
2034
2043
|
yield llmResponse;
|
|
2035
2044
|
}
|
|
2036
2045
|
}
|
|
@@ -2056,7 +2065,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2056
2065
|
}
|
|
2057
2066
|
if (delta.tool_calls) {
|
|
2058
2067
|
for (const toolCall of delta.tool_calls) {
|
|
2059
|
-
if (toolCall.type === "function" && _optionalChain([toolCall, 'access',
|
|
2068
|
+
if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _101 => _101.function, 'optionalAccess', _102 => _102.name])) {
|
|
2060
2069
|
parts.push({
|
|
2061
2070
|
functionCall: {
|
|
2062
2071
|
id: toolCall.id || "",
|
|
@@ -2084,10 +2093,6 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2084
2093
|
*/
|
|
2085
2094
|
openAiMessageToLlmResponse(choice, usage) {
|
|
2086
2095
|
const message = choice.message;
|
|
2087
|
-
logger4.debug(
|
|
2088
|
-
"OpenAI response:",
|
|
2089
|
-
JSON.stringify({ message, usage }, null, 2)
|
|
2090
|
-
);
|
|
2091
2096
|
const parts = [];
|
|
2092
2097
|
if (message.content) {
|
|
2093
2098
|
parts.push({ text: message.content });
|
|
@@ -2126,10 +2131,10 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2126
2131
|
if (role === "system") {
|
|
2127
2132
|
return {
|
|
2128
2133
|
role: "system",
|
|
2129
|
-
content: _optionalChain([content, 'access',
|
|
2134
|
+
content: _optionalChain([content, 'access', _103 => _103.parts, 'optionalAccess', _104 => _104[0], 'optionalAccess', _105 => _105.text]) || ""
|
|
2130
2135
|
};
|
|
2131
2136
|
}
|
|
2132
|
-
if (_optionalChain([content, 'access',
|
|
2137
|
+
if (_optionalChain([content, 'access', _106 => _106.parts, 'optionalAccess', _107 => _107.some, 'call', _108 => _108((part) => part.functionCall)])) {
|
|
2133
2138
|
const functionCallPart = content.parts.find(
|
|
2134
2139
|
(part) => part.functionCall
|
|
2135
2140
|
);
|
|
@@ -2149,7 +2154,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2149
2154
|
]
|
|
2150
2155
|
};
|
|
2151
2156
|
}
|
|
2152
|
-
if (_optionalChain([content, 'access',
|
|
2157
|
+
if (_optionalChain([content, 'access', _109 => _109.parts, 'optionalAccess', _110 => _110.some, 'call', _111 => _111((part) => part.functionResponse)])) {
|
|
2153
2158
|
const functionResponsePart = content.parts.find(
|
|
2154
2159
|
(part) => part.functionResponse
|
|
2155
2160
|
);
|
|
@@ -2161,7 +2166,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2161
2166
|
)
|
|
2162
2167
|
};
|
|
2163
2168
|
}
|
|
2164
|
-
if (_optionalChain([content, 'access',
|
|
2169
|
+
if (_optionalChain([content, 'access', _112 => _112.parts, 'optionalAccess', _113 => _113.length]) === 1 && content.parts[0].text) {
|
|
2165
2170
|
return {
|
|
2166
2171
|
role,
|
|
2167
2172
|
content: content.parts[0].text
|
|
@@ -2184,7 +2189,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2184
2189
|
text: part.text
|
|
2185
2190
|
};
|
|
2186
2191
|
}
|
|
2187
|
-
if (_optionalChain([part, 'access',
|
|
2192
|
+
if (_optionalChain([part, 'access', _114 => _114.inline_data, 'optionalAccess', _115 => _115.mime_type]) && _optionalChain([part, 'access', _116 => _116.inline_data, 'optionalAccess', _117 => _117.data])) {
|
|
2188
2193
|
return {
|
|
2189
2194
|
type: "image_url",
|
|
2190
2195
|
image_url: {
|
|
@@ -2273,69 +2278,8 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2273
2278
|
* Check if response has inline data (similar to Google LLM)
|
|
2274
2279
|
*/
|
|
2275
2280
|
hasInlineData(response) {
|
|
2276
|
-
const parts = _optionalChain([response, 'access',
|
|
2277
|
-
return _optionalChain([parts, 'optionalAccess',
|
|
2278
|
-
}
|
|
2279
|
-
/**
|
|
2280
|
-
* Build request log string for debugging (similar to Google LLM)
|
|
2281
|
-
*/
|
|
2282
|
-
buildRequestLog(req) {
|
|
2283
|
-
const functionDecls = _optionalChain([req, 'access', _132 => _132.config, 'optionalAccess', _133 => _133.tools, 'optionalAccess', _134 => _134[0], 'optionalAccess', _135 => _135.functionDeclarations]) || [];
|
|
2284
|
-
const functionLogs = functionDecls.length > 0 ? functionDecls.map(
|
|
2285
|
-
(funcDecl) => `${funcDecl.name}: ${JSON.stringify(_optionalChain([funcDecl, 'access', _136 => _136.parameters, 'optionalAccess', _137 => _137.properties]) || {})}`
|
|
2286
|
-
) : [];
|
|
2287
|
-
const contentsLogs = _optionalChain([req, 'access', _138 => _138.contents, 'optionalAccess', _139 => _139.map, 'call', _140 => _140(
|
|
2288
|
-
(content) => JSON.stringify(content, (key, value) => {
|
|
2289
|
-
if (key === "data" && typeof value === "string" && value.length > 100) {
|
|
2290
|
-
return "[EXCLUDED]";
|
|
2291
|
-
}
|
|
2292
|
-
return value;
|
|
2293
|
-
})
|
|
2294
|
-
)]) || [];
|
|
2295
|
-
return _dedent2.default`
|
|
2296
|
-
LLM Request:
|
|
2297
|
-
-----------------------------------------------------------
|
|
2298
|
-
System Instruction:
|
|
2299
|
-
${req.getSystemInstructionText() || ""}
|
|
2300
|
-
-----------------------------------------------------------
|
|
2301
|
-
Contents:
|
|
2302
|
-
${contentsLogs.join(NEW_LINE2)}
|
|
2303
|
-
-----------------------------------------------------------
|
|
2304
|
-
Functions:
|
|
2305
|
-
${functionLogs.join(NEW_LINE2)}
|
|
2306
|
-
-----------------------------------------------------------`;
|
|
2307
|
-
}
|
|
2308
|
-
/**
|
|
2309
|
-
* Build response log string for debugging (similar to Google LLM)
|
|
2310
|
-
*/
|
|
2311
|
-
buildResponseLog(response) {
|
|
2312
|
-
const functionCallsText = [];
|
|
2313
|
-
if (_optionalChain([response, 'access', _141 => _141.content, 'optionalAccess', _142 => _142.parts])) {
|
|
2314
|
-
for (const part of response.content.parts) {
|
|
2315
|
-
if (part.functionCall) {
|
|
2316
|
-
const funcCall = part.functionCall;
|
|
2317
|
-
functionCallsText.push(
|
|
2318
|
-
`name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
|
|
2319
|
-
);
|
|
2320
|
-
}
|
|
2321
|
-
}
|
|
2322
|
-
}
|
|
2323
|
-
const text = _optionalChain([response, 'access', _143 => _143.content, 'optionalAccess', _144 => _144.parts, 'optionalAccess', _145 => _145.filter, 'call', _146 => _146((part) => part.text), 'optionalAccess', _147 => _147.map, 'call', _148 => _148((part) => part.text), 'optionalAccess', _149 => _149.join, 'call', _150 => _150("")]) || "";
|
|
2324
|
-
return _dedent2.default`
|
|
2325
|
-
LLM Response:
|
|
2326
|
-
-----------------------------------------------------------
|
|
2327
|
-
Text:
|
|
2328
|
-
${text}
|
|
2329
|
-
-----------------------------------------------------------
|
|
2330
|
-
Function calls:
|
|
2331
|
-
${functionCallsText.join(NEW_LINE2)}
|
|
2332
|
-
-----------------------------------------------------------
|
|
2333
|
-
Usage:
|
|
2334
|
-
${JSON.stringify(response.usageMetadata, null, 2)}
|
|
2335
|
-
-----------------------------------------------------------
|
|
2336
|
-
Finish Reason:
|
|
2337
|
-
${response.finishReason}
|
|
2338
|
-
-----------------------------------------------------------`;
|
|
2281
|
+
const parts = _optionalChain([response, 'access', _118 => _118.content, 'optionalAccess', _119 => _119.parts]);
|
|
2282
|
+
return _optionalChain([parts, 'optionalAccess', _120 => _120.some, 'call', _121 => _121((part) => part.inlineData)]) || false;
|
|
2339
2283
|
}
|
|
2340
2284
|
/**
|
|
2341
2285
|
* Gets the OpenAI client
|
|
@@ -2356,14 +2300,289 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2356
2300
|
}
|
|
2357
2301
|
};
|
|
2358
2302
|
|
|
2303
|
+
// src/models/ai-sdk.ts
|
|
2304
|
+
init_logger();
|
|
2305
|
+
|
|
2306
|
+
|
|
2307
|
+
|
|
2308
|
+
|
|
2309
|
+
var _ai = require('ai');
|
|
2310
|
+
var AiSdkLlm = (_class7 = class extends BaseLlm {
|
|
2311
|
+
|
|
2312
|
+
__init13() {this.logger = new Logger({ name: "AiSdkLlm" })}
|
|
2313
|
+
/**
|
|
2314
|
+
* Constructor accepts a pre-configured LanguageModel instance
|
|
2315
|
+
* @param model - Pre-configured LanguageModel from provider(modelName)
|
|
2316
|
+
*/
|
|
2317
|
+
constructor(modelInstance) {
|
|
2318
|
+
super(modelInstance.modelId || "ai-sdk-model");_class7.prototype.__init13.call(this);;
|
|
2319
|
+
this.modelInstance = modelInstance;
|
|
2320
|
+
}
|
|
2321
|
+
/**
|
|
2322
|
+
* Returns empty array - following Python ADK pattern
|
|
2323
|
+
*/
|
|
2324
|
+
static supportedModels() {
|
|
2325
|
+
return [];
|
|
2326
|
+
}
|
|
2327
|
+
async *generateContentAsyncImpl(request, stream = false) {
|
|
2328
|
+
try {
|
|
2329
|
+
const messages = this.convertToAiSdkMessages(request);
|
|
2330
|
+
const systemMessage = request.getSystemInstructionText();
|
|
2331
|
+
const tools = this.convertToAiSdkTools(request);
|
|
2332
|
+
const requestParams = {
|
|
2333
|
+
model: this.modelInstance,
|
|
2334
|
+
messages,
|
|
2335
|
+
system: systemMessage,
|
|
2336
|
+
tools: Object.keys(tools).length > 0 ? tools : void 0,
|
|
2337
|
+
maxTokens: _optionalChain([request, 'access', _122 => _122.config, 'optionalAccess', _123 => _123.maxOutputTokens]),
|
|
2338
|
+
temperature: _optionalChain([request, 'access', _124 => _124.config, 'optionalAccess', _125 => _125.temperature]),
|
|
2339
|
+
topP: _optionalChain([request, 'access', _126 => _126.config, 'optionalAccess', _127 => _127.topP])
|
|
2340
|
+
};
|
|
2341
|
+
if (stream) {
|
|
2342
|
+
const result = _ai.streamText.call(void 0, requestParams);
|
|
2343
|
+
let accumulatedText = "";
|
|
2344
|
+
for await (const delta of result.textStream) {
|
|
2345
|
+
accumulatedText += delta;
|
|
2346
|
+
yield new LlmResponse({
|
|
2347
|
+
content: {
|
|
2348
|
+
role: "model",
|
|
2349
|
+
parts: [{ text: accumulatedText }]
|
|
2350
|
+
},
|
|
2351
|
+
partial: true
|
|
2352
|
+
});
|
|
2353
|
+
}
|
|
2354
|
+
const toolCalls = await result.toolCalls;
|
|
2355
|
+
const parts = [];
|
|
2356
|
+
if (accumulatedText) {
|
|
2357
|
+
parts.push({ text: accumulatedText });
|
|
2358
|
+
}
|
|
2359
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
2360
|
+
for (const toolCall of toolCalls) {
|
|
2361
|
+
parts.push({
|
|
2362
|
+
functionCall: {
|
|
2363
|
+
id: toolCall.toolCallId,
|
|
2364
|
+
name: toolCall.toolName,
|
|
2365
|
+
args: toolCall.args
|
|
2366
|
+
}
|
|
2367
|
+
});
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
const finalUsage = await result.usage;
|
|
2371
|
+
const finishReason = await result.finishReason;
|
|
2372
|
+
yield new LlmResponse({
|
|
2373
|
+
content: {
|
|
2374
|
+
role: "model",
|
|
2375
|
+
parts: parts.length > 0 ? parts : [{ text: "" }]
|
|
2376
|
+
},
|
|
2377
|
+
usageMetadata: finalUsage ? {
|
|
2378
|
+
promptTokenCount: finalUsage.promptTokens,
|
|
2379
|
+
candidatesTokenCount: finalUsage.completionTokens,
|
|
2380
|
+
totalTokenCount: finalUsage.totalTokens
|
|
2381
|
+
} : void 0,
|
|
2382
|
+
finishReason: this.mapFinishReason(finishReason),
|
|
2383
|
+
turnComplete: true
|
|
2384
|
+
});
|
|
2385
|
+
} else {
|
|
2386
|
+
const result = await _ai.generateText.call(void 0, requestParams);
|
|
2387
|
+
const parts = [];
|
|
2388
|
+
if (result.text) {
|
|
2389
|
+
parts.push({ text: result.text });
|
|
2390
|
+
}
|
|
2391
|
+
if (result.toolCalls && result.toolCalls.length > 0) {
|
|
2392
|
+
for (const toolCall of result.toolCalls) {
|
|
2393
|
+
parts.push({
|
|
2394
|
+
functionCall: {
|
|
2395
|
+
id: toolCall.toolCallId,
|
|
2396
|
+
name: toolCall.toolName,
|
|
2397
|
+
args: toolCall.args
|
|
2398
|
+
}
|
|
2399
|
+
});
|
|
2400
|
+
}
|
|
2401
|
+
}
|
|
2402
|
+
yield new LlmResponse({
|
|
2403
|
+
content: {
|
|
2404
|
+
role: "model",
|
|
2405
|
+
parts: parts.length > 0 ? parts : [{ text: "" }]
|
|
2406
|
+
},
|
|
2407
|
+
usageMetadata: result.usage ? {
|
|
2408
|
+
promptTokenCount: result.usage.promptTokens,
|
|
2409
|
+
candidatesTokenCount: result.usage.completionTokens,
|
|
2410
|
+
totalTokenCount: result.usage.totalTokens
|
|
2411
|
+
} : void 0,
|
|
2412
|
+
finishReason: this.mapFinishReason(result.finishReason),
|
|
2413
|
+
turnComplete: true
|
|
2414
|
+
});
|
|
2415
|
+
}
|
|
2416
|
+
} catch (error) {
|
|
2417
|
+
this.logger.error(`AI SDK Error: ${String(error)}`, { error, request });
|
|
2418
|
+
yield LlmResponse.fromError(error, {
|
|
2419
|
+
errorCode: "AI_SDK_ERROR",
|
|
2420
|
+
model: this.model
|
|
2421
|
+
});
|
|
2422
|
+
}
|
|
2423
|
+
}
|
|
2424
|
+
/**
|
|
2425
|
+
* Convert ADK LlmRequest to AI SDK CoreMessage format
|
|
2426
|
+
*/
|
|
2427
|
+
convertToAiSdkMessages(llmRequest) {
|
|
2428
|
+
const messages = [];
|
|
2429
|
+
for (const content of llmRequest.contents || []) {
|
|
2430
|
+
const message = this.contentToAiSdkMessage(content);
|
|
2431
|
+
if (message) {
|
|
2432
|
+
messages.push(message);
|
|
2433
|
+
}
|
|
2434
|
+
}
|
|
2435
|
+
return messages;
|
|
2436
|
+
}
|
|
2437
|
+
/**
|
|
2438
|
+
* Convert ADK tools to AI SDK tools format
|
|
2439
|
+
*/
|
|
2440
|
+
convertToAiSdkTools(llmRequest) {
|
|
2441
|
+
const tools = {};
|
|
2442
|
+
if (_optionalChain([llmRequest, 'access', _128 => _128.config, 'optionalAccess', _129 => _129.tools])) {
|
|
2443
|
+
for (const toolConfig of llmRequest.config.tools) {
|
|
2444
|
+
if ("functionDeclarations" in toolConfig) {
|
|
2445
|
+
for (const funcDecl of toolConfig.functionDeclarations) {
|
|
2446
|
+
tools[funcDecl.name] = {
|
|
2447
|
+
description: funcDecl.description,
|
|
2448
|
+
parameters: _ai.jsonSchema.call(void 0, funcDecl.parameters || {})
|
|
2449
|
+
};
|
|
2450
|
+
}
|
|
2451
|
+
}
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
return tools;
|
|
2455
|
+
}
|
|
2456
|
+
/**
|
|
2457
|
+
* Convert ADK Content to AI SDK CoreMessage
|
|
2458
|
+
*/
|
|
2459
|
+
contentToAiSdkMessage(content) {
|
|
2460
|
+
const role = this.mapRole(content.role);
|
|
2461
|
+
if (!content.parts || content.parts.length === 0) {
|
|
2462
|
+
return null;
|
|
2463
|
+
}
|
|
2464
|
+
if (content.parts.length === 1 && content.parts[0].text) {
|
|
2465
|
+
const textContent = content.parts[0].text;
|
|
2466
|
+
if (role === "system") {
|
|
2467
|
+
return { role: "system", content: textContent };
|
|
2468
|
+
}
|
|
2469
|
+
if (role === "assistant") {
|
|
2470
|
+
return { role: "assistant", content: textContent };
|
|
2471
|
+
}
|
|
2472
|
+
return { role: "user", content: textContent };
|
|
2473
|
+
}
|
|
2474
|
+
if (_optionalChain([content, 'access', _130 => _130.parts, 'optionalAccess', _131 => _131.some, 'call', _132 => _132((part) => part.functionCall)])) {
|
|
2475
|
+
const textParts = content.parts.filter((part) => part.text);
|
|
2476
|
+
const functionCalls = content.parts.filter((part) => part.functionCall);
|
|
2477
|
+
const contentParts2 = [];
|
|
2478
|
+
for (const textPart of textParts) {
|
|
2479
|
+
if (textPart.text) {
|
|
2480
|
+
contentParts2.push({
|
|
2481
|
+
type: "text",
|
|
2482
|
+
text: textPart.text
|
|
2483
|
+
});
|
|
2484
|
+
}
|
|
2485
|
+
}
|
|
2486
|
+
for (const funcPart of functionCalls) {
|
|
2487
|
+
if (funcPart.functionCall) {
|
|
2488
|
+
contentParts2.push({
|
|
2489
|
+
type: "tool-call",
|
|
2490
|
+
toolCallId: funcPart.functionCall.id,
|
|
2491
|
+
toolName: funcPart.functionCall.name,
|
|
2492
|
+
args: funcPart.functionCall.args
|
|
2493
|
+
});
|
|
2494
|
+
}
|
|
2495
|
+
}
|
|
2496
|
+
return {
|
|
2497
|
+
role: "assistant",
|
|
2498
|
+
content: contentParts2
|
|
2499
|
+
};
|
|
2500
|
+
}
|
|
2501
|
+
if (_optionalChain([content, 'access', _133 => _133.parts, 'optionalAccess', _134 => _134.some, 'call', _135 => _135((part) => part.functionResponse)])) {
|
|
2502
|
+
const functionResponses = content.parts.filter(
|
|
2503
|
+
(part) => part.functionResponse
|
|
2504
|
+
);
|
|
2505
|
+
const contentParts2 = functionResponses.map((part) => ({
|
|
2506
|
+
type: "tool-result",
|
|
2507
|
+
toolCallId: part.functionResponse.id,
|
|
2508
|
+
toolName: part.functionResponse.name || "unknown",
|
|
2509
|
+
result: part.functionResponse.response
|
|
2510
|
+
}));
|
|
2511
|
+
return {
|
|
2512
|
+
role: "tool",
|
|
2513
|
+
content: contentParts2
|
|
2514
|
+
};
|
|
2515
|
+
}
|
|
2516
|
+
const contentParts = [];
|
|
2517
|
+
for (const part of content.parts) {
|
|
2518
|
+
if (part.text) {
|
|
2519
|
+
contentParts.push({
|
|
2520
|
+
type: "text",
|
|
2521
|
+
text: part.text
|
|
2522
|
+
});
|
|
2523
|
+
}
|
|
2524
|
+
}
|
|
2525
|
+
if (contentParts.length === 0) {
|
|
2526
|
+
return null;
|
|
2527
|
+
}
|
|
2528
|
+
if (contentParts.length === 1) {
|
|
2529
|
+
const textContent = contentParts[0].text;
|
|
2530
|
+
if (role === "system") {
|
|
2531
|
+
return { role: "system", content: textContent };
|
|
2532
|
+
}
|
|
2533
|
+
if (role === "assistant") {
|
|
2534
|
+
return { role: "assistant", content: textContent };
|
|
2535
|
+
}
|
|
2536
|
+
return { role: "user", content: textContent };
|
|
2537
|
+
}
|
|
2538
|
+
if (role === "system") {
|
|
2539
|
+
const combinedText = contentParts.map((p) => p.text).join("");
|
|
2540
|
+
return { role: "system", content: combinedText };
|
|
2541
|
+
}
|
|
2542
|
+
if (role === "assistant") {
|
|
2543
|
+
return { role: "assistant", content: contentParts };
|
|
2544
|
+
}
|
|
2545
|
+
return { role: "user", content: contentParts };
|
|
2546
|
+
}
|
|
2547
|
+
/**
|
|
2548
|
+
* Map ADK role to AI SDK role
|
|
2549
|
+
*/
|
|
2550
|
+
mapRole(role) {
|
|
2551
|
+
switch (role) {
|
|
2552
|
+
case "model":
|
|
2553
|
+
case "assistant":
|
|
2554
|
+
return "assistant";
|
|
2555
|
+
case "system":
|
|
2556
|
+
return "system";
|
|
2557
|
+
default:
|
|
2558
|
+
return "user";
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
/**
|
|
2562
|
+
* Map AI SDK finish reason to ADK finish reason
|
|
2563
|
+
*/
|
|
2564
|
+
mapFinishReason(finishReason) {
|
|
2565
|
+
switch (finishReason) {
|
|
2566
|
+
case "stop":
|
|
2567
|
+
case "end_of_message":
|
|
2568
|
+
return "STOP";
|
|
2569
|
+
case "length":
|
|
2570
|
+
case "max_tokens":
|
|
2571
|
+
return "MAX_TOKENS";
|
|
2572
|
+
default:
|
|
2573
|
+
return "FINISH_REASON_UNSPECIFIED";
|
|
2574
|
+
}
|
|
2575
|
+
}
|
|
2576
|
+
}, _class7);
|
|
2577
|
+
|
|
2359
2578
|
// src/models/llm-registry.ts
|
|
2360
2579
|
init_logger();
|
|
2361
|
-
var
|
|
2362
|
-
var LLMRegistry = (_class6 = class _LLMRegistry {
|
|
2580
|
+
var LLMRegistry = (_class8 = class _LLMRegistry {
|
|
2363
2581
|
/**
|
|
2364
2582
|
* Map of model name regex to LLM class
|
|
2365
2583
|
*/
|
|
2366
2584
|
static __initStatic() {this.llmRegistry = /* @__PURE__ */ new Map()}
|
|
2585
|
+
static __initStatic2() {this.logger = new Logger({ name: "LLMRegistry" })}
|
|
2367
2586
|
/**
|
|
2368
2587
|
* Creates a new LLM instance
|
|
2369
2588
|
*
|
|
@@ -2415,12 +2634,12 @@ var LLMRegistry = (_class6 = class _LLMRegistry {
|
|
|
2415
2634
|
* Logs all registered models for debugging
|
|
2416
2635
|
*/
|
|
2417
2636
|
static logRegisteredModels() {
|
|
2418
|
-
|
|
2637
|
+
_LLMRegistry.logger.debug(
|
|
2419
2638
|
"Registered LLM models:",
|
|
2420
2639
|
[..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
|
|
2421
2640
|
);
|
|
2422
2641
|
}
|
|
2423
|
-
},
|
|
2642
|
+
}, _class8.__initStatic(), _class8.__initStatic2(), _class8);
|
|
2424
2643
|
|
|
2425
2644
|
// src/models/registry.ts
|
|
2426
2645
|
function registerProviders() {
|
|
@@ -2618,7 +2837,7 @@ var OAuth2Credential = class extends AuthCredential {
|
|
|
2618
2837
|
"Cannot refresh token: no refresh token or refresh function"
|
|
2619
2838
|
);
|
|
2620
2839
|
}
|
|
2621
|
-
const result = await _optionalChain([this, 'access',
|
|
2840
|
+
const result = await _optionalChain([this, 'access', _136 => _136.refreshFunction, 'optionalCall', _137 => _137(this.refreshToken)]);
|
|
2622
2841
|
if (!result) {
|
|
2623
2842
|
throw new Error("Failed to refresh token");
|
|
2624
2843
|
}
|
|
@@ -2672,7 +2891,7 @@ var AuthHandler = class {
|
|
|
2672
2891
|
* Gets the authentication token
|
|
2673
2892
|
*/
|
|
2674
2893
|
getToken() {
|
|
2675
|
-
return _optionalChain([this, 'access',
|
|
2894
|
+
return _optionalChain([this, 'access', _138 => _138.credential, 'optionalAccess', _139 => _139.getToken, 'call', _140 => _140()]);
|
|
2676
2895
|
}
|
|
2677
2896
|
/**
|
|
2678
2897
|
* Gets headers for HTTP requests
|
|
@@ -2687,7 +2906,7 @@ var AuthHandler = class {
|
|
|
2687
2906
|
* Refreshes the token if necessary
|
|
2688
2907
|
*/
|
|
2689
2908
|
async refreshToken() {
|
|
2690
|
-
if (_optionalChain([this, 'access',
|
|
2909
|
+
if (_optionalChain([this, 'access', _141 => _141.credential, 'optionalAccess', _142 => _142.canRefresh, 'call', _143 => _143()])) {
|
|
2691
2910
|
await this.credential.refresh();
|
|
2692
2911
|
}
|
|
2693
2912
|
}
|
|
@@ -2794,10 +3013,10 @@ var OpenIdConnectScheme = class extends AuthScheme {
|
|
|
2794
3013
|
};
|
|
2795
3014
|
|
|
2796
3015
|
// src/sessions/state.ts
|
|
2797
|
-
var State = (
|
|
2798
|
-
static
|
|
2799
|
-
static
|
|
2800
|
-
static
|
|
3016
|
+
var State = (_class9 = class _State {
|
|
3017
|
+
static __initStatic3() {this.APP_PREFIX = "app:"}
|
|
3018
|
+
static __initStatic4() {this.USER_PREFIX = "user:"}
|
|
3019
|
+
static __initStatic5() {this.TEMP_PREFIX = "temp:"}
|
|
2801
3020
|
|
|
2802
3021
|
|
|
2803
3022
|
/**
|
|
@@ -2891,13 +3110,13 @@ var State = (_class7 = class _State {
|
|
|
2891
3110
|
const state = new _State(value, delta);
|
|
2892
3111
|
return _State.createProxy(state);
|
|
2893
3112
|
}
|
|
2894
|
-
},
|
|
3113
|
+
}, _class9.__initStatic3(), _class9.__initStatic4(), _class9.__initStatic5(), _class9);
|
|
2895
3114
|
|
|
2896
3115
|
// src/events/event.ts
|
|
2897
3116
|
var _uuid = require('uuid');
|
|
2898
3117
|
|
|
2899
3118
|
// src/events/event-actions.ts
|
|
2900
|
-
var EventActions = (
|
|
3119
|
+
var EventActions = (_class10 = class {
|
|
2901
3120
|
/**
|
|
2902
3121
|
* If true, it won't call model to summarize function response.
|
|
2903
3122
|
* Only used for function_response event.
|
|
@@ -2906,12 +3125,12 @@ var EventActions = (_class8 = class {
|
|
|
2906
3125
|
/**
|
|
2907
3126
|
* Indicates that the event is updating the state with the given delta.
|
|
2908
3127
|
*/
|
|
2909
|
-
|
|
3128
|
+
__init14() {this.stateDelta = {}}
|
|
2910
3129
|
/**
|
|
2911
3130
|
* Indicates that the event is updating an artifact. key is the filename,
|
|
2912
3131
|
* value is the version.
|
|
2913
3132
|
*/
|
|
2914
|
-
|
|
3133
|
+
__init15() {this.artifactDelta = {}}
|
|
2915
3134
|
/**
|
|
2916
3135
|
* If set, the event transfers to the specified agent.
|
|
2917
3136
|
*/
|
|
@@ -2927,7 +3146,7 @@ var EventActions = (_class8 = class {
|
|
|
2927
3146
|
/**
|
|
2928
3147
|
* Constructor for EventActions
|
|
2929
3148
|
*/
|
|
2930
|
-
constructor(options = {}) {;
|
|
3149
|
+
constructor(options = {}) {;_class10.prototype.__init14.call(this);_class10.prototype.__init15.call(this);
|
|
2931
3150
|
this.skipSummarization = options.skipSummarization;
|
|
2932
3151
|
this.stateDelta = options.stateDelta || {};
|
|
2933
3152
|
this.artifactDelta = options.artifactDelta || {};
|
|
@@ -2935,16 +3154,16 @@ var EventActions = (_class8 = class {
|
|
|
2935
3154
|
this.escalate = options.escalate;
|
|
2936
3155
|
this.requestedAuthConfigs = options.requestedAuthConfigs;
|
|
2937
3156
|
}
|
|
2938
|
-
},
|
|
3157
|
+
}, _class10);
|
|
2939
3158
|
|
|
2940
3159
|
// src/events/event.ts
|
|
2941
|
-
var Event = (
|
|
3160
|
+
var Event = (_class11 = class _Event extends LlmResponse {
|
|
2942
3161
|
/** The invocation ID of the event. */
|
|
2943
|
-
|
|
3162
|
+
__init16() {this.invocationId = ""}
|
|
2944
3163
|
/** 'user' or the name of the agent, indicating who appended the event to the session. */
|
|
2945
3164
|
|
|
2946
3165
|
/** The actions taken by the agent. */
|
|
2947
|
-
|
|
3166
|
+
__init17() {this.actions = new EventActions()}
|
|
2948
3167
|
/**
|
|
2949
3168
|
* Set of ids of the long running function calls.
|
|
2950
3169
|
* Agent client will know from this field about which function call is long running.
|
|
@@ -2959,9 +3178,9 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
2959
3178
|
*/
|
|
2960
3179
|
|
|
2961
3180
|
/** The unique identifier of the event. */
|
|
2962
|
-
|
|
3181
|
+
__init18() {this.id = ""}
|
|
2963
3182
|
/** The timestamp of the event (seconds since epoch). */
|
|
2964
|
-
|
|
3183
|
+
__init19() {this.timestamp = Math.floor(Date.now() / 1e3)}
|
|
2965
3184
|
/**
|
|
2966
3185
|
* Constructor for Event.
|
|
2967
3186
|
*/
|
|
@@ -2969,7 +3188,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
2969
3188
|
super({
|
|
2970
3189
|
content: opts.content,
|
|
2971
3190
|
partial: opts.partial
|
|
2972
|
-
});
|
|
3191
|
+
});_class11.prototype.__init16.call(this);_class11.prototype.__init17.call(this);_class11.prototype.__init18.call(this);_class11.prototype.__init19.call(this);;
|
|
2973
3192
|
this.invocationId = _nullishCoalesce(opts.invocationId, () => ( ""));
|
|
2974
3193
|
this.author = opts.author;
|
|
2975
3194
|
this.actions = _nullishCoalesce(opts.actions, () => ( new EventActions()));
|
|
@@ -3030,7 +3249,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
3030
3249
|
static newId() {
|
|
3031
3250
|
return _uuid.v4.call(void 0, ).replace(/-/g, "").substring(0, 8);
|
|
3032
3251
|
}
|
|
3033
|
-
},
|
|
3252
|
+
}, _class11);
|
|
3034
3253
|
|
|
3035
3254
|
// src/agents/readonly-context.ts
|
|
3036
3255
|
var ReadonlyContext = class {
|
|
@@ -3136,7 +3355,7 @@ var CallbackContext = class extends ReadonlyContext {
|
|
|
3136
3355
|
};
|
|
3137
3356
|
|
|
3138
3357
|
// src/agents/base-agent.ts
|
|
3139
|
-
var BaseAgent = (
|
|
3358
|
+
var BaseAgent = (_class12 = class {
|
|
3140
3359
|
/**
|
|
3141
3360
|
* The agent's name.
|
|
3142
3361
|
* Agent name must be a valid identifier and unique within the agent tree.
|
|
@@ -3148,7 +3367,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3148
3367
|
* The model uses this to determine whether to delegate control to the agent.
|
|
3149
3368
|
* One-line description is enough and preferred.
|
|
3150
3369
|
*/
|
|
3151
|
-
|
|
3370
|
+
__init20() {this.description = ""}
|
|
3152
3371
|
/**
|
|
3153
3372
|
* The parent agent of this agent.
|
|
3154
3373
|
* Note that an agent can ONLY be added as sub-agent once.
|
|
@@ -3160,7 +3379,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3160
3379
|
/**
|
|
3161
3380
|
* The sub-agents of this agent.
|
|
3162
3381
|
*/
|
|
3163
|
-
|
|
3382
|
+
__init21() {this.subAgents = []}
|
|
3164
3383
|
/**
|
|
3165
3384
|
* Callback or list of callbacks to be invoked before the agent run.
|
|
3166
3385
|
* When a list of callbacks is provided, the callbacks will be called in the
|
|
@@ -3192,7 +3411,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3192
3411
|
/**
|
|
3193
3412
|
* Constructor for BaseAgent
|
|
3194
3413
|
*/
|
|
3195
|
-
constructor(config) {;
|
|
3414
|
+
constructor(config) {;_class12.prototype.__init20.call(this);_class12.prototype.__init21.call(this);
|
|
3196
3415
|
this.name = config.name;
|
|
3197
3416
|
this.description = config.description || "";
|
|
3198
3417
|
this.subAgents = config.subAgents || [];
|
|
@@ -3449,7 +3668,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3449
3668
|
subAgent.parentAgent = this;
|
|
3450
3669
|
}
|
|
3451
3670
|
}
|
|
3452
|
-
},
|
|
3671
|
+
}, _class12);
|
|
3453
3672
|
|
|
3454
3673
|
// src/agents/llm-agent.ts
|
|
3455
3674
|
init_logger();
|
|
@@ -3479,6 +3698,7 @@ __export(tools_exports, {
|
|
|
3479
3698
|
McpAbi: () => McpAbi,
|
|
3480
3699
|
McpAtp: () => McpAtp,
|
|
3481
3700
|
McpBamm: () => McpBamm,
|
|
3701
|
+
McpCoinGecko: () => McpCoinGecko,
|
|
3482
3702
|
McpError: () => McpError,
|
|
3483
3703
|
McpErrorType: () => McpErrorType,
|
|
3484
3704
|
McpFilesystem: () => McpFilesystem,
|
|
@@ -3573,8 +3793,8 @@ init_function_utils();
|
|
|
3573
3793
|
// src/tools/common/google-search.ts
|
|
3574
3794
|
init_logger();
|
|
3575
3795
|
init_base_tool();
|
|
3576
|
-
var GoogleSearch = (
|
|
3577
|
-
|
|
3796
|
+
var GoogleSearch = (_class13 = class extends BaseTool {
|
|
3797
|
+
__init22() {this.logger = new Logger({ name: "GoogleSearch" })}
|
|
3578
3798
|
/**
|
|
3579
3799
|
* Constructor for GoogleSearch
|
|
3580
3800
|
*/
|
|
@@ -3582,7 +3802,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
|
|
|
3582
3802
|
super({
|
|
3583
3803
|
name: "google_search",
|
|
3584
3804
|
description: "Search the web using Google"
|
|
3585
|
-
});
|
|
3805
|
+
});_class13.prototype.__init22.call(this);;
|
|
3586
3806
|
}
|
|
3587
3807
|
/**
|
|
3588
3808
|
* Get the function declaration for the tool
|
|
@@ -3631,7 +3851,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
|
|
|
3631
3851
|
]
|
|
3632
3852
|
};
|
|
3633
3853
|
}
|
|
3634
|
-
},
|
|
3854
|
+
}, _class13);
|
|
3635
3855
|
|
|
3636
3856
|
// src/tools/common/http-request-tool.ts
|
|
3637
3857
|
init_base_tool();
|
|
@@ -3763,7 +3983,7 @@ var FileOperationsTool = class extends BaseTool {
|
|
|
3763
3983
|
name: "file_operations",
|
|
3764
3984
|
description: "Perform file system operations like reading, writing, and managing files"
|
|
3765
3985
|
});
|
|
3766
|
-
this.basePath = _optionalChain([options, 'optionalAccess',
|
|
3986
|
+
this.basePath = _optionalChain([options, 'optionalAccess', _144 => _144.basePath]) || process.cwd();
|
|
3767
3987
|
}
|
|
3768
3988
|
/**
|
|
3769
3989
|
* Get the function declaration for the tool
|
|
@@ -4080,8 +4300,8 @@ var UserInteractionTool = class extends BaseTool {
|
|
|
4080
4300
|
// src/tools/common/exit-loop-tool.ts
|
|
4081
4301
|
init_logger();
|
|
4082
4302
|
init_base_tool();
|
|
4083
|
-
var ExitLoopTool = (
|
|
4084
|
-
|
|
4303
|
+
var ExitLoopTool = (_class14 = class extends BaseTool {
|
|
4304
|
+
__init23() {this.logger = new Logger({ name: "ExitLoopTool" })}
|
|
4085
4305
|
/**
|
|
4086
4306
|
* Constructor for ExitLoopTool
|
|
4087
4307
|
*/
|
|
@@ -4089,7 +4309,7 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
|
|
|
4089
4309
|
super({
|
|
4090
4310
|
name: "exit_loop",
|
|
4091
4311
|
description: "Exits the loop. Call this function only when you are instructed to do so."
|
|
4092
|
-
});
|
|
4312
|
+
});_class14.prototype.__init23.call(this);;
|
|
4093
4313
|
}
|
|
4094
4314
|
/**
|
|
4095
4315
|
* Execute the exit loop action
|
|
@@ -4098,13 +4318,13 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
|
|
|
4098
4318
|
this.logger.debug("Executing exit loop tool");
|
|
4099
4319
|
context.actions.escalate = true;
|
|
4100
4320
|
}
|
|
4101
|
-
},
|
|
4321
|
+
}, _class14);
|
|
4102
4322
|
|
|
4103
4323
|
// src/tools/common/get-user-choice-tool.ts
|
|
4104
4324
|
init_logger();
|
|
4105
4325
|
init_base_tool();
|
|
4106
|
-
var GetUserChoiceTool = (
|
|
4107
|
-
|
|
4326
|
+
var GetUserChoiceTool = (_class15 = class extends BaseTool {
|
|
4327
|
+
__init24() {this.logger = new Logger({ name: "GetUserChoiceTool" })}
|
|
4108
4328
|
/**
|
|
4109
4329
|
* Constructor for GetUserChoiceTool
|
|
4110
4330
|
*/
|
|
@@ -4113,7 +4333,7 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
|
|
|
4113
4333
|
name: "get_user_choice",
|
|
4114
4334
|
description: "This tool provides the options to the user and asks them to choose one. Use this tool when you need the user to make a selection between multiple options. Do not list options in your response - use this tool instead.",
|
|
4115
4335
|
isLongRunning: true
|
|
4116
|
-
});
|
|
4336
|
+
});_class15.prototype.__init24.call(this);;
|
|
4117
4337
|
}
|
|
4118
4338
|
/**
|
|
4119
4339
|
* Get the function declaration for the tool
|
|
@@ -4156,13 +4376,13 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
|
|
|
4156
4376
|
context.actions.skipSummarization = true;
|
|
4157
4377
|
return null;
|
|
4158
4378
|
}
|
|
4159
|
-
},
|
|
4379
|
+
}, _class15);
|
|
4160
4380
|
|
|
4161
4381
|
// src/tools/common/transfer-to-agent-tool.ts
|
|
4162
4382
|
init_logger();
|
|
4163
4383
|
init_base_tool();
|
|
4164
|
-
var TransferToAgentTool = (
|
|
4165
|
-
|
|
4384
|
+
var TransferToAgentTool = (_class16 = class extends BaseTool {
|
|
4385
|
+
__init25() {this.logger = new Logger({ name: "TransferToAgentTool" })}
|
|
4166
4386
|
/**
|
|
4167
4387
|
* Constructor for TransferToAgentTool
|
|
4168
4388
|
*/
|
|
@@ -4170,7 +4390,7 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
|
|
|
4170
4390
|
super({
|
|
4171
4391
|
name: "transfer_to_agent",
|
|
4172
4392
|
description: "Transfer the question to another agent."
|
|
4173
|
-
});
|
|
4393
|
+
});_class16.prototype.__init25.call(this);;
|
|
4174
4394
|
}
|
|
4175
4395
|
/**
|
|
4176
4396
|
* Execute the transfer to agent action
|
|
@@ -4179,13 +4399,13 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
|
|
|
4179
4399
|
this.logger.debug(`Executing transfer to agent: ${args.agent_name}`);
|
|
4180
4400
|
context.actions.transferToAgent = args.agent_name;
|
|
4181
4401
|
}
|
|
4182
|
-
},
|
|
4402
|
+
}, _class16);
|
|
4183
4403
|
|
|
4184
4404
|
// src/tools/common/load-memory-tool.ts
|
|
4185
4405
|
init_logger();
|
|
4186
4406
|
init_base_tool();
|
|
4187
|
-
var LoadMemoryTool = (
|
|
4188
|
-
|
|
4407
|
+
var LoadMemoryTool = (_class17 = class extends BaseTool {
|
|
4408
|
+
__init26() {this.logger = new Logger({ name: "LoadMemoryTool" })}
|
|
4189
4409
|
/**
|
|
4190
4410
|
* Constructor for LoadMemoryTool
|
|
4191
4411
|
*/
|
|
@@ -4193,7 +4413,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4193
4413
|
super({
|
|
4194
4414
|
name: "load_memory",
|
|
4195
4415
|
description: "Loads the memory for the current user based on a query."
|
|
4196
|
-
});
|
|
4416
|
+
});_class17.prototype.__init26.call(this);;
|
|
4197
4417
|
}
|
|
4198
4418
|
/**
|
|
4199
4419
|
* Get the function declaration for the tool
|
|
@@ -4223,7 +4443,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4223
4443
|
const searchResult = await context.searchMemory(args.query);
|
|
4224
4444
|
return {
|
|
4225
4445
|
memories: searchResult.memories || [],
|
|
4226
|
-
count: _optionalChain([searchResult, 'access',
|
|
4446
|
+
count: _optionalChain([searchResult, 'access', _145 => _145.memories, 'optionalAccess', _146 => _146.length]) || 0
|
|
4227
4447
|
};
|
|
4228
4448
|
} catch (error) {
|
|
4229
4449
|
console.error("Error searching memory:", error);
|
|
@@ -4233,7 +4453,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4233
4453
|
};
|
|
4234
4454
|
}
|
|
4235
4455
|
}
|
|
4236
|
-
},
|
|
4456
|
+
}, _class17);
|
|
4237
4457
|
|
|
4238
4458
|
// src/tools/common/load-artifacts-tool.ts
|
|
4239
4459
|
init_base_tool();
|
|
@@ -4384,10 +4604,10 @@ var McpError = class extends Error {
|
|
|
4384
4604
|
};
|
|
4385
4605
|
|
|
4386
4606
|
// src/tools/mcp/sampling-handler.ts
|
|
4387
|
-
var McpSamplingHandler = (
|
|
4388
|
-
|
|
4607
|
+
var McpSamplingHandler = (_class18 = class {
|
|
4608
|
+
__init27() {this.logger = new Logger({ name: "McpSamplingHandler" })}
|
|
4389
4609
|
|
|
4390
|
-
constructor(samplingHandler) {;
|
|
4610
|
+
constructor(samplingHandler) {;_class18.prototype.__init27.call(this);
|
|
4391
4611
|
this.samplingHandler = samplingHandler;
|
|
4392
4612
|
}
|
|
4393
4613
|
/**
|
|
@@ -4568,7 +4788,7 @@ var McpSamplingHandler = (_class16 = class {
|
|
|
4568
4788
|
this.samplingHandler = handler;
|
|
4569
4789
|
this.logger.debug("ADK sampling handler updated");
|
|
4570
4790
|
}
|
|
4571
|
-
},
|
|
4791
|
+
}, _class18);
|
|
4572
4792
|
function createSamplingHandler(handler) {
|
|
4573
4793
|
return handler;
|
|
4574
4794
|
}
|
|
@@ -4602,14 +4822,14 @@ function withRetry(fn, instance, reinitMethod, maxRetries = 1) {
|
|
|
4602
4822
|
}
|
|
4603
4823
|
|
|
4604
4824
|
// src/tools/mcp/client.ts
|
|
4605
|
-
var McpClientService = (
|
|
4606
|
-
|
|
4607
|
-
|
|
4608
|
-
|
|
4609
|
-
|
|
4610
|
-
|
|
4611
|
-
|
|
4612
|
-
constructor(config) {;
|
|
4825
|
+
var McpClientService = (_class19 = class {
|
|
4826
|
+
|
|
4827
|
+
__init28() {this.client = null}
|
|
4828
|
+
__init29() {this.transport = null}
|
|
4829
|
+
__init30() {this.isClosing = false}
|
|
4830
|
+
__init31() {this.mcpSamplingHandler = null}
|
|
4831
|
+
__init32() {this.logger = new Logger({ name: "McpClientService" })}
|
|
4832
|
+
constructor(config) {;_class19.prototype.__init28.call(this);_class19.prototype.__init29.call(this);_class19.prototype.__init30.call(this);_class19.prototype.__init31.call(this);_class19.prototype.__init32.call(this);
|
|
4613
4833
|
this.config = config;
|
|
4614
4834
|
if (config.samplingHandler) {
|
|
4615
4835
|
this.mcpSamplingHandler = new McpSamplingHandler(config.samplingHandler);
|
|
@@ -4665,15 +4885,13 @@ var McpClientService = (_class17 = class {
|
|
|
4665
4885
|
await connectPromise;
|
|
4666
4886
|
}
|
|
4667
4887
|
await this.setupSamplingHandler(client);
|
|
4668
|
-
|
|
4669
|
-
console.log("\u2705 MCP client connected successfully");
|
|
4670
|
-
}
|
|
4888
|
+
this.logger.debug("\u2705 MCP client connected successfully");
|
|
4671
4889
|
this.client = client;
|
|
4672
4890
|
return client;
|
|
4673
4891
|
} catch (error) {
|
|
4674
4892
|
await this.cleanupResources();
|
|
4675
4893
|
if (!(error instanceof McpError)) {
|
|
4676
|
-
|
|
4894
|
+
this.logger.error("Failed to initialize MCP client:", error);
|
|
4677
4895
|
throw new McpError(
|
|
4678
4896
|
`Failed to initialize MCP client: ${error instanceof Error ? error.message : String(error)}`,
|
|
4679
4897
|
"connection_error" /* CONNECTION_ERROR */,
|
|
@@ -4689,12 +4907,10 @@ var McpClientService = (_class17 = class {
|
|
|
4689
4907
|
async createTransport() {
|
|
4690
4908
|
try {
|
|
4691
4909
|
if (this.config.transport.mode === "sse") {
|
|
4692
|
-
|
|
4693
|
-
|
|
4694
|
-
|
|
4695
|
-
|
|
4696
|
-
);
|
|
4697
|
-
}
|
|
4910
|
+
this.logger.debug(
|
|
4911
|
+
"\u{1F680} Initializing MCP client in SSE mode",
|
|
4912
|
+
this.config.transport.serverUrl
|
|
4913
|
+
);
|
|
4698
4914
|
const headers = {
|
|
4699
4915
|
...this.config.transport.headers || {},
|
|
4700
4916
|
...this.config.headers || {}
|
|
@@ -4709,12 +4925,10 @@ var McpClientService = (_class17 = class {
|
|
|
4709
4925
|
}
|
|
4710
4926
|
);
|
|
4711
4927
|
}
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
-
);
|
|
4717
|
-
}
|
|
4928
|
+
this.logger.debug(
|
|
4929
|
+
"\u{1F680} Initializing MCP client in STDIO mode",
|
|
4930
|
+
this.config.transport.command
|
|
4931
|
+
);
|
|
4718
4932
|
return new (0, _stdiojs.StdioClientTransport)({
|
|
4719
4933
|
command: this.config.transport.command,
|
|
4720
4934
|
args: this.config.transport.args,
|
|
@@ -4733,9 +4947,7 @@ var McpClientService = (_class17 = class {
|
|
|
4733
4947
|
* Used by the retry mechanism.
|
|
4734
4948
|
*/
|
|
4735
4949
|
async reinitialize() {
|
|
4736
|
-
|
|
4737
|
-
console.log("\u{1F504} Reinitializing MCP client after closed connection");
|
|
4738
|
-
}
|
|
4950
|
+
this.logger.debug("\u{1F504} Reinitializing MCP client after closed connection");
|
|
4739
4951
|
await this.cleanupResources();
|
|
4740
4952
|
this.client = null;
|
|
4741
4953
|
this.transport = null;
|
|
@@ -4759,11 +4971,9 @@ var McpClientService = (_class17 = class {
|
|
|
4759
4971
|
if (this.transport && typeof this.transport.close === "function") {
|
|
4760
4972
|
await this.transport.close();
|
|
4761
4973
|
}
|
|
4762
|
-
|
|
4763
|
-
console.log("\u{1F9F9} Cleaned up MCP client resources");
|
|
4764
|
-
}
|
|
4974
|
+
this.logger.debug("\u{1F9F9} Cleaned up MCP client resources");
|
|
4765
4975
|
} catch (error) {
|
|
4766
|
-
|
|
4976
|
+
this.logger.error("Error cleaning up MCP resources:", error);
|
|
4767
4977
|
} finally {
|
|
4768
4978
|
this.client = null;
|
|
4769
4979
|
this.transport = null;
|
|
@@ -4785,7 +4995,7 @@ var McpClientService = (_class17 = class {
|
|
|
4785
4995
|
},
|
|
4786
4996
|
this,
|
|
4787
4997
|
async (instance) => await instance.reinitialize(),
|
|
4788
|
-
_optionalChain([this, 'access',
|
|
4998
|
+
_optionalChain([this, 'access', _147 => _147.config, 'access', _148 => _148.retryOptions, 'optionalAccess', _149 => _149.maxRetries]) || 2
|
|
4789
4999
|
);
|
|
4790
5000
|
return await wrappedCall();
|
|
4791
5001
|
} catch (error) {
|
|
@@ -4805,9 +5015,7 @@ var McpClientService = (_class17 = class {
|
|
|
4805
5015
|
* Similar to Python's close() method.
|
|
4806
5016
|
*/
|
|
4807
5017
|
async close() {
|
|
4808
|
-
|
|
4809
|
-
console.log("\u{1F51A} Closing MCP client service");
|
|
4810
|
-
}
|
|
5018
|
+
this.logger.debug("\u{1F51A} Closing MCP client service");
|
|
4811
5019
|
await this.cleanupResources();
|
|
4812
5020
|
}
|
|
4813
5021
|
/**
|
|
@@ -4818,11 +5026,9 @@ var McpClientService = (_class17 = class {
|
|
|
4818
5026
|
}
|
|
4819
5027
|
async setupSamplingHandler(client) {
|
|
4820
5028
|
if (!this.mcpSamplingHandler) {
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
);
|
|
4825
|
-
}
|
|
5029
|
+
this.logger.debug(
|
|
5030
|
+
"\u26A0\uFE0F No sampling handler provided - sampling requests will be rejected"
|
|
5031
|
+
);
|
|
4826
5032
|
return;
|
|
4827
5033
|
}
|
|
4828
5034
|
try {
|
|
@@ -4832,12 +5038,10 @@ var McpClientService = (_class17 = class {
|
|
|
4832
5038
|
try {
|
|
4833
5039
|
this.logger.debug("Received sampling request:", request);
|
|
4834
5040
|
const response = await this.mcpSamplingHandler.handleSamplingRequest(request);
|
|
4835
|
-
|
|
4836
|
-
console.log("\u2705 Sampling request completed successfully");
|
|
4837
|
-
}
|
|
5041
|
+
this.logger.debug("\u2705 Sampling request completed successfully");
|
|
4838
5042
|
return response;
|
|
4839
5043
|
} catch (error) {
|
|
4840
|
-
|
|
5044
|
+
this.logger.error("\u274C Error handling sampling request:", error);
|
|
4841
5045
|
if (error instanceof McpError) {
|
|
4842
5046
|
throw error;
|
|
4843
5047
|
}
|
|
@@ -4849,16 +5053,12 @@ var McpClientService = (_class17 = class {
|
|
|
4849
5053
|
}
|
|
4850
5054
|
}
|
|
4851
5055
|
);
|
|
4852
|
-
|
|
4853
|
-
console.log("\u{1F3AF} Sampling handler registered successfully");
|
|
4854
|
-
}
|
|
5056
|
+
this.logger.debug("\u{1F3AF} Sampling handler registered successfully");
|
|
4855
5057
|
} catch (error) {
|
|
4856
|
-
|
|
4857
|
-
|
|
4858
|
-
|
|
4859
|
-
|
|
4860
|
-
);
|
|
4861
|
-
}
|
|
5058
|
+
this.logger.error("Failed to setup sampling handler:", error);
|
|
5059
|
+
this.logger.debug(
|
|
5060
|
+
"\u26A0\uFE0F Sampling handler registration failed, continuing without sampling support"
|
|
5061
|
+
);
|
|
4862
5062
|
}
|
|
4863
5063
|
}
|
|
4864
5064
|
/**
|
|
@@ -4868,7 +5068,7 @@ var McpClientService = (_class17 = class {
|
|
|
4868
5068
|
this.mcpSamplingHandler = new McpSamplingHandler(handler);
|
|
4869
5069
|
if (this.client) {
|
|
4870
5070
|
this.setupSamplingHandler(this.client).catch((error) => {
|
|
4871
|
-
|
|
5071
|
+
this.logger.error("Failed to update ADK sampling handler:", error);
|
|
4872
5072
|
});
|
|
4873
5073
|
}
|
|
4874
5074
|
}
|
|
@@ -4879,13 +5079,13 @@ var McpClientService = (_class17 = class {
|
|
|
4879
5079
|
this.mcpSamplingHandler = null;
|
|
4880
5080
|
if (this.client) {
|
|
4881
5081
|
try {
|
|
4882
|
-
_optionalChain([this, 'access',
|
|
5082
|
+
_optionalChain([this, 'access', _150 => _150.client, 'access', _151 => _151.removeRequestHandler, 'optionalCall', _152 => _152("sampling/createMessage")]);
|
|
4883
5083
|
} catch (error) {
|
|
4884
|
-
|
|
5084
|
+
this.logger.error("Failed to remove sampling handler:", error);
|
|
4885
5085
|
}
|
|
4886
5086
|
}
|
|
4887
5087
|
}
|
|
4888
|
-
},
|
|
5088
|
+
}, _class19);
|
|
4889
5089
|
|
|
4890
5090
|
// src/tools/mcp/create-tool.ts
|
|
4891
5091
|
init_logger();
|
|
@@ -5087,11 +5287,11 @@ async function createTool(mcpTool, client) {
|
|
|
5087
5287
|
throw error;
|
|
5088
5288
|
}
|
|
5089
5289
|
}
|
|
5090
|
-
var McpToolAdapter = (
|
|
5290
|
+
var McpToolAdapter = (_class20 = class extends BaseTool {
|
|
5091
5291
|
|
|
5092
5292
|
|
|
5093
|
-
|
|
5094
|
-
|
|
5293
|
+
__init33() {this.clientService = null}
|
|
5294
|
+
__init34() {this.logger = new Logger({ name: "McpToolAdapter" })}
|
|
5095
5295
|
constructor(mcpTool, client) {
|
|
5096
5296
|
const metadata = mcpTool.metadata || {};
|
|
5097
5297
|
super({
|
|
@@ -5100,7 +5300,7 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
|
|
|
5100
5300
|
isLongRunning: _nullishCoalesce(metadata.isLongRunning, () => ( false)),
|
|
5101
5301
|
shouldRetryOnFailure: _nullishCoalesce(metadata.shouldRetryOnFailure, () => ( false)),
|
|
5102
5302
|
maxRetryAttempts: _nullishCoalesce(metadata.maxRetryAttempts, () => ( 3))
|
|
5103
|
-
});
|
|
5303
|
+
});_class20.prototype.__init33.call(this);_class20.prototype.__init34.call(this);;
|
|
5104
5304
|
this.mcpTool = mcpTool;
|
|
5105
5305
|
this.client = client;
|
|
5106
5306
|
if (client.reinitialize && typeof client.reinitialize === "function") {
|
|
@@ -5173,7 +5373,7 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
|
|
|
5173
5373
|
throw error;
|
|
5174
5374
|
}
|
|
5175
5375
|
}
|
|
5176
|
-
},
|
|
5376
|
+
}, _class20);
|
|
5177
5377
|
|
|
5178
5378
|
// src/tools/mcp/servers.ts
|
|
5179
5379
|
function createMcpConfig(name, packageName, config = {}) {
|
|
@@ -5249,7 +5449,7 @@ function McpNearAgent(config = {}) {
|
|
|
5249
5449
|
}
|
|
5250
5450
|
function McpNearIntentSwaps(config = {}) {
|
|
5251
5451
|
const mcpConfig = createMcpConfig(
|
|
5252
|
-
"
|
|
5452
|
+
"Near Intents Swaps MCP Client",
|
|
5253
5453
|
"@iqai/mcp-near-intent-swaps",
|
|
5254
5454
|
config
|
|
5255
5455
|
);
|
|
@@ -5271,6 +5471,14 @@ function McpTelegram(config = {}) {
|
|
|
5271
5471
|
);
|
|
5272
5472
|
return new McpToolset(mcpConfig);
|
|
5273
5473
|
}
|
|
5474
|
+
function McpCoinGecko(config = {}) {
|
|
5475
|
+
const mcpConfig = createMcpConfig(
|
|
5476
|
+
"CoinGecko MCP Client",
|
|
5477
|
+
"@coingecko/coingecko-mcp",
|
|
5478
|
+
config
|
|
5479
|
+
);
|
|
5480
|
+
return new McpToolset(mcpConfig);
|
|
5481
|
+
}
|
|
5274
5482
|
function McpFilesystem(config = {}) {
|
|
5275
5483
|
const mcpConfig = createMcpConfig(
|
|
5276
5484
|
"Filesystem MCP Client",
|
|
@@ -5294,13 +5502,13 @@ function McpGeneric(packageName, config = {}, name) {
|
|
|
5294
5502
|
}
|
|
5295
5503
|
|
|
5296
5504
|
// src/tools/mcp/index.ts
|
|
5297
|
-
var McpToolset = (
|
|
5505
|
+
var McpToolset = (_class21 = class {
|
|
5298
5506
|
|
|
5299
|
-
|
|
5300
|
-
|
|
5301
|
-
|
|
5302
|
-
|
|
5303
|
-
constructor(config, toolFilter = null) {;
|
|
5507
|
+
__init35() {this.clientService = null}
|
|
5508
|
+
__init36() {this.toolFilter = null}
|
|
5509
|
+
__init37() {this.tools = []}
|
|
5510
|
+
__init38() {this.isClosing = false}
|
|
5511
|
+
constructor(config, toolFilter = null) {;_class21.prototype.__init35.call(this);_class21.prototype.__init36.call(this);_class21.prototype.__init37.call(this);_class21.prototype.__init38.call(this);
|
|
5304
5512
|
this.config = config;
|
|
5305
5513
|
this.toolFilter = toolFilter;
|
|
5306
5514
|
this.clientService = new McpClientService(config);
|
|
@@ -5375,7 +5583,7 @@ var McpToolset = (_class19 = class {
|
|
|
5375
5583
|
"resource_closed_error" /* RESOURCE_CLOSED_ERROR */
|
|
5376
5584
|
);
|
|
5377
5585
|
}
|
|
5378
|
-
if (this.tools.length > 0 && !_optionalChain([this, 'access',
|
|
5586
|
+
if (this.tools.length > 0 && !_optionalChain([this, 'access', _153 => _153.config, 'access', _154 => _154.cacheConfig, 'optionalAccess', _155 => _155.enabled]) === false) {
|
|
5379
5587
|
return this.tools;
|
|
5380
5588
|
}
|
|
5381
5589
|
if (!this.clientService) {
|
|
@@ -5401,7 +5609,7 @@ var McpToolset = (_class19 = class {
|
|
|
5401
5609
|
}
|
|
5402
5610
|
}
|
|
5403
5611
|
}
|
|
5404
|
-
if (_optionalChain([this, 'access',
|
|
5612
|
+
if (_optionalChain([this, 'access', _156 => _156.config, 'access', _157 => _157.cacheConfig, 'optionalAccess', _158 => _158.enabled]) !== false) {
|
|
5405
5613
|
this.tools = tools;
|
|
5406
5614
|
}
|
|
5407
5615
|
return tools;
|
|
@@ -5461,7 +5669,7 @@ var McpToolset = (_class19 = class {
|
|
|
5461
5669
|
async dispose() {
|
|
5462
5670
|
await this.close();
|
|
5463
5671
|
}
|
|
5464
|
-
},
|
|
5672
|
+
}, _class21);
|
|
5465
5673
|
async function getMcpTools(config, toolFilter) {
|
|
5466
5674
|
const toolset = new McpToolset(config, toolFilter);
|
|
5467
5675
|
try {
|
|
@@ -5489,12 +5697,12 @@ function populateClientFunctionCallId(modelResponseEvent) {
|
|
|
5489
5697
|
}
|
|
5490
5698
|
}
|
|
5491
5699
|
function removeClientFunctionCallId(content) {
|
|
5492
|
-
if (_optionalChain([content, 'optionalAccess',
|
|
5700
|
+
if (_optionalChain([content, 'optionalAccess', _159 => _159.parts])) {
|
|
5493
5701
|
for (const part of content.parts) {
|
|
5494
|
-
if (_optionalChain([part, 'access',
|
|
5702
|
+
if (_optionalChain([part, 'access', _160 => _160.functionCall, 'optionalAccess', _161 => _161.id, 'optionalAccess', _162 => _162.startsWith, 'call', _163 => _163(AF_FUNCTION_CALL_ID_PREFIX)])) {
|
|
5495
5703
|
part.functionCall.id = void 0;
|
|
5496
5704
|
}
|
|
5497
|
-
if (_optionalChain([part, 'access',
|
|
5705
|
+
if (_optionalChain([part, 'access', _164 => _164.functionResponse, 'optionalAccess', _165 => _165.id, 'optionalAccess', _166 => _166.startsWith, 'call', _167 => _167(AF_FUNCTION_CALL_ID_PREFIX)])) {
|
|
5498
5706
|
part.functionResponse.id = void 0;
|
|
5499
5707
|
}
|
|
5500
5708
|
}
|
|
@@ -5639,7 +5847,7 @@ function mergeParallelFunctionResponseEvents(functionResponseEvents) {
|
|
|
5639
5847
|
}
|
|
5640
5848
|
const mergedParts = [];
|
|
5641
5849
|
for (const event of functionResponseEvents) {
|
|
5642
|
-
if (_optionalChain([event, 'access',
|
|
5850
|
+
if (_optionalChain([event, 'access', _168 => _168.content, 'optionalAccess', _169 => _169.parts])) {
|
|
5643
5851
|
for (const part of event.content.parts) {
|
|
5644
5852
|
mergedParts.push(part);
|
|
5645
5853
|
}
|
|
@@ -5672,94 +5880,52 @@ function isLlmAgent(agent) {
|
|
|
5672
5880
|
|
|
5673
5881
|
// src/flows/llm-flows/base-llm-flow.ts
|
|
5674
5882
|
var _ADK_AGENT_NAME_LABEL_KEY = "adk_agent_name";
|
|
5675
|
-
var BaseLlmFlow = (
|
|
5676
|
-
|
|
5677
|
-
|
|
5678
|
-
|
|
5883
|
+
var BaseLlmFlow = (_class22 = class {constructor() { _class22.prototype.__init39.call(this);_class22.prototype.__init40.call(this);_class22.prototype.__init41.call(this); }
|
|
5884
|
+
__init39() {this.requestProcessors = []}
|
|
5885
|
+
__init40() {this.responseProcessors = []}
|
|
5886
|
+
__init41() {this.logger = new Logger({ name: "BaseLlmFlow" })}
|
|
5679
5887
|
async *runAsync(invocationContext) {
|
|
5680
|
-
this.logger.
|
|
5681
|
-
invocationId: invocationContext.invocationId,
|
|
5682
|
-
agentName: invocationContext.agent.name,
|
|
5683
|
-
branch: invocationContext.branch
|
|
5684
|
-
});
|
|
5888
|
+
this.logger.info(`Agent '${invocationContext.agent.name}' started.`);
|
|
5685
5889
|
let stepCount = 0;
|
|
5686
5890
|
while (true) {
|
|
5687
5891
|
stepCount++;
|
|
5688
|
-
this.logger.debug(`\u{1F4CB} Running step ${stepCount}`, {
|
|
5689
|
-
invocationId: invocationContext.invocationId
|
|
5690
|
-
});
|
|
5691
5892
|
let lastEvent = null;
|
|
5692
|
-
let eventCount = 0;
|
|
5693
5893
|
for await (const event of this._runOneStepAsync(invocationContext)) {
|
|
5694
|
-
eventCount++;
|
|
5695
5894
|
lastEvent = event;
|
|
5696
|
-
this.logger.debug(
|
|
5697
|
-
`\u{1F4E4} Yielding event ${eventCount} from step ${stepCount}`,
|
|
5698
|
-
{
|
|
5699
|
-
eventId: event.id,
|
|
5700
|
-
eventType: event.constructor.name,
|
|
5701
|
-
hasContent: !!event.content,
|
|
5702
|
-
isFinalResponse: event.isFinalResponse(),
|
|
5703
|
-
partial: event.partial
|
|
5704
|
-
}
|
|
5705
|
-
);
|
|
5706
5895
|
yield event;
|
|
5707
5896
|
}
|
|
5708
5897
|
if (!lastEvent || lastEvent.isFinalResponse()) {
|
|
5709
|
-
this.logger.
|
|
5710
|
-
|
|
5711
|
-
|
|
5712
|
-
});
|
|
5898
|
+
this.logger.info(
|
|
5899
|
+
`Agent '${invocationContext.agent.name}' finished after ${stepCount} steps.`
|
|
5900
|
+
);
|
|
5713
5901
|
break;
|
|
5714
5902
|
}
|
|
5715
5903
|
if (lastEvent.partial) {
|
|
5716
|
-
this.logger.error(
|
|
5717
|
-
|
|
5718
|
-
|
|
5719
|
-
});
|
|
5904
|
+
this.logger.error(
|
|
5905
|
+
"Partial event encountered. LLM max output limit may be reached."
|
|
5906
|
+
);
|
|
5720
5907
|
throw new Error(
|
|
5721
5908
|
"Last event shouldn't be partial. LLM max output limit may be reached."
|
|
5722
5909
|
);
|
|
5723
5910
|
}
|
|
5724
5911
|
}
|
|
5725
|
-
this.logger.debug("\u{1F3C1} runAsync flow finished", {
|
|
5726
|
-
totalSteps: stepCount,
|
|
5727
|
-
invocationId: invocationContext.invocationId
|
|
5728
|
-
});
|
|
5729
5912
|
}
|
|
5730
5913
|
async *runLive(invocationContext) {
|
|
5731
|
-
this.logger.debug("\u{1F534} Starting runLive flow", {
|
|
5732
|
-
invocationId: invocationContext.invocationId,
|
|
5733
|
-
agentName: invocationContext.agent.name
|
|
5734
|
-
});
|
|
5735
5914
|
this.logger.warn("\u26A0\uFE0F runLive not fully implemented, delegating to runAsync");
|
|
5736
5915
|
yield* this.runAsync(invocationContext);
|
|
5737
5916
|
}
|
|
5738
5917
|
async *_runOneStepAsync(invocationContext) {
|
|
5739
|
-
this.logger.debug("\u{1F504} Starting one step execution", {
|
|
5740
|
-
invocationId: invocationContext.invocationId
|
|
5741
|
-
});
|
|
5742
5918
|
const llmRequest = new LlmRequest();
|
|
5743
|
-
this.logger.debug("\u{1F4DD} Created new LlmRequest", {
|
|
5744
|
-
requestId: llmRequest.id || "unknown"
|
|
5745
|
-
});
|
|
5746
|
-
this.logger.debug("\u{1F527} Starting preprocessing phase");
|
|
5747
5919
|
let preprocessEventCount = 0;
|
|
5748
5920
|
for await (const event of this._preprocessAsync(
|
|
5749
5921
|
invocationContext,
|
|
5750
5922
|
llmRequest
|
|
5751
5923
|
)) {
|
|
5752
5924
|
preprocessEventCount++;
|
|
5753
|
-
this.logger.debug(`\u{1F4E4} Preprocessing event ${preprocessEventCount}`, {
|
|
5754
|
-
eventId: event.id
|
|
5755
|
-
});
|
|
5756
5925
|
yield event;
|
|
5757
5926
|
}
|
|
5758
|
-
this.logger.debug("\u2705 Preprocessing completed", {
|
|
5759
|
-
eventCount: preprocessEventCount
|
|
5760
|
-
});
|
|
5761
5927
|
if (invocationContext.endInvocation) {
|
|
5762
|
-
this.logger.
|
|
5928
|
+
this.logger.info("Invocation ended during preprocessing.");
|
|
5763
5929
|
return;
|
|
5764
5930
|
}
|
|
5765
5931
|
const modelResponseEvent = new Event({
|
|
@@ -5768,9 +5934,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5768
5934
|
author: invocationContext.agent.name,
|
|
5769
5935
|
branch: invocationContext.branch
|
|
5770
5936
|
});
|
|
5771
|
-
this.logger.debug("\u{1F916} Starting LLM call phase", {
|
|
5772
|
-
modelResponseEventId: modelResponseEvent.id
|
|
5773
|
-
});
|
|
5774
5937
|
let llmResponseCount = 0;
|
|
5775
5938
|
for await (const llmResponse of this._callLlmAsync(
|
|
5776
5939
|
invocationContext,
|
|
@@ -5778,12 +5941,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5778
5941
|
modelResponseEvent
|
|
5779
5942
|
)) {
|
|
5780
5943
|
llmResponseCount++;
|
|
5781
|
-
this.logger.debug(`\u{1F504} Processing LLM response ${llmResponseCount}`, {
|
|
5782
|
-
hasContent: !!llmResponse.content,
|
|
5783
|
-
hasError: !!llmResponse.errorCode,
|
|
5784
|
-
interrupted: !!llmResponse.interrupted,
|
|
5785
|
-
partial: !!llmResponse.partial
|
|
5786
|
-
});
|
|
5787
5944
|
for await (const event of this._postprocessAsync(
|
|
5788
5945
|
invocationContext,
|
|
5789
5946
|
llmRequest,
|
|
@@ -5791,89 +5948,47 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5791
5948
|
modelResponseEvent
|
|
5792
5949
|
)) {
|
|
5793
5950
|
modelResponseEvent.id = Event.newId();
|
|
5794
|
-
this.logger.debug("\u{1F4E4} Yielding postprocessed event", {
|
|
5795
|
-
eventId: event.id,
|
|
5796
|
-
hasFunctionCalls: !!event.getFunctionCalls()
|
|
5797
|
-
});
|
|
5798
5951
|
yield event;
|
|
5799
5952
|
}
|
|
5800
5953
|
}
|
|
5801
|
-
this.logger.debug("\u2705 One step execution completed", {
|
|
5802
|
-
llmResponseCount
|
|
5803
|
-
});
|
|
5804
5954
|
}
|
|
5805
5955
|
async *_preprocessAsync(invocationContext, llmRequest) {
|
|
5806
|
-
this.logger.debug("\u{1F527} Starting preprocessing", {
|
|
5807
|
-
processorCount: this.requestProcessors.length
|
|
5808
|
-
});
|
|
5809
5956
|
const agent = invocationContext.agent;
|
|
5810
5957
|
if (!("canonicalTools" in agent) || typeof agent.canonicalTools !== "function") {
|
|
5811
|
-
this.logger.debug("\u2139\uFE0F Agent has no canonical tools");
|
|
5812
5958
|
return;
|
|
5813
5959
|
}
|
|
5814
|
-
for (
|
|
5815
|
-
const processor = this.requestProcessors[i];
|
|
5816
|
-
this.logger.debug(`\u{1F504} Running request processor ${i + 1}`, {
|
|
5817
|
-
processorName: _optionalChain([processor, 'access', _185 => _185.constructor, 'optionalAccess', _186 => _186.name]) || "unknown"
|
|
5818
|
-
});
|
|
5819
|
-
let processorEventCount = 0;
|
|
5960
|
+
for (const processor of this.requestProcessors) {
|
|
5820
5961
|
for await (const event of processor.runAsync(
|
|
5821
5962
|
invocationContext,
|
|
5822
5963
|
llmRequest
|
|
5823
5964
|
)) {
|
|
5824
|
-
processorEventCount++;
|
|
5825
|
-
this.logger.debug(
|
|
5826
|
-
`\u{1F4E4} Request processor ${i + 1} event ${processorEventCount}`,
|
|
5827
|
-
{
|
|
5828
|
-
eventId: event.id
|
|
5829
|
-
}
|
|
5830
|
-
);
|
|
5831
5965
|
yield event;
|
|
5832
5966
|
}
|
|
5833
|
-
this.logger.debug(`\u2705 Request processor ${i + 1} completed`, {
|
|
5834
|
-
eventCount: processorEventCount
|
|
5835
|
-
});
|
|
5836
5967
|
}
|
|
5837
5968
|
const tools = await agent.canonicalTools(
|
|
5838
5969
|
new ReadonlyContext(invocationContext)
|
|
5839
5970
|
);
|
|
5840
|
-
|
|
5841
|
-
toolCount: tools.length
|
|
5842
|
-
});
|
|
5843
|
-
for (let i = 0; i < tools.length; i++) {
|
|
5844
|
-
const tool = tools[i];
|
|
5845
|
-
this.logger.debug(`\u{1F504} Processing tool ${i + 1}`, {
|
|
5846
|
-
toolName: _optionalChain([tool, 'access', _187 => _187.constructor, 'optionalAccess', _188 => _188.name]) || "unknown"
|
|
5847
|
-
});
|
|
5971
|
+
for (const tool of tools) {
|
|
5848
5972
|
const toolContext = new ToolContext(invocationContext);
|
|
5849
5973
|
await tool.processLlmRequest(toolContext, llmRequest);
|
|
5850
|
-
this.logger.debug(`\u2705 Tool ${i + 1} processed`);
|
|
5851
5974
|
}
|
|
5852
|
-
|
|
5853
|
-
|
|
5854
|
-
|
|
5975
|
+
if (tools.length > 0) {
|
|
5976
|
+
const toolsData = tools.map((tool) => ({
|
|
5977
|
+
Name: tool.name,
|
|
5978
|
+
Description: _optionalChain([tool, 'access', _170 => _170.description, 'optionalAccess', _171 => _171.substring, 'call', _172 => _172(0, 50)]) + (_optionalChain([tool, 'access', _173 => _173.description, 'optionalAccess', _174 => _174.length]) > 50 ? "..." : ""),
|
|
5979
|
+
"Long Running": tool.isLongRunning ? "Yes" : "No"
|
|
5980
|
+
}));
|
|
5981
|
+
this.logger.debugArray("\u{1F6E0}\uFE0F Available Tools", toolsData);
|
|
5982
|
+
}
|
|
5855
5983
|
}
|
|
5856
5984
|
async *_postprocessAsync(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
|
|
5857
|
-
this.logger.debug("\u{1F504} Starting postprocessing", {
|
|
5858
|
-
hasContent: !!llmResponse.content,
|
|
5859
|
-
hasError: !!llmResponse.errorCode,
|
|
5860
|
-
interrupted: !!llmResponse.interrupted
|
|
5861
|
-
});
|
|
5862
|
-
let processorEventCount = 0;
|
|
5863
5985
|
for await (const event of this._postprocessRunProcessorsAsync(
|
|
5864
5986
|
invocationContext,
|
|
5865
5987
|
llmResponse
|
|
5866
5988
|
)) {
|
|
5867
|
-
processorEventCount++;
|
|
5868
|
-
this.logger.debug(`\u{1F4E4} Response processor event ${processorEventCount}`, {
|
|
5869
|
-
eventId: event.id
|
|
5870
|
-
});
|
|
5871
5989
|
yield event;
|
|
5872
5990
|
}
|
|
5873
5991
|
if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted) {
|
|
5874
|
-
this.logger.debug(
|
|
5875
|
-
"\u2139\uFE0F Skipping event creation - no content, error, or interruption"
|
|
5876
|
-
);
|
|
5877
5992
|
return;
|
|
5878
5993
|
}
|
|
5879
5994
|
const finalizedEvent = this._finalizeModelResponseEvent(
|
|
@@ -5881,54 +5996,32 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5881
5996
|
llmResponse,
|
|
5882
5997
|
modelResponseEvent
|
|
5883
5998
|
);
|
|
5884
|
-
this.logger.debug("\u{1F4DD} Finalized model response event", {
|
|
5885
|
-
eventId: finalizedEvent.id,
|
|
5886
|
-
hasContent: !!finalizedEvent.content,
|
|
5887
|
-
hasFunctionCalls: !!finalizedEvent.getFunctionCalls(),
|
|
5888
|
-
longRunningToolIds: finalizedEvent.longRunningToolIds.entries.length || 0
|
|
5889
|
-
});
|
|
5890
5999
|
yield finalizedEvent;
|
|
5891
6000
|
const functionCalls = finalizedEvent.getFunctionCalls();
|
|
5892
|
-
if (functionCalls) {
|
|
5893
|
-
|
|
5894
|
-
|
|
5895
|
-
|
|
5896
|
-
|
|
6001
|
+
if (functionCalls && functionCalls.length > 0) {
|
|
6002
|
+
const functionCallsData = functionCalls.map((fc) => ({
|
|
6003
|
+
Name: fc.name,
|
|
6004
|
+
Arguments: JSON.stringify(fc.args).substring(0, 100) + (JSON.stringify(fc.args).length > 100 ? "..." : ""),
|
|
6005
|
+
ID: fc.id || "auto"
|
|
6006
|
+
}));
|
|
6007
|
+
this.logger.debugArray("\u{1F527} Function Calls", functionCallsData);
|
|
5897
6008
|
for await (const event of this._postprocessHandleFunctionCallsAsync(
|
|
5898
6009
|
invocationContext,
|
|
5899
6010
|
finalizedEvent,
|
|
5900
6011
|
llmRequest
|
|
5901
6012
|
)) {
|
|
5902
|
-
functionEventCount++;
|
|
5903
|
-
this.logger.debug(`\u{1F4E4} Function call event ${functionEventCount}`, {
|
|
5904
|
-
eventId: event.id
|
|
5905
|
-
});
|
|
5906
6013
|
yield event;
|
|
5907
6014
|
}
|
|
5908
|
-
this.logger.debug("\u2705 Function calls processed", {
|
|
5909
|
-
eventCount: functionEventCount
|
|
5910
|
-
});
|
|
5911
6015
|
}
|
|
5912
|
-
this.logger.debug("\u2705 Postprocessing completed");
|
|
5913
6016
|
}
|
|
5914
6017
|
async *_postprocessLive(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
|
|
5915
|
-
this.logger.debug("\u{1F534} Starting live postprocessing", {
|
|
5916
|
-
hasContent: !!llmResponse.content,
|
|
5917
|
-
turnComplete: !!llmResponse.turnComplete
|
|
5918
|
-
});
|
|
5919
6018
|
for await (const event of this._postprocessRunProcessorsAsync(
|
|
5920
6019
|
invocationContext,
|
|
5921
6020
|
llmResponse
|
|
5922
6021
|
)) {
|
|
5923
|
-
this.logger.debug("\u{1F4E4} Live response processor event", {
|
|
5924
|
-
eventId: event.id
|
|
5925
|
-
});
|
|
5926
6022
|
yield event;
|
|
5927
6023
|
}
|
|
5928
6024
|
if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted && !llmResponse.turnComplete) {
|
|
5929
|
-
this.logger.debug(
|
|
5930
|
-
"\u2139\uFE0F Skipping live event - no content or completion signal"
|
|
5931
|
-
);
|
|
5932
6025
|
return;
|
|
5933
6026
|
}
|
|
5934
6027
|
const finalizedEvent = this._finalizeModelResponseEvent(
|
|
@@ -5936,165 +6029,83 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5936
6029
|
llmResponse,
|
|
5937
6030
|
modelResponseEvent
|
|
5938
6031
|
);
|
|
5939
|
-
this.logger.debug("\u{1F4DD} Finalized live model response event", {
|
|
5940
|
-
eventId: finalizedEvent.id,
|
|
5941
|
-
hasFunctionCalls: !!finalizedEvent.getFunctionCalls()
|
|
5942
|
-
});
|
|
5943
6032
|
yield finalizedEvent;
|
|
5944
6033
|
if (finalizedEvent.getFunctionCalls()) {
|
|
5945
|
-
this.logger.debug("\u{1F527} Processing live function calls");
|
|
5946
6034
|
const functionResponseEvent = await handleFunctionCallsAsync(
|
|
5947
6035
|
invocationContext,
|
|
5948
6036
|
finalizedEvent,
|
|
5949
6037
|
llmRequest.toolsDict || {}
|
|
5950
6038
|
);
|
|
5951
6039
|
if (functionResponseEvent) {
|
|
5952
|
-
this.logger.debug("\u{1F4E4} Live function response event", {
|
|
5953
|
-
eventId: functionResponseEvent.id,
|
|
5954
|
-
hasTransfer: !!_optionalChain([functionResponseEvent, 'access', _189 => _189.actions, 'optionalAccess', _190 => _190.transferToAgent])
|
|
5955
|
-
});
|
|
5956
6040
|
yield functionResponseEvent;
|
|
5957
|
-
const transferToAgent = _optionalChain([functionResponseEvent, 'access',
|
|
6041
|
+
const transferToAgent = _optionalChain([functionResponseEvent, 'access', _175 => _175.actions, 'optionalAccess', _176 => _176.transferToAgent]);
|
|
5958
6042
|
if (transferToAgent) {
|
|
5959
|
-
this.logger.
|
|
5960
|
-
targetAgent: transferToAgent
|
|
5961
|
-
});
|
|
6043
|
+
this.logger.info(`\u{1F504} Live transfer to agent '${transferToAgent}'`);
|
|
5962
6044
|
const agentToRun = this._getAgentToRun(
|
|
5963
6045
|
invocationContext,
|
|
5964
6046
|
transferToAgent
|
|
5965
6047
|
);
|
|
5966
|
-
|
|
5967
|
-
for await (const event of _optionalChain([agentToRun, 'access', _193 => _193.runLive, 'optionalCall', _194 => _194(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
|
|
5968
|
-
transferEventCount++;
|
|
5969
|
-
this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
|
|
5970
|
-
eventId: event.id
|
|
5971
|
-
});
|
|
6048
|
+
for await (const event of _optionalChain([agentToRun, 'access', _177 => _177.runLive, 'optionalCall', _178 => _178(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
|
|
5972
6049
|
yield event;
|
|
5973
6050
|
}
|
|
5974
|
-
this.logger.debug("\u2705 Agent transfer completed", {
|
|
5975
|
-
eventCount: transferEventCount
|
|
5976
|
-
});
|
|
5977
6051
|
}
|
|
5978
6052
|
}
|
|
5979
6053
|
}
|
|
5980
|
-
this.logger.debug("\u2705 Live postprocessing completed");
|
|
5981
6054
|
}
|
|
5982
6055
|
async *_postprocessRunProcessorsAsync(invocationContext, llmResponse) {
|
|
5983
|
-
|
|
5984
|
-
processorCount: this.responseProcessors.length
|
|
5985
|
-
});
|
|
5986
|
-
for (let i = 0; i < this.responseProcessors.length; i++) {
|
|
5987
|
-
const processor = this.responseProcessors[i];
|
|
5988
|
-
this.logger.debug(`\u{1F504} Running response processor ${i + 1}`, {
|
|
5989
|
-
processorName: _optionalChain([processor, 'access', _195 => _195.constructor, 'optionalAccess', _196 => _196.name]) || "unknown"
|
|
5990
|
-
});
|
|
5991
|
-
let processorEventCount = 0;
|
|
6056
|
+
for (const processor of this.responseProcessors) {
|
|
5992
6057
|
for await (const event of processor.runAsync(
|
|
5993
6058
|
invocationContext,
|
|
5994
6059
|
llmResponse
|
|
5995
6060
|
)) {
|
|
5996
|
-
processorEventCount++;
|
|
5997
|
-
this.logger.debug(
|
|
5998
|
-
`\u{1F4E4} Response processor ${i + 1} event ${processorEventCount}`,
|
|
5999
|
-
{
|
|
6000
|
-
eventId: event.id
|
|
6001
|
-
}
|
|
6002
|
-
);
|
|
6003
6061
|
yield event;
|
|
6004
6062
|
}
|
|
6005
|
-
this.logger;
|
|
6006
|
-
this.logger.debug(`\u2705 Response processor ${i + 1} completed`, {
|
|
6007
|
-
eventCount: processorEventCount
|
|
6008
|
-
});
|
|
6009
6063
|
}
|
|
6010
|
-
this.logger.debug("\u2705 All response processors completed");
|
|
6011
6064
|
}
|
|
6012
6065
|
async *_postprocessHandleFunctionCallsAsync(invocationContext, functionCallEvent, llmRequest) {
|
|
6013
|
-
this.logger.debug("\u{1F527} Handling function calls", {
|
|
6014
|
-
eventId: functionCallEvent.id,
|
|
6015
|
-
toolsDictSize: Object.keys(llmRequest.toolsDict || {}).length
|
|
6016
|
-
});
|
|
6017
6066
|
const functionResponseEvent = await handleFunctionCallsAsync(
|
|
6018
6067
|
invocationContext,
|
|
6019
6068
|
functionCallEvent,
|
|
6020
6069
|
llmRequest.toolsDict || {}
|
|
6021
6070
|
);
|
|
6022
6071
|
if (functionResponseEvent) {
|
|
6023
|
-
this.logger.debug("\u{1F4CB} Function calls executed", {
|
|
6024
|
-
responseEventId: functionResponseEvent.id,
|
|
6025
|
-
hasActions: !!functionResponseEvent.actions
|
|
6026
|
-
});
|
|
6027
6072
|
const authEvent = generateAuthEvent(
|
|
6028
6073
|
invocationContext,
|
|
6029
6074
|
functionResponseEvent
|
|
6030
6075
|
);
|
|
6031
6076
|
if (authEvent) {
|
|
6032
|
-
this.logger.debug("\u{1F510} Generated auth event", {
|
|
6033
|
-
authEventId: authEvent.id
|
|
6034
|
-
});
|
|
6035
6077
|
yield authEvent;
|
|
6036
6078
|
}
|
|
6037
6079
|
yield functionResponseEvent;
|
|
6038
|
-
const transferToAgent = _optionalChain([functionResponseEvent, 'access',
|
|
6080
|
+
const transferToAgent = _optionalChain([functionResponseEvent, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.transferToAgent]);
|
|
6039
6081
|
if (transferToAgent) {
|
|
6040
|
-
this.logger.
|
|
6041
|
-
targetAgent: transferToAgent
|
|
6042
|
-
});
|
|
6082
|
+
this.logger.info(`\u{1F504} Transferring to agent '${transferToAgent}'`);
|
|
6043
6083
|
const agentToRun = this._getAgentToRun(
|
|
6044
6084
|
invocationContext,
|
|
6045
6085
|
transferToAgent
|
|
6046
6086
|
);
|
|
6047
|
-
let transferEventCount = 0;
|
|
6048
6087
|
for await (const event of agentToRun.runAsync(invocationContext)) {
|
|
6049
|
-
transferEventCount++;
|
|
6050
|
-
this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
|
|
6051
|
-
eventId: event.id
|
|
6052
|
-
});
|
|
6053
6088
|
yield event;
|
|
6054
6089
|
}
|
|
6055
|
-
this.logger.debug("\u2705 Agent transfer completed", {
|
|
6056
|
-
eventCount: transferEventCount
|
|
6057
|
-
});
|
|
6058
6090
|
}
|
|
6059
|
-
} else {
|
|
6060
|
-
this.logger.debug("\u2139\uFE0F No function response event generated");
|
|
6061
6091
|
}
|
|
6062
6092
|
}
|
|
6063
6093
|
_getAgentToRun(invocationContext, agentName) {
|
|
6064
|
-
this.logger.debug("\u{1F50D} Finding agent to run", {
|
|
6065
|
-
targetAgent: agentName,
|
|
6066
|
-
currentAgent: invocationContext.agent.name
|
|
6067
|
-
});
|
|
6068
6094
|
const rootAgent = invocationContext.agent.rootAgent;
|
|
6069
6095
|
const agentToRun = rootAgent.findAgent(agentName);
|
|
6070
6096
|
if (!agentToRun) {
|
|
6071
|
-
this.logger.error(
|
|
6072
|
-
targetAgent: agentName,
|
|
6073
|
-
rootAgent: rootAgent.name
|
|
6074
|
-
});
|
|
6097
|
+
this.logger.error(`Agent '${agentName}' not found in the agent tree.`);
|
|
6075
6098
|
throw new Error(`Agent ${agentName} not found in the agent tree.`);
|
|
6076
6099
|
}
|
|
6077
|
-
this.logger.debug("\u2705 Agent found", {
|
|
6078
|
-
targetAgent: agentName,
|
|
6079
|
-
agentType: agentToRun.constructor.name
|
|
6080
|
-
});
|
|
6081
6100
|
return agentToRun;
|
|
6082
6101
|
}
|
|
6083
6102
|
async *_callLlmAsync(invocationContext, llmRequest, modelResponseEvent) {
|
|
6084
|
-
this.logger.debug("\u{1F916} Starting LLM call", {
|
|
6085
|
-
model: llmRequest.model || "default",
|
|
6086
|
-
eventId: modelResponseEvent.id
|
|
6087
|
-
});
|
|
6088
|
-
this.logger.debug("\u{1F504} Processing before model callbacks");
|
|
6089
6103
|
const beforeModelCallbackContent = await this._handleBeforeModelCallback(
|
|
6090
6104
|
invocationContext,
|
|
6091
6105
|
llmRequest,
|
|
6092
6106
|
modelResponseEvent
|
|
6093
6107
|
);
|
|
6094
6108
|
if (beforeModelCallbackContent) {
|
|
6095
|
-
this.logger.debug("\u{1F4CB} Before model callback returned content", {
|
|
6096
|
-
hasContent: !!beforeModelCallbackContent.content
|
|
6097
|
-
});
|
|
6098
6109
|
yield beforeModelCallbackContent;
|
|
6099
6110
|
return;
|
|
6100
6111
|
}
|
|
@@ -6102,27 +6113,38 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6102
6113
|
llmRequest.config.labels = llmRequest.config.labels || {};
|
|
6103
6114
|
if (!(_ADK_AGENT_NAME_LABEL_KEY in llmRequest.config.labels)) {
|
|
6104
6115
|
llmRequest.config.labels[_ADK_AGENT_NAME_LABEL_KEY] = invocationContext.agent.name;
|
|
6105
|
-
this.logger.debug("\u{1F3F7}\uFE0F Added agent name label", {
|
|
6106
|
-
agentName: invocationContext.agent.name
|
|
6107
|
-
});
|
|
6108
6116
|
}
|
|
6109
6117
|
const llm = this.__getLlm(invocationContext);
|
|
6110
|
-
this.logger.debug("\u{1F527} Retrieved LLM instance", {
|
|
6111
|
-
llmModel: llm.model,
|
|
6112
|
-
llmType: llm.constructor.name
|
|
6113
|
-
});
|
|
6114
6118
|
const runConfig = invocationContext.runConfig;
|
|
6115
6119
|
if (runConfig.supportCfc) {
|
|
6116
6120
|
this.logger.warn(
|
|
6117
|
-
"
|
|
6121
|
+
"CFC (supportCfc) not fully implemented, using standard flow."
|
|
6118
6122
|
);
|
|
6119
6123
|
}
|
|
6120
6124
|
invocationContext.incrementLlmCallCount();
|
|
6121
|
-
this.logger.debug("\u{1F4C8} Incremented LLM call count");
|
|
6122
6125
|
const isStreaming = invocationContext.runConfig.streamingMode === "sse" /* SSE */;
|
|
6123
|
-
|
|
6124
|
-
|
|
6125
|
-
|
|
6126
|
+
const tools = _optionalChain([llmRequest, 'access', _181 => _181.config, 'optionalAccess', _182 => _182.tools]) || [];
|
|
6127
|
+
const toolNames = tools.map((tool) => {
|
|
6128
|
+
if (tool.functionDeclarations && Array.isArray(tool.functionDeclarations)) {
|
|
6129
|
+
return tool.functionDeclarations.map((fn) => fn.name).join(", ");
|
|
6130
|
+
}
|
|
6131
|
+
if (tool.name) return tool.name;
|
|
6132
|
+
if (_optionalChain([tool, 'access', _183 => _183.function, 'optionalAccess', _184 => _184.name])) return tool.function.name;
|
|
6133
|
+
if (_optionalChain([tool, 'access', _185 => _185.function, 'optionalAccess', _186 => _186.function, 'optionalAccess', _187 => _187.name])) return tool.function.function.name;
|
|
6134
|
+
return "unknown";
|
|
6135
|
+
}).join(", ");
|
|
6136
|
+
const systemInstruction = llmRequest.getSystemInstructionText() || "";
|
|
6137
|
+
const truncatedSystemInstruction = systemInstruction.length > 100 ? `${systemInstruction.substring(0, 100)}...` : systemInstruction;
|
|
6138
|
+
const contentPreview = _optionalChain([llmRequest, 'access', _188 => _188.contents, 'optionalAccess', _189 => _189.length]) > 0 ? this._formatContentPreview(llmRequest.contents[0]) : "none";
|
|
6139
|
+
this.logger.debugStructured("\u{1F4E4} LLM Request", {
|
|
6140
|
+
Model: llm.model,
|
|
6141
|
+
Agent: invocationContext.agent.name,
|
|
6142
|
+
"Content Items": _optionalChain([llmRequest, 'access', _190 => _190.contents, 'optionalAccess', _191 => _191.length]) || 0,
|
|
6143
|
+
"Content Preview": contentPreview,
|
|
6144
|
+
"System Instruction": truncatedSystemInstruction || "none",
|
|
6145
|
+
"Available Tools": toolNames || "none",
|
|
6146
|
+
"Tool Count": _optionalChain([llmRequest, 'access', _192 => _192.config, 'optionalAccess', _193 => _193.tools, 'optionalAccess', _194 => _194.length]) || 0,
|
|
6147
|
+
Streaming: isStreaming ? "Yes" : "No"
|
|
6126
6148
|
});
|
|
6127
6149
|
let responseCount = 0;
|
|
6128
6150
|
for await (const llmResponse of llm.generateContentAsync(
|
|
@@ -6130,59 +6152,46 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6130
6152
|
isStreaming
|
|
6131
6153
|
)) {
|
|
6132
6154
|
responseCount++;
|
|
6133
|
-
this.logger.debug(`\u{1F4E5} Received LLM response ${responseCount}`, {
|
|
6134
|
-
hasContent: !!llmResponse.content,
|
|
6135
|
-
hasError: !!llmResponse.errorCode,
|
|
6136
|
-
interrupted: !!llmResponse.interrupted,
|
|
6137
|
-
partial: !!llmResponse.partial,
|
|
6138
|
-
finishReason: llmResponse.finishReason,
|
|
6139
|
-
usage: llmResponse.usageMetadata ? {
|
|
6140
|
-
promptTokens: llmResponse.usageMetadata.promptTokenCount,
|
|
6141
|
-
completionTokens: llmResponse.usageMetadata.candidatesTokenCount,
|
|
6142
|
-
totalTokens: llmResponse.usageMetadata.totalTokenCount
|
|
6143
|
-
} : null
|
|
6144
|
-
});
|
|
6145
6155
|
traceLlmCall(
|
|
6146
6156
|
invocationContext,
|
|
6147
6157
|
modelResponseEvent.id,
|
|
6148
6158
|
llmRequest,
|
|
6149
6159
|
llmResponse
|
|
6150
6160
|
);
|
|
6151
|
-
|
|
6161
|
+
const tokenCount = _optionalChain([llmResponse, 'access', _195 => _195.usageMetadata, 'optionalAccess', _196 => _196.totalTokenCount]) || "unknown";
|
|
6162
|
+
const functionCallCount = _optionalChain([llmResponse, 'access', _197 => _197.content, 'optionalAccess', _198 => _198.parts, 'optionalAccess', _199 => _199.filter, 'call', _200 => _200((part) => part.functionCall), 'access', _201 => _201.length]) || 0;
|
|
6163
|
+
const responsePreview = this._formatResponsePreview(llmResponse);
|
|
6164
|
+
this.logger.debugStructured("\u{1F4E5} LLM Response", {
|
|
6165
|
+
Model: llm.model,
|
|
6166
|
+
"Token Count": tokenCount,
|
|
6167
|
+
"Function Calls": functionCallCount,
|
|
6168
|
+
"Response Preview": responsePreview,
|
|
6169
|
+
"Finish Reason": llmResponse.finishReason || "unknown",
|
|
6170
|
+
"Response #": responseCount,
|
|
6171
|
+
Partial: llmResponse.partial ? "Yes" : "No",
|
|
6172
|
+
Error: llmResponse.errorCode || "none"
|
|
6173
|
+
});
|
|
6152
6174
|
const alteredLlmResponse = await this._handleAfterModelCallback(
|
|
6153
6175
|
invocationContext,
|
|
6154
6176
|
llmResponse,
|
|
6155
6177
|
modelResponseEvent
|
|
6156
6178
|
);
|
|
6157
|
-
if (alteredLlmResponse) {
|
|
6158
|
-
this.logger.debug("\u{1F4CB} After model callback altered response");
|
|
6159
|
-
}
|
|
6160
6179
|
yield alteredLlmResponse || llmResponse;
|
|
6161
6180
|
}
|
|
6162
|
-
this.logger.debug("\u2705 LLM call completed", {
|
|
6163
|
-
totalResponses: responseCount
|
|
6164
|
-
});
|
|
6165
6181
|
}
|
|
6166
6182
|
async _handleBeforeModelCallback(invocationContext, llmRequest, modelResponseEvent) {
|
|
6167
6183
|
const agent = invocationContext.agent;
|
|
6168
6184
|
if (!("canonicalBeforeModelCallbacks" in agent)) {
|
|
6169
|
-
this.logger.debug("\u2139\uFE0F Agent has no before model callbacks");
|
|
6170
6185
|
return;
|
|
6171
6186
|
}
|
|
6172
6187
|
const beforeCallbacks = agent.canonicalBeforeModelCallbacks;
|
|
6173
6188
|
if (!beforeCallbacks) {
|
|
6174
|
-
this.logger.debug("\u2139\uFE0F Before model callbacks is null/undefined");
|
|
6175
6189
|
return;
|
|
6176
6190
|
}
|
|
6177
|
-
this.logger.debug("\u{1F504} Processing before model callbacks", {
|
|
6178
|
-
callbackCount: beforeCallbacks.length
|
|
6179
|
-
});
|
|
6180
6191
|
const callbackContext = new CallbackContext(invocationContext, {
|
|
6181
6192
|
eventActions: modelResponseEvent.actions
|
|
6182
6193
|
});
|
|
6183
|
-
for (
|
|
6184
|
-
const callback = beforeCallbacks[i];
|
|
6185
|
-
this.logger.debug(`\u{1F504} Running before model callback ${i + 1}`);
|
|
6194
|
+
for (const callback of beforeCallbacks) {
|
|
6186
6195
|
let beforeModelCallbackContent = callback({
|
|
6187
6196
|
callbackContext,
|
|
6188
6197
|
llmRequest
|
|
@@ -6191,35 +6200,23 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6191
6200
|
beforeModelCallbackContent = await beforeModelCallbackContent;
|
|
6192
6201
|
}
|
|
6193
6202
|
if (beforeModelCallbackContent) {
|
|
6194
|
-
this.logger.debug(`\u2705 Before model callback ${i + 1} returned content`);
|
|
6195
6203
|
return beforeModelCallbackContent;
|
|
6196
6204
|
}
|
|
6197
|
-
this.logger.debug(
|
|
6198
|
-
`\u2705 Before model callback ${i + 1} completed (no content)`
|
|
6199
|
-
);
|
|
6200
6205
|
}
|
|
6201
|
-
this.logger.debug("\u2705 All before model callbacks completed");
|
|
6202
6206
|
}
|
|
6203
6207
|
async _handleAfterModelCallback(invocationContext, llmResponse, modelResponseEvent) {
|
|
6204
6208
|
const agent = invocationContext.agent;
|
|
6205
6209
|
if (!("canonicalAfterModelCallbacks" in agent)) {
|
|
6206
|
-
this.logger.debug("\u2139\uFE0F Agent has no after model callbacks");
|
|
6207
6210
|
return;
|
|
6208
6211
|
}
|
|
6209
6212
|
const afterCallbacks = agent.canonicalAfterModelCallbacks;
|
|
6210
6213
|
if (!afterCallbacks) {
|
|
6211
|
-
this.logger.debug("\u2139\uFE0F After model callbacks is null/undefined");
|
|
6212
6214
|
return;
|
|
6213
6215
|
}
|
|
6214
|
-
this.logger.debug("\u{1F504} Processing after model callbacks", {
|
|
6215
|
-
callbackCount: afterCallbacks.length
|
|
6216
|
-
});
|
|
6217
6216
|
const callbackContext = new CallbackContext(invocationContext, {
|
|
6218
6217
|
eventActions: modelResponseEvent.actions
|
|
6219
6218
|
});
|
|
6220
|
-
for (
|
|
6221
|
-
const callback = afterCallbacks[i];
|
|
6222
|
-
this.logger.debug(`\u{1F504} Running after model callback ${i + 1}`);
|
|
6219
|
+
for (const callback of afterCallbacks) {
|
|
6223
6220
|
let afterModelCallbackContent = callback({
|
|
6224
6221
|
callbackContext,
|
|
6225
6222
|
llmResponse
|
|
@@ -6228,21 +6225,11 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6228
6225
|
afterModelCallbackContent = await afterModelCallbackContent;
|
|
6229
6226
|
}
|
|
6230
6227
|
if (afterModelCallbackContent) {
|
|
6231
|
-
this.logger.debug(`\u2705 After model callback ${i + 1} returned content`);
|
|
6232
6228
|
return afterModelCallbackContent;
|
|
6233
6229
|
}
|
|
6234
|
-
this.logger.debug(
|
|
6235
|
-
`\u2705 After model callback ${i + 1} completed (no content)`
|
|
6236
|
-
);
|
|
6237
6230
|
}
|
|
6238
|
-
this.logger.debug("\u2705 All after model callbacks completed");
|
|
6239
6231
|
}
|
|
6240
6232
|
_finalizeModelResponseEvent(llmRequest, llmResponse, modelResponseEvent) {
|
|
6241
|
-
this.logger.debug("\u{1F4DD} Finalizing model response event", {
|
|
6242
|
-
requestModel: llmRequest.model,
|
|
6243
|
-
responseHasContent: !!llmResponse.content,
|
|
6244
|
-
eventId: modelResponseEvent.id
|
|
6245
|
-
});
|
|
6246
6233
|
const eventData = { ...modelResponseEvent };
|
|
6247
6234
|
const responseData = { ...llmResponse };
|
|
6248
6235
|
Object.keys(responseData).forEach((key) => {
|
|
@@ -6254,38 +6241,48 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6254
6241
|
if (event.content) {
|
|
6255
6242
|
const functionCalls = event.getFunctionCalls();
|
|
6256
6243
|
if (functionCalls) {
|
|
6257
|
-
this.logger.debug("\u{1F527} Processing function calls in event", {
|
|
6258
|
-
functionCallCount: functionCalls.length
|
|
6259
|
-
});
|
|
6260
6244
|
populateClientFunctionCallId(event);
|
|
6261
6245
|
event.longRunningToolIds = getLongRunningFunctionCalls(
|
|
6262
6246
|
functionCalls,
|
|
6263
6247
|
llmRequest.toolsDict || {}
|
|
6264
6248
|
);
|
|
6265
|
-
this.logger.debug("\u2705 Function calls processed", {
|
|
6266
|
-
longRunningToolCount: event.longRunningToolIds.entries.length || 0
|
|
6267
|
-
});
|
|
6268
6249
|
}
|
|
6269
6250
|
}
|
|
6270
|
-
this.logger.debug("\u2705 Model response event finalized", {
|
|
6271
|
-
finalEventId: event.id,
|
|
6272
|
-
hasContent: !!event.content,
|
|
6273
|
-
hasFunctionCalls: !!event.getFunctionCalls()
|
|
6274
|
-
});
|
|
6275
6251
|
return event;
|
|
6276
6252
|
}
|
|
6253
|
+
/**
|
|
6254
|
+
* Logs data in a visually appealing format that works well in any terminal size.
|
|
6255
|
+
* Uses vertical layout for better readability and respects debug settings.
|
|
6256
|
+
*/
|
|
6257
|
+
_formatContentPreview(content) {
|
|
6258
|
+
if (!content) return "none";
|
|
6259
|
+
if (content.parts && Array.isArray(content.parts)) {
|
|
6260
|
+
const textParts = content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
|
|
6261
|
+
return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
|
|
6262
|
+
}
|
|
6263
|
+
if (typeof content === "string") {
|
|
6264
|
+
return content.length > 80 ? `${content.substring(0, 80)}...` : content;
|
|
6265
|
+
}
|
|
6266
|
+
const stringified = JSON.stringify(content);
|
|
6267
|
+
return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
|
|
6268
|
+
}
|
|
6269
|
+
/**
|
|
6270
|
+
* Formats response content preview for debug logging
|
|
6271
|
+
*/
|
|
6272
|
+
_formatResponsePreview(llmResponse) {
|
|
6273
|
+
if (!llmResponse.content) return "none";
|
|
6274
|
+
if (llmResponse.content.parts && Array.isArray(llmResponse.content.parts)) {
|
|
6275
|
+
const textParts = llmResponse.content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
|
|
6276
|
+
return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
|
|
6277
|
+
}
|
|
6278
|
+
const stringified = JSON.stringify(llmResponse.content);
|
|
6279
|
+
return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
|
|
6280
|
+
}
|
|
6277
6281
|
__getLlm(invocationContext) {
|
|
6278
6282
|
const llm = invocationContext.agent.canonicalModel;
|
|
6279
|
-
this.logger.debug("\u{1F527} Retrieved canonical model", {
|
|
6280
|
-
model: _optionalChain([llm, 'optionalAccess', _199 => _199.model]) || "unknown",
|
|
6281
|
-
llmType: _optionalChain([llm, 'optionalAccess', _200 => _200.constructor, 'optionalAccess', _201 => _201.name]) || "unknown"
|
|
6282
|
-
});
|
|
6283
6283
|
return llm;
|
|
6284
6284
|
}
|
|
6285
|
-
},
|
|
6286
|
-
|
|
6287
|
-
// src/flows/llm-flows/single-flow.ts
|
|
6288
|
-
init_logger();
|
|
6285
|
+
}, _class22);
|
|
6289
6286
|
|
|
6290
6287
|
// src/flows/llm-flows/base-llm-processor.ts
|
|
6291
6288
|
var BaseLlmRequestProcessor = class {
|
|
@@ -6293,52 +6290,6 @@ var BaseLlmRequestProcessor = class {
|
|
|
6293
6290
|
var BaseLlmResponseProcessor = class {
|
|
6294
6291
|
};
|
|
6295
6292
|
|
|
6296
|
-
// src/flows/llm-flows/basic.ts
|
|
6297
|
-
var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6298
|
-
async *runAsync(invocationContext, llmRequest) {
|
|
6299
|
-
const agent = invocationContext.agent;
|
|
6300
|
-
if (!this.isLlmAgent(agent)) {
|
|
6301
|
-
return;
|
|
6302
|
-
}
|
|
6303
|
-
llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
|
|
6304
|
-
if (agent.generateContentConfig) {
|
|
6305
|
-
llmRequest.config = JSON.parse(
|
|
6306
|
-
JSON.stringify(agent.generateContentConfig)
|
|
6307
|
-
);
|
|
6308
|
-
} else {
|
|
6309
|
-
llmRequest.config = {};
|
|
6310
|
-
}
|
|
6311
|
-
if (agent.outputSchema) {
|
|
6312
|
-
llmRequest.setOutputSchema(agent.outputSchema);
|
|
6313
|
-
}
|
|
6314
|
-
const runConfig = invocationContext.runConfig;
|
|
6315
|
-
if (!llmRequest.liveConnectConfig) {
|
|
6316
|
-
llmRequest.liveConnectConfig = {};
|
|
6317
|
-
}
|
|
6318
|
-
if (runConfig.responseModalities) {
|
|
6319
|
-
llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
|
|
6320
|
-
}
|
|
6321
|
-
llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
|
|
6322
|
-
llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
|
|
6323
|
-
llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
|
|
6324
|
-
llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
|
|
6325
|
-
llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
|
|
6326
|
-
llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
|
|
6327
|
-
const tools = await agent.canonicalTools();
|
|
6328
|
-
llmRequest.appendTools(tools);
|
|
6329
|
-
for await (const _ of []) {
|
|
6330
|
-
yield _;
|
|
6331
|
-
}
|
|
6332
|
-
}
|
|
6333
|
-
/**
|
|
6334
|
-
* Type guard to check if agent is an LlmAgent
|
|
6335
|
-
*/
|
|
6336
|
-
isLlmAgent(agent) {
|
|
6337
|
-
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6338
|
-
}
|
|
6339
|
-
};
|
|
6340
|
-
var requestProcessor = new BasicLlmRequestProcessor();
|
|
6341
|
-
|
|
6342
6293
|
// src/auth/auth-tool.ts
|
|
6343
6294
|
var EnhancedAuthConfig = class {
|
|
6344
6295
|
/**
|
|
@@ -6546,152 +6497,738 @@ var AuthLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
|
6546
6497
|
}
|
|
6547
6498
|
}
|
|
6548
6499
|
};
|
|
6549
|
-
var
|
|
6500
|
+
var requestProcessor = new AuthLlmRequestProcessor();
|
|
6550
6501
|
|
|
6551
|
-
// src/flows/llm-flows/
|
|
6552
|
-
var
|
|
6502
|
+
// src/flows/llm-flows/basic.ts
|
|
6503
|
+
var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6553
6504
|
async *runAsync(invocationContext, llmRequest) {
|
|
6554
6505
|
const agent = invocationContext.agent;
|
|
6555
|
-
|
|
6556
|
-
|
|
6557
|
-
];
|
|
6558
|
-
if (agent.description) {
|
|
6559
|
-
instructions.push(` The description about you is "${agent.description}"`);
|
|
6506
|
+
if (!this.isLlmAgent(agent)) {
|
|
6507
|
+
return;
|
|
6560
6508
|
}
|
|
6561
|
-
llmRequest.
|
|
6509
|
+
llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
|
|
6510
|
+
if (agent.generateContentConfig) {
|
|
6511
|
+
llmRequest.config = JSON.parse(
|
|
6512
|
+
JSON.stringify(agent.generateContentConfig)
|
|
6513
|
+
);
|
|
6514
|
+
} else {
|
|
6515
|
+
llmRequest.config = {};
|
|
6516
|
+
}
|
|
6517
|
+
if (agent.outputSchema) {
|
|
6518
|
+
llmRequest.setOutputSchema(agent.outputSchema);
|
|
6519
|
+
}
|
|
6520
|
+
const runConfig = invocationContext.runConfig;
|
|
6521
|
+
if (!llmRequest.liveConnectConfig) {
|
|
6522
|
+
llmRequest.liveConnectConfig = {};
|
|
6523
|
+
}
|
|
6524
|
+
if (runConfig.responseModalities) {
|
|
6525
|
+
llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
|
|
6526
|
+
}
|
|
6527
|
+
llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
|
|
6528
|
+
llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
|
|
6529
|
+
llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
|
|
6530
|
+
llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
|
|
6531
|
+
llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
|
|
6532
|
+
llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
|
|
6533
|
+
const tools = await agent.canonicalTools();
|
|
6534
|
+
llmRequest.appendTools(tools);
|
|
6562
6535
|
for await (const _ of []) {
|
|
6563
6536
|
yield _;
|
|
6564
6537
|
}
|
|
6565
6538
|
}
|
|
6539
|
+
/**
|
|
6540
|
+
* Type guard to check if agent is an LlmAgent
|
|
6541
|
+
*/
|
|
6542
|
+
isLlmAgent(agent) {
|
|
6543
|
+
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6544
|
+
}
|
|
6545
|
+
};
|
|
6546
|
+
var requestProcessor2 = new BasicLlmRequestProcessor();
|
|
6547
|
+
|
|
6548
|
+
// src/code-executors/base-code-executor.ts
|
|
6549
|
+
var BaseCodeExecutor = class {
|
|
6550
|
+
|
|
6551
|
+
constructor(config = {}) {
|
|
6552
|
+
this.config = {
|
|
6553
|
+
optimizeDataFile: _nullishCoalesce(config.optimizeDataFile, () => ( false)),
|
|
6554
|
+
stateful: _nullishCoalesce(config.stateful, () => ( false)),
|
|
6555
|
+
errorRetryAttempts: _nullishCoalesce(config.errorRetryAttempts, () => ( 2)),
|
|
6556
|
+
codeBlockDelimiters: _nullishCoalesce(config.codeBlockDelimiters, () => ( [
|
|
6557
|
+
["`tool_code\n", "\n`"],
|
|
6558
|
+
["`python\n", "\n`"]
|
|
6559
|
+
])),
|
|
6560
|
+
executionResultDelimiters: _nullishCoalesce(config.executionResultDelimiters, () => ( [
|
|
6561
|
+
"`tool_output\n",
|
|
6562
|
+
"\n`"
|
|
6563
|
+
]))
|
|
6564
|
+
};
|
|
6565
|
+
}
|
|
6566
|
+
// Getters for configuration
|
|
6567
|
+
get optimizeDataFile() {
|
|
6568
|
+
return this.config.optimizeDataFile;
|
|
6569
|
+
}
|
|
6570
|
+
get stateful() {
|
|
6571
|
+
return this.config.stateful;
|
|
6572
|
+
}
|
|
6573
|
+
get errorRetryAttempts() {
|
|
6574
|
+
return this.config.errorRetryAttempts;
|
|
6575
|
+
}
|
|
6576
|
+
get codeBlockDelimiters() {
|
|
6577
|
+
return this.config.codeBlockDelimiters;
|
|
6578
|
+
}
|
|
6579
|
+
get executionResultDelimiters() {
|
|
6580
|
+
return this.config.executionResultDelimiters;
|
|
6581
|
+
}
|
|
6566
6582
|
};
|
|
6567
|
-
var requestProcessor3 = new IdentityLlmRequestProcessor();
|
|
6568
6583
|
|
|
6569
|
-
// src/
|
|
6570
|
-
|
|
6571
|
-
|
|
6572
|
-
|
|
6573
|
-
const result = [];
|
|
6574
|
-
let lastEnd = 0;
|
|
6575
|
-
const matches = Array.from(string.matchAll(pattern));
|
|
6576
|
-
for (const match of matches) {
|
|
6577
|
-
result.push(string.slice(lastEnd, match.index));
|
|
6578
|
-
const replacement = await replaceAsyncFn(match);
|
|
6579
|
-
result.push(replacement);
|
|
6580
|
-
lastEnd = (match.index || 0) + match[0].length;
|
|
6581
|
-
}
|
|
6582
|
-
result.push(string.slice(lastEnd));
|
|
6583
|
-
return result.join("");
|
|
6584
|
+
// src/code-executors/built-in-code-executor.ts
|
|
6585
|
+
var BuiltInCodeExecutor = class extends BaseCodeExecutor {
|
|
6586
|
+
constructor(config = {}) {
|
|
6587
|
+
super(config);
|
|
6584
6588
|
}
|
|
6585
|
-
async
|
|
6586
|
-
|
|
6587
|
-
|
|
6588
|
-
|
|
6589
|
-
|
|
6590
|
-
|
|
6589
|
+
async executeCode(invocationContext, codeExecutionInput) {
|
|
6590
|
+
throw new Error(
|
|
6591
|
+
"BuiltInCodeExecutor.executeCode should not be called directly"
|
|
6592
|
+
);
|
|
6593
|
+
}
|
|
6594
|
+
/**
|
|
6595
|
+
* Pre-process the LLM request for Gemini 2.0+ models to use the code execution tool
|
|
6596
|
+
*/
|
|
6597
|
+
processLlmRequest(llmRequest) {
|
|
6598
|
+
if (!_optionalChain([llmRequest, 'access', _207 => _207.model, 'optionalAccess', _208 => _208.startsWith, 'call', _209 => _209("gemini-2")])) {
|
|
6599
|
+
throw new Error(
|
|
6600
|
+
`Gemini code execution tool is not supported for model ${llmRequest.model}`
|
|
6601
|
+
);
|
|
6591
6602
|
}
|
|
6592
|
-
if (
|
|
6593
|
-
|
|
6594
|
-
if (!invocationContext.artifactService) {
|
|
6595
|
-
throw new Error("Artifact service is not initialized.");
|
|
6596
|
-
}
|
|
6597
|
-
try {
|
|
6598
|
-
const artifact = await invocationContext.artifactService.loadArtifact({
|
|
6599
|
-
appName: invocationContext.session.appName,
|
|
6600
|
-
userId: invocationContext.session.userId,
|
|
6601
|
-
sessionId: invocationContext.session.id,
|
|
6602
|
-
filename: varName
|
|
6603
|
-
});
|
|
6604
|
-
if (!artifact) {
|
|
6605
|
-
throw new Error(`Artifact ${varName} not found.`);
|
|
6606
|
-
}
|
|
6607
|
-
return String(artifact);
|
|
6608
|
-
} catch (error) {
|
|
6609
|
-
if (optional) {
|
|
6610
|
-
return "";
|
|
6611
|
-
}
|
|
6612
|
-
throw error;
|
|
6613
|
-
}
|
|
6614
|
-
} else {
|
|
6615
|
-
if (!isValidStateName(varName)) {
|
|
6616
|
-
return match[0];
|
|
6617
|
-
}
|
|
6618
|
-
const sessionState = invocationContext.session.state;
|
|
6619
|
-
if (varName in sessionState) {
|
|
6620
|
-
return String(sessionState[varName]);
|
|
6621
|
-
}
|
|
6622
|
-
if (optional) {
|
|
6623
|
-
return "";
|
|
6624
|
-
}
|
|
6625
|
-
throw new Error(`Context variable not found: \`${varName}\`.`);
|
|
6603
|
+
if (!llmRequest.config) {
|
|
6604
|
+
llmRequest.config = {};
|
|
6626
6605
|
}
|
|
6606
|
+
if (!llmRequest.config.tools) {
|
|
6607
|
+
llmRequest.config.tools = [];
|
|
6608
|
+
}
|
|
6609
|
+
const codeExecutionTool = {
|
|
6610
|
+
codeExecution: {}
|
|
6611
|
+
};
|
|
6612
|
+
llmRequest.config.tools.push(codeExecutionTool);
|
|
6627
6613
|
}
|
|
6628
|
-
|
|
6629
|
-
|
|
6630
|
-
|
|
6631
|
-
|
|
6632
|
-
|
|
6633
|
-
|
|
6614
|
+
};
|
|
6615
|
+
|
|
6616
|
+
// src/code-executors/code-execution-utils.ts
|
|
6617
|
+
|
|
6618
|
+
var CodeExecutionUtils = class _CodeExecutionUtils {
|
|
6619
|
+
/**
|
|
6620
|
+
* Gets the file content as a base64-encoded string
|
|
6621
|
+
*/
|
|
6622
|
+
static getEncodedFileContent(data) {
|
|
6623
|
+
let decodedData;
|
|
6624
|
+
if (data instanceof ArrayBuffer) {
|
|
6625
|
+
decodedData = new TextDecoder().decode(data);
|
|
6626
|
+
}
|
|
6627
|
+
if (_CodeExecutionUtils.isBase64Encoded(decodedData)) {
|
|
6628
|
+
return decodedData;
|
|
6629
|
+
}
|
|
6630
|
+
return btoa(decodedData);
|
|
6634
6631
|
}
|
|
6635
|
-
|
|
6636
|
-
|
|
6637
|
-
|
|
6638
|
-
|
|
6639
|
-
return
|
|
6632
|
+
static isBase64Encoded(str) {
|
|
6633
|
+
try {
|
|
6634
|
+
return btoa(atob(str)) === str;
|
|
6635
|
+
} catch (e3) {
|
|
6636
|
+
return false;
|
|
6640
6637
|
}
|
|
6641
6638
|
}
|
|
6642
|
-
|
|
6643
|
-
|
|
6644
|
-
|
|
6645
|
-
|
|
6646
|
-
|
|
6647
|
-
|
|
6639
|
+
/**
|
|
6640
|
+
* Extracts the first code block from the content and truncates everything after it
|
|
6641
|
+
*/
|
|
6642
|
+
static extractCodeAndTruncateContent(content, codeBlockDelimiters) {
|
|
6643
|
+
if (!_optionalChain([content, 'optionalAccess', _210 => _210.parts, 'optionalAccess', _211 => _211.length])) {
|
|
6644
|
+
return null;
|
|
6645
|
+
}
|
|
6646
|
+
for (let idx = 0; idx < content.parts.length; idx++) {
|
|
6647
|
+
const part = content.parts[idx];
|
|
6648
|
+
if (part.executableCode && (idx === content.parts.length - 1 || !content.parts[idx + 1].codeExecutionResult)) {
|
|
6649
|
+
content.parts = content.parts.slice(0, idx + 1);
|
|
6650
|
+
return part.executableCode.code;
|
|
6651
|
+
}
|
|
6652
|
+
}
|
|
6653
|
+
const textParts = content.parts.filter((p) => p.text);
|
|
6654
|
+
if (!textParts.length) {
|
|
6655
|
+
return null;
|
|
6656
|
+
}
|
|
6657
|
+
const responseText = textParts.map((p) => p.text).join("\n");
|
|
6658
|
+
const leadingDelimiterPattern = codeBlockDelimiters.map(([start]) => _CodeExecutionUtils.escapeRegex(start)).join("|");
|
|
6659
|
+
const trailingDelimiterPattern = codeBlockDelimiters.map(([, end]) => _CodeExecutionUtils.escapeRegex(end)).join("|");
|
|
6660
|
+
const pattern = new RegExp(
|
|
6661
|
+
`(.*?)(${leadingDelimiterPattern})(.*?)(${trailingDelimiterPattern})(.*?)$`,
|
|
6662
|
+
"s"
|
|
6663
|
+
);
|
|
6664
|
+
const match = responseText.match(pattern);
|
|
6665
|
+
if (!match) {
|
|
6666
|
+
return null;
|
|
6667
|
+
}
|
|
6668
|
+
const [, prefix, , code, , suffix] = match;
|
|
6669
|
+
if (!code) {
|
|
6670
|
+
return null;
|
|
6671
|
+
}
|
|
6672
|
+
content.parts = [];
|
|
6673
|
+
if (prefix) {
|
|
6674
|
+
content.parts.push({ text: prefix });
|
|
6675
|
+
}
|
|
6676
|
+
content.parts.push(_CodeExecutionUtils.buildExecutableCodePart(code));
|
|
6677
|
+
return code;
|
|
6678
|
+
}
|
|
6679
|
+
static escapeRegex(str) {
|
|
6680
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
6681
|
+
}
|
|
6682
|
+
/**
|
|
6683
|
+
* Builds an executable code part with code string
|
|
6684
|
+
*/
|
|
6685
|
+
static buildExecutableCodePart(code) {
|
|
6686
|
+
return {
|
|
6687
|
+
executableCode: {
|
|
6688
|
+
code,
|
|
6689
|
+
language: _genai.Language.PYTHON
|
|
6690
|
+
}
|
|
6691
|
+
};
|
|
6692
|
+
}
|
|
6693
|
+
/**
|
|
6694
|
+
* Builds the code execution result part from the code execution result
|
|
6695
|
+
*/
|
|
6696
|
+
static buildCodeExecutionResultPart(codeExecutionResult) {
|
|
6697
|
+
if (codeExecutionResult.stderr) {
|
|
6698
|
+
return {
|
|
6699
|
+
codeExecutionResult: {
|
|
6700
|
+
outcome: _genai.Outcome.OUTCOME_FAILED,
|
|
6701
|
+
output: codeExecutionResult.stderr
|
|
6702
|
+
}
|
|
6703
|
+
};
|
|
6704
|
+
}
|
|
6705
|
+
const finalResult = [];
|
|
6706
|
+
if (codeExecutionResult.stdout || !codeExecutionResult.outputFiles.length) {
|
|
6707
|
+
finalResult.push(
|
|
6708
|
+
`Code execution result:
|
|
6709
|
+
${codeExecutionResult.stdout}
|
|
6710
|
+
`
|
|
6711
|
+
);
|
|
6712
|
+
}
|
|
6713
|
+
if (codeExecutionResult.outputFiles.length) {
|
|
6714
|
+
const fileNames = codeExecutionResult.outputFiles.map((f) => `\`${f.name}\``).join(",");
|
|
6715
|
+
finalResult.push(`Saved artifacts:
|
|
6716
|
+
${fileNames}`);
|
|
6717
|
+
}
|
|
6718
|
+
return {
|
|
6719
|
+
codeExecutionResult: {
|
|
6720
|
+
outcome: _genai.Outcome.OUTCOME_OK,
|
|
6721
|
+
output: finalResult.join("\n\n")
|
|
6722
|
+
}
|
|
6723
|
+
};
|
|
6724
|
+
}
|
|
6725
|
+
/**
|
|
6726
|
+
* Converts the code execution parts to text parts in a Content
|
|
6727
|
+
*/
|
|
6728
|
+
static convertCodeExecutionParts(content, codeBlockDelimiter, executionResultDelimiters) {
|
|
6729
|
+
if (!_optionalChain([content, 'access', _212 => _212.parts, 'optionalAccess', _213 => _213.length])) {
|
|
6730
|
+
return;
|
|
6731
|
+
}
|
|
6732
|
+
const lastPart = content.parts[content.parts.length - 1];
|
|
6733
|
+
if (lastPart.executableCode) {
|
|
6734
|
+
content.parts[content.parts.length - 1] = {
|
|
6735
|
+
text: `${codeBlockDelimiter[0]}${lastPart.executableCode.code}${codeBlockDelimiter[1]}`
|
|
6736
|
+
};
|
|
6737
|
+
} else if (content.parts.length === 1 && lastPart.codeExecutionResult) {
|
|
6738
|
+
content.parts[content.parts.length - 1] = {
|
|
6739
|
+
text: `${executionResultDelimiters[0]}${lastPart.codeExecutionResult.output}${executionResultDelimiters[1]}`
|
|
6740
|
+
};
|
|
6741
|
+
content.role = "user";
|
|
6742
|
+
}
|
|
6743
|
+
}
|
|
6744
|
+
};
|
|
6648
6745
|
|
|
6649
|
-
// src/
|
|
6650
|
-
var
|
|
6746
|
+
// src/code-executors/code-executor-context.ts
|
|
6747
|
+
var CONTEXT_KEY = "_code_execution_context";
|
|
6748
|
+
var SESSION_ID_KEY = "execution_session_id";
|
|
6749
|
+
var PROCESSED_FILE_NAMES_KEY = "processed_input_files";
|
|
6750
|
+
var INPUT_FILE_KEY = "_code_executor_input_files";
|
|
6751
|
+
var ERROR_COUNT_KEY = "_code_executor_error_counts";
|
|
6752
|
+
var CODE_EXECUTION_RESULTS_KEY = "_code_execution_results";
|
|
6753
|
+
var CodeExecutorContext = class {
|
|
6754
|
+
|
|
6755
|
+
|
|
6756
|
+
constructor(sessionState) {
|
|
6757
|
+
this.sessionState = sessionState;
|
|
6758
|
+
this.context = this.getCodeExecutorContext(sessionState);
|
|
6759
|
+
}
|
|
6760
|
+
/**
|
|
6761
|
+
* Gets the state delta to update in the persistent session state.
|
|
6762
|
+
*/
|
|
6763
|
+
getStateDelta() {
|
|
6764
|
+
const contextToUpdate = JSON.parse(JSON.stringify(this.context));
|
|
6765
|
+
return { [CONTEXT_KEY]: contextToUpdate };
|
|
6766
|
+
}
|
|
6767
|
+
/**
|
|
6768
|
+
* Gets the session ID for the code executor.
|
|
6769
|
+
*/
|
|
6770
|
+
getExecutionId() {
|
|
6771
|
+
if (!(SESSION_ID_KEY in this.context)) {
|
|
6772
|
+
return null;
|
|
6773
|
+
}
|
|
6774
|
+
return this.context[SESSION_ID_KEY];
|
|
6775
|
+
}
|
|
6776
|
+
/**
|
|
6777
|
+
* Sets the session ID for the code executor.
|
|
6778
|
+
*/
|
|
6779
|
+
setExecutionId(sessionId) {
|
|
6780
|
+
this.context[SESSION_ID_KEY] = sessionId;
|
|
6781
|
+
}
|
|
6782
|
+
/**
|
|
6783
|
+
* Gets the processed file names from the session state.
|
|
6784
|
+
*/
|
|
6785
|
+
getProcessedFileNames() {
|
|
6786
|
+
if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
|
|
6787
|
+
return [];
|
|
6788
|
+
}
|
|
6789
|
+
return this.context[PROCESSED_FILE_NAMES_KEY];
|
|
6790
|
+
}
|
|
6791
|
+
/**
|
|
6792
|
+
* Adds the processed file names to the session state.
|
|
6793
|
+
*/
|
|
6794
|
+
addProcessedFileNames(fileNames) {
|
|
6795
|
+
if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
|
|
6796
|
+
this.context[PROCESSED_FILE_NAMES_KEY] = [];
|
|
6797
|
+
}
|
|
6798
|
+
this.context[PROCESSED_FILE_NAMES_KEY].push(...fileNames);
|
|
6799
|
+
}
|
|
6800
|
+
/**
|
|
6801
|
+
* Gets the code executor input files from the session state.
|
|
6802
|
+
*/
|
|
6803
|
+
getInputFiles() {
|
|
6804
|
+
if (!(INPUT_FILE_KEY in this.sessionState)) {
|
|
6805
|
+
return [];
|
|
6806
|
+
}
|
|
6807
|
+
return this.sessionState[INPUT_FILE_KEY].map(
|
|
6808
|
+
(file) => file
|
|
6809
|
+
);
|
|
6810
|
+
}
|
|
6811
|
+
/**
|
|
6812
|
+
* Adds the input files to the code executor context.
|
|
6813
|
+
*/
|
|
6814
|
+
addInputFiles(inputFiles) {
|
|
6815
|
+
if (!(INPUT_FILE_KEY in this.sessionState)) {
|
|
6816
|
+
this.sessionState[INPUT_FILE_KEY] = [];
|
|
6817
|
+
}
|
|
6818
|
+
const fileArray = this.sessionState[INPUT_FILE_KEY];
|
|
6819
|
+
for (const inputFile of inputFiles) {
|
|
6820
|
+
fileArray.push({
|
|
6821
|
+
name: inputFile.name,
|
|
6822
|
+
content: inputFile.content,
|
|
6823
|
+
mimeType: inputFile.mimeType
|
|
6824
|
+
});
|
|
6825
|
+
}
|
|
6826
|
+
}
|
|
6827
|
+
/**
|
|
6828
|
+
* Removes the input files and processed file names from the code executor context.
|
|
6829
|
+
*/
|
|
6830
|
+
clearInputFiles() {
|
|
6831
|
+
if (INPUT_FILE_KEY in this.sessionState) {
|
|
6832
|
+
this.sessionState[INPUT_FILE_KEY] = [];
|
|
6833
|
+
}
|
|
6834
|
+
if (PROCESSED_FILE_NAMES_KEY in this.context) {
|
|
6835
|
+
this.context[PROCESSED_FILE_NAMES_KEY] = [];
|
|
6836
|
+
}
|
|
6837
|
+
}
|
|
6838
|
+
/**
|
|
6839
|
+
* Gets the error count from the session state.
|
|
6840
|
+
*/
|
|
6841
|
+
getErrorCount(invocationId) {
|
|
6842
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6843
|
+
return 0;
|
|
6844
|
+
}
|
|
6845
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6846
|
+
return _nullishCoalesce(errorCounts[invocationId], () => ( 0));
|
|
6847
|
+
}
|
|
6848
|
+
/**
|
|
6849
|
+
* Increments the error count for the given invocation ID.
|
|
6850
|
+
*/
|
|
6851
|
+
incrementErrorCount(invocationId) {
|
|
6852
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6853
|
+
this.sessionState[ERROR_COUNT_KEY] = {};
|
|
6854
|
+
}
|
|
6855
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6856
|
+
errorCounts[invocationId] = this.getErrorCount(invocationId) + 1;
|
|
6857
|
+
}
|
|
6858
|
+
/**
|
|
6859
|
+
* Resets the error count for the given invocation ID.
|
|
6860
|
+
*/
|
|
6861
|
+
resetErrorCount(invocationId) {
|
|
6862
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6863
|
+
return;
|
|
6864
|
+
}
|
|
6865
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6866
|
+
if (invocationId in errorCounts) {
|
|
6867
|
+
delete errorCounts[invocationId];
|
|
6868
|
+
}
|
|
6869
|
+
}
|
|
6870
|
+
/**
|
|
6871
|
+
* Updates the code execution result.
|
|
6872
|
+
*/
|
|
6873
|
+
updateCodeExecutionResult(invocationId, code, resultStdout, resultStderr) {
|
|
6874
|
+
if (!(CODE_EXECUTION_RESULTS_KEY in this.sessionState)) {
|
|
6875
|
+
this.sessionState[CODE_EXECUTION_RESULTS_KEY] = {};
|
|
6876
|
+
}
|
|
6877
|
+
const results = this.sessionState[CODE_EXECUTION_RESULTS_KEY];
|
|
6878
|
+
if (!(invocationId in results)) {
|
|
6879
|
+
results[invocationId] = [];
|
|
6880
|
+
}
|
|
6881
|
+
results[invocationId].push({
|
|
6882
|
+
code,
|
|
6883
|
+
resultStdout,
|
|
6884
|
+
resultStderr,
|
|
6885
|
+
timestamp: Math.floor(Date.now() / 1e3)
|
|
6886
|
+
});
|
|
6887
|
+
}
|
|
6888
|
+
/**
|
|
6889
|
+
* Gets the code executor context from the session state.
|
|
6890
|
+
*/
|
|
6891
|
+
getCodeExecutorContext(sessionState) {
|
|
6892
|
+
if (!(CONTEXT_KEY in sessionState)) {
|
|
6893
|
+
sessionState[CONTEXT_KEY] = {};
|
|
6894
|
+
}
|
|
6895
|
+
return sessionState[CONTEXT_KEY];
|
|
6896
|
+
}
|
|
6897
|
+
};
|
|
6898
|
+
|
|
6899
|
+
// src/flows/llm-flows/code-execution.ts
|
|
6900
|
+
var DATA_FILE_UTIL_MAP = {
|
|
6901
|
+
"text/csv": {
|
|
6902
|
+
extension: ".csv",
|
|
6903
|
+
loaderCodeTemplate: "pd.read_csv('{filename}')"
|
|
6904
|
+
}
|
|
6905
|
+
};
|
|
6906
|
+
var DATA_FILE_HELPER_LIB = `
|
|
6907
|
+
import pandas as pd
|
|
6908
|
+
|
|
6909
|
+
def explore_df(df: pd.DataFrame) -> None:
|
|
6910
|
+
"""Prints some information about a pandas DataFrame."""
|
|
6911
|
+
|
|
6912
|
+
with pd.option_context(
|
|
6913
|
+
'display.max_columns', None, 'display.expand_frame_repr', False
|
|
6914
|
+
):
|
|
6915
|
+
# Print the column names to never encounter KeyError when selecting one.
|
|
6916
|
+
df_dtypes = df.dtypes
|
|
6917
|
+
|
|
6918
|
+
# Obtain information about data types and missing values.
|
|
6919
|
+
df_nulls = (len(df) - df.isnull().sum()).apply(
|
|
6920
|
+
lambda x: f'{x} / {df.shape[0]} non-null'
|
|
6921
|
+
)
|
|
6922
|
+
|
|
6923
|
+
# Explore unique total values in columns using \`.unique()\`.
|
|
6924
|
+
df_unique_count = df.apply(lambda x: len(x.unique()))
|
|
6925
|
+
|
|
6926
|
+
# Explore unique values in columns using \`.unique()\`.
|
|
6927
|
+
df_unique = df.apply(lambda x: crop(str(list(x.unique()))))
|
|
6928
|
+
|
|
6929
|
+
df_info = pd.concat(
|
|
6930
|
+
(
|
|
6931
|
+
df_dtypes.rename('Dtype'),
|
|
6932
|
+
df_nulls.rename('Non-Null Count'),
|
|
6933
|
+
df_unique_count.rename('Unique Values Count'),
|
|
6934
|
+
df_unique.rename('Unique Values'),
|
|
6935
|
+
),
|
|
6936
|
+
axis=1,
|
|
6937
|
+
)
|
|
6938
|
+
df_info.index.name = 'Columns'
|
|
6939
|
+
print(f"""Total rows: {df.shape[0]}
|
|
6940
|
+
Total columns: {df.shape[1]}
|
|
6941
|
+
|
|
6942
|
+
{df_info}""")
|
|
6943
|
+
|
|
6944
|
+
def crop(text: str, max_length: int = 100) -> str:
|
|
6945
|
+
"""Crop text to maximum length with ellipsis."""
|
|
6946
|
+
return text if len(text) <= max_length else text[:max_length] + "..."
|
|
6947
|
+
`;
|
|
6948
|
+
function hasCodeExecutor(agent) {
|
|
6949
|
+
return agent && typeof agent === "object" && "codeExecutor" in agent;
|
|
6950
|
+
}
|
|
6951
|
+
var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6651
6952
|
async *runAsync(invocationContext, llmRequest) {
|
|
6652
6953
|
const agent = invocationContext.agent;
|
|
6653
|
-
if (!
|
|
6954
|
+
if (!hasCodeExecutor(agent)) {
|
|
6654
6955
|
return;
|
|
6655
6956
|
}
|
|
6656
|
-
|
|
6657
|
-
|
|
6658
|
-
|
|
6659
|
-
|
|
6957
|
+
if (!(agent instanceof LlmAgent) || !agent.codeExecutor) {
|
|
6958
|
+
return;
|
|
6959
|
+
}
|
|
6960
|
+
yield* runPreProcessor(invocationContext, llmRequest);
|
|
6961
|
+
if (!(agent.codeExecutor instanceof BaseCodeExecutor)) {
|
|
6962
|
+
return;
|
|
6963
|
+
}
|
|
6964
|
+
for (const content of llmRequest.contents || []) {
|
|
6965
|
+
CodeExecutionUtils.convertCodeExecutionParts(
|
|
6966
|
+
content,
|
|
6967
|
+
agent.codeExecutor.codeBlockDelimiters[0] || ["", ""],
|
|
6968
|
+
agent.codeExecutor.executionResultDelimiters
|
|
6660
6969
|
);
|
|
6661
|
-
|
|
6662
|
-
|
|
6663
|
-
|
|
6664
|
-
|
|
6665
|
-
|
|
6666
|
-
|
|
6970
|
+
}
|
|
6971
|
+
}
|
|
6972
|
+
};
|
|
6973
|
+
var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
|
|
6974
|
+
async *runAsync(invocationContext, llmResponse) {
|
|
6975
|
+
if (llmResponse.partial) {
|
|
6976
|
+
return;
|
|
6977
|
+
}
|
|
6978
|
+
yield* runPostProcessor(invocationContext, llmResponse);
|
|
6979
|
+
}
|
|
6980
|
+
};
|
|
6981
|
+
async function* runPreProcessor(invocationContext, llmRequest) {
|
|
6982
|
+
const agent = invocationContext.agent;
|
|
6983
|
+
if (!hasCodeExecutor(agent)) {
|
|
6984
|
+
return;
|
|
6985
|
+
}
|
|
6986
|
+
const codeExecutor = agent.codeExecutor;
|
|
6987
|
+
if (!codeExecutor || !(codeExecutor instanceof BaseCodeExecutor)) {
|
|
6988
|
+
return;
|
|
6989
|
+
}
|
|
6990
|
+
if (codeExecutor instanceof BuiltInCodeExecutor) {
|
|
6991
|
+
codeExecutor.processLlmRequest(llmRequest);
|
|
6992
|
+
return;
|
|
6993
|
+
}
|
|
6994
|
+
if (!codeExecutor.optimizeDataFile) {
|
|
6995
|
+
return;
|
|
6996
|
+
}
|
|
6997
|
+
const codeExecutorContext = new CodeExecutorContext(
|
|
6998
|
+
invocationContext.session.state
|
|
6999
|
+
// Type assertion for State compatibility
|
|
7000
|
+
);
|
|
7001
|
+
if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
|
|
7002
|
+
return;
|
|
7003
|
+
}
|
|
7004
|
+
const allInputFiles = extractAndReplaceInlineFiles(
|
|
7005
|
+
codeExecutorContext,
|
|
7006
|
+
llmRequest
|
|
7007
|
+
);
|
|
7008
|
+
const processedFileNames = new Set(
|
|
7009
|
+
codeExecutorContext.getProcessedFileNames()
|
|
7010
|
+
);
|
|
7011
|
+
const filesToProcess = allInputFiles.filter(
|
|
7012
|
+
(f) => !processedFileNames.has(f.name)
|
|
7013
|
+
);
|
|
7014
|
+
for (const file of filesToProcess) {
|
|
7015
|
+
const codeStr = getDataFilePreprocessingCode(file);
|
|
7016
|
+
if (!codeStr) {
|
|
7017
|
+
continue;
|
|
7018
|
+
}
|
|
7019
|
+
const codeContent = {
|
|
7020
|
+
role: "model",
|
|
7021
|
+
parts: [
|
|
7022
|
+
{ text: `Processing input file: \`${file.name}\`` },
|
|
7023
|
+
CodeExecutionUtils.buildExecutableCodePart(codeStr)
|
|
7024
|
+
]
|
|
7025
|
+
};
|
|
7026
|
+
llmRequest.contents = llmRequest.contents || [];
|
|
7027
|
+
llmRequest.contents.push(structuredClone(codeContent));
|
|
7028
|
+
yield new Event({
|
|
7029
|
+
invocationId: invocationContext.invocationId,
|
|
7030
|
+
author: agent.name,
|
|
7031
|
+
branch: invocationContext.branch,
|
|
7032
|
+
content: codeContent
|
|
7033
|
+
});
|
|
7034
|
+
const codeExecutionResult = await codeExecutor.executeCode(
|
|
7035
|
+
invocationContext,
|
|
7036
|
+
{
|
|
7037
|
+
code: codeStr,
|
|
7038
|
+
inputFiles: [file],
|
|
7039
|
+
executionId: getOrSetExecutionId(
|
|
7040
|
+
invocationContext,
|
|
7041
|
+
codeExecutorContext
|
|
7042
|
+
)
|
|
6667
7043
|
}
|
|
6668
|
-
|
|
7044
|
+
);
|
|
7045
|
+
codeExecutorContext.updateCodeExecutionResult(
|
|
7046
|
+
invocationContext.invocationId,
|
|
7047
|
+
codeStr,
|
|
7048
|
+
codeExecutionResult.stdout,
|
|
7049
|
+
codeExecutionResult.stderr
|
|
7050
|
+
);
|
|
7051
|
+
codeExecutorContext.addProcessedFileNames([file.name]);
|
|
7052
|
+
const executionResultEvent = await postProcessCodeExecutionResult(
|
|
7053
|
+
invocationContext,
|
|
7054
|
+
codeExecutorContext,
|
|
7055
|
+
codeExecutionResult
|
|
7056
|
+
);
|
|
7057
|
+
yield executionResultEvent;
|
|
7058
|
+
llmRequest.contents.push(structuredClone(executionResultEvent.content));
|
|
7059
|
+
}
|
|
7060
|
+
}
|
|
7061
|
+
async function* runPostProcessor(invocationContext, llmResponse) {
|
|
7062
|
+
const agent = invocationContext.agent;
|
|
7063
|
+
if (!hasCodeExecutor(agent)) {
|
|
7064
|
+
return;
|
|
7065
|
+
}
|
|
7066
|
+
const codeExecutor = agent.codeExecutor;
|
|
7067
|
+
if (!(codeExecutor instanceof BaseCodeExecutor)) {
|
|
7068
|
+
return;
|
|
7069
|
+
}
|
|
7070
|
+
if (!llmResponse || !llmResponse.content) {
|
|
7071
|
+
return;
|
|
7072
|
+
}
|
|
7073
|
+
if (codeExecutor instanceof BuiltInCodeExecutor) {
|
|
7074
|
+
return;
|
|
7075
|
+
}
|
|
7076
|
+
const codeExecutorContext = new CodeExecutorContext(
|
|
7077
|
+
invocationContext.session.state
|
|
7078
|
+
// Type assertion for State compatibility
|
|
7079
|
+
);
|
|
7080
|
+
if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
|
|
7081
|
+
return;
|
|
7082
|
+
}
|
|
7083
|
+
const responseContent = llmResponse.content;
|
|
7084
|
+
const codeStr = CodeExecutionUtils.extractCodeAndTruncateContent(
|
|
7085
|
+
responseContent,
|
|
7086
|
+
codeExecutor.codeBlockDelimiters
|
|
7087
|
+
);
|
|
7088
|
+
if (!codeStr) {
|
|
7089
|
+
return;
|
|
7090
|
+
}
|
|
7091
|
+
yield new Event({
|
|
7092
|
+
invocationId: invocationContext.invocationId,
|
|
7093
|
+
author: agent.name,
|
|
7094
|
+
branch: invocationContext.branch,
|
|
7095
|
+
content: responseContent,
|
|
7096
|
+
actions: new EventActions()
|
|
7097
|
+
});
|
|
7098
|
+
const codeExecutionResult = await codeExecutor.executeCode(
|
|
7099
|
+
invocationContext,
|
|
7100
|
+
{
|
|
7101
|
+
code: codeStr,
|
|
7102
|
+
inputFiles: codeExecutorContext.getInputFiles(),
|
|
7103
|
+
executionId: getOrSetExecutionId(invocationContext, codeExecutorContext)
|
|
6669
7104
|
}
|
|
6670
|
-
|
|
6671
|
-
|
|
6672
|
-
|
|
6673
|
-
|
|
6674
|
-
|
|
6675
|
-
|
|
6676
|
-
|
|
6677
|
-
|
|
6678
|
-
|
|
6679
|
-
|
|
7105
|
+
);
|
|
7106
|
+
codeExecutorContext.updateCodeExecutionResult(
|
|
7107
|
+
invocationContext.invocationId,
|
|
7108
|
+
codeStr,
|
|
7109
|
+
codeExecutionResult.stdout,
|
|
7110
|
+
codeExecutionResult.stderr
|
|
7111
|
+
);
|
|
7112
|
+
yield await postProcessCodeExecutionResult(
|
|
7113
|
+
invocationContext,
|
|
7114
|
+
codeExecutorContext,
|
|
7115
|
+
codeExecutionResult
|
|
7116
|
+
);
|
|
7117
|
+
llmResponse.content = void 0;
|
|
7118
|
+
}
|
|
7119
|
+
function extractAndReplaceInlineFiles(codeExecutorContext, llmRequest) {
|
|
7120
|
+
const allInputFiles = codeExecutorContext.getInputFiles();
|
|
7121
|
+
const savedFileNames = new Set(allInputFiles.map((f) => f.name));
|
|
7122
|
+
for (let i = 0; i < (_optionalChain([llmRequest, 'access', _214 => _214.contents, 'optionalAccess', _215 => _215.length]) || 0); i++) {
|
|
7123
|
+
const content = llmRequest.contents[i];
|
|
7124
|
+
if (content.role !== "user" || !content.parts) {
|
|
7125
|
+
continue;
|
|
7126
|
+
}
|
|
7127
|
+
for (let j = 0; j < content.parts.length; j++) {
|
|
7128
|
+
const part = content.parts[j];
|
|
7129
|
+
if (!part.inlineData || !(part.inlineData.mimeType in DATA_FILE_UTIL_MAP)) {
|
|
7130
|
+
continue;
|
|
7131
|
+
}
|
|
7132
|
+
const mimeType = part.inlineData.mimeType;
|
|
7133
|
+
const fileName = `data_${i + 1}_${j + 1}${DATA_FILE_UTIL_MAP[mimeType].extension}`;
|
|
7134
|
+
llmRequest.contents[i].parts[j] = {
|
|
7135
|
+
text: `
|
|
7136
|
+
Available file: \`${fileName}\`
|
|
7137
|
+
`
|
|
7138
|
+
};
|
|
7139
|
+
const file = {
|
|
7140
|
+
name: fileName,
|
|
7141
|
+
content: CodeExecutionUtils.getEncodedFileContent(part.inlineData.data),
|
|
7142
|
+
mimeType
|
|
7143
|
+
};
|
|
7144
|
+
if (!savedFileNames.has(fileName)) {
|
|
7145
|
+
codeExecutorContext.addInputFiles([file]);
|
|
7146
|
+
allInputFiles.push(file);
|
|
6680
7147
|
}
|
|
6681
|
-
llmRequest.appendInstructions([instruction]);
|
|
6682
7148
|
}
|
|
6683
|
-
|
|
6684
|
-
|
|
7149
|
+
}
|
|
7150
|
+
return allInputFiles;
|
|
7151
|
+
}
|
|
7152
|
+
function getOrSetExecutionId(invocationContext, codeExecutorContext) {
|
|
7153
|
+
const agent = invocationContext.agent;
|
|
7154
|
+
if (!hasCodeExecutor(agent) || !_optionalChain([agent, 'access', _216 => _216.codeExecutor, 'optionalAccess', _217 => _217.stateful])) {
|
|
7155
|
+
return void 0;
|
|
7156
|
+
}
|
|
7157
|
+
let executionId = codeExecutorContext.getExecutionId();
|
|
7158
|
+
if (!executionId) {
|
|
7159
|
+
executionId = invocationContext.session.id;
|
|
7160
|
+
codeExecutorContext.setExecutionId(executionId);
|
|
7161
|
+
}
|
|
7162
|
+
return executionId;
|
|
7163
|
+
}
|
|
7164
|
+
async function postProcessCodeExecutionResult(invocationContext, codeExecutorContext, codeExecutionResult) {
|
|
7165
|
+
if (!invocationContext.artifactService) {
|
|
7166
|
+
throw new Error("Artifact service is not initialized.");
|
|
7167
|
+
}
|
|
7168
|
+
const resultContent = {
|
|
7169
|
+
role: "model",
|
|
7170
|
+
parts: [
|
|
7171
|
+
CodeExecutionUtils.buildCodeExecutionResultPart(codeExecutionResult)
|
|
7172
|
+
]
|
|
7173
|
+
};
|
|
7174
|
+
const eventActions = new EventActions({
|
|
7175
|
+
stateDelta: codeExecutorContext.getStateDelta()
|
|
7176
|
+
});
|
|
7177
|
+
if (codeExecutionResult.stderr) {
|
|
7178
|
+
codeExecutorContext.incrementErrorCount(invocationContext.invocationId);
|
|
7179
|
+
} else {
|
|
7180
|
+
codeExecutorContext.resetErrorCount(invocationContext.invocationId);
|
|
7181
|
+
}
|
|
7182
|
+
for (const outputFile of codeExecutionResult.outputFiles) {
|
|
7183
|
+
const version = await invocationContext.artifactService.saveArtifact({
|
|
7184
|
+
appName: invocationContext.appName,
|
|
7185
|
+
userId: invocationContext.userId,
|
|
7186
|
+
sessionId: invocationContext.session.id,
|
|
7187
|
+
filename: outputFile.name,
|
|
7188
|
+
artifact: {
|
|
7189
|
+
inlineData: {
|
|
7190
|
+
data: atob(outputFile.content),
|
|
7191
|
+
// Convert from base64
|
|
7192
|
+
mimeType: outputFile.mimeType
|
|
7193
|
+
}
|
|
7194
|
+
}
|
|
7195
|
+
});
|
|
7196
|
+
eventActions.artifactDelta[outputFile.name] = version;
|
|
7197
|
+
}
|
|
7198
|
+
return new Event({
|
|
7199
|
+
invocationId: invocationContext.invocationId,
|
|
7200
|
+
author: invocationContext.agent.name,
|
|
7201
|
+
branch: invocationContext.branch,
|
|
7202
|
+
content: resultContent,
|
|
7203
|
+
actions: eventActions
|
|
7204
|
+
});
|
|
7205
|
+
}
|
|
7206
|
+
function getDataFilePreprocessingCode(file) {
|
|
7207
|
+
function getNormalizedFileName(fileName) {
|
|
7208
|
+
const baseName = fileName.split(".")[0];
|
|
7209
|
+
let varName2 = baseName.replace(/[^a-zA-Z0-9_]/g, "_");
|
|
7210
|
+
if (/^\d/.test(varName2)) {
|
|
7211
|
+
varName2 = `_${varName2}`;
|
|
6685
7212
|
}
|
|
7213
|
+
return varName2;
|
|
6686
7214
|
}
|
|
6687
|
-
|
|
6688
|
-
|
|
6689
|
-
*/
|
|
6690
|
-
isLlmAgent(agent) {
|
|
6691
|
-
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
7215
|
+
if (!(file.mimeType in DATA_FILE_UTIL_MAP)) {
|
|
7216
|
+
return void 0;
|
|
6692
7217
|
}
|
|
6693
|
-
|
|
6694
|
-
|
|
7218
|
+
const varName = getNormalizedFileName(file.name);
|
|
7219
|
+
const loaderCode = DATA_FILE_UTIL_MAP[file.mimeType].loaderCodeTemplate.replace("{filename}", file.name);
|
|
7220
|
+
return `
|
|
7221
|
+
${DATA_FILE_HELPER_LIB}
|
|
7222
|
+
|
|
7223
|
+
# Load the dataframe.
|
|
7224
|
+
${varName} = ${loaderCode}
|
|
7225
|
+
|
|
7226
|
+
# Use \`explore_df\` to guide my analysis.
|
|
7227
|
+
explore_df(${varName})
|
|
7228
|
+
`;
|
|
7229
|
+
}
|
|
7230
|
+
var requestProcessor3 = new CodeExecutionRequestProcessor();
|
|
7231
|
+
var responseProcessor = new CodeExecutionResponseProcessor();
|
|
6695
7232
|
|
|
6696
7233
|
// src/flows/llm-flows/contents.ts
|
|
6697
7234
|
var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
@@ -6724,7 +7261,7 @@ var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
|
6724
7261
|
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6725
7262
|
}
|
|
6726
7263
|
};
|
|
6727
|
-
var
|
|
7264
|
+
var requestProcessor4 = new ContentLlmRequestProcessor();
|
|
6728
7265
|
function rearrangeEventsForAsyncFunctionResponsesInHistory(events) {
|
|
6729
7266
|
const functionCallIdToResponseEventsIndex = {};
|
|
6730
7267
|
for (let i = 0; i < events.length; i++) {
|
|
@@ -6845,7 +7382,7 @@ function rearrangeEventsForLatestFunctionResponse(events) {
|
|
|
6845
7382
|
continue;
|
|
6846
7383
|
}
|
|
6847
7384
|
const functionResponses2 = event.getFunctionResponses();
|
|
6848
|
-
if (_optionalChain([functionResponses2, 'optionalAccess',
|
|
7385
|
+
if (_optionalChain([functionResponses2, 'optionalAccess', _218 => _218.some, 'call', _219 => _219((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
|
|
6849
7386
|
functionResponseEvents.push(event);
|
|
6850
7387
|
}
|
|
6851
7388
|
}
|
|
@@ -6944,7 +7481,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
|
|
|
6944
7481
|
const partIndicesInMergedEvent = {};
|
|
6945
7482
|
for (let idx = 0; idx < partsInMergedEvent.length; idx++) {
|
|
6946
7483
|
const part = partsInMergedEvent[idx];
|
|
6947
|
-
if (_optionalChain([part, 'access',
|
|
7484
|
+
if (_optionalChain([part, 'access', _220 => _220.functionResponse, 'optionalAccess', _221 => _221.id])) {
|
|
6948
7485
|
partIndicesInMergedEvent[part.functionResponse.id] = idx;
|
|
6949
7486
|
}
|
|
6950
7487
|
}
|
|
@@ -6953,7 +7490,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
|
|
|
6953
7490
|
throw new Error("There should be at least one function_response part.");
|
|
6954
7491
|
}
|
|
6955
7492
|
for (const part of event.content.parts) {
|
|
6956
|
-
if (_optionalChain([part, 'access',
|
|
7493
|
+
if (_optionalChain([part, 'access', _222 => _222.functionResponse, 'optionalAccess', _223 => _223.id])) {
|
|
6957
7494
|
const functionCallId = part.functionResponse.id;
|
|
6958
7495
|
if (functionCallId in partIndicesInMergedEvent) {
|
|
6959
7496
|
partsInMergedEvent[partIndicesInMergedEvent[functionCallId]] = part;
|
|
@@ -6989,6 +7526,151 @@ function isAuthEvent(event) {
|
|
|
6989
7526
|
return false;
|
|
6990
7527
|
}
|
|
6991
7528
|
|
|
7529
|
+
// src/flows/llm-flows/identity.ts
|
|
7530
|
+
var IdentityLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7531
|
+
async *runAsync(invocationContext, llmRequest) {
|
|
7532
|
+
const agent = invocationContext.agent;
|
|
7533
|
+
const instructions = [
|
|
7534
|
+
`You are an agent. Your internal name is "${agent.name}".`
|
|
7535
|
+
];
|
|
7536
|
+
if (agent.description) {
|
|
7537
|
+
instructions.push(` The description about you is "${agent.description}"`);
|
|
7538
|
+
}
|
|
7539
|
+
llmRequest.appendInstructions(instructions);
|
|
7540
|
+
for await (const _ of []) {
|
|
7541
|
+
yield _;
|
|
7542
|
+
}
|
|
7543
|
+
}
|
|
7544
|
+
};
|
|
7545
|
+
var requestProcessor5 = new IdentityLlmRequestProcessor();
|
|
7546
|
+
|
|
7547
|
+
// src/utils/instructions-utils.ts
|
|
7548
|
+
async function injectSessionState(template, readonlyContext) {
|
|
7549
|
+
const invocationContext = readonlyContext._invocationContext;
|
|
7550
|
+
async function asyncReplace(pattern, replaceAsyncFn, string) {
|
|
7551
|
+
const result = [];
|
|
7552
|
+
let lastEnd = 0;
|
|
7553
|
+
const matches = Array.from(string.matchAll(pattern));
|
|
7554
|
+
for (const match of matches) {
|
|
7555
|
+
result.push(string.slice(lastEnd, match.index));
|
|
7556
|
+
const replacement = await replaceAsyncFn(match);
|
|
7557
|
+
result.push(replacement);
|
|
7558
|
+
lastEnd = (match.index || 0) + match[0].length;
|
|
7559
|
+
}
|
|
7560
|
+
result.push(string.slice(lastEnd));
|
|
7561
|
+
return result.join("");
|
|
7562
|
+
}
|
|
7563
|
+
async function replaceMatch(match) {
|
|
7564
|
+
let varName = match[0].replace(/[{}]/g, "").trim();
|
|
7565
|
+
let optional = false;
|
|
7566
|
+
if (varName.endsWith("?")) {
|
|
7567
|
+
optional = true;
|
|
7568
|
+
varName = varName.slice(0, -1);
|
|
7569
|
+
}
|
|
7570
|
+
if (varName.startsWith("artifact.")) {
|
|
7571
|
+
varName = varName.replace("artifact.", "");
|
|
7572
|
+
if (!invocationContext.artifactService) {
|
|
7573
|
+
throw new Error("Artifact service is not initialized.");
|
|
7574
|
+
}
|
|
7575
|
+
try {
|
|
7576
|
+
const artifact = await invocationContext.artifactService.loadArtifact({
|
|
7577
|
+
appName: invocationContext.session.appName,
|
|
7578
|
+
userId: invocationContext.session.userId,
|
|
7579
|
+
sessionId: invocationContext.session.id,
|
|
7580
|
+
filename: varName
|
|
7581
|
+
});
|
|
7582
|
+
if (!artifact) {
|
|
7583
|
+
throw new Error(`Artifact ${varName} not found.`);
|
|
7584
|
+
}
|
|
7585
|
+
return String(artifact);
|
|
7586
|
+
} catch (error) {
|
|
7587
|
+
if (optional) {
|
|
7588
|
+
return "";
|
|
7589
|
+
}
|
|
7590
|
+
throw error;
|
|
7591
|
+
}
|
|
7592
|
+
} else {
|
|
7593
|
+
if (!isValidStateName(varName)) {
|
|
7594
|
+
return match[0];
|
|
7595
|
+
}
|
|
7596
|
+
const sessionState = invocationContext.session.state;
|
|
7597
|
+
if (varName in sessionState) {
|
|
7598
|
+
return String(sessionState[varName]);
|
|
7599
|
+
}
|
|
7600
|
+
if (optional) {
|
|
7601
|
+
return "";
|
|
7602
|
+
}
|
|
7603
|
+
throw new Error(`Context variable not found: \`${varName}\`.`);
|
|
7604
|
+
}
|
|
7605
|
+
}
|
|
7606
|
+
return await asyncReplace(/{[^{}]*}/g, replaceMatch, template);
|
|
7607
|
+
}
|
|
7608
|
+
function isValidStateName(varName) {
|
|
7609
|
+
const parts = varName.split(":");
|
|
7610
|
+
if (parts.length === 1) {
|
|
7611
|
+
return isValidIdentifier(varName);
|
|
7612
|
+
}
|
|
7613
|
+
if (parts.length === 2) {
|
|
7614
|
+
const validPrefixes = ["app:", "user:", "temp:"];
|
|
7615
|
+
const prefix = `${parts[0]}:`;
|
|
7616
|
+
if (validPrefixes.includes(prefix)) {
|
|
7617
|
+
return isValidIdentifier(parts[1]);
|
|
7618
|
+
}
|
|
7619
|
+
}
|
|
7620
|
+
return false;
|
|
7621
|
+
}
|
|
7622
|
+
function isValidIdentifier(name) {
|
|
7623
|
+
const identifierRegex = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
|
|
7624
|
+
return identifierRegex.test(name);
|
|
7625
|
+
}
|
|
7626
|
+
|
|
7627
|
+
// src/flows/llm-flows/instructions.ts
|
|
7628
|
+
var InstructionsLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7629
|
+
async *runAsync(invocationContext, llmRequest) {
|
|
7630
|
+
const agent = invocationContext.agent;
|
|
7631
|
+
if (!this.isLlmAgent(agent)) {
|
|
7632
|
+
return;
|
|
7633
|
+
}
|
|
7634
|
+
const rootAgent = agent.rootAgent;
|
|
7635
|
+
if (this.isLlmAgent(rootAgent) && rootAgent.globalInstruction) {
|
|
7636
|
+
const [rawInstruction, bypassStateInjection] = await rootAgent.canonicalGlobalInstruction(
|
|
7637
|
+
new ReadonlyContext(invocationContext)
|
|
7638
|
+
);
|
|
7639
|
+
let instruction = rawInstruction;
|
|
7640
|
+
if (!bypassStateInjection) {
|
|
7641
|
+
instruction = await injectSessionState(
|
|
7642
|
+
rawInstruction,
|
|
7643
|
+
new ReadonlyContext(invocationContext)
|
|
7644
|
+
);
|
|
7645
|
+
}
|
|
7646
|
+
llmRequest.appendInstructions([instruction]);
|
|
7647
|
+
}
|
|
7648
|
+
if (agent.instruction) {
|
|
7649
|
+
const [rawInstruction, bypassStateInjection] = await agent.canonicalInstruction(
|
|
7650
|
+
new ReadonlyContext(invocationContext)
|
|
7651
|
+
);
|
|
7652
|
+
let instruction = rawInstruction;
|
|
7653
|
+
if (!bypassStateInjection) {
|
|
7654
|
+
instruction = await injectSessionState(
|
|
7655
|
+
rawInstruction,
|
|
7656
|
+
new ReadonlyContext(invocationContext)
|
|
7657
|
+
);
|
|
7658
|
+
}
|
|
7659
|
+
llmRequest.appendInstructions([instruction]);
|
|
7660
|
+
}
|
|
7661
|
+
for await (const _ of []) {
|
|
7662
|
+
yield _;
|
|
7663
|
+
}
|
|
7664
|
+
}
|
|
7665
|
+
/**
|
|
7666
|
+
* Type guard to check if agent is an LlmAgent
|
|
7667
|
+
*/
|
|
7668
|
+
isLlmAgent(agent) {
|
|
7669
|
+
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
7670
|
+
}
|
|
7671
|
+
};
|
|
7672
|
+
var requestProcessor6 = new InstructionsLlmRequestProcessor();
|
|
7673
|
+
|
|
6992
7674
|
// src/planners/base-planner.ts
|
|
6993
7675
|
var BasePlanner = class {
|
|
6994
7676
|
};
|
|
@@ -7058,7 +7740,7 @@ var PlanReActPlanner = class extends BasePlanner {
|
|
|
7058
7740
|
let firstFcPartIndex = -1;
|
|
7059
7741
|
for (let i = 0; i < responseParts.length; i++) {
|
|
7060
7742
|
if (responseParts[i].functionCall) {
|
|
7061
|
-
if (!_optionalChain([responseParts, 'access',
|
|
7743
|
+
if (!_optionalChain([responseParts, 'access', _224 => _224[i], 'access', _225 => _225.functionCall, 'optionalAccess', _226 => _226.name])) {
|
|
7062
7744
|
continue;
|
|
7063
7745
|
}
|
|
7064
7746
|
preservedParts.push(responseParts[i]);
|
|
@@ -7097,7 +7779,7 @@ var PlanReActPlanner = class extends BasePlanner {
|
|
|
7097
7779
|
* Handles non-function-call parts of the response
|
|
7098
7780
|
*/
|
|
7099
7781
|
_handleNonFunctionCallParts(responsePart, preservedParts) {
|
|
7100
|
-
if (_optionalChain([responsePart, 'access',
|
|
7782
|
+
if (_optionalChain([responsePart, 'access', _227 => _227.text, 'optionalAccess', _228 => _228.includes, 'call', _229 => _229(FINAL_ANSWER_TAG)])) {
|
|
7101
7783
|
const [reasoningText, finalAnswerText] = this._splitByLastPattern(
|
|
7102
7784
|
responsePart.text,
|
|
7103
7785
|
FINAL_ANSWER_TAG
|
|
@@ -7264,66 +7946,10 @@ function removeThoughtFromRequest(llmRequest) {
|
|
|
7264
7946
|
}
|
|
7265
7947
|
}
|
|
7266
7948
|
}
|
|
7267
|
-
var
|
|
7268
|
-
var
|
|
7269
|
-
|
|
7270
|
-
// src/flows/llm-flows/code-execution.ts
|
|
7271
|
-
var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7272
|
-
async *runAsync(invocationContext, llmRequest) {
|
|
7273
|
-
const agent = invocationContext.agent;
|
|
7274
|
-
if (!("codeExecutor" in agent) || !agent.codeExecutor) {
|
|
7275
|
-
return;
|
|
7276
|
-
}
|
|
7277
|
-
console.log(
|
|
7278
|
-
"Code execution request processing - TODO: Implement when code-executors module is ready"
|
|
7279
|
-
);
|
|
7280
|
-
for await (const _ of []) {
|
|
7281
|
-
yield _;
|
|
7282
|
-
}
|
|
7283
|
-
}
|
|
7284
|
-
/**
|
|
7285
|
-
* Placeholder for pre-processor logic
|
|
7286
|
-
* TODO: Implement when code-executors are ready
|
|
7287
|
-
*/
|
|
7288
|
-
async *runPreProcessor(invocationContext, llmRequest) {
|
|
7289
|
-
console.log("Code execution pre-processor - placeholder");
|
|
7290
|
-
for await (const _ of []) {
|
|
7291
|
-
yield _;
|
|
7292
|
-
}
|
|
7293
|
-
}
|
|
7294
|
-
};
|
|
7295
|
-
var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
|
|
7296
|
-
async *runAsync(invocationContext, llmResponse) {
|
|
7297
|
-
if (llmResponse.partial) {
|
|
7298
|
-
return;
|
|
7299
|
-
}
|
|
7300
|
-
const agent = invocationContext.agent;
|
|
7301
|
-
if (!("codeExecutor" in agent) || !agent.codeExecutor) {
|
|
7302
|
-
return;
|
|
7303
|
-
}
|
|
7304
|
-
console.log(
|
|
7305
|
-
"Code execution response processing - TODO: Implement when code-executors module is ready"
|
|
7306
|
-
);
|
|
7307
|
-
for await (const _ of []) {
|
|
7308
|
-
yield _;
|
|
7309
|
-
}
|
|
7310
|
-
}
|
|
7311
|
-
/**
|
|
7312
|
-
* Placeholder for post-processor logic
|
|
7313
|
-
* TODO: Implement when code-executors are ready
|
|
7314
|
-
*/
|
|
7315
|
-
async *runPostProcessor(invocationContext, llmResponse) {
|
|
7316
|
-
console.log("Code execution post-processor - placeholder");
|
|
7317
|
-
for await (const _ of []) {
|
|
7318
|
-
yield _;
|
|
7319
|
-
}
|
|
7320
|
-
}
|
|
7321
|
-
};
|
|
7322
|
-
var requestProcessor7 = new CodeExecutionRequestProcessor();
|
|
7323
|
-
var responseProcessor2 = new CodeExecutionResponseProcessor();
|
|
7949
|
+
var requestProcessor7 = new NlPlanningRequestProcessor();
|
|
7950
|
+
var responseProcessor2 = new NlPlanningResponseProcessor();
|
|
7324
7951
|
|
|
7325
7952
|
// src/flows/llm-flows/single-flow.ts
|
|
7326
|
-
var logger7 = new Logger({ name: "SingleFlow" });
|
|
7327
7953
|
var SingleFlow = class extends BaseLlmFlow {
|
|
7328
7954
|
/**
|
|
7329
7955
|
* Constructor for SingleFlow
|
|
@@ -7331,35 +7957,32 @@ var SingleFlow = class extends BaseLlmFlow {
|
|
|
7331
7957
|
constructor() {
|
|
7332
7958
|
super();
|
|
7333
7959
|
this.requestProcessors.push(
|
|
7334
|
-
requestProcessor,
|
|
7335
7960
|
requestProcessor2,
|
|
7961
|
+
requestProcessor,
|
|
7336
7962
|
// Phase 3: Auth preprocessor
|
|
7337
|
-
|
|
7338
|
-
requestProcessor3,
|
|
7963
|
+
requestProcessor6,
|
|
7339
7964
|
requestProcessor5,
|
|
7965
|
+
requestProcessor4,
|
|
7340
7966
|
// Some implementations of NL Planning mark planning contents as thoughts
|
|
7341
7967
|
// in the post processor. Since these need to be unmarked, NL Planning
|
|
7342
7968
|
// should be after contents.
|
|
7343
|
-
|
|
7969
|
+
requestProcessor7,
|
|
7344
7970
|
// Phase 5: NL Planning
|
|
7345
7971
|
// Code execution should be after the contents as it mutates the contents
|
|
7346
7972
|
// to optimize data files.
|
|
7347
|
-
|
|
7973
|
+
requestProcessor3
|
|
7348
7974
|
// Phase 5: Code Execution (placeholder)
|
|
7349
7975
|
);
|
|
7350
7976
|
this.responseProcessors.push(
|
|
7351
|
-
|
|
7977
|
+
responseProcessor2,
|
|
7352
7978
|
// Phase 5: NL Planning
|
|
7353
|
-
|
|
7979
|
+
responseProcessor
|
|
7354
7980
|
// Phase 5: Code Execution (placeholder)
|
|
7355
7981
|
);
|
|
7356
|
-
|
|
7982
|
+
this.logger.debug("SingleFlow initialized with processors");
|
|
7357
7983
|
}
|
|
7358
7984
|
};
|
|
7359
7985
|
|
|
7360
|
-
// src/flows/llm-flows/auto-flow.ts
|
|
7361
|
-
init_logger();
|
|
7362
|
-
|
|
7363
7986
|
// src/flows/llm-flows/agent-transfer.ts
|
|
7364
7987
|
var AgentTransferLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7365
7988
|
/**
|
|
@@ -7449,7 +8072,6 @@ function getTransferTargets(agent) {
|
|
|
7449
8072
|
var requestProcessor8 = new AgentTransferLlmRequestProcessor();
|
|
7450
8073
|
|
|
7451
8074
|
// src/flows/llm-flows/auto-flow.ts
|
|
7452
|
-
var logger8 = new Logger({ name: "AutoFlow" });
|
|
7453
8075
|
var AutoFlow = class extends SingleFlow {
|
|
7454
8076
|
/**
|
|
7455
8077
|
* Constructor for AutoFlow
|
|
@@ -7457,13 +8079,13 @@ var AutoFlow = class extends SingleFlow {
|
|
|
7457
8079
|
constructor() {
|
|
7458
8080
|
super();
|
|
7459
8081
|
this.requestProcessors.push(requestProcessor8);
|
|
7460
|
-
|
|
8082
|
+
this.logger.debug("AutoFlow initialized with agent transfer capability");
|
|
7461
8083
|
}
|
|
7462
8084
|
};
|
|
7463
8085
|
|
|
7464
8086
|
// src/agents/llm-agent.ts
|
|
7465
8087
|
init_function_tool();
|
|
7466
|
-
var LlmAgent = (
|
|
8088
|
+
var LlmAgent = (_class23 = class _LlmAgent extends BaseAgent {
|
|
7467
8089
|
/**
|
|
7468
8090
|
* The model to use for the agent
|
|
7469
8091
|
* When not set, the agent will inherit the model from its ancestor
|
|
@@ -7482,6 +8104,10 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7482
8104
|
* Tools available to this agent
|
|
7483
8105
|
*/
|
|
7484
8106
|
|
|
8107
|
+
/**
|
|
8108
|
+
* Code executor for this agent
|
|
8109
|
+
*/
|
|
8110
|
+
|
|
7485
8111
|
/**
|
|
7486
8112
|
* Disallows LLM-controlled transferring to the parent agent
|
|
7487
8113
|
*/
|
|
@@ -7536,7 +8162,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7536
8162
|
*/
|
|
7537
8163
|
|
|
7538
8164
|
// Schema type - depends on specific implementation
|
|
7539
|
-
|
|
8165
|
+
__init42() {this.logger = new Logger({ name: "LlmAgent" })}
|
|
7540
8166
|
/**
|
|
7541
8167
|
* Constructor for LlmAgent
|
|
7542
8168
|
*/
|
|
@@ -7544,11 +8170,12 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7544
8170
|
super({
|
|
7545
8171
|
name: config.name,
|
|
7546
8172
|
description: config.description
|
|
7547
|
-
});
|
|
8173
|
+
});_class23.prototype.__init42.call(this);;
|
|
7548
8174
|
this.model = config.model || "";
|
|
7549
8175
|
this.instruction = config.instruction || "";
|
|
7550
8176
|
this.globalInstruction = config.globalInstruction || "";
|
|
7551
8177
|
this.tools = config.tools || [];
|
|
8178
|
+
this.codeExecutor = config.codeExecutor;
|
|
7552
8179
|
this.disallowTransferToParent = config.disallowTransferToParent || false;
|
|
7553
8180
|
this.disallowTransferToPeers = config.disallowTransferToPeers || false;
|
|
7554
8181
|
this.includeContents = config.includeContents || "default";
|
|
@@ -7568,11 +8195,14 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7568
8195
|
* This method is only for use by Agent Development Kit
|
|
7569
8196
|
*/
|
|
7570
8197
|
get canonicalModel() {
|
|
7571
|
-
if (typeof this.model
|
|
8198
|
+
if (typeof this.model === "string") {
|
|
8199
|
+
if (this.model) {
|
|
8200
|
+
return LLMRegistry.newLLM(this.model);
|
|
8201
|
+
}
|
|
8202
|
+
} else if (this.model instanceof BaseLlm) {
|
|
7572
8203
|
return this.model;
|
|
7573
|
-
}
|
|
7574
|
-
|
|
7575
|
-
return LLMRegistry.newLLM(this.model);
|
|
8204
|
+
} else if (this.model) {
|
|
8205
|
+
return new AiSdkLlm(this.model);
|
|
7576
8206
|
}
|
|
7577
8207
|
let ancestorAgent = this.parentAgent;
|
|
7578
8208
|
while (ancestorAgent !== null) {
|
|
@@ -7626,7 +8256,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7626
8256
|
* This matches the Python implementation's _llm_flow property
|
|
7627
8257
|
*/
|
|
7628
8258
|
get llmFlow() {
|
|
7629
|
-
if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access',
|
|
8259
|
+
if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _230 => _230.subAgents, 'optionalAccess', _231 => _231.length])) {
|
|
7630
8260
|
return new SingleFlow();
|
|
7631
8261
|
}
|
|
7632
8262
|
return new AutoFlow();
|
|
@@ -7636,7 +8266,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7636
8266
|
* This matches the Python implementation's __maybe_save_output_to_state
|
|
7637
8267
|
*/
|
|
7638
8268
|
maybeSaveOutputToState(event) {
|
|
7639
|
-
if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access',
|
|
8269
|
+
if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _232 => _232.content, 'optionalAccess', _233 => _233.parts])) {
|
|
7640
8270
|
const result = event.content.parts.map((part) => part.text || "").join("");
|
|
7641
8271
|
if (result) {
|
|
7642
8272
|
if (!event.actions.stateDelta) {
|
|
@@ -7676,7 +8306,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7676
8306
|
yield errorEvent;
|
|
7677
8307
|
}
|
|
7678
8308
|
}
|
|
7679
|
-
},
|
|
8309
|
+
}, _class23);
|
|
7680
8310
|
|
|
7681
8311
|
// src/agents/sequential-agent.ts
|
|
7682
8312
|
var SequentialAgent = class extends BaseAgent {
|
|
@@ -7742,11 +8372,11 @@ var LlmCallsLimitExceededError = class extends Error {
|
|
|
7742
8372
|
this.name = "LlmCallsLimitExceededError";
|
|
7743
8373
|
}
|
|
7744
8374
|
};
|
|
7745
|
-
var InvocationCostManager = (
|
|
8375
|
+
var InvocationCostManager = (_class24 = class {constructor() { _class24.prototype.__init43.call(this); }
|
|
7746
8376
|
/**
|
|
7747
8377
|
* A counter that keeps track of number of llm calls made.
|
|
7748
8378
|
*/
|
|
7749
|
-
|
|
8379
|
+
__init43() {this._numberOfLlmCalls = 0}
|
|
7750
8380
|
/**
|
|
7751
8381
|
* Increments _numberOfLlmCalls and enforces the limit.
|
|
7752
8382
|
*/
|
|
@@ -7758,11 +8388,11 @@ var InvocationCostManager = (_class22 = class {constructor() { _class22.prototyp
|
|
|
7758
8388
|
);
|
|
7759
8389
|
}
|
|
7760
8390
|
}
|
|
7761
|
-
},
|
|
8391
|
+
}, _class24);
|
|
7762
8392
|
function newInvocationContextId() {
|
|
7763
8393
|
return `e-${crypto.randomUUID()}`;
|
|
7764
8394
|
}
|
|
7765
|
-
var InvocationContext = (
|
|
8395
|
+
var InvocationContext = (_class25 = class _InvocationContext {
|
|
7766
8396
|
|
|
7767
8397
|
|
|
7768
8398
|
|
|
@@ -7797,7 +8427,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7797
8427
|
*
|
|
7798
8428
|
* Set to True in callbacks or tools to terminate this invocation.
|
|
7799
8429
|
*/
|
|
7800
|
-
|
|
8430
|
+
__init44() {this.endInvocation = false}
|
|
7801
8431
|
/**
|
|
7802
8432
|
* The queue to receive live requests.
|
|
7803
8433
|
*/
|
|
@@ -7818,11 +8448,11 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7818
8448
|
* A container to keep track of different kinds of costs incurred as a part
|
|
7819
8449
|
* of this invocation.
|
|
7820
8450
|
*/
|
|
7821
|
-
|
|
8451
|
+
__init45() {this._invocationCostManager = new InvocationCostManager()}
|
|
7822
8452
|
/**
|
|
7823
8453
|
* Constructor for InvocationContext
|
|
7824
8454
|
*/
|
|
7825
|
-
constructor(options) {;
|
|
8455
|
+
constructor(options) {;_class25.prototype.__init44.call(this);_class25.prototype.__init45.call(this);
|
|
7826
8456
|
this.artifactService = options.artifactService;
|
|
7827
8457
|
this.sessionService = options.sessionService;
|
|
7828
8458
|
this.memoryService = options.memoryService;
|
|
@@ -7882,7 +8512,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7882
8512
|
runConfig: this.runConfig
|
|
7883
8513
|
});
|
|
7884
8514
|
}
|
|
7885
|
-
},
|
|
8515
|
+
}, _class25);
|
|
7886
8516
|
|
|
7887
8517
|
// src/agents/parallel-agent.ts
|
|
7888
8518
|
function createBranchContextForSubAgent(agent, subAgent, invocationContext) {
|
|
@@ -7999,7 +8629,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
7999
8629
|
for (const subAgent of this.subAgents) {
|
|
8000
8630
|
for await (const event of subAgent.runAsync(ctx)) {
|
|
8001
8631
|
yield event;
|
|
8002
|
-
if (_optionalChain([event, 'access',
|
|
8632
|
+
if (_optionalChain([event, 'access', _234 => _234.actions, 'optionalAccess', _235 => _235.escalate])) {
|
|
8003
8633
|
return;
|
|
8004
8634
|
}
|
|
8005
8635
|
}
|
|
@@ -8017,7 +8647,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
8017
8647
|
|
|
8018
8648
|
// src/agents/lang-graph-agent.ts
|
|
8019
8649
|
init_logger();
|
|
8020
|
-
var LangGraphAgent = (
|
|
8650
|
+
var LangGraphAgent = (_class26 = class extends BaseAgent {
|
|
8021
8651
|
/**
|
|
8022
8652
|
* Graph nodes (agents and their connections)
|
|
8023
8653
|
*/
|
|
@@ -8033,8 +8663,8 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8033
8663
|
/**
|
|
8034
8664
|
* Results from node executions
|
|
8035
8665
|
*/
|
|
8036
|
-
|
|
8037
|
-
|
|
8666
|
+
__init46() {this.results = []}
|
|
8667
|
+
__init47() {this.logger = new Logger({ name: "LangGraphAgent" })}
|
|
8038
8668
|
/**
|
|
8039
8669
|
* Constructor for LangGraphAgent
|
|
8040
8670
|
*/
|
|
@@ -8042,7 +8672,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8042
8672
|
super({
|
|
8043
8673
|
name: config.name,
|
|
8044
8674
|
description: config.description
|
|
8045
|
-
});
|
|
8675
|
+
});_class26.prototype.__init46.call(this);_class26.prototype.__init47.call(this);;
|
|
8046
8676
|
this.nodes = /* @__PURE__ */ new Map();
|
|
8047
8677
|
for (const node of config.nodes) {
|
|
8048
8678
|
if (this.nodes.has(node.name)) {
|
|
@@ -8238,7 +8868,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8238
8868
|
}
|
|
8239
8869
|
this.maxSteps = maxSteps;
|
|
8240
8870
|
}
|
|
8241
|
-
},
|
|
8871
|
+
}, _class26);
|
|
8242
8872
|
|
|
8243
8873
|
// src/runners.ts
|
|
8244
8874
|
|
|
@@ -8307,17 +8937,17 @@ var RunConfig = class {
|
|
|
8307
8937
|
*/
|
|
8308
8938
|
|
|
8309
8939
|
constructor(config) {
|
|
8310
|
-
this.speechConfig = _optionalChain([config, 'optionalAccess',
|
|
8311
|
-
this.responseModalities = _optionalChain([config, 'optionalAccess',
|
|
8312
|
-
this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess',
|
|
8313
|
-
this.supportCFC = _optionalChain([config, 'optionalAccess',
|
|
8314
|
-
this.streamingMode = _optionalChain([config, 'optionalAccess',
|
|
8315
|
-
this.outputAudioTranscription = _optionalChain([config, 'optionalAccess',
|
|
8316
|
-
this.inputAudioTranscription = _optionalChain([config, 'optionalAccess',
|
|
8317
|
-
this.realtimeInputConfig = _optionalChain([config, 'optionalAccess',
|
|
8318
|
-
this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess',
|
|
8319
|
-
this.proactivity = _optionalChain([config, 'optionalAccess',
|
|
8320
|
-
this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
8940
|
+
this.speechConfig = _optionalChain([config, 'optionalAccess', _236 => _236.speechConfig]);
|
|
8941
|
+
this.responseModalities = _optionalChain([config, 'optionalAccess', _237 => _237.responseModalities]);
|
|
8942
|
+
this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _238 => _238.saveInputBlobsAsArtifacts]) || false;
|
|
8943
|
+
this.supportCFC = _optionalChain([config, 'optionalAccess', _239 => _239.supportCFC]) || false;
|
|
8944
|
+
this.streamingMode = _optionalChain([config, 'optionalAccess', _240 => _240.streamingMode]) || "NONE" /* NONE */;
|
|
8945
|
+
this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _241 => _241.outputAudioTranscription]);
|
|
8946
|
+
this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _242 => _242.inputAudioTranscription]);
|
|
8947
|
+
this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _243 => _243.realtimeInputConfig]);
|
|
8948
|
+
this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _244 => _244.enableAffectiveDialog]);
|
|
8949
|
+
this.proactivity = _optionalChain([config, 'optionalAccess', _245 => _245.proactivity]);
|
|
8950
|
+
this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _246 => _246.maxLlmCalls]), () => ( 500));
|
|
8321
8951
|
this.validateMaxLlmCalls();
|
|
8322
8952
|
}
|
|
8323
8953
|
/**
|
|
@@ -8338,10 +8968,8 @@ var RunConfig = class {
|
|
|
8338
8968
|
};
|
|
8339
8969
|
|
|
8340
8970
|
// src/artifacts/in-memory-artifact-service.ts
|
|
8341
|
-
|
|
8342
|
-
|
|
8343
|
-
var InMemoryArtifactService = (_class25 = class {constructor() { _class25.prototype.__init45.call(this); }
|
|
8344
|
-
__init45() {this.artifacts = /* @__PURE__ */ new Map()}
|
|
8971
|
+
var InMemoryArtifactService = (_class27 = class {constructor() { _class27.prototype.__init48.call(this); }
|
|
8972
|
+
__init48() {this.artifacts = /* @__PURE__ */ new Map()}
|
|
8345
8973
|
fileHasUserNamespace(filename) {
|
|
8346
8974
|
return filename.startsWith("user:");
|
|
8347
8975
|
}
|
|
@@ -8414,7 +9042,7 @@ var InMemoryArtifactService = (_class25 = class {constructor() { _class25.protot
|
|
|
8414
9042
|
}
|
|
8415
9043
|
return Array.from({ length: versions.length }, (_, i) => i);
|
|
8416
9044
|
}
|
|
8417
|
-
},
|
|
9045
|
+
}, _class27);
|
|
8418
9046
|
|
|
8419
9047
|
// src/runners.ts
|
|
8420
9048
|
init_logger();
|
|
@@ -8441,15 +9069,15 @@ function _extractWordsLower(text) {
|
|
|
8441
9069
|
const words = text.match(/[A-Za-z]+/g) || [];
|
|
8442
9070
|
return new Set(words.map((word) => word.toLowerCase()));
|
|
8443
9071
|
}
|
|
8444
|
-
var InMemoryMemoryService = (
|
|
9072
|
+
var InMemoryMemoryService = (_class28 = class {
|
|
8445
9073
|
/**
|
|
8446
9074
|
* Keys are app_name/user_id, session_id. Values are session event lists.
|
|
8447
9075
|
*/
|
|
8448
|
-
|
|
9076
|
+
__init49() {this._sessionEvents = /* @__PURE__ */ new Map()}
|
|
8449
9077
|
/**
|
|
8450
9078
|
* Constructor for InMemoryMemoryService
|
|
8451
9079
|
*/
|
|
8452
|
-
constructor() {;
|
|
9080
|
+
constructor() {;_class28.prototype.__init49.call(this);
|
|
8453
9081
|
this._sessionEvents = /* @__PURE__ */ new Map();
|
|
8454
9082
|
}
|
|
8455
9083
|
/**
|
|
@@ -8463,7 +9091,7 @@ var InMemoryMemoryService = (_class26 = class {
|
|
|
8463
9091
|
}
|
|
8464
9092
|
const userSessions = this._sessionEvents.get(userKey);
|
|
8465
9093
|
const filteredEvents = session.events.filter(
|
|
8466
|
-
(event) => _optionalChain([event, 'access',
|
|
9094
|
+
(event) => _optionalChain([event, 'access', _247 => _247.content, 'optionalAccess', _248 => _248.parts])
|
|
8467
9095
|
);
|
|
8468
9096
|
userSessions.set(session.id, filteredEvents);
|
|
8469
9097
|
}
|
|
@@ -8533,7 +9161,7 @@ var InMemoryMemoryService = (_class26 = class {
|
|
|
8533
9161
|
clear() {
|
|
8534
9162
|
this._sessionEvents.clear();
|
|
8535
9163
|
}
|
|
8536
|
-
},
|
|
9164
|
+
}, _class28);
|
|
8537
9165
|
|
|
8538
9166
|
// src/sessions/in-memory-session-service.ts
|
|
8539
9167
|
var _crypto = require('crypto');
|
|
@@ -8575,19 +9203,19 @@ var BaseSessionService = class {
|
|
|
8575
9203
|
};
|
|
8576
9204
|
|
|
8577
9205
|
// src/sessions/in-memory-session-service.ts
|
|
8578
|
-
var InMemorySessionService = (
|
|
9206
|
+
var InMemorySessionService = (_class29 = class extends BaseSessionService {constructor(...args2) { super(...args2); _class29.prototype.__init50.call(this);_class29.prototype.__init51.call(this);_class29.prototype.__init52.call(this); }
|
|
8579
9207
|
/**
|
|
8580
9208
|
* A map from app name to a map from user ID to a map from session ID to session.
|
|
8581
9209
|
*/
|
|
8582
|
-
|
|
9210
|
+
__init50() {this.sessions = /* @__PURE__ */ new Map()}
|
|
8583
9211
|
/**
|
|
8584
9212
|
* A map from app name to a map from user ID to a map from key to the value.
|
|
8585
9213
|
*/
|
|
8586
|
-
|
|
9214
|
+
__init51() {this.userState = /* @__PURE__ */ new Map()}
|
|
8587
9215
|
/**
|
|
8588
9216
|
* A map from app name to a map from key to the value.
|
|
8589
9217
|
*/
|
|
8590
|
-
|
|
9218
|
+
__init52() {this.appState = /* @__PURE__ */ new Map()}
|
|
8591
9219
|
/**
|
|
8592
9220
|
* Creates a new session.
|
|
8593
9221
|
*/
|
|
@@ -8602,7 +9230,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8602
9230
|
return this.createSessionImpl(appName, userId, state, sessionId);
|
|
8603
9231
|
}
|
|
8604
9232
|
createSessionImpl(appName, userId, state, sessionId) {
|
|
8605
|
-
const finalSessionId = _optionalChain([sessionId, 'optionalAccess',
|
|
9233
|
+
const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _249 => _249.trim, 'call', _250 => _250()]) || _crypto.randomUUID.call(void 0, );
|
|
8606
9234
|
const session = {
|
|
8607
9235
|
appName,
|
|
8608
9236
|
userId,
|
|
@@ -8759,7 +9387,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8759
9387
|
warning(`sessionId ${sessionId} not in sessions[appName][userId]`);
|
|
8760
9388
|
return event;
|
|
8761
9389
|
}
|
|
8762
|
-
if (_optionalChain([event, 'access',
|
|
9390
|
+
if (_optionalChain([event, 'access', _251 => _251.actions, 'optionalAccess', _252 => _252.stateDelta])) {
|
|
8763
9391
|
for (const key in event.actions.stateDelta) {
|
|
8764
9392
|
const value = event.actions.stateDelta[key];
|
|
8765
9393
|
if (key.startsWith(State.APP_PREFIX)) {
|
|
@@ -8784,24 +9412,23 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8784
9412
|
storageSession.lastUpdateTime = event.timestamp;
|
|
8785
9413
|
return event;
|
|
8786
9414
|
}
|
|
8787
|
-
},
|
|
9415
|
+
}, _class29);
|
|
8788
9416
|
|
|
8789
9417
|
// src/runners.ts
|
|
8790
|
-
var logger10 = new Logger({ name: "Runner" });
|
|
8791
9418
|
function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
|
|
8792
9419
|
const events = session.events;
|
|
8793
9420
|
if (!events || events.length === 0) {
|
|
8794
9421
|
return null;
|
|
8795
9422
|
}
|
|
8796
9423
|
const lastEvent = events[events.length - 1];
|
|
8797
|
-
if (_optionalChain([lastEvent, 'access',
|
|
8798
|
-
const functionCallId = _optionalChain([lastEvent, 'access',
|
|
9424
|
+
if (_optionalChain([lastEvent, 'access', _253 => _253.content, 'optionalAccess', _254 => _254.parts, 'optionalAccess', _255 => _255.some, 'call', _256 => _256((part) => part.functionResponse)])) {
|
|
9425
|
+
const functionCallId = _optionalChain([lastEvent, 'access', _257 => _257.content, 'access', _258 => _258.parts, 'access', _259 => _259.find, 'call', _260 => _260(
|
|
8799
9426
|
(part) => part.functionResponse
|
|
8800
|
-
), 'optionalAccess',
|
|
9427
|
+
), 'optionalAccess', _261 => _261.functionResponse, 'optionalAccess', _262 => _262.id]);
|
|
8801
9428
|
if (!functionCallId) return null;
|
|
8802
9429
|
for (let i = events.length - 2; i >= 0; i--) {
|
|
8803
9430
|
const event = events[i];
|
|
8804
|
-
const functionCalls = _optionalChain([event, 'access',
|
|
9431
|
+
const functionCalls = _optionalChain([event, 'access', _263 => _263.getFunctionCalls, 'optionalCall', _264 => _264()]) || [];
|
|
8805
9432
|
for (const functionCall of functionCalls) {
|
|
8806
9433
|
if (functionCall.id === functionCallId) {
|
|
8807
9434
|
return event;
|
|
@@ -8811,7 +9438,7 @@ function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
|
|
|
8811
9438
|
}
|
|
8812
9439
|
return null;
|
|
8813
9440
|
}
|
|
8814
|
-
var Runner = class {
|
|
9441
|
+
var Runner = (_class30 = class {
|
|
8815
9442
|
/**
|
|
8816
9443
|
* The app name of the runner.
|
|
8817
9444
|
*/
|
|
@@ -8832,6 +9459,7 @@ var Runner = class {
|
|
|
8832
9459
|
* The memory service for the runner.
|
|
8833
9460
|
*/
|
|
8834
9461
|
|
|
9462
|
+
__init53() {this.logger = new Logger({ name: "Runner" })}
|
|
8835
9463
|
/**
|
|
8836
9464
|
* Initializes the Runner.
|
|
8837
9465
|
*/
|
|
@@ -8841,7 +9469,7 @@ var Runner = class {
|
|
|
8841
9469
|
artifactService,
|
|
8842
9470
|
sessionService,
|
|
8843
9471
|
memoryService
|
|
8844
|
-
}) {
|
|
9472
|
+
}) {;_class30.prototype.__init53.call(this);
|
|
8845
9473
|
this.appName = appName;
|
|
8846
9474
|
this.agent = agent;
|
|
8847
9475
|
this.artifactService = artifactService;
|
|
@@ -8934,7 +9562,7 @@ var Runner = class {
|
|
|
8934
9562
|
yield event;
|
|
8935
9563
|
}
|
|
8936
9564
|
} catch (error) {
|
|
8937
|
-
|
|
9565
|
+
this.logger.debug("Error running agent:", error);
|
|
8938
9566
|
span.recordException(error);
|
|
8939
9567
|
span.setStatus({
|
|
8940
9568
|
code: _api.SpanStatusCode.ERROR,
|
|
@@ -8988,17 +9616,17 @@ var Runner = class {
|
|
|
8988
9616
|
*/
|
|
8989
9617
|
_findAgentToRun(session, rootAgent) {
|
|
8990
9618
|
const event = _findFunctionCallEventIfLastEventIsFunctionResponse(session);
|
|
8991
|
-
if (_optionalChain([event, 'optionalAccess',
|
|
9619
|
+
if (_optionalChain([event, 'optionalAccess', _265 => _265.author])) {
|
|
8992
9620
|
return rootAgent.findAgent(event.author);
|
|
8993
9621
|
}
|
|
8994
|
-
const nonUserEvents = _optionalChain([session, 'access',
|
|
9622
|
+
const nonUserEvents = _optionalChain([session, 'access', _266 => _266.events, 'optionalAccess', _267 => _267.filter, 'call', _268 => _268((e) => e.author !== "user"), 'access', _269 => _269.reverse, 'call', _270 => _270()]) || [];
|
|
8995
9623
|
for (const event2 of nonUserEvents) {
|
|
8996
9624
|
if (event2.author === rootAgent.name) {
|
|
8997
9625
|
return rootAgent;
|
|
8998
9626
|
}
|
|
8999
|
-
const agent = _optionalChain([rootAgent, 'access',
|
|
9627
|
+
const agent = _optionalChain([rootAgent, 'access', _271 => _271.findSubAgent, 'optionalCall', _272 => _272(event2.author)]);
|
|
9000
9628
|
if (!agent) {
|
|
9001
|
-
|
|
9629
|
+
this.logger.debug(
|
|
9002
9630
|
`Event from an unknown agent: ${event2.author}, event id: ${event2.id}`
|
|
9003
9631
|
);
|
|
9004
9632
|
continue;
|
|
@@ -9045,7 +9673,7 @@ var Runner = class {
|
|
|
9045
9673
|
runConfig
|
|
9046
9674
|
});
|
|
9047
9675
|
}
|
|
9048
|
-
};
|
|
9676
|
+
}, _class30);
|
|
9049
9677
|
var InMemoryRunner = class extends Runner {
|
|
9050
9678
|
/**
|
|
9051
9679
|
* Deprecated. Please don't use. The in-memory session service for the runner.
|
|
@@ -9068,14 +9696,14 @@ var InMemoryRunner = class extends Runner {
|
|
|
9068
9696
|
};
|
|
9069
9697
|
|
|
9070
9698
|
// src/agents/agent-builder.ts
|
|
9071
|
-
var AgentBuilder = (
|
|
9699
|
+
var AgentBuilder = (_class31 = class _AgentBuilder {
|
|
9072
9700
|
|
|
9073
9701
|
|
|
9074
|
-
|
|
9702
|
+
__init54() {this.agentType = "llm"}
|
|
9075
9703
|
/**
|
|
9076
9704
|
* Private constructor - use static create() method
|
|
9077
9705
|
*/
|
|
9078
|
-
constructor(name) {;
|
|
9706
|
+
constructor(name) {;_class31.prototype.__init54.call(this);
|
|
9079
9707
|
this.config = { name };
|
|
9080
9708
|
}
|
|
9081
9709
|
/**
|
|
@@ -9262,7 +9890,7 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9262
9890
|
parts: [{ text: message }]
|
|
9263
9891
|
}
|
|
9264
9892
|
})) {
|
|
9265
|
-
if (_optionalChain([event, 'access',
|
|
9893
|
+
if (_optionalChain([event, 'access', _273 => _273.content, 'optionalAccess', _274 => _274.parts])) {
|
|
9266
9894
|
const content = event.content.parts.map((part) => part.text || "").join("");
|
|
9267
9895
|
if (content) {
|
|
9268
9896
|
response += content;
|
|
@@ -9277,6 +9905,20 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9277
9905
|
*/
|
|
9278
9906
|
createAgent() {
|
|
9279
9907
|
switch (this.agentType) {
|
|
9908
|
+
case "llm": {
|
|
9909
|
+
if (!this.config.model) {
|
|
9910
|
+
throw new Error("Model is required for LLM agent");
|
|
9911
|
+
}
|
|
9912
|
+
const model = this.config.model;
|
|
9913
|
+
return new LlmAgent({
|
|
9914
|
+
name: this.config.name,
|
|
9915
|
+
model,
|
|
9916
|
+
description: this.config.description,
|
|
9917
|
+
instruction: this.config.instruction,
|
|
9918
|
+
tools: this.config.tools,
|
|
9919
|
+
planner: this.config.planner
|
|
9920
|
+
});
|
|
9921
|
+
}
|
|
9280
9922
|
case "sequential":
|
|
9281
9923
|
if (!this.config.subAgents) {
|
|
9282
9924
|
throw new Error("Sub-agents required for sequential agent");
|
|
@@ -9315,18 +9957,9 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9315
9957
|
nodes: this.config.nodes,
|
|
9316
9958
|
rootNode: this.config.rootNode
|
|
9317
9959
|
});
|
|
9318
|
-
default:
|
|
9319
|
-
return new LlmAgent({
|
|
9320
|
-
name: this.config.name,
|
|
9321
|
-
model: this.config.model,
|
|
9322
|
-
description: this.config.description,
|
|
9323
|
-
instruction: this.config.instruction,
|
|
9324
|
-
tools: this.config.tools,
|
|
9325
|
-
planner: this.config.planner
|
|
9326
|
-
});
|
|
9327
9960
|
}
|
|
9328
9961
|
}
|
|
9329
|
-
},
|
|
9962
|
+
}, _class31);
|
|
9330
9963
|
|
|
9331
9964
|
// src/memory/index.ts
|
|
9332
9965
|
var memory_exports = {};
|
|
@@ -9390,7 +10023,7 @@ var VertexAiSessionService = class extends BaseSessionService {
|
|
|
9390
10023
|
path: `operations/${operationId}`,
|
|
9391
10024
|
request_dict: {}
|
|
9392
10025
|
});
|
|
9393
|
-
if (_optionalChain([lroResponse, 'optionalAccess',
|
|
10026
|
+
if (_optionalChain([lroResponse, 'optionalAccess', _275 => _275.done])) {
|
|
9394
10027
|
break;
|
|
9395
10028
|
}
|
|
9396
10029
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
@@ -9660,11 +10293,11 @@ var VertexAiSessionService = class extends BaseSessionService {
|
|
|
9660
10293
|
|
|
9661
10294
|
// src/sessions/database-session-service.ts
|
|
9662
10295
|
var _kysely = require('kysely');
|
|
9663
|
-
var DatabaseSessionService = (
|
|
10296
|
+
var DatabaseSessionService = (_class32 = class extends BaseSessionService {
|
|
9664
10297
|
|
|
9665
|
-
|
|
10298
|
+
__init55() {this.initialized = false}
|
|
9666
10299
|
constructor(config) {
|
|
9667
|
-
super();
|
|
10300
|
+
super();_class32.prototype.__init55.call(this);;
|
|
9668
10301
|
this.db = config.db;
|
|
9669
10302
|
if (!config.skipTableCreation) {
|
|
9670
10303
|
this.initializeDatabase().catch((error) => {
|
|
@@ -9739,7 +10372,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9739
10372
|
if (!jsonString) return defaultValue;
|
|
9740
10373
|
try {
|
|
9741
10374
|
return JSON.parse(jsonString);
|
|
9742
|
-
} catch (
|
|
10375
|
+
} catch (e4) {
|
|
9743
10376
|
return defaultValue;
|
|
9744
10377
|
}
|
|
9745
10378
|
}
|
|
@@ -9761,12 +10394,12 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9761
10394
|
}
|
|
9762
10395
|
async createSession(appName, userId, state, sessionId) {
|
|
9763
10396
|
await this.ensureInitialized();
|
|
9764
|
-
const id = _optionalChain([sessionId, 'optionalAccess',
|
|
10397
|
+
const id = _optionalChain([sessionId, 'optionalAccess', _276 => _276.trim, 'call', _277 => _277()]) || this.generateSessionId();
|
|
9765
10398
|
return await this.db.transaction().execute(async (trx) => {
|
|
9766
10399
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
|
|
9767
10400
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
|
|
9768
|
-
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9769
|
-
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10401
|
+
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _278 => _278.state]), {});
|
|
10402
|
+
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _279 => _279.state]), {});
|
|
9770
10403
|
if (!appState) {
|
|
9771
10404
|
await trx.insertInto("app_states").values({
|
|
9772
10405
|
app_name: appName,
|
|
@@ -9825,21 +10458,21 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9825
10458
|
return void 0;
|
|
9826
10459
|
}
|
|
9827
10460
|
let eventQuery = trx.selectFrom("events").selectAll().where("session_id", "=", sessionId).orderBy("timestamp", "desc");
|
|
9828
|
-
if (_optionalChain([config, 'optionalAccess',
|
|
10461
|
+
if (_optionalChain([config, 'optionalAccess', _280 => _280.afterTimestamp])) {
|
|
9829
10462
|
eventQuery = eventQuery.where(
|
|
9830
10463
|
"timestamp",
|
|
9831
10464
|
">=",
|
|
9832
10465
|
new Date(config.afterTimestamp * 1e3)
|
|
9833
10466
|
);
|
|
9834
10467
|
}
|
|
9835
|
-
if (_optionalChain([config, 'optionalAccess',
|
|
10468
|
+
if (_optionalChain([config, 'optionalAccess', _281 => _281.numRecentEvents])) {
|
|
9836
10469
|
eventQuery = eventQuery.limit(config.numRecentEvents);
|
|
9837
10470
|
}
|
|
9838
10471
|
const storageEvents = await eventQuery.execute();
|
|
9839
10472
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
|
|
9840
10473
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
|
|
9841
|
-
const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9842
|
-
const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10474
|
+
const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _282 => _282.state]), {});
|
|
10475
|
+
const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _283 => _283.state]), {});
|
|
9843
10476
|
const sessionState = this.parseJsonSafely(storageSession.state, {});
|
|
9844
10477
|
const mergedState = this.mergeState(
|
|
9845
10478
|
currentAppState,
|
|
@@ -9897,13 +10530,13 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9897
10530
|
}
|
|
9898
10531
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", session.appName).executeTakeFirst();
|
|
9899
10532
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", session.appName).where("user_id", "=", session.userId).executeTakeFirst();
|
|
9900
|
-
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9901
|
-
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10533
|
+
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _284 => _284.state]), {});
|
|
10534
|
+
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _285 => _285.state]), {});
|
|
9902
10535
|
let sessionState = this.parseJsonSafely(storageSession.state, {});
|
|
9903
10536
|
let appStateDelta = {};
|
|
9904
10537
|
let userStateDelta = {};
|
|
9905
10538
|
let sessionStateDelta = {};
|
|
9906
|
-
if (_optionalChain([event, 'access',
|
|
10539
|
+
if (_optionalChain([event, 'access', _286 => _286.actions, 'optionalAccess', _287 => _287.stateDelta])) {
|
|
9907
10540
|
const deltas = this.extractStateDelta(event.actions.stateDelta);
|
|
9908
10541
|
appStateDelta = deltas.appStateDelta;
|
|
9909
10542
|
userStateDelta = deltas.userStateDelta;
|
|
@@ -10049,7 +10682,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
10049
10682
|
* Overrides the base class method to work with plain object state.
|
|
10050
10683
|
*/
|
|
10051
10684
|
updateSessionState(session, event) {
|
|
10052
|
-
if (!_optionalChain([event, 'access',
|
|
10685
|
+
if (!_optionalChain([event, 'access', _288 => _288.actions, 'optionalAccess', _289 => _289.stateDelta])) {
|
|
10053
10686
|
return;
|
|
10054
10687
|
}
|
|
10055
10688
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
@@ -10058,10 +10691,10 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
10058
10691
|
}
|
|
10059
10692
|
}
|
|
10060
10693
|
}
|
|
10061
|
-
},
|
|
10694
|
+
}, _class32);
|
|
10062
10695
|
|
|
10063
10696
|
// src/sessions/database-factories.ts
|
|
10064
|
-
|
|
10697
|
+
var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
|
|
10065
10698
|
|
|
10066
10699
|
function createDependencyError(packageName, dbType) {
|
|
10067
10700
|
return new Error(
|
|
@@ -10138,11 +10771,9 @@ function createDatabaseSessionService(databaseUrl, options) {
|
|
|
10138
10771
|
}
|
|
10139
10772
|
|
|
10140
10773
|
// src/artifacts/gcs-artifact-service.ts
|
|
10141
|
-
init_logger();
|
|
10142
10774
|
|
|
10143
10775
|
|
|
10144
10776
|
var _storage = require('@google-cloud/storage');
|
|
10145
|
-
var logger11 = new Logger({ name: "GcsArtifactService" });
|
|
10146
10777
|
var GcsArtifactService = class {
|
|
10147
10778
|
|
|
10148
10779
|
|
|
@@ -10221,7 +10852,7 @@ var GcsArtifactService = class {
|
|
|
10221
10852
|
};
|
|
10222
10853
|
return part;
|
|
10223
10854
|
} catch (error) {
|
|
10224
|
-
if (_optionalChain([error, 'optionalAccess',
|
|
10855
|
+
if (_optionalChain([error, 'optionalAccess', _290 => _290.code]) === 404) {
|
|
10225
10856
|
return null;
|
|
10226
10857
|
}
|
|
10227
10858
|
throw error;
|
|
@@ -10297,20 +10928,20 @@ __export(flows_exports, {
|
|
|
10297
10928
|
REQUEST_EUC_FUNCTION_CALL_NAME: () => REQUEST_EUC_FUNCTION_CALL_NAME,
|
|
10298
10929
|
SingleFlow: () => SingleFlow,
|
|
10299
10930
|
agentTransferRequestProcessor: () => requestProcessor8,
|
|
10300
|
-
basicRequestProcessor: () =>
|
|
10301
|
-
codeExecutionRequestProcessor: () =>
|
|
10302
|
-
codeExecutionResponseProcessor: () =>
|
|
10303
|
-
contentRequestProcessor: () =>
|
|
10931
|
+
basicRequestProcessor: () => requestProcessor2,
|
|
10932
|
+
codeExecutionRequestProcessor: () => requestProcessor3,
|
|
10933
|
+
codeExecutionResponseProcessor: () => responseProcessor,
|
|
10934
|
+
contentRequestProcessor: () => requestProcessor4,
|
|
10304
10935
|
generateAuthEvent: () => generateAuthEvent,
|
|
10305
10936
|
generateClientFunctionCallId: () => generateClientFunctionCallId,
|
|
10306
10937
|
getLongRunningFunctionCalls: () => getLongRunningFunctionCalls,
|
|
10307
10938
|
handleFunctionCallsAsync: () => handleFunctionCallsAsync,
|
|
10308
10939
|
handleFunctionCallsLive: () => handleFunctionCallsLive,
|
|
10309
|
-
identityRequestProcessor: () =>
|
|
10310
|
-
instructionsRequestProcessor: () =>
|
|
10940
|
+
identityRequestProcessor: () => requestProcessor5,
|
|
10941
|
+
instructionsRequestProcessor: () => requestProcessor6,
|
|
10311
10942
|
mergeParallelFunctionResponseEvents: () => mergeParallelFunctionResponseEvents,
|
|
10312
|
-
nlPlanningRequestProcessor: () =>
|
|
10313
|
-
nlPlanningResponseProcessor: () =>
|
|
10943
|
+
nlPlanningRequestProcessor: () => requestProcessor7,
|
|
10944
|
+
nlPlanningResponseProcessor: () => responseProcessor2,
|
|
10314
10945
|
populateClientFunctionCallId: () => populateClientFunctionCallId,
|
|
10315
10946
|
removeClientFunctionCallId: () => removeClientFunctionCallId
|
|
10316
10947
|
});
|
|
@@ -10456,4 +11087,10 @@ var VERSION = "0.1.0";
|
|
|
10456
11087
|
|
|
10457
11088
|
|
|
10458
11089
|
|
|
10459
|
-
|
|
11090
|
+
|
|
11091
|
+
|
|
11092
|
+
|
|
11093
|
+
|
|
11094
|
+
|
|
11095
|
+
|
|
11096
|
+
exports.AF_FUNCTION_CALL_ID_PREFIX = AF_FUNCTION_CALL_ID_PREFIX; exports.Agent = LlmAgent; exports.AgentBuilder = AgentBuilder; exports.Agents = agents_exports; exports.AiSdkLlm = AiSdkLlm; exports.AnthropicLlm = AnthropicLlm; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.AuthTool = AuthTool; exports.AutoFlow = AutoFlow; exports.BaseAgent = BaseAgent; exports.BaseCodeExecutor = BaseCodeExecutor; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseLlm = BaseLlm; exports.BaseLlmFlow = BaseLlmFlow; exports.BaseLlmRequestProcessor = BaseLlmRequestProcessor; exports.BaseLlmResponseProcessor = BaseLlmResponseProcessor; exports.BasePlanner = BasePlanner; exports.BaseSessionService = BaseSessionService; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.BuiltInCodeExecutor = BuiltInCodeExecutor; exports.BuiltInPlanner = BuiltInPlanner; exports.CallbackContext = CallbackContext; exports.CodeExecutionUtils = CodeExecutionUtils; exports.CodeExecutorContext = CodeExecutorContext; exports.DatabaseSessionService = DatabaseSessionService; exports.EnhancedAuthConfig = EnhancedAuthConfig; exports.Event = Event; exports.EventActions = EventActions; exports.Events = events_exports; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.Flows = flows_exports; exports.FunctionTool = FunctionTool; exports.GcsArtifactService = GcsArtifactService; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLlm = GoogleLlm; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryArtifactService = InMemoryArtifactService; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LangGraphAgent = LangGraphAgent; exports.LlmAgent = LlmAgent; exports.LlmCallsLimitExceededError = LlmCallsLimitExceededError; exports.LlmRequest = LlmRequest; exports.LlmResponse = LlmResponse; exports.LoadArtifactsTool = LoadArtifactsTool; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpAbi = McpAbi; exports.McpAtp = McpAtp; exports.McpBamm = McpBamm; exports.McpCoinGecko = McpCoinGecko; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpFilesystem = McpFilesystem; exports.McpFraxlend = McpFraxlend; exports.McpGeneric = McpGeneric; exports.McpIqWiki = McpIqWiki; exports.McpMemory = McpMemory; exports.McpNearAgent = McpNearAgent; exports.McpNearIntentSwaps = McpNearIntentSwaps; exports.McpOdos = McpOdos; exports.McpSamplingHandler = McpSamplingHandler; exports.McpTelegram = McpTelegram; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAiLlm = OpenAiLlm; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PlanReActPlanner = PlanReActPlanner; exports.REQUEST_EUC_FUNCTION_CALL_NAME = REQUEST_EUC_FUNCTION_CALL_NAME; exports.ReadonlyContext = ReadonlyContext; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.Sessions = sessions_exports; exports.SingleFlow = SingleFlow; exports.State = State; exports.StreamingMode = StreamingMode; exports.TelemetryService = TelemetryService; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.VertexAiSessionService = VertexAiSessionService; exports.adkToMcpToolType = adkToMcpToolType; exports.agentTransferRequestProcessor = requestProcessor8; exports.basicRequestProcessor = requestProcessor2; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.codeExecutionRequestProcessor = requestProcessor3; exports.codeExecutionResponseProcessor = responseProcessor; exports.contentRequestProcessor = requestProcessor4; exports.createAuthToolArguments = createAuthToolArguments; exports.createDatabaseSessionService = createDatabaseSessionService; exports.createFunctionTool = createFunctionTool; exports.createMysqlSessionService = createMysqlSessionService; exports.createPostgresSessionService = createPostgresSessionService; exports.createSamplingHandler = createSamplingHandler; exports.createSqliteSessionService = createSqliteSessionService; exports.generateAuthEvent = generateAuthEvent; exports.generateClientFunctionCallId = generateClientFunctionCallId; exports.getLongRunningFunctionCalls = getLongRunningFunctionCalls; exports.getMcpTools = getMcpTools; exports.handleFunctionCallsAsync = handleFunctionCallsAsync; exports.handleFunctionCallsLive = handleFunctionCallsLive; exports.identityRequestProcessor = requestProcessor5; exports.initializeTelemetry = initializeTelemetry; exports.injectSessionState = injectSessionState; exports.instructionsRequestProcessor = requestProcessor6; exports.isEnhancedAuthConfig = isEnhancedAuthConfig; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.mergeParallelFunctionResponseEvents = mergeParallelFunctionResponseEvents; exports.newInvocationContextId = newInvocationContextId; exports.nlPlanningRequestProcessor = requestProcessor7; exports.nlPlanningResponseProcessor = responseProcessor2; exports.normalizeJsonSchema = normalizeJsonSchema; exports.populateClientFunctionCallId = populateClientFunctionCallId; exports.registerProviders = registerProviders; exports.removeClientFunctionCallId = removeClientFunctionCallId; exports.requestProcessor = requestProcessor; exports.shutdownTelemetry = shutdownTelemetry; exports.telemetryService = telemetryService; exports.traceLlmCall = traceLlmCall; exports.traceToolCall = traceToolCall; exports.tracer = tracer;
|