@iqai/adk 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.d.mts +343 -64
- package/dist/index.d.ts +343 -64
- package/dist/index.js +1652 -1018
- package/dist/index.mjs +1437 -803
- package/package.json +5 -1
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29;var __defProp = Object.defineProperty;
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29; var _class30; var _class31; var _class32;var __defProp = Object.defineProperty;
|
|
2
2
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
3
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
4
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
@@ -26,8 +26,9 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
26
26
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
27
|
|
|
28
28
|
// src/helpers/logger.ts
|
|
29
|
+
var _chalk = require('chalk'); var _chalk2 = _interopRequireDefault(_chalk);
|
|
29
30
|
function isDebugEnabled() {
|
|
30
|
-
return process.env.NODE_ENV === "development" || process.env.
|
|
31
|
+
return process.env.NODE_ENV === "development" || process.env.ADK_DEBUG === "true";
|
|
31
32
|
}
|
|
32
33
|
var Logger;
|
|
33
34
|
var init_logger = __esm({
|
|
@@ -38,34 +39,99 @@ var init_logger = __esm({
|
|
|
38
39
|
constructor({ name }) {;_class.prototype.__init2.call(this);
|
|
39
40
|
this.name = name;
|
|
40
41
|
}
|
|
42
|
+
colorize(message) {
|
|
43
|
+
return _chalk2.default.blue(message);
|
|
44
|
+
}
|
|
41
45
|
debug(message, ...args) {
|
|
42
|
-
const time = (/* @__PURE__ */ new Date()).toISOString();
|
|
43
46
|
if (this.isDebugEnabled) {
|
|
44
|
-
|
|
47
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
48
|
+
console.log(
|
|
49
|
+
this.colorize(`[${time}] \u{1F41B} [${this.name}] ${message}`),
|
|
50
|
+
...args
|
|
51
|
+
);
|
|
45
52
|
}
|
|
46
53
|
}
|
|
47
54
|
info(message, ...args) {
|
|
48
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
49
|
-
console.info(
|
|
55
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
56
|
+
console.info(
|
|
57
|
+
this.colorize(`[${time}] \u2139\uFE0F [${this.name}] ${message}`),
|
|
58
|
+
...args
|
|
59
|
+
);
|
|
50
60
|
}
|
|
51
61
|
warn(message, ...args) {
|
|
52
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
53
|
-
console.warn(
|
|
62
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
63
|
+
console.warn(
|
|
64
|
+
this.colorize(`[${time}] \u{1F6A7} [${this.name}] ${message}`),
|
|
65
|
+
...args
|
|
66
|
+
);
|
|
54
67
|
}
|
|
55
68
|
error(message, ...args) {
|
|
56
|
-
const time = (/* @__PURE__ */ new Date()).
|
|
57
|
-
console.error(
|
|
69
|
+
const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
|
|
70
|
+
console.error(
|
|
71
|
+
this.colorize(`[${time}] \u274C [${this.name}] ${message}`),
|
|
72
|
+
...args
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Logs structured data in a visually appealing table format.
|
|
77
|
+
* Uses vertical layout for better readability and respects debug settings.
|
|
78
|
+
*/
|
|
79
|
+
debugStructured(title, data) {
|
|
80
|
+
if (!this.isDebugEnabled) return;
|
|
81
|
+
const terminalWidth = process.stdout.columns || 60;
|
|
82
|
+
const width = Math.min(terminalWidth, 100);
|
|
83
|
+
const contentWidth = width - 4;
|
|
84
|
+
const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
|
|
85
|
+
const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
|
|
86
|
+
const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
|
|
87
|
+
console.log(this.colorize(topBorder));
|
|
88
|
+
console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
|
|
89
|
+
console.log(this.colorize(middleBorder));
|
|
90
|
+
Object.entries(data).forEach(([key, value]) => {
|
|
91
|
+
const formattedKey = key.padEnd(20);
|
|
92
|
+
const formattedValue = String(value);
|
|
93
|
+
const availableValueSpace = contentWidth - 20 - 2;
|
|
94
|
+
const truncatedValue = formattedValue.length > availableValueSpace ? `${formattedValue.substring(0, availableValueSpace - 3)}...` : formattedValue;
|
|
95
|
+
const content = `${formattedKey}: ${truncatedValue}`;
|
|
96
|
+
const paddedContent = content.padEnd(contentWidth);
|
|
97
|
+
console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
|
|
98
|
+
});
|
|
99
|
+
console.log(this.colorize(bottomBorder));
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Logs array data in a compact, readable format.
|
|
103
|
+
*/
|
|
104
|
+
debugArray(title, items) {
|
|
105
|
+
if (!this.isDebugEnabled) return;
|
|
106
|
+
const terminalWidth = process.stdout.columns || 78;
|
|
107
|
+
const width = Math.min(terminalWidth, 120);
|
|
108
|
+
const contentWidth = width - 4;
|
|
109
|
+
const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
|
|
110
|
+
const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
|
|
111
|
+
const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
|
|
112
|
+
console.log(this.colorize(topBorder));
|
|
113
|
+
console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
|
|
114
|
+
console.log(this.colorize(middleBorder));
|
|
115
|
+
items.forEach((item, index) => {
|
|
116
|
+
const itemStr = Object.entries(item).map(([k, v]) => `${k}: ${v}`).join(" \u2022 ");
|
|
117
|
+
const indexPart = `[${index + 1}] `;
|
|
118
|
+
const availableSpace = contentWidth - indexPart.length;
|
|
119
|
+
const truncatedItem = itemStr.length > availableSpace ? `${itemStr.substring(0, availableSpace - 3)}...` : itemStr;
|
|
120
|
+
const content = `${indexPart}${truncatedItem}`;
|
|
121
|
+
const paddedContent = content.padEnd(contentWidth);
|
|
122
|
+
console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
|
|
123
|
+
});
|
|
124
|
+
console.log(this.colorize(bottomBorder));
|
|
58
125
|
}
|
|
59
126
|
}, _class);
|
|
60
127
|
}
|
|
61
128
|
});
|
|
62
129
|
|
|
63
130
|
// src/tools/base/base-tool.ts
|
|
64
|
-
var
|
|
131
|
+
var BaseTool;
|
|
65
132
|
var init_base_tool = __esm({
|
|
66
133
|
"src/tools/base/base-tool.ts"() {
|
|
67
134
|
init_logger();
|
|
68
|
-
logger6 = new Logger({ name: "BaseTool" });
|
|
69
135
|
BaseTool = exports.BaseTool = (_class2 = class {
|
|
70
136
|
/**
|
|
71
137
|
* Name of the tool
|
|
@@ -96,10 +162,11 @@ var init_base_tool = __esm({
|
|
|
96
162
|
* Maximum delay for retry in ms
|
|
97
163
|
*/
|
|
98
164
|
__init4() {this.maxRetryDelay = 1e4}
|
|
165
|
+
__init5() {this.logger = new Logger({ name: "BaseTool" })}
|
|
99
166
|
/**
|
|
100
167
|
* Constructor for BaseTool
|
|
101
168
|
*/
|
|
102
|
-
constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
|
|
169
|
+
constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);_class2.prototype.__init5.call(this);
|
|
103
170
|
this.name = config.name;
|
|
104
171
|
this.description = config.description;
|
|
105
172
|
this.isLongRunning = config.isLongRunning || false;
|
|
@@ -226,7 +293,7 @@ var init_base_tool = __esm({
|
|
|
226
293
|
while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
|
|
227
294
|
try {
|
|
228
295
|
if (attempts > 0) {
|
|
229
|
-
|
|
296
|
+
this.logger.debug(
|
|
230
297
|
`Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
|
|
231
298
|
);
|
|
232
299
|
const delay = Math.min(
|
|
@@ -406,8 +473,8 @@ var init_function_tool = __esm({
|
|
|
406
473
|
init_function_utils();
|
|
407
474
|
FunctionTool = exports.FunctionTool = (_class3 = class extends BaseTool {
|
|
408
475
|
|
|
409
|
-
|
|
410
|
-
|
|
476
|
+
__init6() {this.mandatoryArgs = []}
|
|
477
|
+
__init7() {this.parameterTypes = {}}
|
|
411
478
|
/**
|
|
412
479
|
* Creates a new FunctionTool wrapping the provided function.
|
|
413
480
|
*
|
|
@@ -423,7 +490,7 @@ var init_function_tool = __esm({
|
|
|
423
490
|
isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
|
|
424
491
|
shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
|
|
425
492
|
maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
|
|
426
|
-
});_class3.prototype.
|
|
493
|
+
});_class3.prototype.__init6.call(this);_class3.prototype.__init7.call(this);;
|
|
427
494
|
this.func = func;
|
|
428
495
|
this.mandatoryArgs = this.getMandatoryArgs(func);
|
|
429
496
|
this.parameterTypes = _optionalChain([options, 'optionalAccess', _11 => _11.parameterTypes]) || {};
|
|
@@ -613,6 +680,7 @@ __export(agents_exports, {
|
|
|
613
680
|
// src/models/index.ts
|
|
614
681
|
var models_exports = {};
|
|
615
682
|
__export(models_exports, {
|
|
683
|
+
AiSdkLlm: () => AiSdkLlm,
|
|
616
684
|
AnthropicLlm: () => AnthropicLlm,
|
|
617
685
|
ApiKeyCredential: () => ApiKeyCredential,
|
|
618
686
|
ApiKeyScheme: () => ApiKeyScheme,
|
|
@@ -640,8 +708,6 @@ __export(models_exports, {
|
|
|
640
708
|
});
|
|
641
709
|
|
|
642
710
|
// src/models/llm-request.ts
|
|
643
|
-
init_logger();
|
|
644
|
-
var logger = new Logger({ name: "LlmRequest" });
|
|
645
711
|
var LlmRequest = class {
|
|
646
712
|
/**
|
|
647
713
|
* The model name.
|
|
@@ -805,6 +871,10 @@ var LlmResponse = class _LlmResponse {
|
|
|
805
871
|
* Reason why the model finished generating.
|
|
806
872
|
*/
|
|
807
873
|
|
|
874
|
+
/**
|
|
875
|
+
* Error object if the response is an error.
|
|
876
|
+
*/
|
|
877
|
+
|
|
808
878
|
/**
|
|
809
879
|
* Creates a new LlmResponse.
|
|
810
880
|
*/
|
|
@@ -848,6 +918,29 @@ var LlmResponse = class _LlmResponse {
|
|
|
848
918
|
usageMetadata
|
|
849
919
|
});
|
|
850
920
|
}
|
|
921
|
+
/**
|
|
922
|
+
* Creates an LlmResponse from an error.
|
|
923
|
+
*
|
|
924
|
+
* @param error The error object or message.
|
|
925
|
+
* @param options Additional options for the error response.
|
|
926
|
+
* @param options.errorCode A specific error code for the response.
|
|
927
|
+
* @param options.model The model that was being used when the error occurred.
|
|
928
|
+
* @returns The LlmResponse.
|
|
929
|
+
*/
|
|
930
|
+
static fromError(error, options = {}) {
|
|
931
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
932
|
+
const errorCode = options.errorCode || "UNKNOWN_ERROR";
|
|
933
|
+
return new _LlmResponse({
|
|
934
|
+
errorCode,
|
|
935
|
+
errorMessage: `LLM call failed for model ${options.model || "unknown"}: ${errorMessage}`,
|
|
936
|
+
content: {
|
|
937
|
+
role: "model",
|
|
938
|
+
parts: [{ text: `Error: ${errorMessage}` }]
|
|
939
|
+
},
|
|
940
|
+
finishReason: "STOP",
|
|
941
|
+
error: error instanceof Error ? error : new Error(errorMessage)
|
|
942
|
+
});
|
|
943
|
+
}
|
|
851
944
|
};
|
|
852
945
|
|
|
853
946
|
// src/models/base-llm.ts
|
|
@@ -869,11 +962,11 @@ var _sdknode = require('@opentelemetry/sdk-node');
|
|
|
869
962
|
|
|
870
963
|
var _semanticconventions = require('@opentelemetry/semantic-conventions');
|
|
871
964
|
var TelemetryService = (_class4 = class {
|
|
872
|
-
|
|
873
|
-
|
|
965
|
+
__init8() {this.sdk = null}
|
|
966
|
+
__init9() {this.isInitialized = false}
|
|
874
967
|
|
|
875
|
-
|
|
876
|
-
constructor() {;_class4.prototype.
|
|
968
|
+
__init10() {this.config = null}
|
|
969
|
+
constructor() {;_class4.prototype.__init8.call(this);_class4.prototype.__init9.call(this);_class4.prototype.__init10.call(this);
|
|
877
970
|
this.tracer = _api.trace.getTracer("iqai-adk", "0.1.0");
|
|
878
971
|
}
|
|
879
972
|
/**
|
|
@@ -1127,16 +1220,16 @@ var traceLlmCall = (invocationContext, eventId, llmRequest, llmResponse) => tele
|
|
|
1127
1220
|
);
|
|
1128
1221
|
|
|
1129
1222
|
// src/models/base-llm.ts
|
|
1130
|
-
var
|
|
1131
|
-
var BaseLlm = class {
|
|
1223
|
+
var BaseLlm = (_class5 = class {
|
|
1132
1224
|
/**
|
|
1133
1225
|
* The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001.
|
|
1134
1226
|
*/
|
|
1135
1227
|
|
|
1228
|
+
__init11() {this.logger = new Logger({ name: "BaseLlm" })}
|
|
1136
1229
|
/**
|
|
1137
1230
|
* Constructor for BaseLlm
|
|
1138
1231
|
*/
|
|
1139
|
-
constructor(model) {
|
|
1232
|
+
constructor(model) {;_class5.prototype.__init11.call(this);
|
|
1140
1233
|
this.model = model;
|
|
1141
1234
|
}
|
|
1142
1235
|
/**
|
|
@@ -1183,12 +1276,6 @@ var BaseLlm = class {
|
|
|
1183
1276
|
}),
|
|
1184
1277
|
"adk.streaming": stream || false
|
|
1185
1278
|
});
|
|
1186
|
-
logger2.debug("ADK LLM Request:", {
|
|
1187
|
-
model: this.model,
|
|
1188
|
-
contentCount: _optionalChain([llmRequest, 'access', _45 => _45.contents, 'optionalAccess', _46 => _46.length]) || 0,
|
|
1189
|
-
streaming: stream || false,
|
|
1190
|
-
config: llmRequest.config
|
|
1191
|
-
});
|
|
1192
1279
|
let responseCount = 0;
|
|
1193
1280
|
let totalTokens = 0;
|
|
1194
1281
|
for await (const response of this.generateContentAsyncImpl(
|
|
@@ -1196,14 +1283,6 @@ var BaseLlm = class {
|
|
|
1196
1283
|
stream
|
|
1197
1284
|
)) {
|
|
1198
1285
|
responseCount++;
|
|
1199
|
-
logger2.debug(`ADK LLM Response ${responseCount}:`, {
|
|
1200
|
-
model: this.model,
|
|
1201
|
-
parts: _optionalChain([response, 'access', _47 => _47.parts, 'optionalAccess', _48 => _48.map, 'call', _49 => _49((part) => ({
|
|
1202
|
-
text: typeof part.text === "string" ? part.text.substring(0, 200) + (part.text.length > 200 ? "..." : "") : "[non_text_content]"
|
|
1203
|
-
}))]),
|
|
1204
|
-
finishReason: response.finish_reason,
|
|
1205
|
-
usage: response.usage
|
|
1206
|
-
});
|
|
1207
1286
|
if (response.usage) {
|
|
1208
1287
|
totalTokens += response.usage.total_tokens || 0;
|
|
1209
1288
|
span.setAttributes({
|
|
@@ -1224,7 +1303,7 @@ var BaseLlm = class {
|
|
|
1224
1303
|
} catch (error) {
|
|
1225
1304
|
span.recordException(error);
|
|
1226
1305
|
span.setStatus({ code: 2, message: error.message });
|
|
1227
|
-
|
|
1306
|
+
this.logger.error("\u274C ADK LLM Error:", {
|
|
1228
1307
|
model: this.model,
|
|
1229
1308
|
error: error.message
|
|
1230
1309
|
});
|
|
@@ -1273,33 +1352,29 @@ var BaseLlm = class {
|
|
|
1273
1352
|
connect(llmRequest) {
|
|
1274
1353
|
throw new Error(`Live connection is not supported for ${this.model}.`);
|
|
1275
1354
|
}
|
|
1276
|
-
};
|
|
1355
|
+
}, _class5);
|
|
1277
1356
|
|
|
1278
1357
|
// src/models/base-llm-connection.ts
|
|
1279
1358
|
var BaseLLMConnection = class {
|
|
1280
1359
|
};
|
|
1281
1360
|
|
|
1282
1361
|
// src/models/google-llm.ts
|
|
1283
|
-
init_logger();
|
|
1284
1362
|
|
|
1285
1363
|
|
|
1286
1364
|
|
|
1287
1365
|
var _genai = require('@google/genai');
|
|
1288
|
-
var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
|
|
1289
|
-
var NEW_LINE = "\n";
|
|
1290
1366
|
var AGENT_ENGINE_TELEMETRY_TAG = "remote_reasoning_engine";
|
|
1291
1367
|
var AGENT_ENGINE_TELEMETRY_ENV_VARIABLE_NAME = "GOOGLE_CLOUD_AGENT_ENGINE_ID";
|
|
1292
|
-
var GoogleLlm =
|
|
1368
|
+
var GoogleLlm = class extends BaseLlm {
|
|
1293
1369
|
|
|
1294
1370
|
|
|
1295
1371
|
|
|
1296
1372
|
|
|
1297
|
-
__init10() {this.logger = new Logger({ name: "GoogleLlm" })}
|
|
1298
1373
|
/**
|
|
1299
1374
|
* Constructor for Gemini
|
|
1300
1375
|
*/
|
|
1301
1376
|
constructor(model = "gemini-1.5-flash") {
|
|
1302
|
-
super(model);
|
|
1377
|
+
super(model);
|
|
1303
1378
|
}
|
|
1304
1379
|
/**
|
|
1305
1380
|
* Provides the list of supported models.
|
|
@@ -1318,10 +1393,6 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1318
1393
|
*/
|
|
1319
1394
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1320
1395
|
this.preprocessRequest(llmRequest);
|
|
1321
|
-
this.logger.debug(
|
|
1322
|
-
`Sending out request, model: ${llmRequest.model || this.model}, backend: ${this.apiBackend}, stream: ${stream}`
|
|
1323
|
-
);
|
|
1324
|
-
this.logger.debug(this.buildRequestLog(llmRequest));
|
|
1325
1396
|
const model = llmRequest.model || this.model;
|
|
1326
1397
|
const contents = this.convertContents(llmRequest.contents || []);
|
|
1327
1398
|
const config = this.convertConfig(llmRequest.config);
|
|
@@ -1337,10 +1408,9 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1337
1408
|
let usageMetadata = null;
|
|
1338
1409
|
for await (const resp of responses) {
|
|
1339
1410
|
response = resp;
|
|
1340
|
-
this.logger.debug(this.buildResponseLog(resp));
|
|
1341
1411
|
const llmResponse = LlmResponse.create(resp);
|
|
1342
1412
|
usageMetadata = llmResponse.usageMetadata;
|
|
1343
|
-
if (_optionalChain([llmResponse, 'access',
|
|
1413
|
+
if (_optionalChain([llmResponse, 'access', _45 => _45.content, 'optionalAccess', _46 => _46.parts, 'optionalAccess', _47 => _47[0], 'optionalAccess', _48 => _48.text])) {
|
|
1344
1414
|
const part0 = llmResponse.content.parts[0];
|
|
1345
1415
|
if (part0.thought) {
|
|
1346
1416
|
thoughtText += part0.text;
|
|
@@ -1368,7 +1438,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1368
1438
|
}
|
|
1369
1439
|
yield llmResponse;
|
|
1370
1440
|
}
|
|
1371
|
-
if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access',
|
|
1441
|
+
if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _49 => _49.candidates, 'access', _50 => _50[0], 'optionalAccess', _51 => _51.finishReason]) === _genai.FinishReason.STOP) {
|
|
1372
1442
|
const parts = [];
|
|
1373
1443
|
if (thoughtText) {
|
|
1374
1444
|
parts.push({ text: thoughtText, thought: true });
|
|
@@ -1390,8 +1460,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1390
1460
|
contents,
|
|
1391
1461
|
config
|
|
1392
1462
|
});
|
|
1393
|
-
|
|
1394
|
-
|
|
1463
|
+
const llmResponse = LlmResponse.create(response);
|
|
1464
|
+
this.logger.debug(
|
|
1465
|
+
`Google response: ${_optionalChain([llmResponse, 'access', _52 => _52.usageMetadata, 'optionalAccess', _53 => _53.candidatesTokenCount]) || 0} tokens`
|
|
1466
|
+
);
|
|
1467
|
+
yield llmResponse;
|
|
1395
1468
|
}
|
|
1396
1469
|
}
|
|
1397
1470
|
/**
|
|
@@ -1404,8 +1477,8 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1404
1477
|
* Check if response has inline data
|
|
1405
1478
|
*/
|
|
1406
1479
|
hasInlineData(response) {
|
|
1407
|
-
const parts = _optionalChain([response, 'access',
|
|
1408
|
-
return _optionalChain([parts, 'optionalAccess',
|
|
1480
|
+
const parts = _optionalChain([response, 'access', _54 => _54.candidates, 'optionalAccess', _55 => _55[0], 'optionalAccess', _56 => _56.content, 'optionalAccess', _57 => _57.parts]);
|
|
1481
|
+
return _optionalChain([parts, 'optionalAccess', _58 => _58.some, 'call', _59 => _59((part) => _optionalChain([part, 'optionalAccess', _60 => _60.inlineData]))]) || false;
|
|
1409
1482
|
}
|
|
1410
1483
|
/**
|
|
1411
1484
|
* Convert LlmRequest contents to GoogleGenAI format
|
|
@@ -1452,7 +1525,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1452
1525
|
* Sets display_name to null for the Gemini API (non-Vertex) backend.
|
|
1453
1526
|
*/
|
|
1454
1527
|
removeDisplayNameIfPresent(dataObj) {
|
|
1455
|
-
if (_optionalChain([dataObj, 'optionalAccess',
|
|
1528
|
+
if (_optionalChain([dataObj, 'optionalAccess', _61 => _61.displayName])) {
|
|
1456
1529
|
dataObj.displayName = null;
|
|
1457
1530
|
}
|
|
1458
1531
|
}
|
|
@@ -1461,65 +1534,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1461
1534
|
*/
|
|
1462
1535
|
buildFunctionDeclarationLog(funcDecl) {
|
|
1463
1536
|
let paramStr = "{}";
|
|
1464
|
-
if (_optionalChain([funcDecl, 'access',
|
|
1537
|
+
if (_optionalChain([funcDecl, 'access', _62 => _62.parameters, 'optionalAccess', _63 => _63.properties])) {
|
|
1465
1538
|
paramStr = JSON.stringify(funcDecl.parameters.properties);
|
|
1466
1539
|
}
|
|
1467
1540
|
return `${funcDecl.name}: ${paramStr}`;
|
|
1468
1541
|
}
|
|
1469
|
-
/**
|
|
1470
|
-
* Builds request log string.
|
|
1471
|
-
*/
|
|
1472
|
-
buildRequestLog(req) {
|
|
1473
|
-
const functionDecls = _optionalChain([req, 'access', _67 => _67.config, 'optionalAccess', _68 => _68.tools, 'optionalAccess', _69 => _69[0], 'optionalAccess', _70 => _70.functionDeclarations]) || [];
|
|
1474
|
-
const functionLogs = functionDecls.length > 0 ? functionDecls.map(
|
|
1475
|
-
(funcDecl) => this.buildFunctionDeclarationLog(funcDecl)
|
|
1476
|
-
) : [];
|
|
1477
|
-
const contentsLogs = _optionalChain([req, 'access', _71 => _71.contents, 'optionalAccess', _72 => _72.map, 'call', _73 => _73(
|
|
1478
|
-
(content) => JSON.stringify(content, (key, value) => {
|
|
1479
|
-
if (key === "data" && typeof value === "string" && value.length > 100) {
|
|
1480
|
-
return "[EXCLUDED]";
|
|
1481
|
-
}
|
|
1482
|
-
return value;
|
|
1483
|
-
})
|
|
1484
|
-
)]) || [];
|
|
1485
|
-
return _dedent2.default`
|
|
1486
|
-
LLM Request:
|
|
1487
|
-
-----------------------------------------------------------
|
|
1488
|
-
System Instruction:
|
|
1489
|
-
${_optionalChain([req, 'access', _74 => _74.config, 'optionalAccess', _75 => _75.systemInstruction]) || ""}
|
|
1490
|
-
-----------------------------------------------------------
|
|
1491
|
-
Contents:
|
|
1492
|
-
${contentsLogs.join(NEW_LINE)}
|
|
1493
|
-
-----------------------------------------------------------
|
|
1494
|
-
Functions:
|
|
1495
|
-
${functionLogs.join(NEW_LINE)}
|
|
1496
|
-
-----------------------------------------------------------`;
|
|
1497
|
-
}
|
|
1498
|
-
/**
|
|
1499
|
-
* Builds response log string.
|
|
1500
|
-
*/
|
|
1501
|
-
buildResponseLog(resp) {
|
|
1502
|
-
const functionCallsText = [];
|
|
1503
|
-
if (resp.functionCalls) {
|
|
1504
|
-
for (const funcCall of resp.functionCalls) {
|
|
1505
|
-
functionCallsText.push(
|
|
1506
|
-
`name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
|
|
1507
|
-
);
|
|
1508
|
-
}
|
|
1509
|
-
}
|
|
1510
|
-
return _dedent2.default`
|
|
1511
|
-
LLM Response:
|
|
1512
|
-
-----------------------------------------------------------
|
|
1513
|
-
Text:
|
|
1514
|
-
${resp.text || ""}
|
|
1515
|
-
-----------------------------------------------------------
|
|
1516
|
-
Function calls:
|
|
1517
|
-
${functionCallsText.join(NEW_LINE)}
|
|
1518
|
-
-----------------------------------------------------------
|
|
1519
|
-
Raw response:
|
|
1520
|
-
${JSON.stringify(resp, null, 2)}
|
|
1521
|
-
-----------------------------------------------------------`;
|
|
1522
|
-
}
|
|
1523
1542
|
/**
|
|
1524
1543
|
* Provides the api client.
|
|
1525
1544
|
*/
|
|
@@ -1608,20 +1627,20 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
|
|
|
1608
1627
|
}
|
|
1609
1628
|
return this._liveApiClient;
|
|
1610
1629
|
}
|
|
1611
|
-
}
|
|
1630
|
+
};
|
|
1612
1631
|
|
|
1613
1632
|
// src/models/anthropic-llm.ts
|
|
1614
1633
|
init_logger();
|
|
1615
1634
|
var _sdk = require('@anthropic-ai/sdk'); var _sdk2 = _interopRequireDefault(_sdk);
|
|
1616
|
-
var logger3 = new Logger({ name: "AnthropicLlm" });
|
|
1617
1635
|
var MAX_TOKENS = 1024;
|
|
1618
|
-
var AnthropicLlm = class extends BaseLlm {
|
|
1636
|
+
var AnthropicLlm = (_class6 = class extends BaseLlm {
|
|
1619
1637
|
|
|
1638
|
+
__init12() {this.logger = new Logger({ name: "AnthropicLlm" })}
|
|
1620
1639
|
/**
|
|
1621
1640
|
* Constructor for Anthropic LLM
|
|
1622
1641
|
*/
|
|
1623
1642
|
constructor(model = "claude-3-5-sonnet-20241022") {
|
|
1624
|
-
super(model);
|
|
1643
|
+
super(model);_class6.prototype.__init12.call(this);;
|
|
1625
1644
|
}
|
|
1626
1645
|
/**
|
|
1627
1646
|
* Provides the list of supported models
|
|
@@ -1633,15 +1652,12 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1633
1652
|
* Main content generation method - handles both streaming and non-streaming
|
|
1634
1653
|
*/
|
|
1635
1654
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1636
|
-
logger3.debug(
|
|
1637
|
-
`Sending Anthropic request, model: ${llmRequest.model || this.model}, stream: ${stream}`
|
|
1638
|
-
);
|
|
1639
1655
|
const model = llmRequest.model || this.model;
|
|
1640
1656
|
const messages = (llmRequest.contents || []).map(
|
|
1641
1657
|
(content) => this.contentToAnthropicMessage(content)
|
|
1642
1658
|
);
|
|
1643
1659
|
let tools;
|
|
1644
|
-
if (_optionalChain([llmRequest, 'access',
|
|
1660
|
+
if (_optionalChain([llmRequest, 'access', _64 => _64.config, 'optionalAccess', _65 => _65.tools, 'optionalAccess', _66 => _66[0], 'optionalAccess', _67 => _67.functionDeclarations])) {
|
|
1645
1661
|
tools = llmRequest.config.tools[0].functionDeclarations.map(
|
|
1646
1662
|
(decl) => this.functionDeclarationToAnthropicTool(decl)
|
|
1647
1663
|
);
|
|
@@ -1663,9 +1679,9 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1663
1679
|
messages: anthropicMessages,
|
|
1664
1680
|
tools,
|
|
1665
1681
|
tool_choice: tools ? { type: "auto" } : void 0,
|
|
1666
|
-
max_tokens: _optionalChain([llmRequest, 'access',
|
|
1667
|
-
temperature: _optionalChain([llmRequest, 'access',
|
|
1668
|
-
top_p: _optionalChain([llmRequest, 'access',
|
|
1682
|
+
max_tokens: _optionalChain([llmRequest, 'access', _68 => _68.config, 'optionalAccess', _69 => _69.maxOutputTokens]) || MAX_TOKENS,
|
|
1683
|
+
temperature: _optionalChain([llmRequest, 'access', _70 => _70.config, 'optionalAccess', _71 => _71.temperature]),
|
|
1684
|
+
top_p: _optionalChain([llmRequest, 'access', _72 => _72.config, 'optionalAccess', _73 => _73.topP])
|
|
1669
1685
|
});
|
|
1670
1686
|
yield this.anthropicMessageToLlmResponse(message);
|
|
1671
1687
|
}
|
|
@@ -1679,7 +1695,9 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1679
1695
|
* Convert Anthropic Message to ADK LlmResponse
|
|
1680
1696
|
*/
|
|
1681
1697
|
anthropicMessageToLlmResponse(message) {
|
|
1682
|
-
|
|
1698
|
+
this.logger.debug(
|
|
1699
|
+
`Anthropic response: ${message.usage.output_tokens} tokens, ${message.stop_reason}`
|
|
1700
|
+
);
|
|
1683
1701
|
return new LlmResponse({
|
|
1684
1702
|
content: {
|
|
1685
1703
|
role: "model",
|
|
@@ -1724,7 +1742,7 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1724
1742
|
}
|
|
1725
1743
|
if (part.function_response) {
|
|
1726
1744
|
let content = "";
|
|
1727
|
-
if (_optionalChain([part, 'access',
|
|
1745
|
+
if (_optionalChain([part, 'access', _74 => _74.function_response, 'access', _75 => _75.response, 'optionalAccess', _76 => _76.result])) {
|
|
1728
1746
|
content = String(part.function_response.response.result);
|
|
1729
1747
|
}
|
|
1730
1748
|
return {
|
|
@@ -1759,7 +1777,7 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1759
1777
|
*/
|
|
1760
1778
|
functionDeclarationToAnthropicTool(functionDeclaration) {
|
|
1761
1779
|
const properties = {};
|
|
1762
|
-
if (_optionalChain([functionDeclaration, 'access',
|
|
1780
|
+
if (_optionalChain([functionDeclaration, 'access', _77 => _77.parameters, 'optionalAccess', _78 => _78.properties])) {
|
|
1763
1781
|
for (const [key, value] of Object.entries(
|
|
1764
1782
|
functionDeclaration.parameters.properties
|
|
1765
1783
|
)) {
|
|
@@ -1833,14 +1851,10 @@ var AnthropicLlm = class extends BaseLlm {
|
|
|
1833
1851
|
}
|
|
1834
1852
|
return this._client;
|
|
1835
1853
|
}
|
|
1836
|
-
};
|
|
1854
|
+
}, _class6);
|
|
1837
1855
|
|
|
1838
1856
|
// src/models/openai-llm.ts
|
|
1839
|
-
init_logger();
|
|
1840
|
-
|
|
1841
1857
|
var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
|
|
1842
|
-
var logger4 = new Logger({ name: "OpenAiLlm" });
|
|
1843
|
-
var NEW_LINE2 = "\n";
|
|
1844
1858
|
var OpenAiLlm = class extends BaseLlm {
|
|
1845
1859
|
|
|
1846
1860
|
/**
|
|
@@ -1860,16 +1874,12 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1860
1874
|
*/
|
|
1861
1875
|
async *generateContentAsyncImpl(llmRequest, stream = false) {
|
|
1862
1876
|
this.preprocessRequest(llmRequest);
|
|
1863
|
-
logger4.debug(
|
|
1864
|
-
`Sending OpenAI request, model: ${llmRequest.model || this.model}, stream: ${stream}`
|
|
1865
|
-
);
|
|
1866
|
-
logger4.debug(this.buildRequestLog(llmRequest));
|
|
1867
1877
|
const model = llmRequest.model || this.model;
|
|
1868
1878
|
const messages = (llmRequest.contents || []).map(
|
|
1869
1879
|
(content) => this.contentToOpenAiMessage(content)
|
|
1870
1880
|
);
|
|
1871
1881
|
let tools;
|
|
1872
|
-
if (_optionalChain([llmRequest, 'access',
|
|
1882
|
+
if (_optionalChain([llmRequest, 'access', _79 => _79.config, 'optionalAccess', _80 => _80.tools, 'optionalAccess', _81 => _81[0], 'optionalAccess', _82 => _82.functionDeclarations])) {
|
|
1873
1883
|
tools = llmRequest.config.tools[0].functionDeclarations.map(
|
|
1874
1884
|
(funcDecl) => this.functionDeclarationToOpenAiTool(funcDecl)
|
|
1875
1885
|
);
|
|
@@ -1887,9 +1897,9 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1887
1897
|
messages: openAiMessages,
|
|
1888
1898
|
tools,
|
|
1889
1899
|
tool_choice: tools ? "auto" : void 0,
|
|
1890
|
-
max_tokens: _optionalChain([llmRequest, 'access',
|
|
1891
|
-
temperature: _optionalChain([llmRequest, 'access',
|
|
1892
|
-
top_p: _optionalChain([llmRequest, 'access',
|
|
1900
|
+
max_tokens: _optionalChain([llmRequest, 'access', _83 => _83.config, 'optionalAccess', _84 => _84.maxOutputTokens]),
|
|
1901
|
+
temperature: _optionalChain([llmRequest, 'access', _85 => _85.config, 'optionalAccess', _86 => _86.temperature]),
|
|
1902
|
+
top_p: _optionalChain([llmRequest, 'access', _87 => _87.config, 'optionalAccess', _88 => _88.topP]),
|
|
1893
1903
|
stream
|
|
1894
1904
|
};
|
|
1895
1905
|
if (stream) {
|
|
@@ -1905,13 +1915,11 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1905
1915
|
const choice = chunk.choices[0];
|
|
1906
1916
|
if (!choice) continue;
|
|
1907
1917
|
const delta = choice.delta;
|
|
1908
|
-
logger4.debug("Delta content:", delta.content);
|
|
1909
1918
|
const llmResponse = this.createChunkResponse(delta, chunk.usage);
|
|
1910
1919
|
if (chunk.usage) {
|
|
1911
1920
|
usageMetadata = chunk.usage;
|
|
1912
1921
|
}
|
|
1913
|
-
|
|
1914
|
-
if (_optionalChain([llmResponse, 'access', _101 => _101.content, 'optionalAccess', _102 => _102.parts, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.text])) {
|
|
1922
|
+
if (_optionalChain([llmResponse, 'access', _89 => _89.content, 'optionalAccess', _90 => _90.parts, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.text])) {
|
|
1915
1923
|
const part0 = llmResponse.content.parts[0];
|
|
1916
1924
|
if (part0.thought) {
|
|
1917
1925
|
thoughtText += part0.text;
|
|
@@ -1952,10 +1960,10 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1952
1960
|
function: { name: "", arguments: "" }
|
|
1953
1961
|
};
|
|
1954
1962
|
}
|
|
1955
|
-
if (_optionalChain([toolCall, 'access',
|
|
1963
|
+
if (_optionalChain([toolCall, 'access', _93 => _93.function, 'optionalAccess', _94 => _94.name])) {
|
|
1956
1964
|
accumulatedToolCalls[index].function.name += toolCall.function.name;
|
|
1957
1965
|
}
|
|
1958
|
-
if (_optionalChain([toolCall, 'access',
|
|
1966
|
+
if (_optionalChain([toolCall, 'access', _95 => _95.function, 'optionalAccess', _96 => _96.arguments])) {
|
|
1959
1967
|
accumulatedToolCalls[index].function.arguments += toolCall.function.arguments;
|
|
1960
1968
|
}
|
|
1961
1969
|
}
|
|
@@ -1970,7 +1978,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1970
1978
|
}
|
|
1971
1979
|
if (accumulatedToolCalls.length > 0) {
|
|
1972
1980
|
for (const toolCall of accumulatedToolCalls) {
|
|
1973
|
-
if (_optionalChain([toolCall, 'access',
|
|
1981
|
+
if (_optionalChain([toolCall, 'access', _97 => _97.function, 'optionalAccess', _98 => _98.name])) {
|
|
1974
1982
|
parts.push({
|
|
1975
1983
|
functionCall: {
|
|
1976
1984
|
id: toolCall.id,
|
|
@@ -1993,7 +2001,6 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
1993
2001
|
} : void 0,
|
|
1994
2002
|
finishReason: this.toAdkFinishReason(choice.finish_reason)
|
|
1995
2003
|
});
|
|
1996
|
-
logger4.debug(this.buildResponseLog(finalResponse));
|
|
1997
2004
|
yield finalResponse;
|
|
1998
2005
|
} else {
|
|
1999
2006
|
yield llmResponse;
|
|
@@ -2030,7 +2037,9 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2030
2037
|
choice,
|
|
2031
2038
|
response.usage
|
|
2032
2039
|
);
|
|
2033
|
-
|
|
2040
|
+
this.logger.debug(
|
|
2041
|
+
`OpenAI response: ${_optionalChain([response, 'access', _99 => _99.usage, 'optionalAccess', _100 => _100.completion_tokens]) || 0} tokens`
|
|
2042
|
+
);
|
|
2034
2043
|
yield llmResponse;
|
|
2035
2044
|
}
|
|
2036
2045
|
}
|
|
@@ -2056,7 +2065,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2056
2065
|
}
|
|
2057
2066
|
if (delta.tool_calls) {
|
|
2058
2067
|
for (const toolCall of delta.tool_calls) {
|
|
2059
|
-
if (toolCall.type === "function" && _optionalChain([toolCall, 'access',
|
|
2068
|
+
if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _101 => _101.function, 'optionalAccess', _102 => _102.name])) {
|
|
2060
2069
|
parts.push({
|
|
2061
2070
|
functionCall: {
|
|
2062
2071
|
id: toolCall.id || "",
|
|
@@ -2084,10 +2093,6 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2084
2093
|
*/
|
|
2085
2094
|
openAiMessageToLlmResponse(choice, usage) {
|
|
2086
2095
|
const message = choice.message;
|
|
2087
|
-
logger4.debug(
|
|
2088
|
-
"OpenAI response:",
|
|
2089
|
-
JSON.stringify({ message, usage }, null, 2)
|
|
2090
|
-
);
|
|
2091
2096
|
const parts = [];
|
|
2092
2097
|
if (message.content) {
|
|
2093
2098
|
parts.push({ text: message.content });
|
|
@@ -2126,10 +2131,10 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2126
2131
|
if (role === "system") {
|
|
2127
2132
|
return {
|
|
2128
2133
|
role: "system",
|
|
2129
|
-
content: _optionalChain([content, 'access',
|
|
2134
|
+
content: _optionalChain([content, 'access', _103 => _103.parts, 'optionalAccess', _104 => _104[0], 'optionalAccess', _105 => _105.text]) || ""
|
|
2130
2135
|
};
|
|
2131
2136
|
}
|
|
2132
|
-
if (_optionalChain([content, 'access',
|
|
2137
|
+
if (_optionalChain([content, 'access', _106 => _106.parts, 'optionalAccess', _107 => _107.some, 'call', _108 => _108((part) => part.functionCall)])) {
|
|
2133
2138
|
const functionCallPart = content.parts.find(
|
|
2134
2139
|
(part) => part.functionCall
|
|
2135
2140
|
);
|
|
@@ -2149,7 +2154,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2149
2154
|
]
|
|
2150
2155
|
};
|
|
2151
2156
|
}
|
|
2152
|
-
if (_optionalChain([content, 'access',
|
|
2157
|
+
if (_optionalChain([content, 'access', _109 => _109.parts, 'optionalAccess', _110 => _110.some, 'call', _111 => _111((part) => part.functionResponse)])) {
|
|
2153
2158
|
const functionResponsePart = content.parts.find(
|
|
2154
2159
|
(part) => part.functionResponse
|
|
2155
2160
|
);
|
|
@@ -2161,7 +2166,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2161
2166
|
)
|
|
2162
2167
|
};
|
|
2163
2168
|
}
|
|
2164
|
-
if (_optionalChain([content, 'access',
|
|
2169
|
+
if (_optionalChain([content, 'access', _112 => _112.parts, 'optionalAccess', _113 => _113.length]) === 1 && content.parts[0].text) {
|
|
2165
2170
|
return {
|
|
2166
2171
|
role,
|
|
2167
2172
|
content: content.parts[0].text
|
|
@@ -2184,7 +2189,7 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2184
2189
|
text: part.text
|
|
2185
2190
|
};
|
|
2186
2191
|
}
|
|
2187
|
-
if (_optionalChain([part, 'access',
|
|
2192
|
+
if (_optionalChain([part, 'access', _114 => _114.inline_data, 'optionalAccess', _115 => _115.mime_type]) && _optionalChain([part, 'access', _116 => _116.inline_data, 'optionalAccess', _117 => _117.data])) {
|
|
2188
2193
|
return {
|
|
2189
2194
|
type: "image_url",
|
|
2190
2195
|
image_url: {
|
|
@@ -2273,69 +2278,8 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2273
2278
|
* Check if response has inline data (similar to Google LLM)
|
|
2274
2279
|
*/
|
|
2275
2280
|
hasInlineData(response) {
|
|
2276
|
-
const parts = _optionalChain([response, 'access',
|
|
2277
|
-
return _optionalChain([parts, 'optionalAccess',
|
|
2278
|
-
}
|
|
2279
|
-
/**
|
|
2280
|
-
* Build request log string for debugging (similar to Google LLM)
|
|
2281
|
-
*/
|
|
2282
|
-
buildRequestLog(req) {
|
|
2283
|
-
const functionDecls = _optionalChain([req, 'access', _132 => _132.config, 'optionalAccess', _133 => _133.tools, 'optionalAccess', _134 => _134[0], 'optionalAccess', _135 => _135.functionDeclarations]) || [];
|
|
2284
|
-
const functionLogs = functionDecls.length > 0 ? functionDecls.map(
|
|
2285
|
-
(funcDecl) => `${funcDecl.name}: ${JSON.stringify(_optionalChain([funcDecl, 'access', _136 => _136.parameters, 'optionalAccess', _137 => _137.properties]) || {})}`
|
|
2286
|
-
) : [];
|
|
2287
|
-
const contentsLogs = _optionalChain([req, 'access', _138 => _138.contents, 'optionalAccess', _139 => _139.map, 'call', _140 => _140(
|
|
2288
|
-
(content) => JSON.stringify(content, (key, value) => {
|
|
2289
|
-
if (key === "data" && typeof value === "string" && value.length > 100) {
|
|
2290
|
-
return "[EXCLUDED]";
|
|
2291
|
-
}
|
|
2292
|
-
return value;
|
|
2293
|
-
})
|
|
2294
|
-
)]) || [];
|
|
2295
|
-
return _dedent2.default`
|
|
2296
|
-
LLM Request:
|
|
2297
|
-
-----------------------------------------------------------
|
|
2298
|
-
System Instruction:
|
|
2299
|
-
${req.getSystemInstructionText() || ""}
|
|
2300
|
-
-----------------------------------------------------------
|
|
2301
|
-
Contents:
|
|
2302
|
-
${contentsLogs.join(NEW_LINE2)}
|
|
2303
|
-
-----------------------------------------------------------
|
|
2304
|
-
Functions:
|
|
2305
|
-
${functionLogs.join(NEW_LINE2)}
|
|
2306
|
-
-----------------------------------------------------------`;
|
|
2307
|
-
}
|
|
2308
|
-
/**
|
|
2309
|
-
* Build response log string for debugging (similar to Google LLM)
|
|
2310
|
-
*/
|
|
2311
|
-
buildResponseLog(response) {
|
|
2312
|
-
const functionCallsText = [];
|
|
2313
|
-
if (_optionalChain([response, 'access', _141 => _141.content, 'optionalAccess', _142 => _142.parts])) {
|
|
2314
|
-
for (const part of response.content.parts) {
|
|
2315
|
-
if (part.functionCall) {
|
|
2316
|
-
const funcCall = part.functionCall;
|
|
2317
|
-
functionCallsText.push(
|
|
2318
|
-
`name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
|
|
2319
|
-
);
|
|
2320
|
-
}
|
|
2321
|
-
}
|
|
2322
|
-
}
|
|
2323
|
-
const text = _optionalChain([response, 'access', _143 => _143.content, 'optionalAccess', _144 => _144.parts, 'optionalAccess', _145 => _145.filter, 'call', _146 => _146((part) => part.text), 'optionalAccess', _147 => _147.map, 'call', _148 => _148((part) => part.text), 'optionalAccess', _149 => _149.join, 'call', _150 => _150("")]) || "";
|
|
2324
|
-
return _dedent2.default`
|
|
2325
|
-
LLM Response:
|
|
2326
|
-
-----------------------------------------------------------
|
|
2327
|
-
Text:
|
|
2328
|
-
${text}
|
|
2329
|
-
-----------------------------------------------------------
|
|
2330
|
-
Function calls:
|
|
2331
|
-
${functionCallsText.join(NEW_LINE2)}
|
|
2332
|
-
-----------------------------------------------------------
|
|
2333
|
-
Usage:
|
|
2334
|
-
${JSON.stringify(response.usageMetadata, null, 2)}
|
|
2335
|
-
-----------------------------------------------------------
|
|
2336
|
-
Finish Reason:
|
|
2337
|
-
${response.finishReason}
|
|
2338
|
-
-----------------------------------------------------------`;
|
|
2281
|
+
const parts = _optionalChain([response, 'access', _118 => _118.content, 'optionalAccess', _119 => _119.parts]);
|
|
2282
|
+
return _optionalChain([parts, 'optionalAccess', _120 => _120.some, 'call', _121 => _121((part) => part.inlineData)]) || false;
|
|
2339
2283
|
}
|
|
2340
2284
|
/**
|
|
2341
2285
|
* Gets the OpenAI client
|
|
@@ -2356,14 +2300,289 @@ var OpenAiLlm = class extends BaseLlm {
|
|
|
2356
2300
|
}
|
|
2357
2301
|
};
|
|
2358
2302
|
|
|
2303
|
+
// src/models/ai-sdk.ts
|
|
2304
|
+
init_logger();
|
|
2305
|
+
|
|
2306
|
+
|
|
2307
|
+
|
|
2308
|
+
|
|
2309
|
+
var _ai = require('ai');
|
|
2310
|
+
var AiSdkLlm = (_class7 = class extends BaseLlm {
|
|
2311
|
+
|
|
2312
|
+
__init13() {this.logger = new Logger({ name: "AiSdkLlm" })}
|
|
2313
|
+
/**
|
|
2314
|
+
* Constructor accepts a pre-configured LanguageModel instance
|
|
2315
|
+
* @param model - Pre-configured LanguageModel from provider(modelName)
|
|
2316
|
+
*/
|
|
2317
|
+
constructor(modelInstance) {
|
|
2318
|
+
super(modelInstance.modelId || "ai-sdk-model");_class7.prototype.__init13.call(this);;
|
|
2319
|
+
this.modelInstance = modelInstance;
|
|
2320
|
+
}
|
|
2321
|
+
/**
|
|
2322
|
+
* Returns empty array - following Python ADK pattern
|
|
2323
|
+
*/
|
|
2324
|
+
static supportedModels() {
|
|
2325
|
+
return [];
|
|
2326
|
+
}
|
|
2327
|
+
async *generateContentAsyncImpl(request, stream = false) {
|
|
2328
|
+
try {
|
|
2329
|
+
const messages = this.convertToAiSdkMessages(request);
|
|
2330
|
+
const systemMessage = request.getSystemInstructionText();
|
|
2331
|
+
const tools = this.convertToAiSdkTools(request);
|
|
2332
|
+
const requestParams = {
|
|
2333
|
+
model: this.modelInstance,
|
|
2334
|
+
messages,
|
|
2335
|
+
system: systemMessage,
|
|
2336
|
+
tools: Object.keys(tools).length > 0 ? tools : void 0,
|
|
2337
|
+
maxTokens: _optionalChain([request, 'access', _122 => _122.config, 'optionalAccess', _123 => _123.maxOutputTokens]),
|
|
2338
|
+
temperature: _optionalChain([request, 'access', _124 => _124.config, 'optionalAccess', _125 => _125.temperature]),
|
|
2339
|
+
topP: _optionalChain([request, 'access', _126 => _126.config, 'optionalAccess', _127 => _127.topP])
|
|
2340
|
+
};
|
|
2341
|
+
if (stream) {
|
|
2342
|
+
const result = _ai.streamText.call(void 0, requestParams);
|
|
2343
|
+
let accumulatedText = "";
|
|
2344
|
+
for await (const delta of result.textStream) {
|
|
2345
|
+
accumulatedText += delta;
|
|
2346
|
+
yield new LlmResponse({
|
|
2347
|
+
content: {
|
|
2348
|
+
role: "model",
|
|
2349
|
+
parts: [{ text: accumulatedText }]
|
|
2350
|
+
},
|
|
2351
|
+
partial: true
|
|
2352
|
+
});
|
|
2353
|
+
}
|
|
2354
|
+
const toolCalls = await result.toolCalls;
|
|
2355
|
+
const parts = [];
|
|
2356
|
+
if (accumulatedText) {
|
|
2357
|
+
parts.push({ text: accumulatedText });
|
|
2358
|
+
}
|
|
2359
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
2360
|
+
for (const toolCall of toolCalls) {
|
|
2361
|
+
parts.push({
|
|
2362
|
+
functionCall: {
|
|
2363
|
+
id: toolCall.toolCallId,
|
|
2364
|
+
name: toolCall.toolName,
|
|
2365
|
+
args: toolCall.args
|
|
2366
|
+
}
|
|
2367
|
+
});
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
const finalUsage = await result.usage;
|
|
2371
|
+
const finishReason = await result.finishReason;
|
|
2372
|
+
yield new LlmResponse({
|
|
2373
|
+
content: {
|
|
2374
|
+
role: "model",
|
|
2375
|
+
parts: parts.length > 0 ? parts : [{ text: "" }]
|
|
2376
|
+
},
|
|
2377
|
+
usageMetadata: finalUsage ? {
|
|
2378
|
+
promptTokenCount: finalUsage.promptTokens,
|
|
2379
|
+
candidatesTokenCount: finalUsage.completionTokens,
|
|
2380
|
+
totalTokenCount: finalUsage.totalTokens
|
|
2381
|
+
} : void 0,
|
|
2382
|
+
finishReason: this.mapFinishReason(finishReason),
|
|
2383
|
+
turnComplete: true
|
|
2384
|
+
});
|
|
2385
|
+
} else {
|
|
2386
|
+
const result = await _ai.generateText.call(void 0, requestParams);
|
|
2387
|
+
const parts = [];
|
|
2388
|
+
if (result.text) {
|
|
2389
|
+
parts.push({ text: result.text });
|
|
2390
|
+
}
|
|
2391
|
+
if (result.toolCalls && result.toolCalls.length > 0) {
|
|
2392
|
+
for (const toolCall of result.toolCalls) {
|
|
2393
|
+
parts.push({
|
|
2394
|
+
functionCall: {
|
|
2395
|
+
id: toolCall.toolCallId,
|
|
2396
|
+
name: toolCall.toolName,
|
|
2397
|
+
args: toolCall.args
|
|
2398
|
+
}
|
|
2399
|
+
});
|
|
2400
|
+
}
|
|
2401
|
+
}
|
|
2402
|
+
yield new LlmResponse({
|
|
2403
|
+
content: {
|
|
2404
|
+
role: "model",
|
|
2405
|
+
parts: parts.length > 0 ? parts : [{ text: "" }]
|
|
2406
|
+
},
|
|
2407
|
+
usageMetadata: result.usage ? {
|
|
2408
|
+
promptTokenCount: result.usage.promptTokens,
|
|
2409
|
+
candidatesTokenCount: result.usage.completionTokens,
|
|
2410
|
+
totalTokenCount: result.usage.totalTokens
|
|
2411
|
+
} : void 0,
|
|
2412
|
+
finishReason: this.mapFinishReason(result.finishReason),
|
|
2413
|
+
turnComplete: true
|
|
2414
|
+
});
|
|
2415
|
+
}
|
|
2416
|
+
} catch (error) {
|
|
2417
|
+
this.logger.error(`AI SDK Error: ${String(error)}`, { error, request });
|
|
2418
|
+
yield LlmResponse.fromError(error, {
|
|
2419
|
+
errorCode: "AI_SDK_ERROR",
|
|
2420
|
+
model: this.model
|
|
2421
|
+
});
|
|
2422
|
+
}
|
|
2423
|
+
}
|
|
2424
|
+
/**
|
|
2425
|
+
* Convert ADK LlmRequest to AI SDK CoreMessage format
|
|
2426
|
+
*/
|
|
2427
|
+
convertToAiSdkMessages(llmRequest) {
|
|
2428
|
+
const messages = [];
|
|
2429
|
+
for (const content of llmRequest.contents || []) {
|
|
2430
|
+
const message = this.contentToAiSdkMessage(content);
|
|
2431
|
+
if (message) {
|
|
2432
|
+
messages.push(message);
|
|
2433
|
+
}
|
|
2434
|
+
}
|
|
2435
|
+
return messages;
|
|
2436
|
+
}
|
|
2437
|
+
/**
|
|
2438
|
+
* Convert ADK tools to AI SDK tools format
|
|
2439
|
+
*/
|
|
2440
|
+
convertToAiSdkTools(llmRequest) {
|
|
2441
|
+
const tools = {};
|
|
2442
|
+
if (_optionalChain([llmRequest, 'access', _128 => _128.config, 'optionalAccess', _129 => _129.tools])) {
|
|
2443
|
+
for (const toolConfig of llmRequest.config.tools) {
|
|
2444
|
+
if ("functionDeclarations" in toolConfig) {
|
|
2445
|
+
for (const funcDecl of toolConfig.functionDeclarations) {
|
|
2446
|
+
tools[funcDecl.name] = {
|
|
2447
|
+
description: funcDecl.description,
|
|
2448
|
+
parameters: _ai.jsonSchema.call(void 0, funcDecl.parameters || {})
|
|
2449
|
+
};
|
|
2450
|
+
}
|
|
2451
|
+
}
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
return tools;
|
|
2455
|
+
}
|
|
2456
|
+
/**
|
|
2457
|
+
* Convert ADK Content to AI SDK CoreMessage
|
|
2458
|
+
*/
|
|
2459
|
+
contentToAiSdkMessage(content) {
|
|
2460
|
+
const role = this.mapRole(content.role);
|
|
2461
|
+
if (!content.parts || content.parts.length === 0) {
|
|
2462
|
+
return null;
|
|
2463
|
+
}
|
|
2464
|
+
if (content.parts.length === 1 && content.parts[0].text) {
|
|
2465
|
+
const textContent = content.parts[0].text;
|
|
2466
|
+
if (role === "system") {
|
|
2467
|
+
return { role: "system", content: textContent };
|
|
2468
|
+
}
|
|
2469
|
+
if (role === "assistant") {
|
|
2470
|
+
return { role: "assistant", content: textContent };
|
|
2471
|
+
}
|
|
2472
|
+
return { role: "user", content: textContent };
|
|
2473
|
+
}
|
|
2474
|
+
if (_optionalChain([content, 'access', _130 => _130.parts, 'optionalAccess', _131 => _131.some, 'call', _132 => _132((part) => part.functionCall)])) {
|
|
2475
|
+
const textParts = content.parts.filter((part) => part.text);
|
|
2476
|
+
const functionCalls = content.parts.filter((part) => part.functionCall);
|
|
2477
|
+
const contentParts2 = [];
|
|
2478
|
+
for (const textPart of textParts) {
|
|
2479
|
+
if (textPart.text) {
|
|
2480
|
+
contentParts2.push({
|
|
2481
|
+
type: "text",
|
|
2482
|
+
text: textPart.text
|
|
2483
|
+
});
|
|
2484
|
+
}
|
|
2485
|
+
}
|
|
2486
|
+
for (const funcPart of functionCalls) {
|
|
2487
|
+
if (funcPart.functionCall) {
|
|
2488
|
+
contentParts2.push({
|
|
2489
|
+
type: "tool-call",
|
|
2490
|
+
toolCallId: funcPart.functionCall.id,
|
|
2491
|
+
toolName: funcPart.functionCall.name,
|
|
2492
|
+
args: funcPart.functionCall.args
|
|
2493
|
+
});
|
|
2494
|
+
}
|
|
2495
|
+
}
|
|
2496
|
+
return {
|
|
2497
|
+
role: "assistant",
|
|
2498
|
+
content: contentParts2
|
|
2499
|
+
};
|
|
2500
|
+
}
|
|
2501
|
+
if (_optionalChain([content, 'access', _133 => _133.parts, 'optionalAccess', _134 => _134.some, 'call', _135 => _135((part) => part.functionResponse)])) {
|
|
2502
|
+
const functionResponses = content.parts.filter(
|
|
2503
|
+
(part) => part.functionResponse
|
|
2504
|
+
);
|
|
2505
|
+
const contentParts2 = functionResponses.map((part) => ({
|
|
2506
|
+
type: "tool-result",
|
|
2507
|
+
toolCallId: part.functionResponse.id,
|
|
2508
|
+
toolName: part.functionResponse.name || "unknown",
|
|
2509
|
+
result: part.functionResponse.response
|
|
2510
|
+
}));
|
|
2511
|
+
return {
|
|
2512
|
+
role: "tool",
|
|
2513
|
+
content: contentParts2
|
|
2514
|
+
};
|
|
2515
|
+
}
|
|
2516
|
+
const contentParts = [];
|
|
2517
|
+
for (const part of content.parts) {
|
|
2518
|
+
if (part.text) {
|
|
2519
|
+
contentParts.push({
|
|
2520
|
+
type: "text",
|
|
2521
|
+
text: part.text
|
|
2522
|
+
});
|
|
2523
|
+
}
|
|
2524
|
+
}
|
|
2525
|
+
if (contentParts.length === 0) {
|
|
2526
|
+
return null;
|
|
2527
|
+
}
|
|
2528
|
+
if (contentParts.length === 1) {
|
|
2529
|
+
const textContent = contentParts[0].text;
|
|
2530
|
+
if (role === "system") {
|
|
2531
|
+
return { role: "system", content: textContent };
|
|
2532
|
+
}
|
|
2533
|
+
if (role === "assistant") {
|
|
2534
|
+
return { role: "assistant", content: textContent };
|
|
2535
|
+
}
|
|
2536
|
+
return { role: "user", content: textContent };
|
|
2537
|
+
}
|
|
2538
|
+
if (role === "system") {
|
|
2539
|
+
const combinedText = contentParts.map((p) => p.text).join("");
|
|
2540
|
+
return { role: "system", content: combinedText };
|
|
2541
|
+
}
|
|
2542
|
+
if (role === "assistant") {
|
|
2543
|
+
return { role: "assistant", content: contentParts };
|
|
2544
|
+
}
|
|
2545
|
+
return { role: "user", content: contentParts };
|
|
2546
|
+
}
|
|
2547
|
+
/**
|
|
2548
|
+
* Map ADK role to AI SDK role
|
|
2549
|
+
*/
|
|
2550
|
+
mapRole(role) {
|
|
2551
|
+
switch (role) {
|
|
2552
|
+
case "model":
|
|
2553
|
+
case "assistant":
|
|
2554
|
+
return "assistant";
|
|
2555
|
+
case "system":
|
|
2556
|
+
return "system";
|
|
2557
|
+
default:
|
|
2558
|
+
return "user";
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
/**
|
|
2562
|
+
* Map AI SDK finish reason to ADK finish reason
|
|
2563
|
+
*/
|
|
2564
|
+
mapFinishReason(finishReason) {
|
|
2565
|
+
switch (finishReason) {
|
|
2566
|
+
case "stop":
|
|
2567
|
+
case "end_of_message":
|
|
2568
|
+
return "STOP";
|
|
2569
|
+
case "length":
|
|
2570
|
+
case "max_tokens":
|
|
2571
|
+
return "MAX_TOKENS";
|
|
2572
|
+
default:
|
|
2573
|
+
return "FINISH_REASON_UNSPECIFIED";
|
|
2574
|
+
}
|
|
2575
|
+
}
|
|
2576
|
+
}, _class7);
|
|
2577
|
+
|
|
2359
2578
|
// src/models/llm-registry.ts
|
|
2360
2579
|
init_logger();
|
|
2361
|
-
var
|
|
2362
|
-
var LLMRegistry = (_class6 = class _LLMRegistry {
|
|
2580
|
+
var LLMRegistry = (_class8 = class _LLMRegistry {
|
|
2363
2581
|
/**
|
|
2364
2582
|
* Map of model name regex to LLM class
|
|
2365
2583
|
*/
|
|
2366
2584
|
static __initStatic() {this.llmRegistry = /* @__PURE__ */ new Map()}
|
|
2585
|
+
static __initStatic2() {this.logger = new Logger({ name: "LLMRegistry" })}
|
|
2367
2586
|
/**
|
|
2368
2587
|
* Creates a new LLM instance
|
|
2369
2588
|
*
|
|
@@ -2415,12 +2634,12 @@ var LLMRegistry = (_class6 = class _LLMRegistry {
|
|
|
2415
2634
|
* Logs all registered models for debugging
|
|
2416
2635
|
*/
|
|
2417
2636
|
static logRegisteredModels() {
|
|
2418
|
-
|
|
2637
|
+
_LLMRegistry.logger.debug(
|
|
2419
2638
|
"Registered LLM models:",
|
|
2420
2639
|
[..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
|
|
2421
2640
|
);
|
|
2422
2641
|
}
|
|
2423
|
-
},
|
|
2642
|
+
}, _class8.__initStatic(), _class8.__initStatic2(), _class8);
|
|
2424
2643
|
|
|
2425
2644
|
// src/models/registry.ts
|
|
2426
2645
|
function registerProviders() {
|
|
@@ -2618,7 +2837,7 @@ var OAuth2Credential = class extends AuthCredential {
|
|
|
2618
2837
|
"Cannot refresh token: no refresh token or refresh function"
|
|
2619
2838
|
);
|
|
2620
2839
|
}
|
|
2621
|
-
const result = await _optionalChain([this, 'access',
|
|
2840
|
+
const result = await _optionalChain([this, 'access', _136 => _136.refreshFunction, 'optionalCall', _137 => _137(this.refreshToken)]);
|
|
2622
2841
|
if (!result) {
|
|
2623
2842
|
throw new Error("Failed to refresh token");
|
|
2624
2843
|
}
|
|
@@ -2672,7 +2891,7 @@ var AuthHandler = class {
|
|
|
2672
2891
|
* Gets the authentication token
|
|
2673
2892
|
*/
|
|
2674
2893
|
getToken() {
|
|
2675
|
-
return _optionalChain([this, 'access',
|
|
2894
|
+
return _optionalChain([this, 'access', _138 => _138.credential, 'optionalAccess', _139 => _139.getToken, 'call', _140 => _140()]);
|
|
2676
2895
|
}
|
|
2677
2896
|
/**
|
|
2678
2897
|
* Gets headers for HTTP requests
|
|
@@ -2687,7 +2906,7 @@ var AuthHandler = class {
|
|
|
2687
2906
|
* Refreshes the token if necessary
|
|
2688
2907
|
*/
|
|
2689
2908
|
async refreshToken() {
|
|
2690
|
-
if (_optionalChain([this, 'access',
|
|
2909
|
+
if (_optionalChain([this, 'access', _141 => _141.credential, 'optionalAccess', _142 => _142.canRefresh, 'call', _143 => _143()])) {
|
|
2691
2910
|
await this.credential.refresh();
|
|
2692
2911
|
}
|
|
2693
2912
|
}
|
|
@@ -2794,10 +3013,10 @@ var OpenIdConnectScheme = class extends AuthScheme {
|
|
|
2794
3013
|
};
|
|
2795
3014
|
|
|
2796
3015
|
// src/sessions/state.ts
|
|
2797
|
-
var State = (
|
|
2798
|
-
static
|
|
2799
|
-
static
|
|
2800
|
-
static
|
|
3016
|
+
var State = (_class9 = class _State {
|
|
3017
|
+
static __initStatic3() {this.APP_PREFIX = "app:"}
|
|
3018
|
+
static __initStatic4() {this.USER_PREFIX = "user:"}
|
|
3019
|
+
static __initStatic5() {this.TEMP_PREFIX = "temp:"}
|
|
2801
3020
|
|
|
2802
3021
|
|
|
2803
3022
|
/**
|
|
@@ -2891,13 +3110,13 @@ var State = (_class7 = class _State {
|
|
|
2891
3110
|
const state = new _State(value, delta);
|
|
2892
3111
|
return _State.createProxy(state);
|
|
2893
3112
|
}
|
|
2894
|
-
},
|
|
3113
|
+
}, _class9.__initStatic3(), _class9.__initStatic4(), _class9.__initStatic5(), _class9);
|
|
2895
3114
|
|
|
2896
3115
|
// src/events/event.ts
|
|
2897
3116
|
var _uuid = require('uuid');
|
|
2898
3117
|
|
|
2899
3118
|
// src/events/event-actions.ts
|
|
2900
|
-
var EventActions = (
|
|
3119
|
+
var EventActions = (_class10 = class {
|
|
2901
3120
|
/**
|
|
2902
3121
|
* If true, it won't call model to summarize function response.
|
|
2903
3122
|
* Only used for function_response event.
|
|
@@ -2906,12 +3125,12 @@ var EventActions = (_class8 = class {
|
|
|
2906
3125
|
/**
|
|
2907
3126
|
* Indicates that the event is updating the state with the given delta.
|
|
2908
3127
|
*/
|
|
2909
|
-
|
|
3128
|
+
__init14() {this.stateDelta = {}}
|
|
2910
3129
|
/**
|
|
2911
3130
|
* Indicates that the event is updating an artifact. key is the filename,
|
|
2912
3131
|
* value is the version.
|
|
2913
3132
|
*/
|
|
2914
|
-
|
|
3133
|
+
__init15() {this.artifactDelta = {}}
|
|
2915
3134
|
/**
|
|
2916
3135
|
* If set, the event transfers to the specified agent.
|
|
2917
3136
|
*/
|
|
@@ -2927,7 +3146,7 @@ var EventActions = (_class8 = class {
|
|
|
2927
3146
|
/**
|
|
2928
3147
|
* Constructor for EventActions
|
|
2929
3148
|
*/
|
|
2930
|
-
constructor(options = {}) {;
|
|
3149
|
+
constructor(options = {}) {;_class10.prototype.__init14.call(this);_class10.prototype.__init15.call(this);
|
|
2931
3150
|
this.skipSummarization = options.skipSummarization;
|
|
2932
3151
|
this.stateDelta = options.stateDelta || {};
|
|
2933
3152
|
this.artifactDelta = options.artifactDelta || {};
|
|
@@ -2935,16 +3154,16 @@ var EventActions = (_class8 = class {
|
|
|
2935
3154
|
this.escalate = options.escalate;
|
|
2936
3155
|
this.requestedAuthConfigs = options.requestedAuthConfigs;
|
|
2937
3156
|
}
|
|
2938
|
-
},
|
|
3157
|
+
}, _class10);
|
|
2939
3158
|
|
|
2940
3159
|
// src/events/event.ts
|
|
2941
|
-
var Event = (
|
|
3160
|
+
var Event = (_class11 = class _Event extends LlmResponse {
|
|
2942
3161
|
/** The invocation ID of the event. */
|
|
2943
|
-
|
|
3162
|
+
__init16() {this.invocationId = ""}
|
|
2944
3163
|
/** 'user' or the name of the agent, indicating who appended the event to the session. */
|
|
2945
3164
|
|
|
2946
3165
|
/** The actions taken by the agent. */
|
|
2947
|
-
|
|
3166
|
+
__init17() {this.actions = new EventActions()}
|
|
2948
3167
|
/**
|
|
2949
3168
|
* Set of ids of the long running function calls.
|
|
2950
3169
|
* Agent client will know from this field about which function call is long running.
|
|
@@ -2959,9 +3178,9 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
2959
3178
|
*/
|
|
2960
3179
|
|
|
2961
3180
|
/** The unique identifier of the event. */
|
|
2962
|
-
|
|
3181
|
+
__init18() {this.id = ""}
|
|
2963
3182
|
/** The timestamp of the event (seconds since epoch). */
|
|
2964
|
-
|
|
3183
|
+
__init19() {this.timestamp = Math.floor(Date.now() / 1e3)}
|
|
2965
3184
|
/**
|
|
2966
3185
|
* Constructor for Event.
|
|
2967
3186
|
*/
|
|
@@ -2969,7 +3188,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
2969
3188
|
super({
|
|
2970
3189
|
content: opts.content,
|
|
2971
3190
|
partial: opts.partial
|
|
2972
|
-
});
|
|
3191
|
+
});_class11.prototype.__init16.call(this);_class11.prototype.__init17.call(this);_class11.prototype.__init18.call(this);_class11.prototype.__init19.call(this);;
|
|
2973
3192
|
this.invocationId = _nullishCoalesce(opts.invocationId, () => ( ""));
|
|
2974
3193
|
this.author = opts.author;
|
|
2975
3194
|
this.actions = _nullishCoalesce(opts.actions, () => ( new EventActions()));
|
|
@@ -3030,7 +3249,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
|
|
|
3030
3249
|
static newId() {
|
|
3031
3250
|
return _uuid.v4.call(void 0, ).replace(/-/g, "").substring(0, 8);
|
|
3032
3251
|
}
|
|
3033
|
-
},
|
|
3252
|
+
}, _class11);
|
|
3034
3253
|
|
|
3035
3254
|
// src/agents/readonly-context.ts
|
|
3036
3255
|
var ReadonlyContext = class {
|
|
@@ -3136,7 +3355,7 @@ var CallbackContext = class extends ReadonlyContext {
|
|
|
3136
3355
|
};
|
|
3137
3356
|
|
|
3138
3357
|
// src/agents/base-agent.ts
|
|
3139
|
-
var BaseAgent = (
|
|
3358
|
+
var BaseAgent = (_class12 = class {
|
|
3140
3359
|
/**
|
|
3141
3360
|
* The agent's name.
|
|
3142
3361
|
* Agent name must be a valid identifier and unique within the agent tree.
|
|
@@ -3148,7 +3367,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3148
3367
|
* The model uses this to determine whether to delegate control to the agent.
|
|
3149
3368
|
* One-line description is enough and preferred.
|
|
3150
3369
|
*/
|
|
3151
|
-
|
|
3370
|
+
__init20() {this.description = ""}
|
|
3152
3371
|
/**
|
|
3153
3372
|
* The parent agent of this agent.
|
|
3154
3373
|
* Note that an agent can ONLY be added as sub-agent once.
|
|
@@ -3160,7 +3379,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3160
3379
|
/**
|
|
3161
3380
|
* The sub-agents of this agent.
|
|
3162
3381
|
*/
|
|
3163
|
-
|
|
3382
|
+
__init21() {this.subAgents = []}
|
|
3164
3383
|
/**
|
|
3165
3384
|
* Callback or list of callbacks to be invoked before the agent run.
|
|
3166
3385
|
* When a list of callbacks is provided, the callbacks will be called in the
|
|
@@ -3192,7 +3411,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3192
3411
|
/**
|
|
3193
3412
|
* Constructor for BaseAgent
|
|
3194
3413
|
*/
|
|
3195
|
-
constructor(config) {;
|
|
3414
|
+
constructor(config) {;_class12.prototype.__init20.call(this);_class12.prototype.__init21.call(this);
|
|
3196
3415
|
this.name = config.name;
|
|
3197
3416
|
this.description = config.description || "";
|
|
3198
3417
|
this.subAgents = config.subAgents || [];
|
|
@@ -3449,7 +3668,7 @@ var BaseAgent = (_class10 = class {
|
|
|
3449
3668
|
subAgent.parentAgent = this;
|
|
3450
3669
|
}
|
|
3451
3670
|
}
|
|
3452
|
-
},
|
|
3671
|
+
}, _class12);
|
|
3453
3672
|
|
|
3454
3673
|
// src/agents/llm-agent.ts
|
|
3455
3674
|
init_logger();
|
|
@@ -3573,8 +3792,8 @@ init_function_utils();
|
|
|
3573
3792
|
// src/tools/common/google-search.ts
|
|
3574
3793
|
init_logger();
|
|
3575
3794
|
init_base_tool();
|
|
3576
|
-
var GoogleSearch = (
|
|
3577
|
-
|
|
3795
|
+
var GoogleSearch = (_class13 = class extends BaseTool {
|
|
3796
|
+
__init22() {this.logger = new Logger({ name: "GoogleSearch" })}
|
|
3578
3797
|
/**
|
|
3579
3798
|
* Constructor for GoogleSearch
|
|
3580
3799
|
*/
|
|
@@ -3582,7 +3801,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
|
|
|
3582
3801
|
super({
|
|
3583
3802
|
name: "google_search",
|
|
3584
3803
|
description: "Search the web using Google"
|
|
3585
|
-
});
|
|
3804
|
+
});_class13.prototype.__init22.call(this);;
|
|
3586
3805
|
}
|
|
3587
3806
|
/**
|
|
3588
3807
|
* Get the function declaration for the tool
|
|
@@ -3631,7 +3850,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
|
|
|
3631
3850
|
]
|
|
3632
3851
|
};
|
|
3633
3852
|
}
|
|
3634
|
-
},
|
|
3853
|
+
}, _class13);
|
|
3635
3854
|
|
|
3636
3855
|
// src/tools/common/http-request-tool.ts
|
|
3637
3856
|
init_base_tool();
|
|
@@ -3763,7 +3982,7 @@ var FileOperationsTool = class extends BaseTool {
|
|
|
3763
3982
|
name: "file_operations",
|
|
3764
3983
|
description: "Perform file system operations like reading, writing, and managing files"
|
|
3765
3984
|
});
|
|
3766
|
-
this.basePath = _optionalChain([options, 'optionalAccess',
|
|
3985
|
+
this.basePath = _optionalChain([options, 'optionalAccess', _144 => _144.basePath]) || process.cwd();
|
|
3767
3986
|
}
|
|
3768
3987
|
/**
|
|
3769
3988
|
* Get the function declaration for the tool
|
|
@@ -4080,8 +4299,8 @@ var UserInteractionTool = class extends BaseTool {
|
|
|
4080
4299
|
// src/tools/common/exit-loop-tool.ts
|
|
4081
4300
|
init_logger();
|
|
4082
4301
|
init_base_tool();
|
|
4083
|
-
var ExitLoopTool = (
|
|
4084
|
-
|
|
4302
|
+
var ExitLoopTool = (_class14 = class extends BaseTool {
|
|
4303
|
+
__init23() {this.logger = new Logger({ name: "ExitLoopTool" })}
|
|
4085
4304
|
/**
|
|
4086
4305
|
* Constructor for ExitLoopTool
|
|
4087
4306
|
*/
|
|
@@ -4089,7 +4308,7 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
|
|
|
4089
4308
|
super({
|
|
4090
4309
|
name: "exit_loop",
|
|
4091
4310
|
description: "Exits the loop. Call this function only when you are instructed to do so."
|
|
4092
|
-
});
|
|
4311
|
+
});_class14.prototype.__init23.call(this);;
|
|
4093
4312
|
}
|
|
4094
4313
|
/**
|
|
4095
4314
|
* Execute the exit loop action
|
|
@@ -4098,13 +4317,13 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
|
|
|
4098
4317
|
this.logger.debug("Executing exit loop tool");
|
|
4099
4318
|
context.actions.escalate = true;
|
|
4100
4319
|
}
|
|
4101
|
-
},
|
|
4320
|
+
}, _class14);
|
|
4102
4321
|
|
|
4103
4322
|
// src/tools/common/get-user-choice-tool.ts
|
|
4104
4323
|
init_logger();
|
|
4105
4324
|
init_base_tool();
|
|
4106
|
-
var GetUserChoiceTool = (
|
|
4107
|
-
|
|
4325
|
+
var GetUserChoiceTool = (_class15 = class extends BaseTool {
|
|
4326
|
+
__init24() {this.logger = new Logger({ name: "GetUserChoiceTool" })}
|
|
4108
4327
|
/**
|
|
4109
4328
|
* Constructor for GetUserChoiceTool
|
|
4110
4329
|
*/
|
|
@@ -4113,7 +4332,7 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
|
|
|
4113
4332
|
name: "get_user_choice",
|
|
4114
4333
|
description: "This tool provides the options to the user and asks them to choose one. Use this tool when you need the user to make a selection between multiple options. Do not list options in your response - use this tool instead.",
|
|
4115
4334
|
isLongRunning: true
|
|
4116
|
-
});
|
|
4335
|
+
});_class15.prototype.__init24.call(this);;
|
|
4117
4336
|
}
|
|
4118
4337
|
/**
|
|
4119
4338
|
* Get the function declaration for the tool
|
|
@@ -4156,13 +4375,13 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
|
|
|
4156
4375
|
context.actions.skipSummarization = true;
|
|
4157
4376
|
return null;
|
|
4158
4377
|
}
|
|
4159
|
-
},
|
|
4378
|
+
}, _class15);
|
|
4160
4379
|
|
|
4161
4380
|
// src/tools/common/transfer-to-agent-tool.ts
|
|
4162
4381
|
init_logger();
|
|
4163
4382
|
init_base_tool();
|
|
4164
|
-
var TransferToAgentTool = (
|
|
4165
|
-
|
|
4383
|
+
var TransferToAgentTool = (_class16 = class extends BaseTool {
|
|
4384
|
+
__init25() {this.logger = new Logger({ name: "TransferToAgentTool" })}
|
|
4166
4385
|
/**
|
|
4167
4386
|
* Constructor for TransferToAgentTool
|
|
4168
4387
|
*/
|
|
@@ -4170,7 +4389,7 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
|
|
|
4170
4389
|
super({
|
|
4171
4390
|
name: "transfer_to_agent",
|
|
4172
4391
|
description: "Transfer the question to another agent."
|
|
4173
|
-
});
|
|
4392
|
+
});_class16.prototype.__init25.call(this);;
|
|
4174
4393
|
}
|
|
4175
4394
|
/**
|
|
4176
4395
|
* Execute the transfer to agent action
|
|
@@ -4179,13 +4398,13 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
|
|
|
4179
4398
|
this.logger.debug(`Executing transfer to agent: ${args.agent_name}`);
|
|
4180
4399
|
context.actions.transferToAgent = args.agent_name;
|
|
4181
4400
|
}
|
|
4182
|
-
},
|
|
4401
|
+
}, _class16);
|
|
4183
4402
|
|
|
4184
4403
|
// src/tools/common/load-memory-tool.ts
|
|
4185
4404
|
init_logger();
|
|
4186
4405
|
init_base_tool();
|
|
4187
|
-
var LoadMemoryTool = (
|
|
4188
|
-
|
|
4406
|
+
var LoadMemoryTool = (_class17 = class extends BaseTool {
|
|
4407
|
+
__init26() {this.logger = new Logger({ name: "LoadMemoryTool" })}
|
|
4189
4408
|
/**
|
|
4190
4409
|
* Constructor for LoadMemoryTool
|
|
4191
4410
|
*/
|
|
@@ -4193,7 +4412,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4193
4412
|
super({
|
|
4194
4413
|
name: "load_memory",
|
|
4195
4414
|
description: "Loads the memory for the current user based on a query."
|
|
4196
|
-
});
|
|
4415
|
+
});_class17.prototype.__init26.call(this);;
|
|
4197
4416
|
}
|
|
4198
4417
|
/**
|
|
4199
4418
|
* Get the function declaration for the tool
|
|
@@ -4223,7 +4442,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4223
4442
|
const searchResult = await context.searchMemory(args.query);
|
|
4224
4443
|
return {
|
|
4225
4444
|
memories: searchResult.memories || [],
|
|
4226
|
-
count: _optionalChain([searchResult, 'access',
|
|
4445
|
+
count: _optionalChain([searchResult, 'access', _145 => _145.memories, 'optionalAccess', _146 => _146.length]) || 0
|
|
4227
4446
|
};
|
|
4228
4447
|
} catch (error) {
|
|
4229
4448
|
console.error("Error searching memory:", error);
|
|
@@ -4233,7 +4452,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
|
|
|
4233
4452
|
};
|
|
4234
4453
|
}
|
|
4235
4454
|
}
|
|
4236
|
-
},
|
|
4455
|
+
}, _class17);
|
|
4237
4456
|
|
|
4238
4457
|
// src/tools/common/load-artifacts-tool.ts
|
|
4239
4458
|
init_base_tool();
|
|
@@ -4384,10 +4603,10 @@ var McpError = class extends Error {
|
|
|
4384
4603
|
};
|
|
4385
4604
|
|
|
4386
4605
|
// src/tools/mcp/sampling-handler.ts
|
|
4387
|
-
var McpSamplingHandler = (
|
|
4388
|
-
|
|
4606
|
+
var McpSamplingHandler = (_class18 = class {
|
|
4607
|
+
__init27() {this.logger = new Logger({ name: "McpSamplingHandler" })}
|
|
4389
4608
|
|
|
4390
|
-
constructor(samplingHandler) {;
|
|
4609
|
+
constructor(samplingHandler) {;_class18.prototype.__init27.call(this);
|
|
4391
4610
|
this.samplingHandler = samplingHandler;
|
|
4392
4611
|
}
|
|
4393
4612
|
/**
|
|
@@ -4568,7 +4787,7 @@ var McpSamplingHandler = (_class16 = class {
|
|
|
4568
4787
|
this.samplingHandler = handler;
|
|
4569
4788
|
this.logger.debug("ADK sampling handler updated");
|
|
4570
4789
|
}
|
|
4571
|
-
},
|
|
4790
|
+
}, _class18);
|
|
4572
4791
|
function createSamplingHandler(handler) {
|
|
4573
4792
|
return handler;
|
|
4574
4793
|
}
|
|
@@ -4602,14 +4821,14 @@ function withRetry(fn, instance, reinitMethod, maxRetries = 1) {
|
|
|
4602
4821
|
}
|
|
4603
4822
|
|
|
4604
4823
|
// src/tools/mcp/client.ts
|
|
4605
|
-
var McpClientService = (
|
|
4606
|
-
|
|
4607
|
-
|
|
4608
|
-
|
|
4609
|
-
|
|
4610
|
-
|
|
4611
|
-
|
|
4612
|
-
constructor(config) {;
|
|
4824
|
+
var McpClientService = (_class19 = class {
|
|
4825
|
+
|
|
4826
|
+
__init28() {this.client = null}
|
|
4827
|
+
__init29() {this.transport = null}
|
|
4828
|
+
__init30() {this.isClosing = false}
|
|
4829
|
+
__init31() {this.mcpSamplingHandler = null}
|
|
4830
|
+
__init32() {this.logger = new Logger({ name: "McpClientService" })}
|
|
4831
|
+
constructor(config) {;_class19.prototype.__init28.call(this);_class19.prototype.__init29.call(this);_class19.prototype.__init30.call(this);_class19.prototype.__init31.call(this);_class19.prototype.__init32.call(this);
|
|
4613
4832
|
this.config = config;
|
|
4614
4833
|
if (config.samplingHandler) {
|
|
4615
4834
|
this.mcpSamplingHandler = new McpSamplingHandler(config.samplingHandler);
|
|
@@ -4665,15 +4884,13 @@ var McpClientService = (_class17 = class {
|
|
|
4665
4884
|
await connectPromise;
|
|
4666
4885
|
}
|
|
4667
4886
|
await this.setupSamplingHandler(client);
|
|
4668
|
-
|
|
4669
|
-
console.log("\u2705 MCP client connected successfully");
|
|
4670
|
-
}
|
|
4887
|
+
this.logger.debug("\u2705 MCP client connected successfully");
|
|
4671
4888
|
this.client = client;
|
|
4672
4889
|
return client;
|
|
4673
4890
|
} catch (error) {
|
|
4674
4891
|
await this.cleanupResources();
|
|
4675
4892
|
if (!(error instanceof McpError)) {
|
|
4676
|
-
|
|
4893
|
+
this.logger.error("Failed to initialize MCP client:", error);
|
|
4677
4894
|
throw new McpError(
|
|
4678
4895
|
`Failed to initialize MCP client: ${error instanceof Error ? error.message : String(error)}`,
|
|
4679
4896
|
"connection_error" /* CONNECTION_ERROR */,
|
|
@@ -4689,12 +4906,10 @@ var McpClientService = (_class17 = class {
|
|
|
4689
4906
|
async createTransport() {
|
|
4690
4907
|
try {
|
|
4691
4908
|
if (this.config.transport.mode === "sse") {
|
|
4692
|
-
|
|
4693
|
-
|
|
4694
|
-
|
|
4695
|
-
|
|
4696
|
-
);
|
|
4697
|
-
}
|
|
4909
|
+
this.logger.debug(
|
|
4910
|
+
"\u{1F680} Initializing MCP client in SSE mode",
|
|
4911
|
+
this.config.transport.serverUrl
|
|
4912
|
+
);
|
|
4698
4913
|
const headers = {
|
|
4699
4914
|
...this.config.transport.headers || {},
|
|
4700
4915
|
...this.config.headers || {}
|
|
@@ -4709,12 +4924,10 @@ var McpClientService = (_class17 = class {
|
|
|
4709
4924
|
}
|
|
4710
4925
|
);
|
|
4711
4926
|
}
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
-
);
|
|
4717
|
-
}
|
|
4927
|
+
this.logger.debug(
|
|
4928
|
+
"\u{1F680} Initializing MCP client in STDIO mode",
|
|
4929
|
+
this.config.transport.command
|
|
4930
|
+
);
|
|
4718
4931
|
return new (0, _stdiojs.StdioClientTransport)({
|
|
4719
4932
|
command: this.config.transport.command,
|
|
4720
4933
|
args: this.config.transport.args,
|
|
@@ -4733,9 +4946,7 @@ var McpClientService = (_class17 = class {
|
|
|
4733
4946
|
* Used by the retry mechanism.
|
|
4734
4947
|
*/
|
|
4735
4948
|
async reinitialize() {
|
|
4736
|
-
|
|
4737
|
-
console.log("\u{1F504} Reinitializing MCP client after closed connection");
|
|
4738
|
-
}
|
|
4949
|
+
this.logger.debug("\u{1F504} Reinitializing MCP client after closed connection");
|
|
4739
4950
|
await this.cleanupResources();
|
|
4740
4951
|
this.client = null;
|
|
4741
4952
|
this.transport = null;
|
|
@@ -4759,11 +4970,9 @@ var McpClientService = (_class17 = class {
|
|
|
4759
4970
|
if (this.transport && typeof this.transport.close === "function") {
|
|
4760
4971
|
await this.transport.close();
|
|
4761
4972
|
}
|
|
4762
|
-
|
|
4763
|
-
console.log("\u{1F9F9} Cleaned up MCP client resources");
|
|
4764
|
-
}
|
|
4973
|
+
this.logger.debug("\u{1F9F9} Cleaned up MCP client resources");
|
|
4765
4974
|
} catch (error) {
|
|
4766
|
-
|
|
4975
|
+
this.logger.error("Error cleaning up MCP resources:", error);
|
|
4767
4976
|
} finally {
|
|
4768
4977
|
this.client = null;
|
|
4769
4978
|
this.transport = null;
|
|
@@ -4785,7 +4994,7 @@ var McpClientService = (_class17 = class {
|
|
|
4785
4994
|
},
|
|
4786
4995
|
this,
|
|
4787
4996
|
async (instance) => await instance.reinitialize(),
|
|
4788
|
-
_optionalChain([this, 'access',
|
|
4997
|
+
_optionalChain([this, 'access', _147 => _147.config, 'access', _148 => _148.retryOptions, 'optionalAccess', _149 => _149.maxRetries]) || 2
|
|
4789
4998
|
);
|
|
4790
4999
|
return await wrappedCall();
|
|
4791
5000
|
} catch (error) {
|
|
@@ -4805,9 +5014,7 @@ var McpClientService = (_class17 = class {
|
|
|
4805
5014
|
* Similar to Python's close() method.
|
|
4806
5015
|
*/
|
|
4807
5016
|
async close() {
|
|
4808
|
-
|
|
4809
|
-
console.log("\u{1F51A} Closing MCP client service");
|
|
4810
|
-
}
|
|
5017
|
+
this.logger.debug("\u{1F51A} Closing MCP client service");
|
|
4811
5018
|
await this.cleanupResources();
|
|
4812
5019
|
}
|
|
4813
5020
|
/**
|
|
@@ -4818,11 +5025,9 @@ var McpClientService = (_class17 = class {
|
|
|
4818
5025
|
}
|
|
4819
5026
|
async setupSamplingHandler(client) {
|
|
4820
5027
|
if (!this.mcpSamplingHandler) {
|
|
4821
|
-
|
|
4822
|
-
|
|
4823
|
-
|
|
4824
|
-
);
|
|
4825
|
-
}
|
|
5028
|
+
this.logger.debug(
|
|
5029
|
+
"\u26A0\uFE0F No sampling handler provided - sampling requests will be rejected"
|
|
5030
|
+
);
|
|
4826
5031
|
return;
|
|
4827
5032
|
}
|
|
4828
5033
|
try {
|
|
@@ -4832,12 +5037,10 @@ var McpClientService = (_class17 = class {
|
|
|
4832
5037
|
try {
|
|
4833
5038
|
this.logger.debug("Received sampling request:", request);
|
|
4834
5039
|
const response = await this.mcpSamplingHandler.handleSamplingRequest(request);
|
|
4835
|
-
|
|
4836
|
-
console.log("\u2705 Sampling request completed successfully");
|
|
4837
|
-
}
|
|
5040
|
+
this.logger.debug("\u2705 Sampling request completed successfully");
|
|
4838
5041
|
return response;
|
|
4839
5042
|
} catch (error) {
|
|
4840
|
-
|
|
5043
|
+
this.logger.error("\u274C Error handling sampling request:", error);
|
|
4841
5044
|
if (error instanceof McpError) {
|
|
4842
5045
|
throw error;
|
|
4843
5046
|
}
|
|
@@ -4849,16 +5052,12 @@ var McpClientService = (_class17 = class {
|
|
|
4849
5052
|
}
|
|
4850
5053
|
}
|
|
4851
5054
|
);
|
|
4852
|
-
|
|
4853
|
-
console.log("\u{1F3AF} Sampling handler registered successfully");
|
|
4854
|
-
}
|
|
5055
|
+
this.logger.debug("\u{1F3AF} Sampling handler registered successfully");
|
|
4855
5056
|
} catch (error) {
|
|
4856
|
-
|
|
4857
|
-
|
|
4858
|
-
|
|
4859
|
-
|
|
4860
|
-
);
|
|
4861
|
-
}
|
|
5057
|
+
this.logger.error("Failed to setup sampling handler:", error);
|
|
5058
|
+
this.logger.debug(
|
|
5059
|
+
"\u26A0\uFE0F Sampling handler registration failed, continuing without sampling support"
|
|
5060
|
+
);
|
|
4862
5061
|
}
|
|
4863
5062
|
}
|
|
4864
5063
|
/**
|
|
@@ -4868,7 +5067,7 @@ var McpClientService = (_class17 = class {
|
|
|
4868
5067
|
this.mcpSamplingHandler = new McpSamplingHandler(handler);
|
|
4869
5068
|
if (this.client) {
|
|
4870
5069
|
this.setupSamplingHandler(this.client).catch((error) => {
|
|
4871
|
-
|
|
5070
|
+
this.logger.error("Failed to update ADK sampling handler:", error);
|
|
4872
5071
|
});
|
|
4873
5072
|
}
|
|
4874
5073
|
}
|
|
@@ -4879,13 +5078,13 @@ var McpClientService = (_class17 = class {
|
|
|
4879
5078
|
this.mcpSamplingHandler = null;
|
|
4880
5079
|
if (this.client) {
|
|
4881
5080
|
try {
|
|
4882
|
-
_optionalChain([this, 'access',
|
|
5081
|
+
_optionalChain([this, 'access', _150 => _150.client, 'access', _151 => _151.removeRequestHandler, 'optionalCall', _152 => _152("sampling/createMessage")]);
|
|
4883
5082
|
} catch (error) {
|
|
4884
|
-
|
|
5083
|
+
this.logger.error("Failed to remove sampling handler:", error);
|
|
4885
5084
|
}
|
|
4886
5085
|
}
|
|
4887
5086
|
}
|
|
4888
|
-
},
|
|
5087
|
+
}, _class19);
|
|
4889
5088
|
|
|
4890
5089
|
// src/tools/mcp/create-tool.ts
|
|
4891
5090
|
init_logger();
|
|
@@ -5087,11 +5286,11 @@ async function createTool(mcpTool, client) {
|
|
|
5087
5286
|
throw error;
|
|
5088
5287
|
}
|
|
5089
5288
|
}
|
|
5090
|
-
var McpToolAdapter = (
|
|
5289
|
+
var McpToolAdapter = (_class20 = class extends BaseTool {
|
|
5091
5290
|
|
|
5092
5291
|
|
|
5093
|
-
|
|
5094
|
-
|
|
5292
|
+
__init33() {this.clientService = null}
|
|
5293
|
+
__init34() {this.logger = new Logger({ name: "McpToolAdapter" })}
|
|
5095
5294
|
constructor(mcpTool, client) {
|
|
5096
5295
|
const metadata = mcpTool.metadata || {};
|
|
5097
5296
|
super({
|
|
@@ -5100,7 +5299,7 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
|
|
|
5100
5299
|
isLongRunning: _nullishCoalesce(metadata.isLongRunning, () => ( false)),
|
|
5101
5300
|
shouldRetryOnFailure: _nullishCoalesce(metadata.shouldRetryOnFailure, () => ( false)),
|
|
5102
5301
|
maxRetryAttempts: _nullishCoalesce(metadata.maxRetryAttempts, () => ( 3))
|
|
5103
|
-
});
|
|
5302
|
+
});_class20.prototype.__init33.call(this);_class20.prototype.__init34.call(this);;
|
|
5104
5303
|
this.mcpTool = mcpTool;
|
|
5105
5304
|
this.client = client;
|
|
5106
5305
|
if (client.reinitialize && typeof client.reinitialize === "function") {
|
|
@@ -5173,11 +5372,17 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
|
|
|
5173
5372
|
throw error;
|
|
5174
5373
|
}
|
|
5175
5374
|
}
|
|
5176
|
-
},
|
|
5375
|
+
}, _class20);
|
|
5177
5376
|
|
|
5178
5377
|
// src/tools/mcp/servers.ts
|
|
5179
5378
|
function createMcpConfig(name, packageName, config = {}) {
|
|
5180
|
-
const {
|
|
5379
|
+
const {
|
|
5380
|
+
debug,
|
|
5381
|
+
description,
|
|
5382
|
+
retryOptions,
|
|
5383
|
+
env: envVars = {},
|
|
5384
|
+
samplingHandler
|
|
5385
|
+
} = config;
|
|
5181
5386
|
const env = {};
|
|
5182
5387
|
for (const [key, value] of Object.entries(envVars)) {
|
|
5183
5388
|
if (value !== void 0) {
|
|
@@ -5197,7 +5402,8 @@ function createMcpConfig(name, packageName, config = {}) {
|
|
|
5197
5402
|
command: "npx",
|
|
5198
5403
|
args: ["-y", packageName],
|
|
5199
5404
|
env
|
|
5200
|
-
}
|
|
5405
|
+
},
|
|
5406
|
+
samplingHandler
|
|
5201
5407
|
};
|
|
5202
5408
|
}
|
|
5203
5409
|
function McpAbi(config = {}) {
|
|
@@ -5242,7 +5448,7 @@ function McpNearAgent(config = {}) {
|
|
|
5242
5448
|
}
|
|
5243
5449
|
function McpNearIntentSwaps(config = {}) {
|
|
5244
5450
|
const mcpConfig = createMcpConfig(
|
|
5245
|
-
"
|
|
5451
|
+
"Near Intents Swaps MCP Client",
|
|
5246
5452
|
"@iqai/mcp-near-intent-swaps",
|
|
5247
5453
|
config
|
|
5248
5454
|
);
|
|
@@ -5287,13 +5493,13 @@ function McpGeneric(packageName, config = {}, name) {
|
|
|
5287
5493
|
}
|
|
5288
5494
|
|
|
5289
5495
|
// src/tools/mcp/index.ts
|
|
5290
|
-
var McpToolset = (
|
|
5496
|
+
var McpToolset = (_class21 = class {
|
|
5291
5497
|
|
|
5292
|
-
|
|
5293
|
-
|
|
5294
|
-
|
|
5295
|
-
|
|
5296
|
-
constructor(config, toolFilter = null) {;
|
|
5498
|
+
__init35() {this.clientService = null}
|
|
5499
|
+
__init36() {this.toolFilter = null}
|
|
5500
|
+
__init37() {this.tools = []}
|
|
5501
|
+
__init38() {this.isClosing = false}
|
|
5502
|
+
constructor(config, toolFilter = null) {;_class21.prototype.__init35.call(this);_class21.prototype.__init36.call(this);_class21.prototype.__init37.call(this);_class21.prototype.__init38.call(this);
|
|
5297
5503
|
this.config = config;
|
|
5298
5504
|
this.toolFilter = toolFilter;
|
|
5299
5505
|
this.clientService = new McpClientService(config);
|
|
@@ -5368,7 +5574,7 @@ var McpToolset = (_class19 = class {
|
|
|
5368
5574
|
"resource_closed_error" /* RESOURCE_CLOSED_ERROR */
|
|
5369
5575
|
);
|
|
5370
5576
|
}
|
|
5371
|
-
if (this.tools.length > 0 && !_optionalChain([this, 'access',
|
|
5577
|
+
if (this.tools.length > 0 && !_optionalChain([this, 'access', _153 => _153.config, 'access', _154 => _154.cacheConfig, 'optionalAccess', _155 => _155.enabled]) === false) {
|
|
5372
5578
|
return this.tools;
|
|
5373
5579
|
}
|
|
5374
5580
|
if (!this.clientService) {
|
|
@@ -5394,7 +5600,7 @@ var McpToolset = (_class19 = class {
|
|
|
5394
5600
|
}
|
|
5395
5601
|
}
|
|
5396
5602
|
}
|
|
5397
|
-
if (_optionalChain([this, 'access',
|
|
5603
|
+
if (_optionalChain([this, 'access', _156 => _156.config, 'access', _157 => _157.cacheConfig, 'optionalAccess', _158 => _158.enabled]) !== false) {
|
|
5398
5604
|
this.tools = tools;
|
|
5399
5605
|
}
|
|
5400
5606
|
return tools;
|
|
@@ -5454,7 +5660,7 @@ var McpToolset = (_class19 = class {
|
|
|
5454
5660
|
async dispose() {
|
|
5455
5661
|
await this.close();
|
|
5456
5662
|
}
|
|
5457
|
-
},
|
|
5663
|
+
}, _class21);
|
|
5458
5664
|
async function getMcpTools(config, toolFilter) {
|
|
5459
5665
|
const toolset = new McpToolset(config, toolFilter);
|
|
5460
5666
|
try {
|
|
@@ -5482,12 +5688,12 @@ function populateClientFunctionCallId(modelResponseEvent) {
|
|
|
5482
5688
|
}
|
|
5483
5689
|
}
|
|
5484
5690
|
function removeClientFunctionCallId(content) {
|
|
5485
|
-
if (_optionalChain([content, 'optionalAccess',
|
|
5691
|
+
if (_optionalChain([content, 'optionalAccess', _159 => _159.parts])) {
|
|
5486
5692
|
for (const part of content.parts) {
|
|
5487
|
-
if (_optionalChain([part, 'access',
|
|
5693
|
+
if (_optionalChain([part, 'access', _160 => _160.functionCall, 'optionalAccess', _161 => _161.id, 'optionalAccess', _162 => _162.startsWith, 'call', _163 => _163(AF_FUNCTION_CALL_ID_PREFIX)])) {
|
|
5488
5694
|
part.functionCall.id = void 0;
|
|
5489
5695
|
}
|
|
5490
|
-
if (_optionalChain([part, 'access',
|
|
5696
|
+
if (_optionalChain([part, 'access', _164 => _164.functionResponse, 'optionalAccess', _165 => _165.id, 'optionalAccess', _166 => _166.startsWith, 'call', _167 => _167(AF_FUNCTION_CALL_ID_PREFIX)])) {
|
|
5491
5697
|
part.functionResponse.id = void 0;
|
|
5492
5698
|
}
|
|
5493
5699
|
}
|
|
@@ -5632,7 +5838,7 @@ function mergeParallelFunctionResponseEvents(functionResponseEvents) {
|
|
|
5632
5838
|
}
|
|
5633
5839
|
const mergedParts = [];
|
|
5634
5840
|
for (const event of functionResponseEvents) {
|
|
5635
|
-
if (_optionalChain([event, 'access',
|
|
5841
|
+
if (_optionalChain([event, 'access', _168 => _168.content, 'optionalAccess', _169 => _169.parts])) {
|
|
5636
5842
|
for (const part of event.content.parts) {
|
|
5637
5843
|
mergedParts.push(part);
|
|
5638
5844
|
}
|
|
@@ -5665,94 +5871,52 @@ function isLlmAgent(agent) {
|
|
|
5665
5871
|
|
|
5666
5872
|
// src/flows/llm-flows/base-llm-flow.ts
|
|
5667
5873
|
var _ADK_AGENT_NAME_LABEL_KEY = "adk_agent_name";
|
|
5668
|
-
var BaseLlmFlow = (
|
|
5669
|
-
|
|
5670
|
-
|
|
5671
|
-
|
|
5874
|
+
var BaseLlmFlow = (_class22 = class {constructor() { _class22.prototype.__init39.call(this);_class22.prototype.__init40.call(this);_class22.prototype.__init41.call(this); }
|
|
5875
|
+
__init39() {this.requestProcessors = []}
|
|
5876
|
+
__init40() {this.responseProcessors = []}
|
|
5877
|
+
__init41() {this.logger = new Logger({ name: "BaseLlmFlow" })}
|
|
5672
5878
|
async *runAsync(invocationContext) {
|
|
5673
|
-
this.logger.
|
|
5674
|
-
invocationId: invocationContext.invocationId,
|
|
5675
|
-
agentName: invocationContext.agent.name,
|
|
5676
|
-
branch: invocationContext.branch
|
|
5677
|
-
});
|
|
5879
|
+
this.logger.info(`Agent '${invocationContext.agent.name}' started.`);
|
|
5678
5880
|
let stepCount = 0;
|
|
5679
5881
|
while (true) {
|
|
5680
5882
|
stepCount++;
|
|
5681
|
-
this.logger.debug(`\u{1F4CB} Running step ${stepCount}`, {
|
|
5682
|
-
invocationId: invocationContext.invocationId
|
|
5683
|
-
});
|
|
5684
5883
|
let lastEvent = null;
|
|
5685
|
-
let eventCount = 0;
|
|
5686
5884
|
for await (const event of this._runOneStepAsync(invocationContext)) {
|
|
5687
|
-
eventCount++;
|
|
5688
5885
|
lastEvent = event;
|
|
5689
|
-
this.logger.debug(
|
|
5690
|
-
`\u{1F4E4} Yielding event ${eventCount} from step ${stepCount}`,
|
|
5691
|
-
{
|
|
5692
|
-
eventId: event.id,
|
|
5693
|
-
eventType: event.constructor.name,
|
|
5694
|
-
hasContent: !!event.content,
|
|
5695
|
-
isFinalResponse: event.isFinalResponse(),
|
|
5696
|
-
partial: event.partial
|
|
5697
|
-
}
|
|
5698
|
-
);
|
|
5699
5886
|
yield event;
|
|
5700
5887
|
}
|
|
5701
5888
|
if (!lastEvent || lastEvent.isFinalResponse()) {
|
|
5702
|
-
this.logger.
|
|
5703
|
-
|
|
5704
|
-
|
|
5705
|
-
});
|
|
5889
|
+
this.logger.info(
|
|
5890
|
+
`Agent '${invocationContext.agent.name}' finished after ${stepCount} steps.`
|
|
5891
|
+
);
|
|
5706
5892
|
break;
|
|
5707
5893
|
}
|
|
5708
5894
|
if (lastEvent.partial) {
|
|
5709
|
-
this.logger.error(
|
|
5710
|
-
|
|
5711
|
-
|
|
5712
|
-
});
|
|
5895
|
+
this.logger.error(
|
|
5896
|
+
"Partial event encountered. LLM max output limit may be reached."
|
|
5897
|
+
);
|
|
5713
5898
|
throw new Error(
|
|
5714
5899
|
"Last event shouldn't be partial. LLM max output limit may be reached."
|
|
5715
5900
|
);
|
|
5716
5901
|
}
|
|
5717
5902
|
}
|
|
5718
|
-
this.logger.debug("\u{1F3C1} runAsync flow finished", {
|
|
5719
|
-
totalSteps: stepCount,
|
|
5720
|
-
invocationId: invocationContext.invocationId
|
|
5721
|
-
});
|
|
5722
5903
|
}
|
|
5723
5904
|
async *runLive(invocationContext) {
|
|
5724
|
-
this.logger.debug("\u{1F534} Starting runLive flow", {
|
|
5725
|
-
invocationId: invocationContext.invocationId,
|
|
5726
|
-
agentName: invocationContext.agent.name
|
|
5727
|
-
});
|
|
5728
5905
|
this.logger.warn("\u26A0\uFE0F runLive not fully implemented, delegating to runAsync");
|
|
5729
5906
|
yield* this.runAsync(invocationContext);
|
|
5730
5907
|
}
|
|
5731
5908
|
async *_runOneStepAsync(invocationContext) {
|
|
5732
|
-
this.logger.debug("\u{1F504} Starting one step execution", {
|
|
5733
|
-
invocationId: invocationContext.invocationId
|
|
5734
|
-
});
|
|
5735
5909
|
const llmRequest = new LlmRequest();
|
|
5736
|
-
this.logger.debug("\u{1F4DD} Created new LlmRequest", {
|
|
5737
|
-
requestId: llmRequest.id || "unknown"
|
|
5738
|
-
});
|
|
5739
|
-
this.logger.debug("\u{1F527} Starting preprocessing phase");
|
|
5740
5910
|
let preprocessEventCount = 0;
|
|
5741
5911
|
for await (const event of this._preprocessAsync(
|
|
5742
5912
|
invocationContext,
|
|
5743
5913
|
llmRequest
|
|
5744
5914
|
)) {
|
|
5745
5915
|
preprocessEventCount++;
|
|
5746
|
-
this.logger.debug(`\u{1F4E4} Preprocessing event ${preprocessEventCount}`, {
|
|
5747
|
-
eventId: event.id
|
|
5748
|
-
});
|
|
5749
5916
|
yield event;
|
|
5750
5917
|
}
|
|
5751
|
-
this.logger.debug("\u2705 Preprocessing completed", {
|
|
5752
|
-
eventCount: preprocessEventCount
|
|
5753
|
-
});
|
|
5754
5918
|
if (invocationContext.endInvocation) {
|
|
5755
|
-
this.logger.
|
|
5919
|
+
this.logger.info("Invocation ended during preprocessing.");
|
|
5756
5920
|
return;
|
|
5757
5921
|
}
|
|
5758
5922
|
const modelResponseEvent = new Event({
|
|
@@ -5761,9 +5925,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5761
5925
|
author: invocationContext.agent.name,
|
|
5762
5926
|
branch: invocationContext.branch
|
|
5763
5927
|
});
|
|
5764
|
-
this.logger.debug("\u{1F916} Starting LLM call phase", {
|
|
5765
|
-
modelResponseEventId: modelResponseEvent.id
|
|
5766
|
-
});
|
|
5767
5928
|
let llmResponseCount = 0;
|
|
5768
5929
|
for await (const llmResponse of this._callLlmAsync(
|
|
5769
5930
|
invocationContext,
|
|
@@ -5771,12 +5932,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5771
5932
|
modelResponseEvent
|
|
5772
5933
|
)) {
|
|
5773
5934
|
llmResponseCount++;
|
|
5774
|
-
this.logger.debug(`\u{1F504} Processing LLM response ${llmResponseCount}`, {
|
|
5775
|
-
hasContent: !!llmResponse.content,
|
|
5776
|
-
hasError: !!llmResponse.errorCode,
|
|
5777
|
-
interrupted: !!llmResponse.interrupted,
|
|
5778
|
-
partial: !!llmResponse.partial
|
|
5779
|
-
});
|
|
5780
5935
|
for await (const event of this._postprocessAsync(
|
|
5781
5936
|
invocationContext,
|
|
5782
5937
|
llmRequest,
|
|
@@ -5784,89 +5939,47 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5784
5939
|
modelResponseEvent
|
|
5785
5940
|
)) {
|
|
5786
5941
|
modelResponseEvent.id = Event.newId();
|
|
5787
|
-
this.logger.debug("\u{1F4E4} Yielding postprocessed event", {
|
|
5788
|
-
eventId: event.id,
|
|
5789
|
-
hasFunctionCalls: !!event.getFunctionCalls()
|
|
5790
|
-
});
|
|
5791
5942
|
yield event;
|
|
5792
5943
|
}
|
|
5793
5944
|
}
|
|
5794
|
-
this.logger.debug("\u2705 One step execution completed", {
|
|
5795
|
-
llmResponseCount
|
|
5796
|
-
});
|
|
5797
5945
|
}
|
|
5798
5946
|
async *_preprocessAsync(invocationContext, llmRequest) {
|
|
5799
|
-
this.logger.debug("\u{1F527} Starting preprocessing", {
|
|
5800
|
-
processorCount: this.requestProcessors.length
|
|
5801
|
-
});
|
|
5802
5947
|
const agent = invocationContext.agent;
|
|
5803
5948
|
if (!("canonicalTools" in agent) || typeof agent.canonicalTools !== "function") {
|
|
5804
|
-
this.logger.debug("\u2139\uFE0F Agent has no canonical tools");
|
|
5805
5949
|
return;
|
|
5806
5950
|
}
|
|
5807
|
-
for (
|
|
5808
|
-
const processor = this.requestProcessors[i];
|
|
5809
|
-
this.logger.debug(`\u{1F504} Running request processor ${i + 1}`, {
|
|
5810
|
-
processorName: _optionalChain([processor, 'access', _185 => _185.constructor, 'optionalAccess', _186 => _186.name]) || "unknown"
|
|
5811
|
-
});
|
|
5812
|
-
let processorEventCount = 0;
|
|
5951
|
+
for (const processor of this.requestProcessors) {
|
|
5813
5952
|
for await (const event of processor.runAsync(
|
|
5814
5953
|
invocationContext,
|
|
5815
5954
|
llmRequest
|
|
5816
5955
|
)) {
|
|
5817
|
-
processorEventCount++;
|
|
5818
|
-
this.logger.debug(
|
|
5819
|
-
`\u{1F4E4} Request processor ${i + 1} event ${processorEventCount}`,
|
|
5820
|
-
{
|
|
5821
|
-
eventId: event.id
|
|
5822
|
-
}
|
|
5823
|
-
);
|
|
5824
5956
|
yield event;
|
|
5825
5957
|
}
|
|
5826
|
-
this.logger.debug(`\u2705 Request processor ${i + 1} completed`, {
|
|
5827
|
-
eventCount: processorEventCount
|
|
5828
|
-
});
|
|
5829
5958
|
}
|
|
5830
5959
|
const tools = await agent.canonicalTools(
|
|
5831
5960
|
new ReadonlyContext(invocationContext)
|
|
5832
5961
|
);
|
|
5833
|
-
|
|
5834
|
-
toolCount: tools.length
|
|
5835
|
-
});
|
|
5836
|
-
for (let i = 0; i < tools.length; i++) {
|
|
5837
|
-
const tool = tools[i];
|
|
5838
|
-
this.logger.debug(`\u{1F504} Processing tool ${i + 1}`, {
|
|
5839
|
-
toolName: _optionalChain([tool, 'access', _187 => _187.constructor, 'optionalAccess', _188 => _188.name]) || "unknown"
|
|
5840
|
-
});
|
|
5962
|
+
for (const tool of tools) {
|
|
5841
5963
|
const toolContext = new ToolContext(invocationContext);
|
|
5842
5964
|
await tool.processLlmRequest(toolContext, llmRequest);
|
|
5843
|
-
this.logger.debug(`\u2705 Tool ${i + 1} processed`);
|
|
5844
5965
|
}
|
|
5845
|
-
|
|
5846
|
-
|
|
5847
|
-
|
|
5966
|
+
if (tools.length > 0) {
|
|
5967
|
+
const toolsData = tools.map((tool) => ({
|
|
5968
|
+
Name: tool.name,
|
|
5969
|
+
Description: _optionalChain([tool, 'access', _170 => _170.description, 'optionalAccess', _171 => _171.substring, 'call', _172 => _172(0, 50)]) + (_optionalChain([tool, 'access', _173 => _173.description, 'optionalAccess', _174 => _174.length]) > 50 ? "..." : ""),
|
|
5970
|
+
"Long Running": tool.isLongRunning ? "Yes" : "No"
|
|
5971
|
+
}));
|
|
5972
|
+
this.logger.debugArray("\u{1F6E0}\uFE0F Available Tools", toolsData);
|
|
5973
|
+
}
|
|
5848
5974
|
}
|
|
5849
5975
|
async *_postprocessAsync(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
|
|
5850
|
-
this.logger.debug("\u{1F504} Starting postprocessing", {
|
|
5851
|
-
hasContent: !!llmResponse.content,
|
|
5852
|
-
hasError: !!llmResponse.errorCode,
|
|
5853
|
-
interrupted: !!llmResponse.interrupted
|
|
5854
|
-
});
|
|
5855
|
-
let processorEventCount = 0;
|
|
5856
5976
|
for await (const event of this._postprocessRunProcessorsAsync(
|
|
5857
5977
|
invocationContext,
|
|
5858
5978
|
llmResponse
|
|
5859
5979
|
)) {
|
|
5860
|
-
processorEventCount++;
|
|
5861
|
-
this.logger.debug(`\u{1F4E4} Response processor event ${processorEventCount}`, {
|
|
5862
|
-
eventId: event.id
|
|
5863
|
-
});
|
|
5864
5980
|
yield event;
|
|
5865
5981
|
}
|
|
5866
5982
|
if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted) {
|
|
5867
|
-
this.logger.debug(
|
|
5868
|
-
"\u2139\uFE0F Skipping event creation - no content, error, or interruption"
|
|
5869
|
-
);
|
|
5870
5983
|
return;
|
|
5871
5984
|
}
|
|
5872
5985
|
const finalizedEvent = this._finalizeModelResponseEvent(
|
|
@@ -5874,54 +5987,32 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5874
5987
|
llmResponse,
|
|
5875
5988
|
modelResponseEvent
|
|
5876
5989
|
);
|
|
5877
|
-
this.logger.debug("\u{1F4DD} Finalized model response event", {
|
|
5878
|
-
eventId: finalizedEvent.id,
|
|
5879
|
-
hasContent: !!finalizedEvent.content,
|
|
5880
|
-
hasFunctionCalls: !!finalizedEvent.getFunctionCalls(),
|
|
5881
|
-
longRunningToolIds: finalizedEvent.longRunningToolIds.entries.length || 0
|
|
5882
|
-
});
|
|
5883
5990
|
yield finalizedEvent;
|
|
5884
5991
|
const functionCalls = finalizedEvent.getFunctionCalls();
|
|
5885
|
-
if (functionCalls) {
|
|
5886
|
-
|
|
5887
|
-
|
|
5888
|
-
|
|
5889
|
-
|
|
5992
|
+
if (functionCalls && functionCalls.length > 0) {
|
|
5993
|
+
const functionCallsData = functionCalls.map((fc) => ({
|
|
5994
|
+
Name: fc.name,
|
|
5995
|
+
Arguments: JSON.stringify(fc.args).substring(0, 100) + (JSON.stringify(fc.args).length > 100 ? "..." : ""),
|
|
5996
|
+
ID: fc.id || "auto"
|
|
5997
|
+
}));
|
|
5998
|
+
this.logger.debugArray("\u{1F527} Function Calls", functionCallsData);
|
|
5890
5999
|
for await (const event of this._postprocessHandleFunctionCallsAsync(
|
|
5891
6000
|
invocationContext,
|
|
5892
6001
|
finalizedEvent,
|
|
5893
6002
|
llmRequest
|
|
5894
6003
|
)) {
|
|
5895
|
-
functionEventCount++;
|
|
5896
|
-
this.logger.debug(`\u{1F4E4} Function call event ${functionEventCount}`, {
|
|
5897
|
-
eventId: event.id
|
|
5898
|
-
});
|
|
5899
6004
|
yield event;
|
|
5900
6005
|
}
|
|
5901
|
-
this.logger.debug("\u2705 Function calls processed", {
|
|
5902
|
-
eventCount: functionEventCount
|
|
5903
|
-
});
|
|
5904
6006
|
}
|
|
5905
|
-
this.logger.debug("\u2705 Postprocessing completed");
|
|
5906
6007
|
}
|
|
5907
6008
|
async *_postprocessLive(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
|
|
5908
|
-
this.logger.debug("\u{1F534} Starting live postprocessing", {
|
|
5909
|
-
hasContent: !!llmResponse.content,
|
|
5910
|
-
turnComplete: !!llmResponse.turnComplete
|
|
5911
|
-
});
|
|
5912
6009
|
for await (const event of this._postprocessRunProcessorsAsync(
|
|
5913
6010
|
invocationContext,
|
|
5914
6011
|
llmResponse
|
|
5915
6012
|
)) {
|
|
5916
|
-
this.logger.debug("\u{1F4E4} Live response processor event", {
|
|
5917
|
-
eventId: event.id
|
|
5918
|
-
});
|
|
5919
6013
|
yield event;
|
|
5920
6014
|
}
|
|
5921
6015
|
if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted && !llmResponse.turnComplete) {
|
|
5922
|
-
this.logger.debug(
|
|
5923
|
-
"\u2139\uFE0F Skipping live event - no content or completion signal"
|
|
5924
|
-
);
|
|
5925
6016
|
return;
|
|
5926
6017
|
}
|
|
5927
6018
|
const finalizedEvent = this._finalizeModelResponseEvent(
|
|
@@ -5929,165 +6020,83 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
5929
6020
|
llmResponse,
|
|
5930
6021
|
modelResponseEvent
|
|
5931
6022
|
);
|
|
5932
|
-
this.logger.debug("\u{1F4DD} Finalized live model response event", {
|
|
5933
|
-
eventId: finalizedEvent.id,
|
|
5934
|
-
hasFunctionCalls: !!finalizedEvent.getFunctionCalls()
|
|
5935
|
-
});
|
|
5936
6023
|
yield finalizedEvent;
|
|
5937
6024
|
if (finalizedEvent.getFunctionCalls()) {
|
|
5938
|
-
this.logger.debug("\u{1F527} Processing live function calls");
|
|
5939
6025
|
const functionResponseEvent = await handleFunctionCallsAsync(
|
|
5940
6026
|
invocationContext,
|
|
5941
6027
|
finalizedEvent,
|
|
5942
6028
|
llmRequest.toolsDict || {}
|
|
5943
6029
|
);
|
|
5944
6030
|
if (functionResponseEvent) {
|
|
5945
|
-
this.logger.debug("\u{1F4E4} Live function response event", {
|
|
5946
|
-
eventId: functionResponseEvent.id,
|
|
5947
|
-
hasTransfer: !!_optionalChain([functionResponseEvent, 'access', _189 => _189.actions, 'optionalAccess', _190 => _190.transferToAgent])
|
|
5948
|
-
});
|
|
5949
6031
|
yield functionResponseEvent;
|
|
5950
|
-
const transferToAgent = _optionalChain([functionResponseEvent, 'access',
|
|
6032
|
+
const transferToAgent = _optionalChain([functionResponseEvent, 'access', _175 => _175.actions, 'optionalAccess', _176 => _176.transferToAgent]);
|
|
5951
6033
|
if (transferToAgent) {
|
|
5952
|
-
this.logger.
|
|
5953
|
-
targetAgent: transferToAgent
|
|
5954
|
-
});
|
|
6034
|
+
this.logger.info(`\u{1F504} Live transfer to agent '${transferToAgent}'`);
|
|
5955
6035
|
const agentToRun = this._getAgentToRun(
|
|
5956
6036
|
invocationContext,
|
|
5957
6037
|
transferToAgent
|
|
5958
6038
|
);
|
|
5959
|
-
|
|
5960
|
-
for await (const event of _optionalChain([agentToRun, 'access', _193 => _193.runLive, 'optionalCall', _194 => _194(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
|
|
5961
|
-
transferEventCount++;
|
|
5962
|
-
this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
|
|
5963
|
-
eventId: event.id
|
|
5964
|
-
});
|
|
6039
|
+
for await (const event of _optionalChain([agentToRun, 'access', _177 => _177.runLive, 'optionalCall', _178 => _178(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
|
|
5965
6040
|
yield event;
|
|
5966
6041
|
}
|
|
5967
|
-
this.logger.debug("\u2705 Agent transfer completed", {
|
|
5968
|
-
eventCount: transferEventCount
|
|
5969
|
-
});
|
|
5970
6042
|
}
|
|
5971
6043
|
}
|
|
5972
6044
|
}
|
|
5973
|
-
this.logger.debug("\u2705 Live postprocessing completed");
|
|
5974
6045
|
}
|
|
5975
6046
|
async *_postprocessRunProcessorsAsync(invocationContext, llmResponse) {
|
|
5976
|
-
|
|
5977
|
-
processorCount: this.responseProcessors.length
|
|
5978
|
-
});
|
|
5979
|
-
for (let i = 0; i < this.responseProcessors.length; i++) {
|
|
5980
|
-
const processor = this.responseProcessors[i];
|
|
5981
|
-
this.logger.debug(`\u{1F504} Running response processor ${i + 1}`, {
|
|
5982
|
-
processorName: _optionalChain([processor, 'access', _195 => _195.constructor, 'optionalAccess', _196 => _196.name]) || "unknown"
|
|
5983
|
-
});
|
|
5984
|
-
let processorEventCount = 0;
|
|
6047
|
+
for (const processor of this.responseProcessors) {
|
|
5985
6048
|
for await (const event of processor.runAsync(
|
|
5986
6049
|
invocationContext,
|
|
5987
6050
|
llmResponse
|
|
5988
6051
|
)) {
|
|
5989
|
-
processorEventCount++;
|
|
5990
|
-
this.logger.debug(
|
|
5991
|
-
`\u{1F4E4} Response processor ${i + 1} event ${processorEventCount}`,
|
|
5992
|
-
{
|
|
5993
|
-
eventId: event.id
|
|
5994
|
-
}
|
|
5995
|
-
);
|
|
5996
6052
|
yield event;
|
|
5997
6053
|
}
|
|
5998
|
-
this.logger;
|
|
5999
|
-
this.logger.debug(`\u2705 Response processor ${i + 1} completed`, {
|
|
6000
|
-
eventCount: processorEventCount
|
|
6001
|
-
});
|
|
6002
6054
|
}
|
|
6003
|
-
this.logger.debug("\u2705 All response processors completed");
|
|
6004
6055
|
}
|
|
6005
6056
|
async *_postprocessHandleFunctionCallsAsync(invocationContext, functionCallEvent, llmRequest) {
|
|
6006
|
-
this.logger.debug("\u{1F527} Handling function calls", {
|
|
6007
|
-
eventId: functionCallEvent.id,
|
|
6008
|
-
toolsDictSize: Object.keys(llmRequest.toolsDict || {}).length
|
|
6009
|
-
});
|
|
6010
6057
|
const functionResponseEvent = await handleFunctionCallsAsync(
|
|
6011
6058
|
invocationContext,
|
|
6012
6059
|
functionCallEvent,
|
|
6013
6060
|
llmRequest.toolsDict || {}
|
|
6014
6061
|
);
|
|
6015
6062
|
if (functionResponseEvent) {
|
|
6016
|
-
this.logger.debug("\u{1F4CB} Function calls executed", {
|
|
6017
|
-
responseEventId: functionResponseEvent.id,
|
|
6018
|
-
hasActions: !!functionResponseEvent.actions
|
|
6019
|
-
});
|
|
6020
6063
|
const authEvent = generateAuthEvent(
|
|
6021
6064
|
invocationContext,
|
|
6022
6065
|
functionResponseEvent
|
|
6023
6066
|
);
|
|
6024
6067
|
if (authEvent) {
|
|
6025
|
-
this.logger.debug("\u{1F510} Generated auth event", {
|
|
6026
|
-
authEventId: authEvent.id
|
|
6027
|
-
});
|
|
6028
6068
|
yield authEvent;
|
|
6029
6069
|
}
|
|
6030
6070
|
yield functionResponseEvent;
|
|
6031
|
-
const transferToAgent = _optionalChain([functionResponseEvent, 'access',
|
|
6071
|
+
const transferToAgent = _optionalChain([functionResponseEvent, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.transferToAgent]);
|
|
6032
6072
|
if (transferToAgent) {
|
|
6033
|
-
this.logger.
|
|
6034
|
-
targetAgent: transferToAgent
|
|
6035
|
-
});
|
|
6073
|
+
this.logger.info(`\u{1F504} Transferring to agent '${transferToAgent}'`);
|
|
6036
6074
|
const agentToRun = this._getAgentToRun(
|
|
6037
6075
|
invocationContext,
|
|
6038
6076
|
transferToAgent
|
|
6039
6077
|
);
|
|
6040
|
-
let transferEventCount = 0;
|
|
6041
6078
|
for await (const event of agentToRun.runAsync(invocationContext)) {
|
|
6042
|
-
transferEventCount++;
|
|
6043
|
-
this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
|
|
6044
|
-
eventId: event.id
|
|
6045
|
-
});
|
|
6046
6079
|
yield event;
|
|
6047
6080
|
}
|
|
6048
|
-
this.logger.debug("\u2705 Agent transfer completed", {
|
|
6049
|
-
eventCount: transferEventCount
|
|
6050
|
-
});
|
|
6051
6081
|
}
|
|
6052
|
-
} else {
|
|
6053
|
-
this.logger.debug("\u2139\uFE0F No function response event generated");
|
|
6054
6082
|
}
|
|
6055
6083
|
}
|
|
6056
6084
|
_getAgentToRun(invocationContext, agentName) {
|
|
6057
|
-
this.logger.debug("\u{1F50D} Finding agent to run", {
|
|
6058
|
-
targetAgent: agentName,
|
|
6059
|
-
currentAgent: invocationContext.agent.name
|
|
6060
|
-
});
|
|
6061
6085
|
const rootAgent = invocationContext.agent.rootAgent;
|
|
6062
6086
|
const agentToRun = rootAgent.findAgent(agentName);
|
|
6063
6087
|
if (!agentToRun) {
|
|
6064
|
-
this.logger.error(
|
|
6065
|
-
targetAgent: agentName,
|
|
6066
|
-
rootAgent: rootAgent.name
|
|
6067
|
-
});
|
|
6088
|
+
this.logger.error(`Agent '${agentName}' not found in the agent tree.`);
|
|
6068
6089
|
throw new Error(`Agent ${agentName} not found in the agent tree.`);
|
|
6069
6090
|
}
|
|
6070
|
-
this.logger.debug("\u2705 Agent found", {
|
|
6071
|
-
targetAgent: agentName,
|
|
6072
|
-
agentType: agentToRun.constructor.name
|
|
6073
|
-
});
|
|
6074
6091
|
return agentToRun;
|
|
6075
6092
|
}
|
|
6076
6093
|
async *_callLlmAsync(invocationContext, llmRequest, modelResponseEvent) {
|
|
6077
|
-
this.logger.debug("\u{1F916} Starting LLM call", {
|
|
6078
|
-
model: llmRequest.model || "default",
|
|
6079
|
-
eventId: modelResponseEvent.id
|
|
6080
|
-
});
|
|
6081
|
-
this.logger.debug("\u{1F504} Processing before model callbacks");
|
|
6082
6094
|
const beforeModelCallbackContent = await this._handleBeforeModelCallback(
|
|
6083
6095
|
invocationContext,
|
|
6084
6096
|
llmRequest,
|
|
6085
6097
|
modelResponseEvent
|
|
6086
6098
|
);
|
|
6087
6099
|
if (beforeModelCallbackContent) {
|
|
6088
|
-
this.logger.debug("\u{1F4CB} Before model callback returned content", {
|
|
6089
|
-
hasContent: !!beforeModelCallbackContent.content
|
|
6090
|
-
});
|
|
6091
6100
|
yield beforeModelCallbackContent;
|
|
6092
6101
|
return;
|
|
6093
6102
|
}
|
|
@@ -6095,27 +6104,38 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6095
6104
|
llmRequest.config.labels = llmRequest.config.labels || {};
|
|
6096
6105
|
if (!(_ADK_AGENT_NAME_LABEL_KEY in llmRequest.config.labels)) {
|
|
6097
6106
|
llmRequest.config.labels[_ADK_AGENT_NAME_LABEL_KEY] = invocationContext.agent.name;
|
|
6098
|
-
this.logger.debug("\u{1F3F7}\uFE0F Added agent name label", {
|
|
6099
|
-
agentName: invocationContext.agent.name
|
|
6100
|
-
});
|
|
6101
6107
|
}
|
|
6102
6108
|
const llm = this.__getLlm(invocationContext);
|
|
6103
|
-
this.logger.debug("\u{1F527} Retrieved LLM instance", {
|
|
6104
|
-
llmModel: llm.model,
|
|
6105
|
-
llmType: llm.constructor.name
|
|
6106
|
-
});
|
|
6107
6109
|
const runConfig = invocationContext.runConfig;
|
|
6108
6110
|
if (runConfig.supportCfc) {
|
|
6109
6111
|
this.logger.warn(
|
|
6110
|
-
"
|
|
6112
|
+
"CFC (supportCfc) not fully implemented, using standard flow."
|
|
6111
6113
|
);
|
|
6112
6114
|
}
|
|
6113
6115
|
invocationContext.incrementLlmCallCount();
|
|
6114
|
-
this.logger.debug("\u{1F4C8} Incremented LLM call count");
|
|
6115
6116
|
const isStreaming = invocationContext.runConfig.streamingMode === "sse" /* SSE */;
|
|
6116
|
-
|
|
6117
|
-
|
|
6118
|
-
|
|
6117
|
+
const tools = _optionalChain([llmRequest, 'access', _181 => _181.config, 'optionalAccess', _182 => _182.tools]) || [];
|
|
6118
|
+
const toolNames = tools.map((tool) => {
|
|
6119
|
+
if (tool.functionDeclarations && Array.isArray(tool.functionDeclarations)) {
|
|
6120
|
+
return tool.functionDeclarations.map((fn) => fn.name).join(", ");
|
|
6121
|
+
}
|
|
6122
|
+
if (tool.name) return tool.name;
|
|
6123
|
+
if (_optionalChain([tool, 'access', _183 => _183.function, 'optionalAccess', _184 => _184.name])) return tool.function.name;
|
|
6124
|
+
if (_optionalChain([tool, 'access', _185 => _185.function, 'optionalAccess', _186 => _186.function, 'optionalAccess', _187 => _187.name])) return tool.function.function.name;
|
|
6125
|
+
return "unknown";
|
|
6126
|
+
}).join(", ");
|
|
6127
|
+
const systemInstruction = llmRequest.getSystemInstructionText() || "";
|
|
6128
|
+
const truncatedSystemInstruction = systemInstruction.length > 100 ? `${systemInstruction.substring(0, 100)}...` : systemInstruction;
|
|
6129
|
+
const contentPreview = _optionalChain([llmRequest, 'access', _188 => _188.contents, 'optionalAccess', _189 => _189.length]) > 0 ? this._formatContentPreview(llmRequest.contents[0]) : "none";
|
|
6130
|
+
this.logger.debugStructured("\u{1F4E4} LLM Request", {
|
|
6131
|
+
Model: llm.model,
|
|
6132
|
+
Agent: invocationContext.agent.name,
|
|
6133
|
+
"Content Items": _optionalChain([llmRequest, 'access', _190 => _190.contents, 'optionalAccess', _191 => _191.length]) || 0,
|
|
6134
|
+
"Content Preview": contentPreview,
|
|
6135
|
+
"System Instruction": truncatedSystemInstruction || "none",
|
|
6136
|
+
"Available Tools": toolNames || "none",
|
|
6137
|
+
"Tool Count": _optionalChain([llmRequest, 'access', _192 => _192.config, 'optionalAccess', _193 => _193.tools, 'optionalAccess', _194 => _194.length]) || 0,
|
|
6138
|
+
Streaming: isStreaming ? "Yes" : "No"
|
|
6119
6139
|
});
|
|
6120
6140
|
let responseCount = 0;
|
|
6121
6141
|
for await (const llmResponse of llm.generateContentAsync(
|
|
@@ -6123,59 +6143,46 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6123
6143
|
isStreaming
|
|
6124
6144
|
)) {
|
|
6125
6145
|
responseCount++;
|
|
6126
|
-
this.logger.debug(`\u{1F4E5} Received LLM response ${responseCount}`, {
|
|
6127
|
-
hasContent: !!llmResponse.content,
|
|
6128
|
-
hasError: !!llmResponse.errorCode,
|
|
6129
|
-
interrupted: !!llmResponse.interrupted,
|
|
6130
|
-
partial: !!llmResponse.partial,
|
|
6131
|
-
finishReason: llmResponse.finishReason,
|
|
6132
|
-
usage: llmResponse.usageMetadata ? {
|
|
6133
|
-
promptTokens: llmResponse.usageMetadata.promptTokenCount,
|
|
6134
|
-
completionTokens: llmResponse.usageMetadata.candidatesTokenCount,
|
|
6135
|
-
totalTokens: llmResponse.usageMetadata.totalTokenCount
|
|
6136
|
-
} : null
|
|
6137
|
-
});
|
|
6138
6146
|
traceLlmCall(
|
|
6139
6147
|
invocationContext,
|
|
6140
6148
|
modelResponseEvent.id,
|
|
6141
6149
|
llmRequest,
|
|
6142
6150
|
llmResponse
|
|
6143
6151
|
);
|
|
6144
|
-
|
|
6152
|
+
const tokenCount = _optionalChain([llmResponse, 'access', _195 => _195.usageMetadata, 'optionalAccess', _196 => _196.totalTokenCount]) || "unknown";
|
|
6153
|
+
const functionCallCount = _optionalChain([llmResponse, 'access', _197 => _197.content, 'optionalAccess', _198 => _198.parts, 'optionalAccess', _199 => _199.filter, 'call', _200 => _200((part) => part.functionCall), 'access', _201 => _201.length]) || 0;
|
|
6154
|
+
const responsePreview = this._formatResponsePreview(llmResponse);
|
|
6155
|
+
this.logger.debugStructured("\u{1F4E5} LLM Response", {
|
|
6156
|
+
Model: llm.model,
|
|
6157
|
+
"Token Count": tokenCount,
|
|
6158
|
+
"Function Calls": functionCallCount,
|
|
6159
|
+
"Response Preview": responsePreview,
|
|
6160
|
+
"Finish Reason": llmResponse.finishReason || "unknown",
|
|
6161
|
+
"Response #": responseCount,
|
|
6162
|
+
Partial: llmResponse.partial ? "Yes" : "No",
|
|
6163
|
+
Error: llmResponse.errorCode || "none"
|
|
6164
|
+
});
|
|
6145
6165
|
const alteredLlmResponse = await this._handleAfterModelCallback(
|
|
6146
6166
|
invocationContext,
|
|
6147
6167
|
llmResponse,
|
|
6148
6168
|
modelResponseEvent
|
|
6149
6169
|
);
|
|
6150
|
-
if (alteredLlmResponse) {
|
|
6151
|
-
this.logger.debug("\u{1F4CB} After model callback altered response");
|
|
6152
|
-
}
|
|
6153
6170
|
yield alteredLlmResponse || llmResponse;
|
|
6154
6171
|
}
|
|
6155
|
-
this.logger.debug("\u2705 LLM call completed", {
|
|
6156
|
-
totalResponses: responseCount
|
|
6157
|
-
});
|
|
6158
6172
|
}
|
|
6159
6173
|
async _handleBeforeModelCallback(invocationContext, llmRequest, modelResponseEvent) {
|
|
6160
6174
|
const agent = invocationContext.agent;
|
|
6161
6175
|
if (!("canonicalBeforeModelCallbacks" in agent)) {
|
|
6162
|
-
this.logger.debug("\u2139\uFE0F Agent has no before model callbacks");
|
|
6163
6176
|
return;
|
|
6164
6177
|
}
|
|
6165
6178
|
const beforeCallbacks = agent.canonicalBeforeModelCallbacks;
|
|
6166
6179
|
if (!beforeCallbacks) {
|
|
6167
|
-
this.logger.debug("\u2139\uFE0F Before model callbacks is null/undefined");
|
|
6168
6180
|
return;
|
|
6169
6181
|
}
|
|
6170
|
-
this.logger.debug("\u{1F504} Processing before model callbacks", {
|
|
6171
|
-
callbackCount: beforeCallbacks.length
|
|
6172
|
-
});
|
|
6173
6182
|
const callbackContext = new CallbackContext(invocationContext, {
|
|
6174
6183
|
eventActions: modelResponseEvent.actions
|
|
6175
6184
|
});
|
|
6176
|
-
for (
|
|
6177
|
-
const callback = beforeCallbacks[i];
|
|
6178
|
-
this.logger.debug(`\u{1F504} Running before model callback ${i + 1}`);
|
|
6185
|
+
for (const callback of beforeCallbacks) {
|
|
6179
6186
|
let beforeModelCallbackContent = callback({
|
|
6180
6187
|
callbackContext,
|
|
6181
6188
|
llmRequest
|
|
@@ -6184,35 +6191,23 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6184
6191
|
beforeModelCallbackContent = await beforeModelCallbackContent;
|
|
6185
6192
|
}
|
|
6186
6193
|
if (beforeModelCallbackContent) {
|
|
6187
|
-
this.logger.debug(`\u2705 Before model callback ${i + 1} returned content`);
|
|
6188
6194
|
return beforeModelCallbackContent;
|
|
6189
6195
|
}
|
|
6190
|
-
this.logger.debug(
|
|
6191
|
-
`\u2705 Before model callback ${i + 1} completed (no content)`
|
|
6192
|
-
);
|
|
6193
6196
|
}
|
|
6194
|
-
this.logger.debug("\u2705 All before model callbacks completed");
|
|
6195
6197
|
}
|
|
6196
6198
|
async _handleAfterModelCallback(invocationContext, llmResponse, modelResponseEvent) {
|
|
6197
6199
|
const agent = invocationContext.agent;
|
|
6198
6200
|
if (!("canonicalAfterModelCallbacks" in agent)) {
|
|
6199
|
-
this.logger.debug("\u2139\uFE0F Agent has no after model callbacks");
|
|
6200
6201
|
return;
|
|
6201
6202
|
}
|
|
6202
6203
|
const afterCallbacks = agent.canonicalAfterModelCallbacks;
|
|
6203
6204
|
if (!afterCallbacks) {
|
|
6204
|
-
this.logger.debug("\u2139\uFE0F After model callbacks is null/undefined");
|
|
6205
6205
|
return;
|
|
6206
6206
|
}
|
|
6207
|
-
this.logger.debug("\u{1F504} Processing after model callbacks", {
|
|
6208
|
-
callbackCount: afterCallbacks.length
|
|
6209
|
-
});
|
|
6210
6207
|
const callbackContext = new CallbackContext(invocationContext, {
|
|
6211
6208
|
eventActions: modelResponseEvent.actions
|
|
6212
6209
|
});
|
|
6213
|
-
for (
|
|
6214
|
-
const callback = afterCallbacks[i];
|
|
6215
|
-
this.logger.debug(`\u{1F504} Running after model callback ${i + 1}`);
|
|
6210
|
+
for (const callback of afterCallbacks) {
|
|
6216
6211
|
let afterModelCallbackContent = callback({
|
|
6217
6212
|
callbackContext,
|
|
6218
6213
|
llmResponse
|
|
@@ -6221,21 +6216,11 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6221
6216
|
afterModelCallbackContent = await afterModelCallbackContent;
|
|
6222
6217
|
}
|
|
6223
6218
|
if (afterModelCallbackContent) {
|
|
6224
|
-
this.logger.debug(`\u2705 After model callback ${i + 1} returned content`);
|
|
6225
6219
|
return afterModelCallbackContent;
|
|
6226
6220
|
}
|
|
6227
|
-
this.logger.debug(
|
|
6228
|
-
`\u2705 After model callback ${i + 1} completed (no content)`
|
|
6229
|
-
);
|
|
6230
6221
|
}
|
|
6231
|
-
this.logger.debug("\u2705 All after model callbacks completed");
|
|
6232
6222
|
}
|
|
6233
6223
|
_finalizeModelResponseEvent(llmRequest, llmResponse, modelResponseEvent) {
|
|
6234
|
-
this.logger.debug("\u{1F4DD} Finalizing model response event", {
|
|
6235
|
-
requestModel: llmRequest.model,
|
|
6236
|
-
responseHasContent: !!llmResponse.content,
|
|
6237
|
-
eventId: modelResponseEvent.id
|
|
6238
|
-
});
|
|
6239
6224
|
const eventData = { ...modelResponseEvent };
|
|
6240
6225
|
const responseData = { ...llmResponse };
|
|
6241
6226
|
Object.keys(responseData).forEach((key) => {
|
|
@@ -6247,38 +6232,48 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
|
|
|
6247
6232
|
if (event.content) {
|
|
6248
6233
|
const functionCalls = event.getFunctionCalls();
|
|
6249
6234
|
if (functionCalls) {
|
|
6250
|
-
this.logger.debug("\u{1F527} Processing function calls in event", {
|
|
6251
|
-
functionCallCount: functionCalls.length
|
|
6252
|
-
});
|
|
6253
6235
|
populateClientFunctionCallId(event);
|
|
6254
6236
|
event.longRunningToolIds = getLongRunningFunctionCalls(
|
|
6255
6237
|
functionCalls,
|
|
6256
6238
|
llmRequest.toolsDict || {}
|
|
6257
6239
|
);
|
|
6258
|
-
this.logger.debug("\u2705 Function calls processed", {
|
|
6259
|
-
longRunningToolCount: event.longRunningToolIds.entries.length || 0
|
|
6260
|
-
});
|
|
6261
6240
|
}
|
|
6262
6241
|
}
|
|
6263
|
-
this.logger.debug("\u2705 Model response event finalized", {
|
|
6264
|
-
finalEventId: event.id,
|
|
6265
|
-
hasContent: !!event.content,
|
|
6266
|
-
hasFunctionCalls: !!event.getFunctionCalls()
|
|
6267
|
-
});
|
|
6268
6242
|
return event;
|
|
6269
6243
|
}
|
|
6244
|
+
/**
|
|
6245
|
+
* Logs data in a visually appealing format that works well in any terminal size.
|
|
6246
|
+
* Uses vertical layout for better readability and respects debug settings.
|
|
6247
|
+
*/
|
|
6248
|
+
_formatContentPreview(content) {
|
|
6249
|
+
if (!content) return "none";
|
|
6250
|
+
if (content.parts && Array.isArray(content.parts)) {
|
|
6251
|
+
const textParts = content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
|
|
6252
|
+
return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
|
|
6253
|
+
}
|
|
6254
|
+
if (typeof content === "string") {
|
|
6255
|
+
return content.length > 80 ? `${content.substring(0, 80)}...` : content;
|
|
6256
|
+
}
|
|
6257
|
+
const stringified = JSON.stringify(content);
|
|
6258
|
+
return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
|
|
6259
|
+
}
|
|
6260
|
+
/**
|
|
6261
|
+
* Formats response content preview for debug logging
|
|
6262
|
+
*/
|
|
6263
|
+
_formatResponsePreview(llmResponse) {
|
|
6264
|
+
if (!llmResponse.content) return "none";
|
|
6265
|
+
if (llmResponse.content.parts && Array.isArray(llmResponse.content.parts)) {
|
|
6266
|
+
const textParts = llmResponse.content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
|
|
6267
|
+
return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
|
|
6268
|
+
}
|
|
6269
|
+
const stringified = JSON.stringify(llmResponse.content);
|
|
6270
|
+
return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
|
|
6271
|
+
}
|
|
6270
6272
|
__getLlm(invocationContext) {
|
|
6271
6273
|
const llm = invocationContext.agent.canonicalModel;
|
|
6272
|
-
this.logger.debug("\u{1F527} Retrieved canonical model", {
|
|
6273
|
-
model: _optionalChain([llm, 'optionalAccess', _199 => _199.model]) || "unknown",
|
|
6274
|
-
llmType: _optionalChain([llm, 'optionalAccess', _200 => _200.constructor, 'optionalAccess', _201 => _201.name]) || "unknown"
|
|
6275
|
-
});
|
|
6276
6274
|
return llm;
|
|
6277
6275
|
}
|
|
6278
|
-
},
|
|
6279
|
-
|
|
6280
|
-
// src/flows/llm-flows/single-flow.ts
|
|
6281
|
-
init_logger();
|
|
6276
|
+
}, _class22);
|
|
6282
6277
|
|
|
6283
6278
|
// src/flows/llm-flows/base-llm-processor.ts
|
|
6284
6279
|
var BaseLlmRequestProcessor = class {
|
|
@@ -6286,52 +6281,6 @@ var BaseLlmRequestProcessor = class {
|
|
|
6286
6281
|
var BaseLlmResponseProcessor = class {
|
|
6287
6282
|
};
|
|
6288
6283
|
|
|
6289
|
-
// src/flows/llm-flows/basic.ts
|
|
6290
|
-
var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6291
|
-
async *runAsync(invocationContext, llmRequest) {
|
|
6292
|
-
const agent = invocationContext.agent;
|
|
6293
|
-
if (!this.isLlmAgent(agent)) {
|
|
6294
|
-
return;
|
|
6295
|
-
}
|
|
6296
|
-
llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
|
|
6297
|
-
if (agent.generateContentConfig) {
|
|
6298
|
-
llmRequest.config = JSON.parse(
|
|
6299
|
-
JSON.stringify(agent.generateContentConfig)
|
|
6300
|
-
);
|
|
6301
|
-
} else {
|
|
6302
|
-
llmRequest.config = {};
|
|
6303
|
-
}
|
|
6304
|
-
if (agent.outputSchema) {
|
|
6305
|
-
llmRequest.setOutputSchema(agent.outputSchema);
|
|
6306
|
-
}
|
|
6307
|
-
const runConfig = invocationContext.runConfig;
|
|
6308
|
-
if (!llmRequest.liveConnectConfig) {
|
|
6309
|
-
llmRequest.liveConnectConfig = {};
|
|
6310
|
-
}
|
|
6311
|
-
if (runConfig.responseModalities) {
|
|
6312
|
-
llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
|
|
6313
|
-
}
|
|
6314
|
-
llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
|
|
6315
|
-
llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
|
|
6316
|
-
llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
|
|
6317
|
-
llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
|
|
6318
|
-
llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
|
|
6319
|
-
llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
|
|
6320
|
-
const tools = await agent.canonicalTools();
|
|
6321
|
-
llmRequest.appendTools(tools);
|
|
6322
|
-
for await (const _ of []) {
|
|
6323
|
-
yield _;
|
|
6324
|
-
}
|
|
6325
|
-
}
|
|
6326
|
-
/**
|
|
6327
|
-
* Type guard to check if agent is an LlmAgent
|
|
6328
|
-
*/
|
|
6329
|
-
isLlmAgent(agent) {
|
|
6330
|
-
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6331
|
-
}
|
|
6332
|
-
};
|
|
6333
|
-
var requestProcessor = new BasicLlmRequestProcessor();
|
|
6334
|
-
|
|
6335
6284
|
// src/auth/auth-tool.ts
|
|
6336
6285
|
var EnhancedAuthConfig = class {
|
|
6337
6286
|
/**
|
|
@@ -6539,152 +6488,738 @@ var AuthLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
|
6539
6488
|
}
|
|
6540
6489
|
}
|
|
6541
6490
|
};
|
|
6542
|
-
var
|
|
6491
|
+
var requestProcessor = new AuthLlmRequestProcessor();
|
|
6543
6492
|
|
|
6544
|
-
// src/flows/llm-flows/
|
|
6545
|
-
var
|
|
6493
|
+
// src/flows/llm-flows/basic.ts
|
|
6494
|
+
var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6546
6495
|
async *runAsync(invocationContext, llmRequest) {
|
|
6547
6496
|
const agent = invocationContext.agent;
|
|
6548
|
-
|
|
6549
|
-
|
|
6550
|
-
];
|
|
6551
|
-
if (agent.description) {
|
|
6552
|
-
instructions.push(` The description about you is "${agent.description}"`);
|
|
6497
|
+
if (!this.isLlmAgent(agent)) {
|
|
6498
|
+
return;
|
|
6553
6499
|
}
|
|
6554
|
-
llmRequest.
|
|
6500
|
+
llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
|
|
6501
|
+
if (agent.generateContentConfig) {
|
|
6502
|
+
llmRequest.config = JSON.parse(
|
|
6503
|
+
JSON.stringify(agent.generateContentConfig)
|
|
6504
|
+
);
|
|
6505
|
+
} else {
|
|
6506
|
+
llmRequest.config = {};
|
|
6507
|
+
}
|
|
6508
|
+
if (agent.outputSchema) {
|
|
6509
|
+
llmRequest.setOutputSchema(agent.outputSchema);
|
|
6510
|
+
}
|
|
6511
|
+
const runConfig = invocationContext.runConfig;
|
|
6512
|
+
if (!llmRequest.liveConnectConfig) {
|
|
6513
|
+
llmRequest.liveConnectConfig = {};
|
|
6514
|
+
}
|
|
6515
|
+
if (runConfig.responseModalities) {
|
|
6516
|
+
llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
|
|
6517
|
+
}
|
|
6518
|
+
llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
|
|
6519
|
+
llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
|
|
6520
|
+
llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
|
|
6521
|
+
llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
|
|
6522
|
+
llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
|
|
6523
|
+
llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
|
|
6524
|
+
const tools = await agent.canonicalTools();
|
|
6525
|
+
llmRequest.appendTools(tools);
|
|
6555
6526
|
for await (const _ of []) {
|
|
6556
6527
|
yield _;
|
|
6557
6528
|
}
|
|
6558
6529
|
}
|
|
6530
|
+
/**
|
|
6531
|
+
* Type guard to check if agent is an LlmAgent
|
|
6532
|
+
*/
|
|
6533
|
+
isLlmAgent(agent) {
|
|
6534
|
+
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6535
|
+
}
|
|
6536
|
+
};
|
|
6537
|
+
var requestProcessor2 = new BasicLlmRequestProcessor();
|
|
6538
|
+
|
|
6539
|
+
// src/code-executors/base-code-executor.ts
|
|
6540
|
+
var BaseCodeExecutor = class {
|
|
6541
|
+
|
|
6542
|
+
constructor(config = {}) {
|
|
6543
|
+
this.config = {
|
|
6544
|
+
optimizeDataFile: _nullishCoalesce(config.optimizeDataFile, () => ( false)),
|
|
6545
|
+
stateful: _nullishCoalesce(config.stateful, () => ( false)),
|
|
6546
|
+
errorRetryAttempts: _nullishCoalesce(config.errorRetryAttempts, () => ( 2)),
|
|
6547
|
+
codeBlockDelimiters: _nullishCoalesce(config.codeBlockDelimiters, () => ( [
|
|
6548
|
+
["`tool_code\n", "\n`"],
|
|
6549
|
+
["`python\n", "\n`"]
|
|
6550
|
+
])),
|
|
6551
|
+
executionResultDelimiters: _nullishCoalesce(config.executionResultDelimiters, () => ( [
|
|
6552
|
+
"`tool_output\n",
|
|
6553
|
+
"\n`"
|
|
6554
|
+
]))
|
|
6555
|
+
};
|
|
6556
|
+
}
|
|
6557
|
+
// Getters for configuration
|
|
6558
|
+
get optimizeDataFile() {
|
|
6559
|
+
return this.config.optimizeDataFile;
|
|
6560
|
+
}
|
|
6561
|
+
get stateful() {
|
|
6562
|
+
return this.config.stateful;
|
|
6563
|
+
}
|
|
6564
|
+
get errorRetryAttempts() {
|
|
6565
|
+
return this.config.errorRetryAttempts;
|
|
6566
|
+
}
|
|
6567
|
+
get codeBlockDelimiters() {
|
|
6568
|
+
return this.config.codeBlockDelimiters;
|
|
6569
|
+
}
|
|
6570
|
+
get executionResultDelimiters() {
|
|
6571
|
+
return this.config.executionResultDelimiters;
|
|
6572
|
+
}
|
|
6559
6573
|
};
|
|
6560
|
-
var requestProcessor3 = new IdentityLlmRequestProcessor();
|
|
6561
6574
|
|
|
6562
|
-
// src/
|
|
6563
|
-
|
|
6564
|
-
|
|
6565
|
-
|
|
6566
|
-
const result = [];
|
|
6567
|
-
let lastEnd = 0;
|
|
6568
|
-
const matches = Array.from(string.matchAll(pattern));
|
|
6569
|
-
for (const match of matches) {
|
|
6570
|
-
result.push(string.slice(lastEnd, match.index));
|
|
6571
|
-
const replacement = await replaceAsyncFn(match);
|
|
6572
|
-
result.push(replacement);
|
|
6573
|
-
lastEnd = (match.index || 0) + match[0].length;
|
|
6574
|
-
}
|
|
6575
|
-
result.push(string.slice(lastEnd));
|
|
6576
|
-
return result.join("");
|
|
6575
|
+
// src/code-executors/built-in-code-executor.ts
|
|
6576
|
+
var BuiltInCodeExecutor = class extends BaseCodeExecutor {
|
|
6577
|
+
constructor(config = {}) {
|
|
6578
|
+
super(config);
|
|
6577
6579
|
}
|
|
6578
|
-
async
|
|
6579
|
-
|
|
6580
|
-
|
|
6581
|
-
|
|
6582
|
-
|
|
6583
|
-
|
|
6580
|
+
async executeCode(invocationContext, codeExecutionInput) {
|
|
6581
|
+
throw new Error(
|
|
6582
|
+
"BuiltInCodeExecutor.executeCode should not be called directly"
|
|
6583
|
+
);
|
|
6584
|
+
}
|
|
6585
|
+
/**
|
|
6586
|
+
* Pre-process the LLM request for Gemini 2.0+ models to use the code execution tool
|
|
6587
|
+
*/
|
|
6588
|
+
processLlmRequest(llmRequest) {
|
|
6589
|
+
if (!_optionalChain([llmRequest, 'access', _207 => _207.model, 'optionalAccess', _208 => _208.startsWith, 'call', _209 => _209("gemini-2")])) {
|
|
6590
|
+
throw new Error(
|
|
6591
|
+
`Gemini code execution tool is not supported for model ${llmRequest.model}`
|
|
6592
|
+
);
|
|
6584
6593
|
}
|
|
6585
|
-
if (
|
|
6586
|
-
|
|
6587
|
-
if (!invocationContext.artifactService) {
|
|
6588
|
-
throw new Error("Artifact service is not initialized.");
|
|
6589
|
-
}
|
|
6590
|
-
try {
|
|
6591
|
-
const artifact = await invocationContext.artifactService.loadArtifact({
|
|
6592
|
-
appName: invocationContext.session.appName,
|
|
6593
|
-
userId: invocationContext.session.userId,
|
|
6594
|
-
sessionId: invocationContext.session.id,
|
|
6595
|
-
filename: varName
|
|
6596
|
-
});
|
|
6597
|
-
if (!artifact) {
|
|
6598
|
-
throw new Error(`Artifact ${varName} not found.`);
|
|
6599
|
-
}
|
|
6600
|
-
return String(artifact);
|
|
6601
|
-
} catch (error) {
|
|
6602
|
-
if (optional) {
|
|
6603
|
-
return "";
|
|
6604
|
-
}
|
|
6605
|
-
throw error;
|
|
6606
|
-
}
|
|
6607
|
-
} else {
|
|
6608
|
-
if (!isValidStateName(varName)) {
|
|
6609
|
-
return match[0];
|
|
6610
|
-
}
|
|
6611
|
-
const sessionState = invocationContext.session.state;
|
|
6612
|
-
if (varName in sessionState) {
|
|
6613
|
-
return String(sessionState[varName]);
|
|
6614
|
-
}
|
|
6615
|
-
if (optional) {
|
|
6616
|
-
return "";
|
|
6617
|
-
}
|
|
6618
|
-
throw new Error(`Context variable not found: \`${varName}\`.`);
|
|
6594
|
+
if (!llmRequest.config) {
|
|
6595
|
+
llmRequest.config = {};
|
|
6619
6596
|
}
|
|
6597
|
+
if (!llmRequest.config.tools) {
|
|
6598
|
+
llmRequest.config.tools = [];
|
|
6599
|
+
}
|
|
6600
|
+
const codeExecutionTool = {
|
|
6601
|
+
codeExecution: {}
|
|
6602
|
+
};
|
|
6603
|
+
llmRequest.config.tools.push(codeExecutionTool);
|
|
6620
6604
|
}
|
|
6621
|
-
|
|
6622
|
-
|
|
6623
|
-
|
|
6624
|
-
|
|
6625
|
-
|
|
6626
|
-
|
|
6605
|
+
};
|
|
6606
|
+
|
|
6607
|
+
// src/code-executors/code-execution-utils.ts
|
|
6608
|
+
|
|
6609
|
+
var CodeExecutionUtils = class _CodeExecutionUtils {
|
|
6610
|
+
/**
|
|
6611
|
+
* Gets the file content as a base64-encoded string
|
|
6612
|
+
*/
|
|
6613
|
+
static getEncodedFileContent(data) {
|
|
6614
|
+
let decodedData;
|
|
6615
|
+
if (data instanceof ArrayBuffer) {
|
|
6616
|
+
decodedData = new TextDecoder().decode(data);
|
|
6617
|
+
}
|
|
6618
|
+
if (_CodeExecutionUtils.isBase64Encoded(decodedData)) {
|
|
6619
|
+
return decodedData;
|
|
6620
|
+
}
|
|
6621
|
+
return btoa(decodedData);
|
|
6627
6622
|
}
|
|
6628
|
-
|
|
6629
|
-
|
|
6630
|
-
|
|
6631
|
-
|
|
6632
|
-
return
|
|
6623
|
+
static isBase64Encoded(str) {
|
|
6624
|
+
try {
|
|
6625
|
+
return btoa(atob(str)) === str;
|
|
6626
|
+
} catch (e3) {
|
|
6627
|
+
return false;
|
|
6633
6628
|
}
|
|
6634
6629
|
}
|
|
6635
|
-
|
|
6636
|
-
|
|
6637
|
-
|
|
6638
|
-
|
|
6639
|
-
|
|
6640
|
-
|
|
6630
|
+
/**
|
|
6631
|
+
* Extracts the first code block from the content and truncates everything after it
|
|
6632
|
+
*/
|
|
6633
|
+
static extractCodeAndTruncateContent(content, codeBlockDelimiters) {
|
|
6634
|
+
if (!_optionalChain([content, 'optionalAccess', _210 => _210.parts, 'optionalAccess', _211 => _211.length])) {
|
|
6635
|
+
return null;
|
|
6636
|
+
}
|
|
6637
|
+
for (let idx = 0; idx < content.parts.length; idx++) {
|
|
6638
|
+
const part = content.parts[idx];
|
|
6639
|
+
if (part.executableCode && (idx === content.parts.length - 1 || !content.parts[idx + 1].codeExecutionResult)) {
|
|
6640
|
+
content.parts = content.parts.slice(0, idx + 1);
|
|
6641
|
+
return part.executableCode.code;
|
|
6642
|
+
}
|
|
6643
|
+
}
|
|
6644
|
+
const textParts = content.parts.filter((p) => p.text);
|
|
6645
|
+
if (!textParts.length) {
|
|
6646
|
+
return null;
|
|
6647
|
+
}
|
|
6648
|
+
const responseText = textParts.map((p) => p.text).join("\n");
|
|
6649
|
+
const leadingDelimiterPattern = codeBlockDelimiters.map(([start]) => _CodeExecutionUtils.escapeRegex(start)).join("|");
|
|
6650
|
+
const trailingDelimiterPattern = codeBlockDelimiters.map(([, end]) => _CodeExecutionUtils.escapeRegex(end)).join("|");
|
|
6651
|
+
const pattern = new RegExp(
|
|
6652
|
+
`(.*?)(${leadingDelimiterPattern})(.*?)(${trailingDelimiterPattern})(.*?)$`,
|
|
6653
|
+
"s"
|
|
6654
|
+
);
|
|
6655
|
+
const match = responseText.match(pattern);
|
|
6656
|
+
if (!match) {
|
|
6657
|
+
return null;
|
|
6658
|
+
}
|
|
6659
|
+
const [, prefix, , code, , suffix] = match;
|
|
6660
|
+
if (!code) {
|
|
6661
|
+
return null;
|
|
6662
|
+
}
|
|
6663
|
+
content.parts = [];
|
|
6664
|
+
if (prefix) {
|
|
6665
|
+
content.parts.push({ text: prefix });
|
|
6666
|
+
}
|
|
6667
|
+
content.parts.push(_CodeExecutionUtils.buildExecutableCodePart(code));
|
|
6668
|
+
return code;
|
|
6669
|
+
}
|
|
6670
|
+
static escapeRegex(str) {
|
|
6671
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
6672
|
+
}
|
|
6673
|
+
/**
|
|
6674
|
+
* Builds an executable code part with code string
|
|
6675
|
+
*/
|
|
6676
|
+
static buildExecutableCodePart(code) {
|
|
6677
|
+
return {
|
|
6678
|
+
executableCode: {
|
|
6679
|
+
code,
|
|
6680
|
+
language: _genai.Language.PYTHON
|
|
6681
|
+
}
|
|
6682
|
+
};
|
|
6683
|
+
}
|
|
6684
|
+
/**
|
|
6685
|
+
* Builds the code execution result part from the code execution result
|
|
6686
|
+
*/
|
|
6687
|
+
static buildCodeExecutionResultPart(codeExecutionResult) {
|
|
6688
|
+
if (codeExecutionResult.stderr) {
|
|
6689
|
+
return {
|
|
6690
|
+
codeExecutionResult: {
|
|
6691
|
+
outcome: _genai.Outcome.OUTCOME_FAILED,
|
|
6692
|
+
output: codeExecutionResult.stderr
|
|
6693
|
+
}
|
|
6694
|
+
};
|
|
6695
|
+
}
|
|
6696
|
+
const finalResult = [];
|
|
6697
|
+
if (codeExecutionResult.stdout || !codeExecutionResult.outputFiles.length) {
|
|
6698
|
+
finalResult.push(
|
|
6699
|
+
`Code execution result:
|
|
6700
|
+
${codeExecutionResult.stdout}
|
|
6701
|
+
`
|
|
6702
|
+
);
|
|
6703
|
+
}
|
|
6704
|
+
if (codeExecutionResult.outputFiles.length) {
|
|
6705
|
+
const fileNames = codeExecutionResult.outputFiles.map((f) => `\`${f.name}\``).join(",");
|
|
6706
|
+
finalResult.push(`Saved artifacts:
|
|
6707
|
+
${fileNames}`);
|
|
6708
|
+
}
|
|
6709
|
+
return {
|
|
6710
|
+
codeExecutionResult: {
|
|
6711
|
+
outcome: _genai.Outcome.OUTCOME_OK,
|
|
6712
|
+
output: finalResult.join("\n\n")
|
|
6713
|
+
}
|
|
6714
|
+
};
|
|
6715
|
+
}
|
|
6716
|
+
/**
|
|
6717
|
+
* Converts the code execution parts to text parts in a Content
|
|
6718
|
+
*/
|
|
6719
|
+
static convertCodeExecutionParts(content, codeBlockDelimiter, executionResultDelimiters) {
|
|
6720
|
+
if (!_optionalChain([content, 'access', _212 => _212.parts, 'optionalAccess', _213 => _213.length])) {
|
|
6721
|
+
return;
|
|
6722
|
+
}
|
|
6723
|
+
const lastPart = content.parts[content.parts.length - 1];
|
|
6724
|
+
if (lastPart.executableCode) {
|
|
6725
|
+
content.parts[content.parts.length - 1] = {
|
|
6726
|
+
text: `${codeBlockDelimiter[0]}${lastPart.executableCode.code}${codeBlockDelimiter[1]}`
|
|
6727
|
+
};
|
|
6728
|
+
} else if (content.parts.length === 1 && lastPart.codeExecutionResult) {
|
|
6729
|
+
content.parts[content.parts.length - 1] = {
|
|
6730
|
+
text: `${executionResultDelimiters[0]}${lastPart.codeExecutionResult.output}${executionResultDelimiters[1]}`
|
|
6731
|
+
};
|
|
6732
|
+
content.role = "user";
|
|
6733
|
+
}
|
|
6734
|
+
}
|
|
6735
|
+
};
|
|
6641
6736
|
|
|
6642
|
-
// src/
|
|
6643
|
-
var
|
|
6737
|
+
// src/code-executors/code-executor-context.ts
|
|
6738
|
+
var CONTEXT_KEY = "_code_execution_context";
|
|
6739
|
+
var SESSION_ID_KEY = "execution_session_id";
|
|
6740
|
+
var PROCESSED_FILE_NAMES_KEY = "processed_input_files";
|
|
6741
|
+
var INPUT_FILE_KEY = "_code_executor_input_files";
|
|
6742
|
+
var ERROR_COUNT_KEY = "_code_executor_error_counts";
|
|
6743
|
+
var CODE_EXECUTION_RESULTS_KEY = "_code_execution_results";
|
|
6744
|
+
var CodeExecutorContext = class {
|
|
6745
|
+
|
|
6746
|
+
|
|
6747
|
+
constructor(sessionState) {
|
|
6748
|
+
this.sessionState = sessionState;
|
|
6749
|
+
this.context = this.getCodeExecutorContext(sessionState);
|
|
6750
|
+
}
|
|
6751
|
+
/**
|
|
6752
|
+
* Gets the state delta to update in the persistent session state.
|
|
6753
|
+
*/
|
|
6754
|
+
getStateDelta() {
|
|
6755
|
+
const contextToUpdate = JSON.parse(JSON.stringify(this.context));
|
|
6756
|
+
return { [CONTEXT_KEY]: contextToUpdate };
|
|
6757
|
+
}
|
|
6758
|
+
/**
|
|
6759
|
+
* Gets the session ID for the code executor.
|
|
6760
|
+
*/
|
|
6761
|
+
getExecutionId() {
|
|
6762
|
+
if (!(SESSION_ID_KEY in this.context)) {
|
|
6763
|
+
return null;
|
|
6764
|
+
}
|
|
6765
|
+
return this.context[SESSION_ID_KEY];
|
|
6766
|
+
}
|
|
6767
|
+
/**
|
|
6768
|
+
* Sets the session ID for the code executor.
|
|
6769
|
+
*/
|
|
6770
|
+
setExecutionId(sessionId) {
|
|
6771
|
+
this.context[SESSION_ID_KEY] = sessionId;
|
|
6772
|
+
}
|
|
6773
|
+
/**
|
|
6774
|
+
* Gets the processed file names from the session state.
|
|
6775
|
+
*/
|
|
6776
|
+
getProcessedFileNames() {
|
|
6777
|
+
if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
|
|
6778
|
+
return [];
|
|
6779
|
+
}
|
|
6780
|
+
return this.context[PROCESSED_FILE_NAMES_KEY];
|
|
6781
|
+
}
|
|
6782
|
+
/**
|
|
6783
|
+
* Adds the processed file names to the session state.
|
|
6784
|
+
*/
|
|
6785
|
+
addProcessedFileNames(fileNames) {
|
|
6786
|
+
if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
|
|
6787
|
+
this.context[PROCESSED_FILE_NAMES_KEY] = [];
|
|
6788
|
+
}
|
|
6789
|
+
this.context[PROCESSED_FILE_NAMES_KEY].push(...fileNames);
|
|
6790
|
+
}
|
|
6791
|
+
/**
|
|
6792
|
+
* Gets the code executor input files from the session state.
|
|
6793
|
+
*/
|
|
6794
|
+
getInputFiles() {
|
|
6795
|
+
if (!(INPUT_FILE_KEY in this.sessionState)) {
|
|
6796
|
+
return [];
|
|
6797
|
+
}
|
|
6798
|
+
return this.sessionState[INPUT_FILE_KEY].map(
|
|
6799
|
+
(file) => file
|
|
6800
|
+
);
|
|
6801
|
+
}
|
|
6802
|
+
/**
|
|
6803
|
+
* Adds the input files to the code executor context.
|
|
6804
|
+
*/
|
|
6805
|
+
addInputFiles(inputFiles) {
|
|
6806
|
+
if (!(INPUT_FILE_KEY in this.sessionState)) {
|
|
6807
|
+
this.sessionState[INPUT_FILE_KEY] = [];
|
|
6808
|
+
}
|
|
6809
|
+
const fileArray = this.sessionState[INPUT_FILE_KEY];
|
|
6810
|
+
for (const inputFile of inputFiles) {
|
|
6811
|
+
fileArray.push({
|
|
6812
|
+
name: inputFile.name,
|
|
6813
|
+
content: inputFile.content,
|
|
6814
|
+
mimeType: inputFile.mimeType
|
|
6815
|
+
});
|
|
6816
|
+
}
|
|
6817
|
+
}
|
|
6818
|
+
/**
|
|
6819
|
+
* Removes the input files and processed file names from the code executor context.
|
|
6820
|
+
*/
|
|
6821
|
+
clearInputFiles() {
|
|
6822
|
+
if (INPUT_FILE_KEY in this.sessionState) {
|
|
6823
|
+
this.sessionState[INPUT_FILE_KEY] = [];
|
|
6824
|
+
}
|
|
6825
|
+
if (PROCESSED_FILE_NAMES_KEY in this.context) {
|
|
6826
|
+
this.context[PROCESSED_FILE_NAMES_KEY] = [];
|
|
6827
|
+
}
|
|
6828
|
+
}
|
|
6829
|
+
/**
|
|
6830
|
+
* Gets the error count from the session state.
|
|
6831
|
+
*/
|
|
6832
|
+
getErrorCount(invocationId) {
|
|
6833
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6834
|
+
return 0;
|
|
6835
|
+
}
|
|
6836
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6837
|
+
return _nullishCoalesce(errorCounts[invocationId], () => ( 0));
|
|
6838
|
+
}
|
|
6839
|
+
/**
|
|
6840
|
+
* Increments the error count for the given invocation ID.
|
|
6841
|
+
*/
|
|
6842
|
+
incrementErrorCount(invocationId) {
|
|
6843
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6844
|
+
this.sessionState[ERROR_COUNT_KEY] = {};
|
|
6845
|
+
}
|
|
6846
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6847
|
+
errorCounts[invocationId] = this.getErrorCount(invocationId) + 1;
|
|
6848
|
+
}
|
|
6849
|
+
/**
|
|
6850
|
+
* Resets the error count for the given invocation ID.
|
|
6851
|
+
*/
|
|
6852
|
+
resetErrorCount(invocationId) {
|
|
6853
|
+
if (!(ERROR_COUNT_KEY in this.sessionState)) {
|
|
6854
|
+
return;
|
|
6855
|
+
}
|
|
6856
|
+
const errorCounts = this.sessionState[ERROR_COUNT_KEY];
|
|
6857
|
+
if (invocationId in errorCounts) {
|
|
6858
|
+
delete errorCounts[invocationId];
|
|
6859
|
+
}
|
|
6860
|
+
}
|
|
6861
|
+
/**
|
|
6862
|
+
* Updates the code execution result.
|
|
6863
|
+
*/
|
|
6864
|
+
updateCodeExecutionResult(invocationId, code, resultStdout, resultStderr) {
|
|
6865
|
+
if (!(CODE_EXECUTION_RESULTS_KEY in this.sessionState)) {
|
|
6866
|
+
this.sessionState[CODE_EXECUTION_RESULTS_KEY] = {};
|
|
6867
|
+
}
|
|
6868
|
+
const results = this.sessionState[CODE_EXECUTION_RESULTS_KEY];
|
|
6869
|
+
if (!(invocationId in results)) {
|
|
6870
|
+
results[invocationId] = [];
|
|
6871
|
+
}
|
|
6872
|
+
results[invocationId].push({
|
|
6873
|
+
code,
|
|
6874
|
+
resultStdout,
|
|
6875
|
+
resultStderr,
|
|
6876
|
+
timestamp: Math.floor(Date.now() / 1e3)
|
|
6877
|
+
});
|
|
6878
|
+
}
|
|
6879
|
+
/**
|
|
6880
|
+
* Gets the code executor context from the session state.
|
|
6881
|
+
*/
|
|
6882
|
+
getCodeExecutorContext(sessionState) {
|
|
6883
|
+
if (!(CONTEXT_KEY in sessionState)) {
|
|
6884
|
+
sessionState[CONTEXT_KEY] = {};
|
|
6885
|
+
}
|
|
6886
|
+
return sessionState[CONTEXT_KEY];
|
|
6887
|
+
}
|
|
6888
|
+
};
|
|
6889
|
+
|
|
6890
|
+
// src/flows/llm-flows/code-execution.ts
|
|
6891
|
+
var DATA_FILE_UTIL_MAP = {
|
|
6892
|
+
"text/csv": {
|
|
6893
|
+
extension: ".csv",
|
|
6894
|
+
loaderCodeTemplate: "pd.read_csv('{filename}')"
|
|
6895
|
+
}
|
|
6896
|
+
};
|
|
6897
|
+
var DATA_FILE_HELPER_LIB = `
|
|
6898
|
+
import pandas as pd
|
|
6899
|
+
|
|
6900
|
+
def explore_df(df: pd.DataFrame) -> None:
|
|
6901
|
+
"""Prints some information about a pandas DataFrame."""
|
|
6902
|
+
|
|
6903
|
+
with pd.option_context(
|
|
6904
|
+
'display.max_columns', None, 'display.expand_frame_repr', False
|
|
6905
|
+
):
|
|
6906
|
+
# Print the column names to never encounter KeyError when selecting one.
|
|
6907
|
+
df_dtypes = df.dtypes
|
|
6908
|
+
|
|
6909
|
+
# Obtain information about data types and missing values.
|
|
6910
|
+
df_nulls = (len(df) - df.isnull().sum()).apply(
|
|
6911
|
+
lambda x: f'{x} / {df.shape[0]} non-null'
|
|
6912
|
+
)
|
|
6913
|
+
|
|
6914
|
+
# Explore unique total values in columns using \`.unique()\`.
|
|
6915
|
+
df_unique_count = df.apply(lambda x: len(x.unique()))
|
|
6916
|
+
|
|
6917
|
+
# Explore unique values in columns using \`.unique()\`.
|
|
6918
|
+
df_unique = df.apply(lambda x: crop(str(list(x.unique()))))
|
|
6919
|
+
|
|
6920
|
+
df_info = pd.concat(
|
|
6921
|
+
(
|
|
6922
|
+
df_dtypes.rename('Dtype'),
|
|
6923
|
+
df_nulls.rename('Non-Null Count'),
|
|
6924
|
+
df_unique_count.rename('Unique Values Count'),
|
|
6925
|
+
df_unique.rename('Unique Values'),
|
|
6926
|
+
),
|
|
6927
|
+
axis=1,
|
|
6928
|
+
)
|
|
6929
|
+
df_info.index.name = 'Columns'
|
|
6930
|
+
print(f"""Total rows: {df.shape[0]}
|
|
6931
|
+
Total columns: {df.shape[1]}
|
|
6932
|
+
|
|
6933
|
+
{df_info}""")
|
|
6934
|
+
|
|
6935
|
+
def crop(text: str, max_length: int = 100) -> str:
|
|
6936
|
+
"""Crop text to maximum length with ellipsis."""
|
|
6937
|
+
return text if len(text) <= max_length else text[:max_length] + "..."
|
|
6938
|
+
`;
|
|
6939
|
+
function hasCodeExecutor(agent) {
|
|
6940
|
+
return agent && typeof agent === "object" && "codeExecutor" in agent;
|
|
6941
|
+
}
|
|
6942
|
+
var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
6644
6943
|
async *runAsync(invocationContext, llmRequest) {
|
|
6645
6944
|
const agent = invocationContext.agent;
|
|
6646
|
-
if (!
|
|
6945
|
+
if (!hasCodeExecutor(agent)) {
|
|
6647
6946
|
return;
|
|
6648
6947
|
}
|
|
6649
|
-
|
|
6650
|
-
|
|
6651
|
-
|
|
6652
|
-
|
|
6948
|
+
if (!(agent instanceof LlmAgent) || !agent.codeExecutor) {
|
|
6949
|
+
return;
|
|
6950
|
+
}
|
|
6951
|
+
yield* runPreProcessor(invocationContext, llmRequest);
|
|
6952
|
+
if (!(agent.codeExecutor instanceof BaseCodeExecutor)) {
|
|
6953
|
+
return;
|
|
6954
|
+
}
|
|
6955
|
+
for (const content of llmRequest.contents || []) {
|
|
6956
|
+
CodeExecutionUtils.convertCodeExecutionParts(
|
|
6957
|
+
content,
|
|
6958
|
+
agent.codeExecutor.codeBlockDelimiters[0] || ["", ""],
|
|
6959
|
+
agent.codeExecutor.executionResultDelimiters
|
|
6653
6960
|
);
|
|
6654
|
-
|
|
6655
|
-
|
|
6656
|
-
|
|
6657
|
-
|
|
6658
|
-
|
|
6659
|
-
|
|
6961
|
+
}
|
|
6962
|
+
}
|
|
6963
|
+
};
|
|
6964
|
+
var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
|
|
6965
|
+
async *runAsync(invocationContext, llmResponse) {
|
|
6966
|
+
if (llmResponse.partial) {
|
|
6967
|
+
return;
|
|
6968
|
+
}
|
|
6969
|
+
yield* runPostProcessor(invocationContext, llmResponse);
|
|
6970
|
+
}
|
|
6971
|
+
};
|
|
6972
|
+
async function* runPreProcessor(invocationContext, llmRequest) {
|
|
6973
|
+
const agent = invocationContext.agent;
|
|
6974
|
+
if (!hasCodeExecutor(agent)) {
|
|
6975
|
+
return;
|
|
6976
|
+
}
|
|
6977
|
+
const codeExecutor = agent.codeExecutor;
|
|
6978
|
+
if (!codeExecutor || !(codeExecutor instanceof BaseCodeExecutor)) {
|
|
6979
|
+
return;
|
|
6980
|
+
}
|
|
6981
|
+
if (codeExecutor instanceof BuiltInCodeExecutor) {
|
|
6982
|
+
codeExecutor.processLlmRequest(llmRequest);
|
|
6983
|
+
return;
|
|
6984
|
+
}
|
|
6985
|
+
if (!codeExecutor.optimizeDataFile) {
|
|
6986
|
+
return;
|
|
6987
|
+
}
|
|
6988
|
+
const codeExecutorContext = new CodeExecutorContext(
|
|
6989
|
+
invocationContext.session.state
|
|
6990
|
+
// Type assertion for State compatibility
|
|
6991
|
+
);
|
|
6992
|
+
if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
|
|
6993
|
+
return;
|
|
6994
|
+
}
|
|
6995
|
+
const allInputFiles = extractAndReplaceInlineFiles(
|
|
6996
|
+
codeExecutorContext,
|
|
6997
|
+
llmRequest
|
|
6998
|
+
);
|
|
6999
|
+
const processedFileNames = new Set(
|
|
7000
|
+
codeExecutorContext.getProcessedFileNames()
|
|
7001
|
+
);
|
|
7002
|
+
const filesToProcess = allInputFiles.filter(
|
|
7003
|
+
(f) => !processedFileNames.has(f.name)
|
|
7004
|
+
);
|
|
7005
|
+
for (const file of filesToProcess) {
|
|
7006
|
+
const codeStr = getDataFilePreprocessingCode(file);
|
|
7007
|
+
if (!codeStr) {
|
|
7008
|
+
continue;
|
|
7009
|
+
}
|
|
7010
|
+
const codeContent = {
|
|
7011
|
+
role: "model",
|
|
7012
|
+
parts: [
|
|
7013
|
+
{ text: `Processing input file: \`${file.name}\`` },
|
|
7014
|
+
CodeExecutionUtils.buildExecutableCodePart(codeStr)
|
|
7015
|
+
]
|
|
7016
|
+
};
|
|
7017
|
+
llmRequest.contents = llmRequest.contents || [];
|
|
7018
|
+
llmRequest.contents.push(structuredClone(codeContent));
|
|
7019
|
+
yield new Event({
|
|
7020
|
+
invocationId: invocationContext.invocationId,
|
|
7021
|
+
author: agent.name,
|
|
7022
|
+
branch: invocationContext.branch,
|
|
7023
|
+
content: codeContent
|
|
7024
|
+
});
|
|
7025
|
+
const codeExecutionResult = await codeExecutor.executeCode(
|
|
7026
|
+
invocationContext,
|
|
7027
|
+
{
|
|
7028
|
+
code: codeStr,
|
|
7029
|
+
inputFiles: [file],
|
|
7030
|
+
executionId: getOrSetExecutionId(
|
|
7031
|
+
invocationContext,
|
|
7032
|
+
codeExecutorContext
|
|
7033
|
+
)
|
|
6660
7034
|
}
|
|
6661
|
-
|
|
7035
|
+
);
|
|
7036
|
+
codeExecutorContext.updateCodeExecutionResult(
|
|
7037
|
+
invocationContext.invocationId,
|
|
7038
|
+
codeStr,
|
|
7039
|
+
codeExecutionResult.stdout,
|
|
7040
|
+
codeExecutionResult.stderr
|
|
7041
|
+
);
|
|
7042
|
+
codeExecutorContext.addProcessedFileNames([file.name]);
|
|
7043
|
+
const executionResultEvent = await postProcessCodeExecutionResult(
|
|
7044
|
+
invocationContext,
|
|
7045
|
+
codeExecutorContext,
|
|
7046
|
+
codeExecutionResult
|
|
7047
|
+
);
|
|
7048
|
+
yield executionResultEvent;
|
|
7049
|
+
llmRequest.contents.push(structuredClone(executionResultEvent.content));
|
|
7050
|
+
}
|
|
7051
|
+
}
|
|
7052
|
+
async function* runPostProcessor(invocationContext, llmResponse) {
|
|
7053
|
+
const agent = invocationContext.agent;
|
|
7054
|
+
if (!hasCodeExecutor(agent)) {
|
|
7055
|
+
return;
|
|
7056
|
+
}
|
|
7057
|
+
const codeExecutor = agent.codeExecutor;
|
|
7058
|
+
if (!(codeExecutor instanceof BaseCodeExecutor)) {
|
|
7059
|
+
return;
|
|
7060
|
+
}
|
|
7061
|
+
if (!llmResponse || !llmResponse.content) {
|
|
7062
|
+
return;
|
|
7063
|
+
}
|
|
7064
|
+
if (codeExecutor instanceof BuiltInCodeExecutor) {
|
|
7065
|
+
return;
|
|
7066
|
+
}
|
|
7067
|
+
const codeExecutorContext = new CodeExecutorContext(
|
|
7068
|
+
invocationContext.session.state
|
|
7069
|
+
// Type assertion for State compatibility
|
|
7070
|
+
);
|
|
7071
|
+
if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
|
|
7072
|
+
return;
|
|
7073
|
+
}
|
|
7074
|
+
const responseContent = llmResponse.content;
|
|
7075
|
+
const codeStr = CodeExecutionUtils.extractCodeAndTruncateContent(
|
|
7076
|
+
responseContent,
|
|
7077
|
+
codeExecutor.codeBlockDelimiters
|
|
7078
|
+
);
|
|
7079
|
+
if (!codeStr) {
|
|
7080
|
+
return;
|
|
7081
|
+
}
|
|
7082
|
+
yield new Event({
|
|
7083
|
+
invocationId: invocationContext.invocationId,
|
|
7084
|
+
author: agent.name,
|
|
7085
|
+
branch: invocationContext.branch,
|
|
7086
|
+
content: responseContent,
|
|
7087
|
+
actions: new EventActions()
|
|
7088
|
+
});
|
|
7089
|
+
const codeExecutionResult = await codeExecutor.executeCode(
|
|
7090
|
+
invocationContext,
|
|
7091
|
+
{
|
|
7092
|
+
code: codeStr,
|
|
7093
|
+
inputFiles: codeExecutorContext.getInputFiles(),
|
|
7094
|
+
executionId: getOrSetExecutionId(invocationContext, codeExecutorContext)
|
|
6662
7095
|
}
|
|
6663
|
-
|
|
6664
|
-
|
|
6665
|
-
|
|
6666
|
-
|
|
6667
|
-
|
|
6668
|
-
|
|
6669
|
-
|
|
6670
|
-
|
|
6671
|
-
|
|
6672
|
-
|
|
7096
|
+
);
|
|
7097
|
+
codeExecutorContext.updateCodeExecutionResult(
|
|
7098
|
+
invocationContext.invocationId,
|
|
7099
|
+
codeStr,
|
|
7100
|
+
codeExecutionResult.stdout,
|
|
7101
|
+
codeExecutionResult.stderr
|
|
7102
|
+
);
|
|
7103
|
+
yield await postProcessCodeExecutionResult(
|
|
7104
|
+
invocationContext,
|
|
7105
|
+
codeExecutorContext,
|
|
7106
|
+
codeExecutionResult
|
|
7107
|
+
);
|
|
7108
|
+
llmResponse.content = void 0;
|
|
7109
|
+
}
|
|
7110
|
+
function extractAndReplaceInlineFiles(codeExecutorContext, llmRequest) {
|
|
7111
|
+
const allInputFiles = codeExecutorContext.getInputFiles();
|
|
7112
|
+
const savedFileNames = new Set(allInputFiles.map((f) => f.name));
|
|
7113
|
+
for (let i = 0; i < (_optionalChain([llmRequest, 'access', _214 => _214.contents, 'optionalAccess', _215 => _215.length]) || 0); i++) {
|
|
7114
|
+
const content = llmRequest.contents[i];
|
|
7115
|
+
if (content.role !== "user" || !content.parts) {
|
|
7116
|
+
continue;
|
|
7117
|
+
}
|
|
7118
|
+
for (let j = 0; j < content.parts.length; j++) {
|
|
7119
|
+
const part = content.parts[j];
|
|
7120
|
+
if (!part.inlineData || !(part.inlineData.mimeType in DATA_FILE_UTIL_MAP)) {
|
|
7121
|
+
continue;
|
|
7122
|
+
}
|
|
7123
|
+
const mimeType = part.inlineData.mimeType;
|
|
7124
|
+
const fileName = `data_${i + 1}_${j + 1}${DATA_FILE_UTIL_MAP[mimeType].extension}`;
|
|
7125
|
+
llmRequest.contents[i].parts[j] = {
|
|
7126
|
+
text: `
|
|
7127
|
+
Available file: \`${fileName}\`
|
|
7128
|
+
`
|
|
7129
|
+
};
|
|
7130
|
+
const file = {
|
|
7131
|
+
name: fileName,
|
|
7132
|
+
content: CodeExecutionUtils.getEncodedFileContent(part.inlineData.data),
|
|
7133
|
+
mimeType
|
|
7134
|
+
};
|
|
7135
|
+
if (!savedFileNames.has(fileName)) {
|
|
7136
|
+
codeExecutorContext.addInputFiles([file]);
|
|
7137
|
+
allInputFiles.push(file);
|
|
6673
7138
|
}
|
|
6674
|
-
llmRequest.appendInstructions([instruction]);
|
|
6675
7139
|
}
|
|
6676
|
-
|
|
6677
|
-
|
|
7140
|
+
}
|
|
7141
|
+
return allInputFiles;
|
|
7142
|
+
}
|
|
7143
|
+
function getOrSetExecutionId(invocationContext, codeExecutorContext) {
|
|
7144
|
+
const agent = invocationContext.agent;
|
|
7145
|
+
if (!hasCodeExecutor(agent) || !_optionalChain([agent, 'access', _216 => _216.codeExecutor, 'optionalAccess', _217 => _217.stateful])) {
|
|
7146
|
+
return void 0;
|
|
7147
|
+
}
|
|
7148
|
+
let executionId = codeExecutorContext.getExecutionId();
|
|
7149
|
+
if (!executionId) {
|
|
7150
|
+
executionId = invocationContext.session.id;
|
|
7151
|
+
codeExecutorContext.setExecutionId(executionId);
|
|
7152
|
+
}
|
|
7153
|
+
return executionId;
|
|
7154
|
+
}
|
|
7155
|
+
async function postProcessCodeExecutionResult(invocationContext, codeExecutorContext, codeExecutionResult) {
|
|
7156
|
+
if (!invocationContext.artifactService) {
|
|
7157
|
+
throw new Error("Artifact service is not initialized.");
|
|
7158
|
+
}
|
|
7159
|
+
const resultContent = {
|
|
7160
|
+
role: "model",
|
|
7161
|
+
parts: [
|
|
7162
|
+
CodeExecutionUtils.buildCodeExecutionResultPart(codeExecutionResult)
|
|
7163
|
+
]
|
|
7164
|
+
};
|
|
7165
|
+
const eventActions = new EventActions({
|
|
7166
|
+
stateDelta: codeExecutorContext.getStateDelta()
|
|
7167
|
+
});
|
|
7168
|
+
if (codeExecutionResult.stderr) {
|
|
7169
|
+
codeExecutorContext.incrementErrorCount(invocationContext.invocationId);
|
|
7170
|
+
} else {
|
|
7171
|
+
codeExecutorContext.resetErrorCount(invocationContext.invocationId);
|
|
7172
|
+
}
|
|
7173
|
+
for (const outputFile of codeExecutionResult.outputFiles) {
|
|
7174
|
+
const version = await invocationContext.artifactService.saveArtifact({
|
|
7175
|
+
appName: invocationContext.appName,
|
|
7176
|
+
userId: invocationContext.userId,
|
|
7177
|
+
sessionId: invocationContext.session.id,
|
|
7178
|
+
filename: outputFile.name,
|
|
7179
|
+
artifact: {
|
|
7180
|
+
inlineData: {
|
|
7181
|
+
data: atob(outputFile.content),
|
|
7182
|
+
// Convert from base64
|
|
7183
|
+
mimeType: outputFile.mimeType
|
|
7184
|
+
}
|
|
7185
|
+
}
|
|
7186
|
+
});
|
|
7187
|
+
eventActions.artifactDelta[outputFile.name] = version;
|
|
7188
|
+
}
|
|
7189
|
+
return new Event({
|
|
7190
|
+
invocationId: invocationContext.invocationId,
|
|
7191
|
+
author: invocationContext.agent.name,
|
|
7192
|
+
branch: invocationContext.branch,
|
|
7193
|
+
content: resultContent,
|
|
7194
|
+
actions: eventActions
|
|
7195
|
+
});
|
|
7196
|
+
}
|
|
7197
|
+
function getDataFilePreprocessingCode(file) {
|
|
7198
|
+
function getNormalizedFileName(fileName) {
|
|
7199
|
+
const baseName = fileName.split(".")[0];
|
|
7200
|
+
let varName2 = baseName.replace(/[^a-zA-Z0-9_]/g, "_");
|
|
7201
|
+
if (/^\d/.test(varName2)) {
|
|
7202
|
+
varName2 = `_${varName2}`;
|
|
6678
7203
|
}
|
|
7204
|
+
return varName2;
|
|
6679
7205
|
}
|
|
6680
|
-
|
|
6681
|
-
|
|
6682
|
-
*/
|
|
6683
|
-
isLlmAgent(agent) {
|
|
6684
|
-
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
7206
|
+
if (!(file.mimeType in DATA_FILE_UTIL_MAP)) {
|
|
7207
|
+
return void 0;
|
|
6685
7208
|
}
|
|
6686
|
-
|
|
6687
|
-
|
|
7209
|
+
const varName = getNormalizedFileName(file.name);
|
|
7210
|
+
const loaderCode = DATA_FILE_UTIL_MAP[file.mimeType].loaderCodeTemplate.replace("{filename}", file.name);
|
|
7211
|
+
return `
|
|
7212
|
+
${DATA_FILE_HELPER_LIB}
|
|
7213
|
+
|
|
7214
|
+
# Load the dataframe.
|
|
7215
|
+
${varName} = ${loaderCode}
|
|
7216
|
+
|
|
7217
|
+
# Use \`explore_df\` to guide my analysis.
|
|
7218
|
+
explore_df(${varName})
|
|
7219
|
+
`;
|
|
7220
|
+
}
|
|
7221
|
+
var requestProcessor3 = new CodeExecutionRequestProcessor();
|
|
7222
|
+
var responseProcessor = new CodeExecutionResponseProcessor();
|
|
6688
7223
|
|
|
6689
7224
|
// src/flows/llm-flows/contents.ts
|
|
6690
7225
|
var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
@@ -6717,7 +7252,7 @@ var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
|
6717
7252
|
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
6718
7253
|
}
|
|
6719
7254
|
};
|
|
6720
|
-
var
|
|
7255
|
+
var requestProcessor4 = new ContentLlmRequestProcessor();
|
|
6721
7256
|
function rearrangeEventsForAsyncFunctionResponsesInHistory(events) {
|
|
6722
7257
|
const functionCallIdToResponseEventsIndex = {};
|
|
6723
7258
|
for (let i = 0; i < events.length; i++) {
|
|
@@ -6838,7 +7373,7 @@ function rearrangeEventsForLatestFunctionResponse(events) {
|
|
|
6838
7373
|
continue;
|
|
6839
7374
|
}
|
|
6840
7375
|
const functionResponses2 = event.getFunctionResponses();
|
|
6841
|
-
if (_optionalChain([functionResponses2, 'optionalAccess',
|
|
7376
|
+
if (_optionalChain([functionResponses2, 'optionalAccess', _218 => _218.some, 'call', _219 => _219((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
|
|
6842
7377
|
functionResponseEvents.push(event);
|
|
6843
7378
|
}
|
|
6844
7379
|
}
|
|
@@ -6937,7 +7472,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
|
|
|
6937
7472
|
const partIndicesInMergedEvent = {};
|
|
6938
7473
|
for (let idx = 0; idx < partsInMergedEvent.length; idx++) {
|
|
6939
7474
|
const part = partsInMergedEvent[idx];
|
|
6940
|
-
if (_optionalChain([part, 'access',
|
|
7475
|
+
if (_optionalChain([part, 'access', _220 => _220.functionResponse, 'optionalAccess', _221 => _221.id])) {
|
|
6941
7476
|
partIndicesInMergedEvent[part.functionResponse.id] = idx;
|
|
6942
7477
|
}
|
|
6943
7478
|
}
|
|
@@ -6946,7 +7481,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
|
|
|
6946
7481
|
throw new Error("There should be at least one function_response part.");
|
|
6947
7482
|
}
|
|
6948
7483
|
for (const part of event.content.parts) {
|
|
6949
|
-
if (_optionalChain([part, 'access',
|
|
7484
|
+
if (_optionalChain([part, 'access', _222 => _222.functionResponse, 'optionalAccess', _223 => _223.id])) {
|
|
6950
7485
|
const functionCallId = part.functionResponse.id;
|
|
6951
7486
|
if (functionCallId in partIndicesInMergedEvent) {
|
|
6952
7487
|
partsInMergedEvent[partIndicesInMergedEvent[functionCallId]] = part;
|
|
@@ -6982,6 +7517,151 @@ function isAuthEvent(event) {
|
|
|
6982
7517
|
return false;
|
|
6983
7518
|
}
|
|
6984
7519
|
|
|
7520
|
+
// src/flows/llm-flows/identity.ts
|
|
7521
|
+
var IdentityLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7522
|
+
async *runAsync(invocationContext, llmRequest) {
|
|
7523
|
+
const agent = invocationContext.agent;
|
|
7524
|
+
const instructions = [
|
|
7525
|
+
`You are an agent. Your internal name is "${agent.name}".`
|
|
7526
|
+
];
|
|
7527
|
+
if (agent.description) {
|
|
7528
|
+
instructions.push(` The description about you is "${agent.description}"`);
|
|
7529
|
+
}
|
|
7530
|
+
llmRequest.appendInstructions(instructions);
|
|
7531
|
+
for await (const _ of []) {
|
|
7532
|
+
yield _;
|
|
7533
|
+
}
|
|
7534
|
+
}
|
|
7535
|
+
};
|
|
7536
|
+
var requestProcessor5 = new IdentityLlmRequestProcessor();
|
|
7537
|
+
|
|
7538
|
+
// src/utils/instructions-utils.ts
|
|
7539
|
+
async function injectSessionState(template, readonlyContext) {
|
|
7540
|
+
const invocationContext = readonlyContext._invocationContext;
|
|
7541
|
+
async function asyncReplace(pattern, replaceAsyncFn, string) {
|
|
7542
|
+
const result = [];
|
|
7543
|
+
let lastEnd = 0;
|
|
7544
|
+
const matches = Array.from(string.matchAll(pattern));
|
|
7545
|
+
for (const match of matches) {
|
|
7546
|
+
result.push(string.slice(lastEnd, match.index));
|
|
7547
|
+
const replacement = await replaceAsyncFn(match);
|
|
7548
|
+
result.push(replacement);
|
|
7549
|
+
lastEnd = (match.index || 0) + match[0].length;
|
|
7550
|
+
}
|
|
7551
|
+
result.push(string.slice(lastEnd));
|
|
7552
|
+
return result.join("");
|
|
7553
|
+
}
|
|
7554
|
+
async function replaceMatch(match) {
|
|
7555
|
+
let varName = match[0].replace(/[{}]/g, "").trim();
|
|
7556
|
+
let optional = false;
|
|
7557
|
+
if (varName.endsWith("?")) {
|
|
7558
|
+
optional = true;
|
|
7559
|
+
varName = varName.slice(0, -1);
|
|
7560
|
+
}
|
|
7561
|
+
if (varName.startsWith("artifact.")) {
|
|
7562
|
+
varName = varName.replace("artifact.", "");
|
|
7563
|
+
if (!invocationContext.artifactService) {
|
|
7564
|
+
throw new Error("Artifact service is not initialized.");
|
|
7565
|
+
}
|
|
7566
|
+
try {
|
|
7567
|
+
const artifact = await invocationContext.artifactService.loadArtifact({
|
|
7568
|
+
appName: invocationContext.session.appName,
|
|
7569
|
+
userId: invocationContext.session.userId,
|
|
7570
|
+
sessionId: invocationContext.session.id,
|
|
7571
|
+
filename: varName
|
|
7572
|
+
});
|
|
7573
|
+
if (!artifact) {
|
|
7574
|
+
throw new Error(`Artifact ${varName} not found.`);
|
|
7575
|
+
}
|
|
7576
|
+
return String(artifact);
|
|
7577
|
+
} catch (error) {
|
|
7578
|
+
if (optional) {
|
|
7579
|
+
return "";
|
|
7580
|
+
}
|
|
7581
|
+
throw error;
|
|
7582
|
+
}
|
|
7583
|
+
} else {
|
|
7584
|
+
if (!isValidStateName(varName)) {
|
|
7585
|
+
return match[0];
|
|
7586
|
+
}
|
|
7587
|
+
const sessionState = invocationContext.session.state;
|
|
7588
|
+
if (varName in sessionState) {
|
|
7589
|
+
return String(sessionState[varName]);
|
|
7590
|
+
}
|
|
7591
|
+
if (optional) {
|
|
7592
|
+
return "";
|
|
7593
|
+
}
|
|
7594
|
+
throw new Error(`Context variable not found: \`${varName}\`.`);
|
|
7595
|
+
}
|
|
7596
|
+
}
|
|
7597
|
+
return await asyncReplace(/{[^{}]*}/g, replaceMatch, template);
|
|
7598
|
+
}
|
|
7599
|
+
function isValidStateName(varName) {
|
|
7600
|
+
const parts = varName.split(":");
|
|
7601
|
+
if (parts.length === 1) {
|
|
7602
|
+
return isValidIdentifier(varName);
|
|
7603
|
+
}
|
|
7604
|
+
if (parts.length === 2) {
|
|
7605
|
+
const validPrefixes = ["app:", "user:", "temp:"];
|
|
7606
|
+
const prefix = `${parts[0]}:`;
|
|
7607
|
+
if (validPrefixes.includes(prefix)) {
|
|
7608
|
+
return isValidIdentifier(parts[1]);
|
|
7609
|
+
}
|
|
7610
|
+
}
|
|
7611
|
+
return false;
|
|
7612
|
+
}
|
|
7613
|
+
function isValidIdentifier(name) {
|
|
7614
|
+
const identifierRegex = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
|
|
7615
|
+
return identifierRegex.test(name);
|
|
7616
|
+
}
|
|
7617
|
+
|
|
7618
|
+
// src/flows/llm-flows/instructions.ts
|
|
7619
|
+
var InstructionsLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7620
|
+
async *runAsync(invocationContext, llmRequest) {
|
|
7621
|
+
const agent = invocationContext.agent;
|
|
7622
|
+
if (!this.isLlmAgent(agent)) {
|
|
7623
|
+
return;
|
|
7624
|
+
}
|
|
7625
|
+
const rootAgent = agent.rootAgent;
|
|
7626
|
+
if (this.isLlmAgent(rootAgent) && rootAgent.globalInstruction) {
|
|
7627
|
+
const [rawInstruction, bypassStateInjection] = await rootAgent.canonicalGlobalInstruction(
|
|
7628
|
+
new ReadonlyContext(invocationContext)
|
|
7629
|
+
);
|
|
7630
|
+
let instruction = rawInstruction;
|
|
7631
|
+
if (!bypassStateInjection) {
|
|
7632
|
+
instruction = await injectSessionState(
|
|
7633
|
+
rawInstruction,
|
|
7634
|
+
new ReadonlyContext(invocationContext)
|
|
7635
|
+
);
|
|
7636
|
+
}
|
|
7637
|
+
llmRequest.appendInstructions([instruction]);
|
|
7638
|
+
}
|
|
7639
|
+
if (agent.instruction) {
|
|
7640
|
+
const [rawInstruction, bypassStateInjection] = await agent.canonicalInstruction(
|
|
7641
|
+
new ReadonlyContext(invocationContext)
|
|
7642
|
+
);
|
|
7643
|
+
let instruction = rawInstruction;
|
|
7644
|
+
if (!bypassStateInjection) {
|
|
7645
|
+
instruction = await injectSessionState(
|
|
7646
|
+
rawInstruction,
|
|
7647
|
+
new ReadonlyContext(invocationContext)
|
|
7648
|
+
);
|
|
7649
|
+
}
|
|
7650
|
+
llmRequest.appendInstructions([instruction]);
|
|
7651
|
+
}
|
|
7652
|
+
for await (const _ of []) {
|
|
7653
|
+
yield _;
|
|
7654
|
+
}
|
|
7655
|
+
}
|
|
7656
|
+
/**
|
|
7657
|
+
* Type guard to check if agent is an LlmAgent
|
|
7658
|
+
*/
|
|
7659
|
+
isLlmAgent(agent) {
|
|
7660
|
+
return agent && typeof agent === "object" && "canonicalModel" in agent;
|
|
7661
|
+
}
|
|
7662
|
+
};
|
|
7663
|
+
var requestProcessor6 = new InstructionsLlmRequestProcessor();
|
|
7664
|
+
|
|
6985
7665
|
// src/planners/base-planner.ts
|
|
6986
7666
|
var BasePlanner = class {
|
|
6987
7667
|
};
|
|
@@ -7051,7 +7731,7 @@ var PlanReActPlanner = class extends BasePlanner {
|
|
|
7051
7731
|
let firstFcPartIndex = -1;
|
|
7052
7732
|
for (let i = 0; i < responseParts.length; i++) {
|
|
7053
7733
|
if (responseParts[i].functionCall) {
|
|
7054
|
-
if (!_optionalChain([responseParts, 'access',
|
|
7734
|
+
if (!_optionalChain([responseParts, 'access', _224 => _224[i], 'access', _225 => _225.functionCall, 'optionalAccess', _226 => _226.name])) {
|
|
7055
7735
|
continue;
|
|
7056
7736
|
}
|
|
7057
7737
|
preservedParts.push(responseParts[i]);
|
|
@@ -7090,7 +7770,7 @@ var PlanReActPlanner = class extends BasePlanner {
|
|
|
7090
7770
|
* Handles non-function-call parts of the response
|
|
7091
7771
|
*/
|
|
7092
7772
|
_handleNonFunctionCallParts(responsePart, preservedParts) {
|
|
7093
|
-
if (_optionalChain([responsePart, 'access',
|
|
7773
|
+
if (_optionalChain([responsePart, 'access', _227 => _227.text, 'optionalAccess', _228 => _228.includes, 'call', _229 => _229(FINAL_ANSWER_TAG)])) {
|
|
7094
7774
|
const [reasoningText, finalAnswerText] = this._splitByLastPattern(
|
|
7095
7775
|
responsePart.text,
|
|
7096
7776
|
FINAL_ANSWER_TAG
|
|
@@ -7257,66 +7937,10 @@ function removeThoughtFromRequest(llmRequest) {
|
|
|
7257
7937
|
}
|
|
7258
7938
|
}
|
|
7259
7939
|
}
|
|
7260
|
-
var
|
|
7261
|
-
var
|
|
7262
|
-
|
|
7263
|
-
// src/flows/llm-flows/code-execution.ts
|
|
7264
|
-
var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7265
|
-
async *runAsync(invocationContext, llmRequest) {
|
|
7266
|
-
const agent = invocationContext.agent;
|
|
7267
|
-
if (!("codeExecutor" in agent) || !agent.codeExecutor) {
|
|
7268
|
-
return;
|
|
7269
|
-
}
|
|
7270
|
-
console.log(
|
|
7271
|
-
"Code execution request processing - TODO: Implement when code-executors module is ready"
|
|
7272
|
-
);
|
|
7273
|
-
for await (const _ of []) {
|
|
7274
|
-
yield _;
|
|
7275
|
-
}
|
|
7276
|
-
}
|
|
7277
|
-
/**
|
|
7278
|
-
* Placeholder for pre-processor logic
|
|
7279
|
-
* TODO: Implement when code-executors are ready
|
|
7280
|
-
*/
|
|
7281
|
-
async *runPreProcessor(invocationContext, llmRequest) {
|
|
7282
|
-
console.log("Code execution pre-processor - placeholder");
|
|
7283
|
-
for await (const _ of []) {
|
|
7284
|
-
yield _;
|
|
7285
|
-
}
|
|
7286
|
-
}
|
|
7287
|
-
};
|
|
7288
|
-
var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
|
|
7289
|
-
async *runAsync(invocationContext, llmResponse) {
|
|
7290
|
-
if (llmResponse.partial) {
|
|
7291
|
-
return;
|
|
7292
|
-
}
|
|
7293
|
-
const agent = invocationContext.agent;
|
|
7294
|
-
if (!("codeExecutor" in agent) || !agent.codeExecutor) {
|
|
7295
|
-
return;
|
|
7296
|
-
}
|
|
7297
|
-
console.log(
|
|
7298
|
-
"Code execution response processing - TODO: Implement when code-executors module is ready"
|
|
7299
|
-
);
|
|
7300
|
-
for await (const _ of []) {
|
|
7301
|
-
yield _;
|
|
7302
|
-
}
|
|
7303
|
-
}
|
|
7304
|
-
/**
|
|
7305
|
-
* Placeholder for post-processor logic
|
|
7306
|
-
* TODO: Implement when code-executors are ready
|
|
7307
|
-
*/
|
|
7308
|
-
async *runPostProcessor(invocationContext, llmResponse) {
|
|
7309
|
-
console.log("Code execution post-processor - placeholder");
|
|
7310
|
-
for await (const _ of []) {
|
|
7311
|
-
yield _;
|
|
7312
|
-
}
|
|
7313
|
-
}
|
|
7314
|
-
};
|
|
7315
|
-
var requestProcessor7 = new CodeExecutionRequestProcessor();
|
|
7316
|
-
var responseProcessor2 = new CodeExecutionResponseProcessor();
|
|
7940
|
+
var requestProcessor7 = new NlPlanningRequestProcessor();
|
|
7941
|
+
var responseProcessor2 = new NlPlanningResponseProcessor();
|
|
7317
7942
|
|
|
7318
7943
|
// src/flows/llm-flows/single-flow.ts
|
|
7319
|
-
var logger7 = new Logger({ name: "SingleFlow" });
|
|
7320
7944
|
var SingleFlow = class extends BaseLlmFlow {
|
|
7321
7945
|
/**
|
|
7322
7946
|
* Constructor for SingleFlow
|
|
@@ -7324,35 +7948,32 @@ var SingleFlow = class extends BaseLlmFlow {
|
|
|
7324
7948
|
constructor() {
|
|
7325
7949
|
super();
|
|
7326
7950
|
this.requestProcessors.push(
|
|
7327
|
-
requestProcessor,
|
|
7328
7951
|
requestProcessor2,
|
|
7952
|
+
requestProcessor,
|
|
7329
7953
|
// Phase 3: Auth preprocessor
|
|
7330
|
-
|
|
7331
|
-
requestProcessor3,
|
|
7954
|
+
requestProcessor6,
|
|
7332
7955
|
requestProcessor5,
|
|
7956
|
+
requestProcessor4,
|
|
7333
7957
|
// Some implementations of NL Planning mark planning contents as thoughts
|
|
7334
7958
|
// in the post processor. Since these need to be unmarked, NL Planning
|
|
7335
7959
|
// should be after contents.
|
|
7336
|
-
|
|
7960
|
+
requestProcessor7,
|
|
7337
7961
|
// Phase 5: NL Planning
|
|
7338
7962
|
// Code execution should be after the contents as it mutates the contents
|
|
7339
7963
|
// to optimize data files.
|
|
7340
|
-
|
|
7964
|
+
requestProcessor3
|
|
7341
7965
|
// Phase 5: Code Execution (placeholder)
|
|
7342
7966
|
);
|
|
7343
7967
|
this.responseProcessors.push(
|
|
7344
|
-
|
|
7968
|
+
responseProcessor2,
|
|
7345
7969
|
// Phase 5: NL Planning
|
|
7346
|
-
|
|
7970
|
+
responseProcessor
|
|
7347
7971
|
// Phase 5: Code Execution (placeholder)
|
|
7348
7972
|
);
|
|
7349
|
-
|
|
7973
|
+
this.logger.debug("SingleFlow initialized with processors");
|
|
7350
7974
|
}
|
|
7351
7975
|
};
|
|
7352
7976
|
|
|
7353
|
-
// src/flows/llm-flows/auto-flow.ts
|
|
7354
|
-
init_logger();
|
|
7355
|
-
|
|
7356
7977
|
// src/flows/llm-flows/agent-transfer.ts
|
|
7357
7978
|
var AgentTransferLlmRequestProcessor = class extends BaseLlmRequestProcessor {
|
|
7358
7979
|
/**
|
|
@@ -7442,7 +8063,6 @@ function getTransferTargets(agent) {
|
|
|
7442
8063
|
var requestProcessor8 = new AgentTransferLlmRequestProcessor();
|
|
7443
8064
|
|
|
7444
8065
|
// src/flows/llm-flows/auto-flow.ts
|
|
7445
|
-
var logger8 = new Logger({ name: "AutoFlow" });
|
|
7446
8066
|
var AutoFlow = class extends SingleFlow {
|
|
7447
8067
|
/**
|
|
7448
8068
|
* Constructor for AutoFlow
|
|
@@ -7450,13 +8070,13 @@ var AutoFlow = class extends SingleFlow {
|
|
|
7450
8070
|
constructor() {
|
|
7451
8071
|
super();
|
|
7452
8072
|
this.requestProcessors.push(requestProcessor8);
|
|
7453
|
-
|
|
8073
|
+
this.logger.debug("AutoFlow initialized with agent transfer capability");
|
|
7454
8074
|
}
|
|
7455
8075
|
};
|
|
7456
8076
|
|
|
7457
8077
|
// src/agents/llm-agent.ts
|
|
7458
8078
|
init_function_tool();
|
|
7459
|
-
var LlmAgent = (
|
|
8079
|
+
var LlmAgent = (_class23 = class _LlmAgent extends BaseAgent {
|
|
7460
8080
|
/**
|
|
7461
8081
|
* The model to use for the agent
|
|
7462
8082
|
* When not set, the agent will inherit the model from its ancestor
|
|
@@ -7475,6 +8095,10 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7475
8095
|
* Tools available to this agent
|
|
7476
8096
|
*/
|
|
7477
8097
|
|
|
8098
|
+
/**
|
|
8099
|
+
* Code executor for this agent
|
|
8100
|
+
*/
|
|
8101
|
+
|
|
7478
8102
|
/**
|
|
7479
8103
|
* Disallows LLM-controlled transferring to the parent agent
|
|
7480
8104
|
*/
|
|
@@ -7529,7 +8153,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7529
8153
|
*/
|
|
7530
8154
|
|
|
7531
8155
|
// Schema type - depends on specific implementation
|
|
7532
|
-
|
|
8156
|
+
__init42() {this.logger = new Logger({ name: "LlmAgent" })}
|
|
7533
8157
|
/**
|
|
7534
8158
|
* Constructor for LlmAgent
|
|
7535
8159
|
*/
|
|
@@ -7537,11 +8161,12 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7537
8161
|
super({
|
|
7538
8162
|
name: config.name,
|
|
7539
8163
|
description: config.description
|
|
7540
|
-
});
|
|
8164
|
+
});_class23.prototype.__init42.call(this);;
|
|
7541
8165
|
this.model = config.model || "";
|
|
7542
8166
|
this.instruction = config.instruction || "";
|
|
7543
8167
|
this.globalInstruction = config.globalInstruction || "";
|
|
7544
8168
|
this.tools = config.tools || [];
|
|
8169
|
+
this.codeExecutor = config.codeExecutor;
|
|
7545
8170
|
this.disallowTransferToParent = config.disallowTransferToParent || false;
|
|
7546
8171
|
this.disallowTransferToPeers = config.disallowTransferToPeers || false;
|
|
7547
8172
|
this.includeContents = config.includeContents || "default";
|
|
@@ -7561,11 +8186,14 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7561
8186
|
* This method is only for use by Agent Development Kit
|
|
7562
8187
|
*/
|
|
7563
8188
|
get canonicalModel() {
|
|
7564
|
-
if (typeof this.model
|
|
8189
|
+
if (typeof this.model === "string") {
|
|
8190
|
+
if (this.model) {
|
|
8191
|
+
return LLMRegistry.newLLM(this.model);
|
|
8192
|
+
}
|
|
8193
|
+
} else if (this.model instanceof BaseLlm) {
|
|
7565
8194
|
return this.model;
|
|
7566
|
-
}
|
|
7567
|
-
|
|
7568
|
-
return LLMRegistry.newLLM(this.model);
|
|
8195
|
+
} else if (this.model) {
|
|
8196
|
+
return new AiSdkLlm(this.model);
|
|
7569
8197
|
}
|
|
7570
8198
|
let ancestorAgent = this.parentAgent;
|
|
7571
8199
|
while (ancestorAgent !== null) {
|
|
@@ -7619,7 +8247,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7619
8247
|
* This matches the Python implementation's _llm_flow property
|
|
7620
8248
|
*/
|
|
7621
8249
|
get llmFlow() {
|
|
7622
|
-
if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access',
|
|
8250
|
+
if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _230 => _230.subAgents, 'optionalAccess', _231 => _231.length])) {
|
|
7623
8251
|
return new SingleFlow();
|
|
7624
8252
|
}
|
|
7625
8253
|
return new AutoFlow();
|
|
@@ -7629,7 +8257,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7629
8257
|
* This matches the Python implementation's __maybe_save_output_to_state
|
|
7630
8258
|
*/
|
|
7631
8259
|
maybeSaveOutputToState(event) {
|
|
7632
|
-
if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access',
|
|
8260
|
+
if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _232 => _232.content, 'optionalAccess', _233 => _233.parts])) {
|
|
7633
8261
|
const result = event.content.parts.map((part) => part.text || "").join("");
|
|
7634
8262
|
if (result) {
|
|
7635
8263
|
if (!event.actions.stateDelta) {
|
|
@@ -7669,7 +8297,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
|
|
|
7669
8297
|
yield errorEvent;
|
|
7670
8298
|
}
|
|
7671
8299
|
}
|
|
7672
|
-
},
|
|
8300
|
+
}, _class23);
|
|
7673
8301
|
|
|
7674
8302
|
// src/agents/sequential-agent.ts
|
|
7675
8303
|
var SequentialAgent = class extends BaseAgent {
|
|
@@ -7735,11 +8363,11 @@ var LlmCallsLimitExceededError = class extends Error {
|
|
|
7735
8363
|
this.name = "LlmCallsLimitExceededError";
|
|
7736
8364
|
}
|
|
7737
8365
|
};
|
|
7738
|
-
var InvocationCostManager = (
|
|
8366
|
+
var InvocationCostManager = (_class24 = class {constructor() { _class24.prototype.__init43.call(this); }
|
|
7739
8367
|
/**
|
|
7740
8368
|
* A counter that keeps track of number of llm calls made.
|
|
7741
8369
|
*/
|
|
7742
|
-
|
|
8370
|
+
__init43() {this._numberOfLlmCalls = 0}
|
|
7743
8371
|
/**
|
|
7744
8372
|
* Increments _numberOfLlmCalls and enforces the limit.
|
|
7745
8373
|
*/
|
|
@@ -7751,11 +8379,11 @@ var InvocationCostManager = (_class22 = class {constructor() { _class22.prototyp
|
|
|
7751
8379
|
);
|
|
7752
8380
|
}
|
|
7753
8381
|
}
|
|
7754
|
-
},
|
|
8382
|
+
}, _class24);
|
|
7755
8383
|
function newInvocationContextId() {
|
|
7756
8384
|
return `e-${crypto.randomUUID()}`;
|
|
7757
8385
|
}
|
|
7758
|
-
var InvocationContext = (
|
|
8386
|
+
var InvocationContext = (_class25 = class _InvocationContext {
|
|
7759
8387
|
|
|
7760
8388
|
|
|
7761
8389
|
|
|
@@ -7790,7 +8418,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7790
8418
|
*
|
|
7791
8419
|
* Set to True in callbacks or tools to terminate this invocation.
|
|
7792
8420
|
*/
|
|
7793
|
-
|
|
8421
|
+
__init44() {this.endInvocation = false}
|
|
7794
8422
|
/**
|
|
7795
8423
|
* The queue to receive live requests.
|
|
7796
8424
|
*/
|
|
@@ -7811,11 +8439,11 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7811
8439
|
* A container to keep track of different kinds of costs incurred as a part
|
|
7812
8440
|
* of this invocation.
|
|
7813
8441
|
*/
|
|
7814
|
-
|
|
8442
|
+
__init45() {this._invocationCostManager = new InvocationCostManager()}
|
|
7815
8443
|
/**
|
|
7816
8444
|
* Constructor for InvocationContext
|
|
7817
8445
|
*/
|
|
7818
|
-
constructor(options) {;
|
|
8446
|
+
constructor(options) {;_class25.prototype.__init44.call(this);_class25.prototype.__init45.call(this);
|
|
7819
8447
|
this.artifactService = options.artifactService;
|
|
7820
8448
|
this.sessionService = options.sessionService;
|
|
7821
8449
|
this.memoryService = options.memoryService;
|
|
@@ -7875,7 +8503,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
|
|
|
7875
8503
|
runConfig: this.runConfig
|
|
7876
8504
|
});
|
|
7877
8505
|
}
|
|
7878
|
-
},
|
|
8506
|
+
}, _class25);
|
|
7879
8507
|
|
|
7880
8508
|
// src/agents/parallel-agent.ts
|
|
7881
8509
|
function createBranchContextForSubAgent(agent, subAgent, invocationContext) {
|
|
@@ -7992,7 +8620,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
7992
8620
|
for (const subAgent of this.subAgents) {
|
|
7993
8621
|
for await (const event of subAgent.runAsync(ctx)) {
|
|
7994
8622
|
yield event;
|
|
7995
|
-
if (_optionalChain([event, 'access',
|
|
8623
|
+
if (_optionalChain([event, 'access', _234 => _234.actions, 'optionalAccess', _235 => _235.escalate])) {
|
|
7996
8624
|
return;
|
|
7997
8625
|
}
|
|
7998
8626
|
}
|
|
@@ -8010,7 +8638,7 @@ var LoopAgent = class extends BaseAgent {
|
|
|
8010
8638
|
|
|
8011
8639
|
// src/agents/lang-graph-agent.ts
|
|
8012
8640
|
init_logger();
|
|
8013
|
-
var LangGraphAgent = (
|
|
8641
|
+
var LangGraphAgent = (_class26 = class extends BaseAgent {
|
|
8014
8642
|
/**
|
|
8015
8643
|
* Graph nodes (agents and their connections)
|
|
8016
8644
|
*/
|
|
@@ -8026,8 +8654,8 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8026
8654
|
/**
|
|
8027
8655
|
* Results from node executions
|
|
8028
8656
|
*/
|
|
8029
|
-
|
|
8030
|
-
|
|
8657
|
+
__init46() {this.results = []}
|
|
8658
|
+
__init47() {this.logger = new Logger({ name: "LangGraphAgent" })}
|
|
8031
8659
|
/**
|
|
8032
8660
|
* Constructor for LangGraphAgent
|
|
8033
8661
|
*/
|
|
@@ -8035,7 +8663,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8035
8663
|
super({
|
|
8036
8664
|
name: config.name,
|
|
8037
8665
|
description: config.description
|
|
8038
|
-
});
|
|
8666
|
+
});_class26.prototype.__init46.call(this);_class26.prototype.__init47.call(this);;
|
|
8039
8667
|
this.nodes = /* @__PURE__ */ new Map();
|
|
8040
8668
|
for (const node of config.nodes) {
|
|
8041
8669
|
if (this.nodes.has(node.name)) {
|
|
@@ -8231,7 +8859,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
|
|
|
8231
8859
|
}
|
|
8232
8860
|
this.maxSteps = maxSteps;
|
|
8233
8861
|
}
|
|
8234
|
-
},
|
|
8862
|
+
}, _class26);
|
|
8235
8863
|
|
|
8236
8864
|
// src/runners.ts
|
|
8237
8865
|
|
|
@@ -8300,17 +8928,17 @@ var RunConfig = class {
|
|
|
8300
8928
|
*/
|
|
8301
8929
|
|
|
8302
8930
|
constructor(config) {
|
|
8303
|
-
this.speechConfig = _optionalChain([config, 'optionalAccess',
|
|
8304
|
-
this.responseModalities = _optionalChain([config, 'optionalAccess',
|
|
8305
|
-
this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess',
|
|
8306
|
-
this.supportCFC = _optionalChain([config, 'optionalAccess',
|
|
8307
|
-
this.streamingMode = _optionalChain([config, 'optionalAccess',
|
|
8308
|
-
this.outputAudioTranscription = _optionalChain([config, 'optionalAccess',
|
|
8309
|
-
this.inputAudioTranscription = _optionalChain([config, 'optionalAccess',
|
|
8310
|
-
this.realtimeInputConfig = _optionalChain([config, 'optionalAccess',
|
|
8311
|
-
this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess',
|
|
8312
|
-
this.proactivity = _optionalChain([config, 'optionalAccess',
|
|
8313
|
-
this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess',
|
|
8931
|
+
this.speechConfig = _optionalChain([config, 'optionalAccess', _236 => _236.speechConfig]);
|
|
8932
|
+
this.responseModalities = _optionalChain([config, 'optionalAccess', _237 => _237.responseModalities]);
|
|
8933
|
+
this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _238 => _238.saveInputBlobsAsArtifacts]) || false;
|
|
8934
|
+
this.supportCFC = _optionalChain([config, 'optionalAccess', _239 => _239.supportCFC]) || false;
|
|
8935
|
+
this.streamingMode = _optionalChain([config, 'optionalAccess', _240 => _240.streamingMode]) || "NONE" /* NONE */;
|
|
8936
|
+
this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _241 => _241.outputAudioTranscription]);
|
|
8937
|
+
this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _242 => _242.inputAudioTranscription]);
|
|
8938
|
+
this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _243 => _243.realtimeInputConfig]);
|
|
8939
|
+
this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _244 => _244.enableAffectiveDialog]);
|
|
8940
|
+
this.proactivity = _optionalChain([config, 'optionalAccess', _245 => _245.proactivity]);
|
|
8941
|
+
this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _246 => _246.maxLlmCalls]), () => ( 500));
|
|
8314
8942
|
this.validateMaxLlmCalls();
|
|
8315
8943
|
}
|
|
8316
8944
|
/**
|
|
@@ -8331,10 +8959,8 @@ var RunConfig = class {
|
|
|
8331
8959
|
};
|
|
8332
8960
|
|
|
8333
8961
|
// src/artifacts/in-memory-artifact-service.ts
|
|
8334
|
-
|
|
8335
|
-
|
|
8336
|
-
var InMemoryArtifactService = (_class25 = class {constructor() { _class25.prototype.__init45.call(this); }
|
|
8337
|
-
__init45() {this.artifacts = /* @__PURE__ */ new Map()}
|
|
8962
|
+
var InMemoryArtifactService = (_class27 = class {constructor() { _class27.prototype.__init48.call(this); }
|
|
8963
|
+
__init48() {this.artifacts = /* @__PURE__ */ new Map()}
|
|
8338
8964
|
fileHasUserNamespace(filename) {
|
|
8339
8965
|
return filename.startsWith("user:");
|
|
8340
8966
|
}
|
|
@@ -8407,7 +9033,7 @@ var InMemoryArtifactService = (_class25 = class {constructor() { _class25.protot
|
|
|
8407
9033
|
}
|
|
8408
9034
|
return Array.from({ length: versions.length }, (_, i) => i);
|
|
8409
9035
|
}
|
|
8410
|
-
},
|
|
9036
|
+
}, _class27);
|
|
8411
9037
|
|
|
8412
9038
|
// src/runners.ts
|
|
8413
9039
|
init_logger();
|
|
@@ -8434,15 +9060,15 @@ function _extractWordsLower(text) {
|
|
|
8434
9060
|
const words = text.match(/[A-Za-z]+/g) || [];
|
|
8435
9061
|
return new Set(words.map((word) => word.toLowerCase()));
|
|
8436
9062
|
}
|
|
8437
|
-
var InMemoryMemoryService = (
|
|
9063
|
+
var InMemoryMemoryService = (_class28 = class {
|
|
8438
9064
|
/**
|
|
8439
9065
|
* Keys are app_name/user_id, session_id. Values are session event lists.
|
|
8440
9066
|
*/
|
|
8441
|
-
|
|
9067
|
+
__init49() {this._sessionEvents = /* @__PURE__ */ new Map()}
|
|
8442
9068
|
/**
|
|
8443
9069
|
* Constructor for InMemoryMemoryService
|
|
8444
9070
|
*/
|
|
8445
|
-
constructor() {;
|
|
9071
|
+
constructor() {;_class28.prototype.__init49.call(this);
|
|
8446
9072
|
this._sessionEvents = /* @__PURE__ */ new Map();
|
|
8447
9073
|
}
|
|
8448
9074
|
/**
|
|
@@ -8456,7 +9082,7 @@ var InMemoryMemoryService = (_class26 = class {
|
|
|
8456
9082
|
}
|
|
8457
9083
|
const userSessions = this._sessionEvents.get(userKey);
|
|
8458
9084
|
const filteredEvents = session.events.filter(
|
|
8459
|
-
(event) => _optionalChain([event, 'access',
|
|
9085
|
+
(event) => _optionalChain([event, 'access', _247 => _247.content, 'optionalAccess', _248 => _248.parts])
|
|
8460
9086
|
);
|
|
8461
9087
|
userSessions.set(session.id, filteredEvents);
|
|
8462
9088
|
}
|
|
@@ -8526,7 +9152,7 @@ var InMemoryMemoryService = (_class26 = class {
|
|
|
8526
9152
|
clear() {
|
|
8527
9153
|
this._sessionEvents.clear();
|
|
8528
9154
|
}
|
|
8529
|
-
},
|
|
9155
|
+
}, _class28);
|
|
8530
9156
|
|
|
8531
9157
|
// src/sessions/in-memory-session-service.ts
|
|
8532
9158
|
var _crypto = require('crypto');
|
|
@@ -8568,19 +9194,19 @@ var BaseSessionService = class {
|
|
|
8568
9194
|
};
|
|
8569
9195
|
|
|
8570
9196
|
// src/sessions/in-memory-session-service.ts
|
|
8571
|
-
var InMemorySessionService = (
|
|
9197
|
+
var InMemorySessionService = (_class29 = class extends BaseSessionService {constructor(...args2) { super(...args2); _class29.prototype.__init50.call(this);_class29.prototype.__init51.call(this);_class29.prototype.__init52.call(this); }
|
|
8572
9198
|
/**
|
|
8573
9199
|
* A map from app name to a map from user ID to a map from session ID to session.
|
|
8574
9200
|
*/
|
|
8575
|
-
|
|
9201
|
+
__init50() {this.sessions = /* @__PURE__ */ new Map()}
|
|
8576
9202
|
/**
|
|
8577
9203
|
* A map from app name to a map from user ID to a map from key to the value.
|
|
8578
9204
|
*/
|
|
8579
|
-
|
|
9205
|
+
__init51() {this.userState = /* @__PURE__ */ new Map()}
|
|
8580
9206
|
/**
|
|
8581
9207
|
* A map from app name to a map from key to the value.
|
|
8582
9208
|
*/
|
|
8583
|
-
|
|
9209
|
+
__init52() {this.appState = /* @__PURE__ */ new Map()}
|
|
8584
9210
|
/**
|
|
8585
9211
|
* Creates a new session.
|
|
8586
9212
|
*/
|
|
@@ -8595,7 +9221,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8595
9221
|
return this.createSessionImpl(appName, userId, state, sessionId);
|
|
8596
9222
|
}
|
|
8597
9223
|
createSessionImpl(appName, userId, state, sessionId) {
|
|
8598
|
-
const finalSessionId = _optionalChain([sessionId, 'optionalAccess',
|
|
9224
|
+
const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _249 => _249.trim, 'call', _250 => _250()]) || _crypto.randomUUID.call(void 0, );
|
|
8599
9225
|
const session = {
|
|
8600
9226
|
appName,
|
|
8601
9227
|
userId,
|
|
@@ -8752,7 +9378,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8752
9378
|
warning(`sessionId ${sessionId} not in sessions[appName][userId]`);
|
|
8753
9379
|
return event;
|
|
8754
9380
|
}
|
|
8755
|
-
if (_optionalChain([event, 'access',
|
|
9381
|
+
if (_optionalChain([event, 'access', _251 => _251.actions, 'optionalAccess', _252 => _252.stateDelta])) {
|
|
8756
9382
|
for (const key in event.actions.stateDelta) {
|
|
8757
9383
|
const value = event.actions.stateDelta[key];
|
|
8758
9384
|
if (key.startsWith(State.APP_PREFIX)) {
|
|
@@ -8777,24 +9403,23 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
|
|
|
8777
9403
|
storageSession.lastUpdateTime = event.timestamp;
|
|
8778
9404
|
return event;
|
|
8779
9405
|
}
|
|
8780
|
-
},
|
|
9406
|
+
}, _class29);
|
|
8781
9407
|
|
|
8782
9408
|
// src/runners.ts
|
|
8783
|
-
var logger10 = new Logger({ name: "Runner" });
|
|
8784
9409
|
function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
|
|
8785
9410
|
const events = session.events;
|
|
8786
9411
|
if (!events || events.length === 0) {
|
|
8787
9412
|
return null;
|
|
8788
9413
|
}
|
|
8789
9414
|
const lastEvent = events[events.length - 1];
|
|
8790
|
-
if (_optionalChain([lastEvent, 'access',
|
|
8791
|
-
const functionCallId = _optionalChain([lastEvent, 'access',
|
|
9415
|
+
if (_optionalChain([lastEvent, 'access', _253 => _253.content, 'optionalAccess', _254 => _254.parts, 'optionalAccess', _255 => _255.some, 'call', _256 => _256((part) => part.functionResponse)])) {
|
|
9416
|
+
const functionCallId = _optionalChain([lastEvent, 'access', _257 => _257.content, 'access', _258 => _258.parts, 'access', _259 => _259.find, 'call', _260 => _260(
|
|
8792
9417
|
(part) => part.functionResponse
|
|
8793
|
-
), 'optionalAccess',
|
|
9418
|
+
), 'optionalAccess', _261 => _261.functionResponse, 'optionalAccess', _262 => _262.id]);
|
|
8794
9419
|
if (!functionCallId) return null;
|
|
8795
9420
|
for (let i = events.length - 2; i >= 0; i--) {
|
|
8796
9421
|
const event = events[i];
|
|
8797
|
-
const functionCalls = _optionalChain([event, 'access',
|
|
9422
|
+
const functionCalls = _optionalChain([event, 'access', _263 => _263.getFunctionCalls, 'optionalCall', _264 => _264()]) || [];
|
|
8798
9423
|
for (const functionCall of functionCalls) {
|
|
8799
9424
|
if (functionCall.id === functionCallId) {
|
|
8800
9425
|
return event;
|
|
@@ -8804,7 +9429,7 @@ function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
|
|
|
8804
9429
|
}
|
|
8805
9430
|
return null;
|
|
8806
9431
|
}
|
|
8807
|
-
var Runner = class {
|
|
9432
|
+
var Runner = (_class30 = class {
|
|
8808
9433
|
/**
|
|
8809
9434
|
* The app name of the runner.
|
|
8810
9435
|
*/
|
|
@@ -8825,6 +9450,7 @@ var Runner = class {
|
|
|
8825
9450
|
* The memory service for the runner.
|
|
8826
9451
|
*/
|
|
8827
9452
|
|
|
9453
|
+
__init53() {this.logger = new Logger({ name: "Runner" })}
|
|
8828
9454
|
/**
|
|
8829
9455
|
* Initializes the Runner.
|
|
8830
9456
|
*/
|
|
@@ -8834,7 +9460,7 @@ var Runner = class {
|
|
|
8834
9460
|
artifactService,
|
|
8835
9461
|
sessionService,
|
|
8836
9462
|
memoryService
|
|
8837
|
-
}) {
|
|
9463
|
+
}) {;_class30.prototype.__init53.call(this);
|
|
8838
9464
|
this.appName = appName;
|
|
8839
9465
|
this.agent = agent;
|
|
8840
9466
|
this.artifactService = artifactService;
|
|
@@ -8927,7 +9553,7 @@ var Runner = class {
|
|
|
8927
9553
|
yield event;
|
|
8928
9554
|
}
|
|
8929
9555
|
} catch (error) {
|
|
8930
|
-
|
|
9556
|
+
this.logger.debug("Error running agent:", error);
|
|
8931
9557
|
span.recordException(error);
|
|
8932
9558
|
span.setStatus({
|
|
8933
9559
|
code: _api.SpanStatusCode.ERROR,
|
|
@@ -8981,17 +9607,17 @@ var Runner = class {
|
|
|
8981
9607
|
*/
|
|
8982
9608
|
_findAgentToRun(session, rootAgent) {
|
|
8983
9609
|
const event = _findFunctionCallEventIfLastEventIsFunctionResponse(session);
|
|
8984
|
-
if (_optionalChain([event, 'optionalAccess',
|
|
9610
|
+
if (_optionalChain([event, 'optionalAccess', _265 => _265.author])) {
|
|
8985
9611
|
return rootAgent.findAgent(event.author);
|
|
8986
9612
|
}
|
|
8987
|
-
const nonUserEvents = _optionalChain([session, 'access',
|
|
9613
|
+
const nonUserEvents = _optionalChain([session, 'access', _266 => _266.events, 'optionalAccess', _267 => _267.filter, 'call', _268 => _268((e) => e.author !== "user"), 'access', _269 => _269.reverse, 'call', _270 => _270()]) || [];
|
|
8988
9614
|
for (const event2 of nonUserEvents) {
|
|
8989
9615
|
if (event2.author === rootAgent.name) {
|
|
8990
9616
|
return rootAgent;
|
|
8991
9617
|
}
|
|
8992
|
-
const agent = _optionalChain([rootAgent, 'access',
|
|
9618
|
+
const agent = _optionalChain([rootAgent, 'access', _271 => _271.findSubAgent, 'optionalCall', _272 => _272(event2.author)]);
|
|
8993
9619
|
if (!agent) {
|
|
8994
|
-
|
|
9620
|
+
this.logger.debug(
|
|
8995
9621
|
`Event from an unknown agent: ${event2.author}, event id: ${event2.id}`
|
|
8996
9622
|
);
|
|
8997
9623
|
continue;
|
|
@@ -9038,7 +9664,7 @@ var Runner = class {
|
|
|
9038
9664
|
runConfig
|
|
9039
9665
|
});
|
|
9040
9666
|
}
|
|
9041
|
-
};
|
|
9667
|
+
}, _class30);
|
|
9042
9668
|
var InMemoryRunner = class extends Runner {
|
|
9043
9669
|
/**
|
|
9044
9670
|
* Deprecated. Please don't use. The in-memory session service for the runner.
|
|
@@ -9061,14 +9687,14 @@ var InMemoryRunner = class extends Runner {
|
|
|
9061
9687
|
};
|
|
9062
9688
|
|
|
9063
9689
|
// src/agents/agent-builder.ts
|
|
9064
|
-
var AgentBuilder = (
|
|
9690
|
+
var AgentBuilder = (_class31 = class _AgentBuilder {
|
|
9065
9691
|
|
|
9066
9692
|
|
|
9067
|
-
|
|
9693
|
+
__init54() {this.agentType = "llm"}
|
|
9068
9694
|
/**
|
|
9069
9695
|
* Private constructor - use static create() method
|
|
9070
9696
|
*/
|
|
9071
|
-
constructor(name) {;
|
|
9697
|
+
constructor(name) {;_class31.prototype.__init54.call(this);
|
|
9072
9698
|
this.config = { name };
|
|
9073
9699
|
}
|
|
9074
9700
|
/**
|
|
@@ -9255,7 +9881,7 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9255
9881
|
parts: [{ text: message }]
|
|
9256
9882
|
}
|
|
9257
9883
|
})) {
|
|
9258
|
-
if (_optionalChain([event, 'access',
|
|
9884
|
+
if (_optionalChain([event, 'access', _273 => _273.content, 'optionalAccess', _274 => _274.parts])) {
|
|
9259
9885
|
const content = event.content.parts.map((part) => part.text || "").join("");
|
|
9260
9886
|
if (content) {
|
|
9261
9887
|
response += content;
|
|
@@ -9270,6 +9896,20 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9270
9896
|
*/
|
|
9271
9897
|
createAgent() {
|
|
9272
9898
|
switch (this.agentType) {
|
|
9899
|
+
case "llm": {
|
|
9900
|
+
if (!this.config.model) {
|
|
9901
|
+
throw new Error("Model is required for LLM agent");
|
|
9902
|
+
}
|
|
9903
|
+
const model = this.config.model;
|
|
9904
|
+
return new LlmAgent({
|
|
9905
|
+
name: this.config.name,
|
|
9906
|
+
model,
|
|
9907
|
+
description: this.config.description,
|
|
9908
|
+
instruction: this.config.instruction,
|
|
9909
|
+
tools: this.config.tools,
|
|
9910
|
+
planner: this.config.planner
|
|
9911
|
+
});
|
|
9912
|
+
}
|
|
9273
9913
|
case "sequential":
|
|
9274
9914
|
if (!this.config.subAgents) {
|
|
9275
9915
|
throw new Error("Sub-agents required for sequential agent");
|
|
@@ -9308,18 +9948,9 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
|
|
|
9308
9948
|
nodes: this.config.nodes,
|
|
9309
9949
|
rootNode: this.config.rootNode
|
|
9310
9950
|
});
|
|
9311
|
-
default:
|
|
9312
|
-
return new LlmAgent({
|
|
9313
|
-
name: this.config.name,
|
|
9314
|
-
model: this.config.model,
|
|
9315
|
-
description: this.config.description,
|
|
9316
|
-
instruction: this.config.instruction,
|
|
9317
|
-
tools: this.config.tools,
|
|
9318
|
-
planner: this.config.planner
|
|
9319
|
-
});
|
|
9320
9951
|
}
|
|
9321
9952
|
}
|
|
9322
|
-
},
|
|
9953
|
+
}, _class31);
|
|
9323
9954
|
|
|
9324
9955
|
// src/memory/index.ts
|
|
9325
9956
|
var memory_exports = {};
|
|
@@ -9383,7 +10014,7 @@ var VertexAiSessionService = class extends BaseSessionService {
|
|
|
9383
10014
|
path: `operations/${operationId}`,
|
|
9384
10015
|
request_dict: {}
|
|
9385
10016
|
});
|
|
9386
|
-
if (_optionalChain([lroResponse, 'optionalAccess',
|
|
10017
|
+
if (_optionalChain([lroResponse, 'optionalAccess', _275 => _275.done])) {
|
|
9387
10018
|
break;
|
|
9388
10019
|
}
|
|
9389
10020
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
@@ -9653,11 +10284,11 @@ var VertexAiSessionService = class extends BaseSessionService {
|
|
|
9653
10284
|
|
|
9654
10285
|
// src/sessions/database-session-service.ts
|
|
9655
10286
|
var _kysely = require('kysely');
|
|
9656
|
-
var DatabaseSessionService = (
|
|
10287
|
+
var DatabaseSessionService = (_class32 = class extends BaseSessionService {
|
|
9657
10288
|
|
|
9658
|
-
|
|
10289
|
+
__init55() {this.initialized = false}
|
|
9659
10290
|
constructor(config) {
|
|
9660
|
-
super();
|
|
10291
|
+
super();_class32.prototype.__init55.call(this);;
|
|
9661
10292
|
this.db = config.db;
|
|
9662
10293
|
if (!config.skipTableCreation) {
|
|
9663
10294
|
this.initializeDatabase().catch((error) => {
|
|
@@ -9732,7 +10363,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9732
10363
|
if (!jsonString) return defaultValue;
|
|
9733
10364
|
try {
|
|
9734
10365
|
return JSON.parse(jsonString);
|
|
9735
|
-
} catch (
|
|
10366
|
+
} catch (e4) {
|
|
9736
10367
|
return defaultValue;
|
|
9737
10368
|
}
|
|
9738
10369
|
}
|
|
@@ -9754,12 +10385,12 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9754
10385
|
}
|
|
9755
10386
|
async createSession(appName, userId, state, sessionId) {
|
|
9756
10387
|
await this.ensureInitialized();
|
|
9757
|
-
const id = _optionalChain([sessionId, 'optionalAccess',
|
|
10388
|
+
const id = _optionalChain([sessionId, 'optionalAccess', _276 => _276.trim, 'call', _277 => _277()]) || this.generateSessionId();
|
|
9758
10389
|
return await this.db.transaction().execute(async (trx) => {
|
|
9759
10390
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
|
|
9760
10391
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
|
|
9761
|
-
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9762
|
-
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10392
|
+
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _278 => _278.state]), {});
|
|
10393
|
+
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _279 => _279.state]), {});
|
|
9763
10394
|
if (!appState) {
|
|
9764
10395
|
await trx.insertInto("app_states").values({
|
|
9765
10396
|
app_name: appName,
|
|
@@ -9818,21 +10449,21 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9818
10449
|
return void 0;
|
|
9819
10450
|
}
|
|
9820
10451
|
let eventQuery = trx.selectFrom("events").selectAll().where("session_id", "=", sessionId).orderBy("timestamp", "desc");
|
|
9821
|
-
if (_optionalChain([config, 'optionalAccess',
|
|
10452
|
+
if (_optionalChain([config, 'optionalAccess', _280 => _280.afterTimestamp])) {
|
|
9822
10453
|
eventQuery = eventQuery.where(
|
|
9823
10454
|
"timestamp",
|
|
9824
10455
|
">=",
|
|
9825
10456
|
new Date(config.afterTimestamp * 1e3)
|
|
9826
10457
|
);
|
|
9827
10458
|
}
|
|
9828
|
-
if (_optionalChain([config, 'optionalAccess',
|
|
10459
|
+
if (_optionalChain([config, 'optionalAccess', _281 => _281.numRecentEvents])) {
|
|
9829
10460
|
eventQuery = eventQuery.limit(config.numRecentEvents);
|
|
9830
10461
|
}
|
|
9831
10462
|
const storageEvents = await eventQuery.execute();
|
|
9832
10463
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
|
|
9833
10464
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
|
|
9834
|
-
const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9835
|
-
const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10465
|
+
const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _282 => _282.state]), {});
|
|
10466
|
+
const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _283 => _283.state]), {});
|
|
9836
10467
|
const sessionState = this.parseJsonSafely(storageSession.state, {});
|
|
9837
10468
|
const mergedState = this.mergeState(
|
|
9838
10469
|
currentAppState,
|
|
@@ -9890,13 +10521,13 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
9890
10521
|
}
|
|
9891
10522
|
const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", session.appName).executeTakeFirst();
|
|
9892
10523
|
const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", session.appName).where("user_id", "=", session.userId).executeTakeFirst();
|
|
9893
|
-
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess',
|
|
9894
|
-
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess',
|
|
10524
|
+
let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _284 => _284.state]), {});
|
|
10525
|
+
let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _285 => _285.state]), {});
|
|
9895
10526
|
let sessionState = this.parseJsonSafely(storageSession.state, {});
|
|
9896
10527
|
let appStateDelta = {};
|
|
9897
10528
|
let userStateDelta = {};
|
|
9898
10529
|
let sessionStateDelta = {};
|
|
9899
|
-
if (_optionalChain([event, 'access',
|
|
10530
|
+
if (_optionalChain([event, 'access', _286 => _286.actions, 'optionalAccess', _287 => _287.stateDelta])) {
|
|
9900
10531
|
const deltas = this.extractStateDelta(event.actions.stateDelta);
|
|
9901
10532
|
appStateDelta = deltas.appStateDelta;
|
|
9902
10533
|
userStateDelta = deltas.userStateDelta;
|
|
@@ -10042,7 +10673,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
10042
10673
|
* Overrides the base class method to work with plain object state.
|
|
10043
10674
|
*/
|
|
10044
10675
|
updateSessionState(session, event) {
|
|
10045
|
-
if (!_optionalChain([event, 'access',
|
|
10676
|
+
if (!_optionalChain([event, 'access', _288 => _288.actions, 'optionalAccess', _289 => _289.stateDelta])) {
|
|
10046
10677
|
return;
|
|
10047
10678
|
}
|
|
10048
10679
|
for (const [key, value] of Object.entries(event.actions.stateDelta)) {
|
|
@@ -10051,10 +10682,10 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
|
|
|
10051
10682
|
}
|
|
10052
10683
|
}
|
|
10053
10684
|
}
|
|
10054
|
-
},
|
|
10685
|
+
}, _class32);
|
|
10055
10686
|
|
|
10056
10687
|
// src/sessions/database-factories.ts
|
|
10057
|
-
|
|
10688
|
+
var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
|
|
10058
10689
|
|
|
10059
10690
|
function createDependencyError(packageName, dbType) {
|
|
10060
10691
|
return new Error(
|
|
@@ -10131,11 +10762,9 @@ function createDatabaseSessionService(databaseUrl, options) {
|
|
|
10131
10762
|
}
|
|
10132
10763
|
|
|
10133
10764
|
// src/artifacts/gcs-artifact-service.ts
|
|
10134
|
-
init_logger();
|
|
10135
10765
|
|
|
10136
10766
|
|
|
10137
10767
|
var _storage = require('@google-cloud/storage');
|
|
10138
|
-
var logger11 = new Logger({ name: "GcsArtifactService" });
|
|
10139
10768
|
var GcsArtifactService = class {
|
|
10140
10769
|
|
|
10141
10770
|
|
|
@@ -10214,7 +10843,7 @@ var GcsArtifactService = class {
|
|
|
10214
10843
|
};
|
|
10215
10844
|
return part;
|
|
10216
10845
|
} catch (error) {
|
|
10217
|
-
if (_optionalChain([error, 'optionalAccess',
|
|
10846
|
+
if (_optionalChain([error, 'optionalAccess', _290 => _290.code]) === 404) {
|
|
10218
10847
|
return null;
|
|
10219
10848
|
}
|
|
10220
10849
|
throw error;
|
|
@@ -10290,20 +10919,20 @@ __export(flows_exports, {
|
|
|
10290
10919
|
REQUEST_EUC_FUNCTION_CALL_NAME: () => REQUEST_EUC_FUNCTION_CALL_NAME,
|
|
10291
10920
|
SingleFlow: () => SingleFlow,
|
|
10292
10921
|
agentTransferRequestProcessor: () => requestProcessor8,
|
|
10293
|
-
basicRequestProcessor: () =>
|
|
10294
|
-
codeExecutionRequestProcessor: () =>
|
|
10295
|
-
codeExecutionResponseProcessor: () =>
|
|
10296
|
-
contentRequestProcessor: () =>
|
|
10922
|
+
basicRequestProcessor: () => requestProcessor2,
|
|
10923
|
+
codeExecutionRequestProcessor: () => requestProcessor3,
|
|
10924
|
+
codeExecutionResponseProcessor: () => responseProcessor,
|
|
10925
|
+
contentRequestProcessor: () => requestProcessor4,
|
|
10297
10926
|
generateAuthEvent: () => generateAuthEvent,
|
|
10298
10927
|
generateClientFunctionCallId: () => generateClientFunctionCallId,
|
|
10299
10928
|
getLongRunningFunctionCalls: () => getLongRunningFunctionCalls,
|
|
10300
10929
|
handleFunctionCallsAsync: () => handleFunctionCallsAsync,
|
|
10301
10930
|
handleFunctionCallsLive: () => handleFunctionCallsLive,
|
|
10302
|
-
identityRequestProcessor: () =>
|
|
10303
|
-
instructionsRequestProcessor: () =>
|
|
10931
|
+
identityRequestProcessor: () => requestProcessor5,
|
|
10932
|
+
instructionsRequestProcessor: () => requestProcessor6,
|
|
10304
10933
|
mergeParallelFunctionResponseEvents: () => mergeParallelFunctionResponseEvents,
|
|
10305
|
-
nlPlanningRequestProcessor: () =>
|
|
10306
|
-
nlPlanningResponseProcessor: () =>
|
|
10934
|
+
nlPlanningRequestProcessor: () => requestProcessor7,
|
|
10935
|
+
nlPlanningResponseProcessor: () => responseProcessor2,
|
|
10307
10936
|
populateClientFunctionCallId: () => populateClientFunctionCallId,
|
|
10308
10937
|
removeClientFunctionCallId: () => removeClientFunctionCallId
|
|
10309
10938
|
});
|
|
@@ -10449,4 +11078,9 @@ var VERSION = "0.1.0";
|
|
|
10449
11078
|
|
|
10450
11079
|
|
|
10451
11080
|
|
|
10452
|
-
|
|
11081
|
+
|
|
11082
|
+
|
|
11083
|
+
|
|
11084
|
+
|
|
11085
|
+
|
|
11086
|
+
exports.AF_FUNCTION_CALL_ID_PREFIX = AF_FUNCTION_CALL_ID_PREFIX; exports.Agent = LlmAgent; exports.AgentBuilder = AgentBuilder; exports.Agents = agents_exports; exports.AiSdkLlm = AiSdkLlm; exports.AnthropicLlm = AnthropicLlm; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.AuthTool = AuthTool; exports.AutoFlow = AutoFlow; exports.BaseAgent = BaseAgent; exports.BaseCodeExecutor = BaseCodeExecutor; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseLlm = BaseLlm; exports.BaseLlmFlow = BaseLlmFlow; exports.BaseLlmRequestProcessor = BaseLlmRequestProcessor; exports.BaseLlmResponseProcessor = BaseLlmResponseProcessor; exports.BasePlanner = BasePlanner; exports.BaseSessionService = BaseSessionService; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.BuiltInCodeExecutor = BuiltInCodeExecutor; exports.BuiltInPlanner = BuiltInPlanner; exports.CallbackContext = CallbackContext; exports.CodeExecutionUtils = CodeExecutionUtils; exports.CodeExecutorContext = CodeExecutorContext; exports.DatabaseSessionService = DatabaseSessionService; exports.EnhancedAuthConfig = EnhancedAuthConfig; exports.Event = Event; exports.EventActions = EventActions; exports.Events = events_exports; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.Flows = flows_exports; exports.FunctionTool = FunctionTool; exports.GcsArtifactService = GcsArtifactService; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLlm = GoogleLlm; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryArtifactService = InMemoryArtifactService; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LangGraphAgent = LangGraphAgent; exports.LlmAgent = LlmAgent; exports.LlmCallsLimitExceededError = LlmCallsLimitExceededError; exports.LlmRequest = LlmRequest; exports.LlmResponse = LlmResponse; exports.LoadArtifactsTool = LoadArtifactsTool; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpAbi = McpAbi; exports.McpAtp = McpAtp; exports.McpBamm = McpBamm; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpFilesystem = McpFilesystem; exports.McpFraxlend = McpFraxlend; exports.McpGeneric = McpGeneric; exports.McpIqWiki = McpIqWiki; exports.McpMemory = McpMemory; exports.McpNearAgent = McpNearAgent; exports.McpNearIntentSwaps = McpNearIntentSwaps; exports.McpOdos = McpOdos; exports.McpSamplingHandler = McpSamplingHandler; exports.McpTelegram = McpTelegram; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAiLlm = OpenAiLlm; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PlanReActPlanner = PlanReActPlanner; exports.REQUEST_EUC_FUNCTION_CALL_NAME = REQUEST_EUC_FUNCTION_CALL_NAME; exports.ReadonlyContext = ReadonlyContext; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.Sessions = sessions_exports; exports.SingleFlow = SingleFlow; exports.State = State; exports.StreamingMode = StreamingMode; exports.TelemetryService = TelemetryService; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.VertexAiSessionService = VertexAiSessionService; exports.adkToMcpToolType = adkToMcpToolType; exports.agentTransferRequestProcessor = requestProcessor8; exports.basicRequestProcessor = requestProcessor2; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.codeExecutionRequestProcessor = requestProcessor3; exports.codeExecutionResponseProcessor = responseProcessor; exports.contentRequestProcessor = requestProcessor4; exports.createAuthToolArguments = createAuthToolArguments; exports.createDatabaseSessionService = createDatabaseSessionService; exports.createFunctionTool = createFunctionTool; exports.createMysqlSessionService = createMysqlSessionService; exports.createPostgresSessionService = createPostgresSessionService; exports.createSamplingHandler = createSamplingHandler; exports.createSqliteSessionService = createSqliteSessionService; exports.generateAuthEvent = generateAuthEvent; exports.generateClientFunctionCallId = generateClientFunctionCallId; exports.getLongRunningFunctionCalls = getLongRunningFunctionCalls; exports.getMcpTools = getMcpTools; exports.handleFunctionCallsAsync = handleFunctionCallsAsync; exports.handleFunctionCallsLive = handleFunctionCallsLive; exports.identityRequestProcessor = requestProcessor5; exports.initializeTelemetry = initializeTelemetry; exports.injectSessionState = injectSessionState; exports.instructionsRequestProcessor = requestProcessor6; exports.isEnhancedAuthConfig = isEnhancedAuthConfig; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.mergeParallelFunctionResponseEvents = mergeParallelFunctionResponseEvents; exports.newInvocationContextId = newInvocationContextId; exports.nlPlanningRequestProcessor = requestProcessor7; exports.nlPlanningResponseProcessor = responseProcessor2; exports.normalizeJsonSchema = normalizeJsonSchema; exports.populateClientFunctionCallId = populateClientFunctionCallId; exports.registerProviders = registerProviders; exports.removeClientFunctionCallId = removeClientFunctionCallId; exports.requestProcessor = requestProcessor; exports.shutdownTelemetry = shutdownTelemetry; exports.telemetryService = telemetryService; exports.traceLlmCall = traceLlmCall; exports.traceToolCall = traceToolCall; exports.tracer = tracer;
|