@iqai/adk 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29;var __defProp = Object.defineProperty;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29; var _class30; var _class31; var _class32;var __defProp = Object.defineProperty;
2
2
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
3
  var __getOwnPropNames = Object.getOwnPropertyNames;
4
4
  var __hasOwnProp = Object.prototype.hasOwnProperty;
@@ -26,8 +26,9 @@ var __copyProps = (to, from, except, desc) => {
26
26
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
27
27
 
28
28
  // src/helpers/logger.ts
29
+ var _chalk = require('chalk'); var _chalk2 = _interopRequireDefault(_chalk);
29
30
  function isDebugEnabled() {
30
- return process.env.NODE_ENV === "development" || process.env.DEBUG === "true";
31
+ return process.env.NODE_ENV === "development" || process.env.ADK_DEBUG === "true";
31
32
  }
32
33
  var Logger;
33
34
  var init_logger = __esm({
@@ -38,34 +39,99 @@ var init_logger = __esm({
38
39
  constructor({ name }) {;_class.prototype.__init2.call(this);
39
40
  this.name = name;
40
41
  }
42
+ colorize(message) {
43
+ return _chalk2.default.blue(message);
44
+ }
41
45
  debug(message, ...args) {
42
- const time = (/* @__PURE__ */ new Date()).toISOString();
43
46
  if (this.isDebugEnabled) {
44
- console.log(`[${time}] \u{1F41B} [DEBUG] \u2728 [${this.name}] ${message}`, ...args);
47
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
48
+ console.log(
49
+ this.colorize(`[${time}] \u{1F41B} [${this.name}] ${message}`),
50
+ ...args
51
+ );
45
52
  }
46
53
  }
47
54
  info(message, ...args) {
48
- const time = (/* @__PURE__ */ new Date()).toISOString();
49
- console.info(`[${time}] \u2139\uFE0F [INFO] \u2728 [${this.name}] ${message}`, ...args);
55
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
56
+ console.info(
57
+ this.colorize(`[${time}] \u2139\uFE0F [${this.name}] ${message}`),
58
+ ...args
59
+ );
50
60
  }
51
61
  warn(message, ...args) {
52
- const time = (/* @__PURE__ */ new Date()).toISOString();
53
- console.warn(`[${time}] \u{1F6A7} [WARN] \u2728 [${this.name}] ${message}`, ...args);
62
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
63
+ console.warn(
64
+ this.colorize(`[${time}] \u{1F6A7} [${this.name}] ${message}`),
65
+ ...args
66
+ );
54
67
  }
55
68
  error(message, ...args) {
56
- const time = (/* @__PURE__ */ new Date()).toISOString();
57
- console.error(`[${time}] \u274C [ERROR] \u2728 [${this.name}] ${message}`, ...args);
69
+ const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
70
+ console.error(
71
+ this.colorize(`[${time}] \u274C [${this.name}] ${message}`),
72
+ ...args
73
+ );
74
+ }
75
+ /**
76
+ * Logs structured data in a visually appealing table format.
77
+ * Uses vertical layout for better readability and respects debug settings.
78
+ */
79
+ debugStructured(title, data) {
80
+ if (!this.isDebugEnabled) return;
81
+ const terminalWidth = process.stdout.columns || 60;
82
+ const width = Math.min(terminalWidth, 100);
83
+ const contentWidth = width - 4;
84
+ const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
85
+ const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
86
+ const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
87
+ console.log(this.colorize(topBorder));
88
+ console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
89
+ console.log(this.colorize(middleBorder));
90
+ Object.entries(data).forEach(([key, value]) => {
91
+ const formattedKey = key.padEnd(20);
92
+ const formattedValue = String(value);
93
+ const availableValueSpace = contentWidth - 20 - 2;
94
+ const truncatedValue = formattedValue.length > availableValueSpace ? `${formattedValue.substring(0, availableValueSpace - 3)}...` : formattedValue;
95
+ const content = `${formattedKey}: ${truncatedValue}`;
96
+ const paddedContent = content.padEnd(contentWidth);
97
+ console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
98
+ });
99
+ console.log(this.colorize(bottomBorder));
100
+ }
101
+ /**
102
+ * Logs array data in a compact, readable format.
103
+ */
104
+ debugArray(title, items) {
105
+ if (!this.isDebugEnabled) return;
106
+ const terminalWidth = process.stdout.columns || 78;
107
+ const width = Math.min(terminalWidth, 120);
108
+ const contentWidth = width - 4;
109
+ const topBorder = `\u250C${"\u2500".repeat(width - 2)}\u2510`;
110
+ const bottomBorder = `\u2514${"\u2500".repeat(width - 2)}\u2518`;
111
+ const middleBorder = `\u251C${"\u2500".repeat(width - 2)}\u2524`;
112
+ console.log(this.colorize(topBorder));
113
+ console.log(this.colorize(`\u2502 ${title.padEnd(contentWidth)} \u2502`));
114
+ console.log(this.colorize(middleBorder));
115
+ items.forEach((item, index) => {
116
+ const itemStr = Object.entries(item).map(([k, v]) => `${k}: ${v}`).join(" \u2022 ");
117
+ const indexPart = `[${index + 1}] `;
118
+ const availableSpace = contentWidth - indexPart.length;
119
+ const truncatedItem = itemStr.length > availableSpace ? `${itemStr.substring(0, availableSpace - 3)}...` : itemStr;
120
+ const content = `${indexPart}${truncatedItem}`;
121
+ const paddedContent = content.padEnd(contentWidth);
122
+ console.log(this.colorize(`\u2502 ${paddedContent} \u2502`));
123
+ });
124
+ console.log(this.colorize(bottomBorder));
58
125
  }
59
126
  }, _class);
60
127
  }
61
128
  });
62
129
 
63
130
  // src/tools/base/base-tool.ts
64
- var logger6, BaseTool;
131
+ var BaseTool;
65
132
  var init_base_tool = __esm({
66
133
  "src/tools/base/base-tool.ts"() {
67
134
  init_logger();
68
- logger6 = new Logger({ name: "BaseTool" });
69
135
  BaseTool = exports.BaseTool = (_class2 = class {
70
136
  /**
71
137
  * Name of the tool
@@ -96,10 +162,11 @@ var init_base_tool = __esm({
96
162
  * Maximum delay for retry in ms
97
163
  */
98
164
  __init4() {this.maxRetryDelay = 1e4}
165
+ __init5() {this.logger = new Logger({ name: "BaseTool" })}
99
166
  /**
100
167
  * Constructor for BaseTool
101
168
  */
102
- constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
169
+ constructor(config) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);_class2.prototype.__init5.call(this);
103
170
  this.name = config.name;
104
171
  this.description = config.description;
105
172
  this.isLongRunning = config.isLongRunning || false;
@@ -226,7 +293,7 @@ var init_base_tool = __esm({
226
293
  while (attempts <= (this.shouldRetryOnFailure ? this.maxRetryAttempts : 0)) {
227
294
  try {
228
295
  if (attempts > 0) {
229
- logger6.debug(
296
+ this.logger.debug(
230
297
  `Retrying tool ${this.name} (attempt ${attempts} of ${this.maxRetryAttempts})...`
231
298
  );
232
299
  const delay = Math.min(
@@ -406,8 +473,8 @@ var init_function_tool = __esm({
406
473
  init_function_utils();
407
474
  FunctionTool = exports.FunctionTool = (_class3 = class extends BaseTool {
408
475
 
409
- __init5() {this.mandatoryArgs = []}
410
- __init6() {this.parameterTypes = {}}
476
+ __init6() {this.mandatoryArgs = []}
477
+ __init7() {this.parameterTypes = {}}
411
478
  /**
412
479
  * Creates a new FunctionTool wrapping the provided function.
413
480
  *
@@ -423,7 +490,7 @@ var init_function_tool = __esm({
423
490
  isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
424
491
  shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
425
492
  maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
426
- });_class3.prototype.__init5.call(this);_class3.prototype.__init6.call(this);;
493
+ });_class3.prototype.__init6.call(this);_class3.prototype.__init7.call(this);;
427
494
  this.func = func;
428
495
  this.mandatoryArgs = this.getMandatoryArgs(func);
429
496
  this.parameterTypes = _optionalChain([options, 'optionalAccess', _11 => _11.parameterTypes]) || {};
@@ -613,6 +680,7 @@ __export(agents_exports, {
613
680
  // src/models/index.ts
614
681
  var models_exports = {};
615
682
  __export(models_exports, {
683
+ AiSdkLlm: () => AiSdkLlm,
616
684
  AnthropicLlm: () => AnthropicLlm,
617
685
  ApiKeyCredential: () => ApiKeyCredential,
618
686
  ApiKeyScheme: () => ApiKeyScheme,
@@ -640,8 +708,6 @@ __export(models_exports, {
640
708
  });
641
709
 
642
710
  // src/models/llm-request.ts
643
- init_logger();
644
- var logger = new Logger({ name: "LlmRequest" });
645
711
  var LlmRequest = class {
646
712
  /**
647
713
  * The model name.
@@ -805,6 +871,10 @@ var LlmResponse = class _LlmResponse {
805
871
  * Reason why the model finished generating.
806
872
  */
807
873
 
874
+ /**
875
+ * Error object if the response is an error.
876
+ */
877
+
808
878
  /**
809
879
  * Creates a new LlmResponse.
810
880
  */
@@ -848,6 +918,29 @@ var LlmResponse = class _LlmResponse {
848
918
  usageMetadata
849
919
  });
850
920
  }
921
+ /**
922
+ * Creates an LlmResponse from an error.
923
+ *
924
+ * @param error The error object or message.
925
+ * @param options Additional options for the error response.
926
+ * @param options.errorCode A specific error code for the response.
927
+ * @param options.model The model that was being used when the error occurred.
928
+ * @returns The LlmResponse.
929
+ */
930
+ static fromError(error, options = {}) {
931
+ const errorMessage = error instanceof Error ? error.message : String(error);
932
+ const errorCode = options.errorCode || "UNKNOWN_ERROR";
933
+ return new _LlmResponse({
934
+ errorCode,
935
+ errorMessage: `LLM call failed for model ${options.model || "unknown"}: ${errorMessage}`,
936
+ content: {
937
+ role: "model",
938
+ parts: [{ text: `Error: ${errorMessage}` }]
939
+ },
940
+ finishReason: "STOP",
941
+ error: error instanceof Error ? error : new Error(errorMessage)
942
+ });
943
+ }
851
944
  };
852
945
 
853
946
  // src/models/base-llm.ts
@@ -869,11 +962,11 @@ var _sdknode = require('@opentelemetry/sdk-node');
869
962
 
870
963
  var _semanticconventions = require('@opentelemetry/semantic-conventions');
871
964
  var TelemetryService = (_class4 = class {
872
- __init7() {this.sdk = null}
873
- __init8() {this.isInitialized = false}
965
+ __init8() {this.sdk = null}
966
+ __init9() {this.isInitialized = false}
874
967
 
875
- __init9() {this.config = null}
876
- constructor() {;_class4.prototype.__init7.call(this);_class4.prototype.__init8.call(this);_class4.prototype.__init9.call(this);
968
+ __init10() {this.config = null}
969
+ constructor() {;_class4.prototype.__init8.call(this);_class4.prototype.__init9.call(this);_class4.prototype.__init10.call(this);
877
970
  this.tracer = _api.trace.getTracer("iqai-adk", "0.1.0");
878
971
  }
879
972
  /**
@@ -1127,16 +1220,16 @@ var traceLlmCall = (invocationContext, eventId, llmRequest, llmResponse) => tele
1127
1220
  );
1128
1221
 
1129
1222
  // src/models/base-llm.ts
1130
- var logger2 = new Logger({ name: "BaseLlm" });
1131
- var BaseLlm = class {
1223
+ var BaseLlm = (_class5 = class {
1132
1224
  /**
1133
1225
  * The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001.
1134
1226
  */
1135
1227
 
1228
+ __init11() {this.logger = new Logger({ name: "BaseLlm" })}
1136
1229
  /**
1137
1230
  * Constructor for BaseLlm
1138
1231
  */
1139
- constructor(model) {
1232
+ constructor(model) {;_class5.prototype.__init11.call(this);
1140
1233
  this.model = model;
1141
1234
  }
1142
1235
  /**
@@ -1183,12 +1276,6 @@ var BaseLlm = class {
1183
1276
  }),
1184
1277
  "adk.streaming": stream || false
1185
1278
  });
1186
- logger2.debug("ADK LLM Request:", {
1187
- model: this.model,
1188
- contentCount: _optionalChain([llmRequest, 'access', _45 => _45.contents, 'optionalAccess', _46 => _46.length]) || 0,
1189
- streaming: stream || false,
1190
- config: llmRequest.config
1191
- });
1192
1279
  let responseCount = 0;
1193
1280
  let totalTokens = 0;
1194
1281
  for await (const response of this.generateContentAsyncImpl(
@@ -1196,14 +1283,6 @@ var BaseLlm = class {
1196
1283
  stream
1197
1284
  )) {
1198
1285
  responseCount++;
1199
- logger2.debug(`ADK LLM Response ${responseCount}:`, {
1200
- model: this.model,
1201
- parts: _optionalChain([response, 'access', _47 => _47.parts, 'optionalAccess', _48 => _48.map, 'call', _49 => _49((part) => ({
1202
- text: typeof part.text === "string" ? part.text.substring(0, 200) + (part.text.length > 200 ? "..." : "") : "[non_text_content]"
1203
- }))]),
1204
- finishReason: response.finish_reason,
1205
- usage: response.usage
1206
- });
1207
1286
  if (response.usage) {
1208
1287
  totalTokens += response.usage.total_tokens || 0;
1209
1288
  span.setAttributes({
@@ -1224,7 +1303,7 @@ var BaseLlm = class {
1224
1303
  } catch (error) {
1225
1304
  span.recordException(error);
1226
1305
  span.setStatus({ code: 2, message: error.message });
1227
- console.error("\u274C ADK LLM Error:", {
1306
+ this.logger.error("\u274C ADK LLM Error:", {
1228
1307
  model: this.model,
1229
1308
  error: error.message
1230
1309
  });
@@ -1273,33 +1352,29 @@ var BaseLlm = class {
1273
1352
  connect(llmRequest) {
1274
1353
  throw new Error(`Live connection is not supported for ${this.model}.`);
1275
1354
  }
1276
- };
1355
+ }, _class5);
1277
1356
 
1278
1357
  // src/models/base-llm-connection.ts
1279
1358
  var BaseLLMConnection = class {
1280
1359
  };
1281
1360
 
1282
1361
  // src/models/google-llm.ts
1283
- init_logger();
1284
1362
 
1285
1363
 
1286
1364
 
1287
1365
  var _genai = require('@google/genai');
1288
- var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
1289
- var NEW_LINE = "\n";
1290
1366
  var AGENT_ENGINE_TELEMETRY_TAG = "remote_reasoning_engine";
1291
1367
  var AGENT_ENGINE_TELEMETRY_ENV_VARIABLE_NAME = "GOOGLE_CLOUD_AGENT_ENGINE_ID";
1292
- var GoogleLlm = (_class5 = class extends BaseLlm {
1368
+ var GoogleLlm = class extends BaseLlm {
1293
1369
 
1294
1370
 
1295
1371
 
1296
1372
 
1297
- __init10() {this.logger = new Logger({ name: "GoogleLlm" })}
1298
1373
  /**
1299
1374
  * Constructor for Gemini
1300
1375
  */
1301
1376
  constructor(model = "gemini-1.5-flash") {
1302
- super(model);_class5.prototype.__init10.call(this);;
1377
+ super(model);
1303
1378
  }
1304
1379
  /**
1305
1380
  * Provides the list of supported models.
@@ -1318,10 +1393,6 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1318
1393
  */
1319
1394
  async *generateContentAsyncImpl(llmRequest, stream = false) {
1320
1395
  this.preprocessRequest(llmRequest);
1321
- this.logger.debug(
1322
- `Sending out request, model: ${llmRequest.model || this.model}, backend: ${this.apiBackend}, stream: ${stream}`
1323
- );
1324
- this.logger.debug(this.buildRequestLog(llmRequest));
1325
1396
  const model = llmRequest.model || this.model;
1326
1397
  const contents = this.convertContents(llmRequest.contents || []);
1327
1398
  const config = this.convertConfig(llmRequest.config);
@@ -1337,10 +1408,9 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1337
1408
  let usageMetadata = null;
1338
1409
  for await (const resp of responses) {
1339
1410
  response = resp;
1340
- this.logger.debug(this.buildResponseLog(resp));
1341
1411
  const llmResponse = LlmResponse.create(resp);
1342
1412
  usageMetadata = llmResponse.usageMetadata;
1343
- if (_optionalChain([llmResponse, 'access', _50 => _50.content, 'optionalAccess', _51 => _51.parts, 'optionalAccess', _52 => _52[0], 'optionalAccess', _53 => _53.text])) {
1413
+ if (_optionalChain([llmResponse, 'access', _45 => _45.content, 'optionalAccess', _46 => _46.parts, 'optionalAccess', _47 => _47[0], 'optionalAccess', _48 => _48.text])) {
1344
1414
  const part0 = llmResponse.content.parts[0];
1345
1415
  if (part0.thought) {
1346
1416
  thoughtText += part0.text;
@@ -1368,7 +1438,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1368
1438
  }
1369
1439
  yield llmResponse;
1370
1440
  }
1371
- if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _54 => _54.candidates, 'access', _55 => _55[0], 'optionalAccess', _56 => _56.finishReason]) === _genai.FinishReason.STOP) {
1441
+ if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _49 => _49.candidates, 'access', _50 => _50[0], 'optionalAccess', _51 => _51.finishReason]) === _genai.FinishReason.STOP) {
1372
1442
  const parts = [];
1373
1443
  if (thoughtText) {
1374
1444
  parts.push({ text: thoughtText, thought: true });
@@ -1390,8 +1460,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1390
1460
  contents,
1391
1461
  config
1392
1462
  });
1393
- this.logger.debug(this.buildResponseLog(response));
1394
- yield LlmResponse.create(response);
1463
+ const llmResponse = LlmResponse.create(response);
1464
+ this.logger.debug(
1465
+ `Google response: ${_optionalChain([llmResponse, 'access', _52 => _52.usageMetadata, 'optionalAccess', _53 => _53.candidatesTokenCount]) || 0} tokens`
1466
+ );
1467
+ yield llmResponse;
1395
1468
  }
1396
1469
  }
1397
1470
  /**
@@ -1404,8 +1477,8 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1404
1477
  * Check if response has inline data
1405
1478
  */
1406
1479
  hasInlineData(response) {
1407
- const parts = _optionalChain([response, 'access', _57 => _57.candidates, 'optionalAccess', _58 => _58[0], 'optionalAccess', _59 => _59.content, 'optionalAccess', _60 => _60.parts]);
1408
- return _optionalChain([parts, 'optionalAccess', _61 => _61.some, 'call', _62 => _62((part) => _optionalChain([part, 'optionalAccess', _63 => _63.inlineData]))]) || false;
1480
+ const parts = _optionalChain([response, 'access', _54 => _54.candidates, 'optionalAccess', _55 => _55[0], 'optionalAccess', _56 => _56.content, 'optionalAccess', _57 => _57.parts]);
1481
+ return _optionalChain([parts, 'optionalAccess', _58 => _58.some, 'call', _59 => _59((part) => _optionalChain([part, 'optionalAccess', _60 => _60.inlineData]))]) || false;
1409
1482
  }
1410
1483
  /**
1411
1484
  * Convert LlmRequest contents to GoogleGenAI format
@@ -1452,7 +1525,7 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1452
1525
  * Sets display_name to null for the Gemini API (non-Vertex) backend.
1453
1526
  */
1454
1527
  removeDisplayNameIfPresent(dataObj) {
1455
- if (_optionalChain([dataObj, 'optionalAccess', _64 => _64.displayName])) {
1528
+ if (_optionalChain([dataObj, 'optionalAccess', _61 => _61.displayName])) {
1456
1529
  dataObj.displayName = null;
1457
1530
  }
1458
1531
  }
@@ -1461,65 +1534,11 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1461
1534
  */
1462
1535
  buildFunctionDeclarationLog(funcDecl) {
1463
1536
  let paramStr = "{}";
1464
- if (_optionalChain([funcDecl, 'access', _65 => _65.parameters, 'optionalAccess', _66 => _66.properties])) {
1537
+ if (_optionalChain([funcDecl, 'access', _62 => _62.parameters, 'optionalAccess', _63 => _63.properties])) {
1465
1538
  paramStr = JSON.stringify(funcDecl.parameters.properties);
1466
1539
  }
1467
1540
  return `${funcDecl.name}: ${paramStr}`;
1468
1541
  }
1469
- /**
1470
- * Builds request log string.
1471
- */
1472
- buildRequestLog(req) {
1473
- const functionDecls = _optionalChain([req, 'access', _67 => _67.config, 'optionalAccess', _68 => _68.tools, 'optionalAccess', _69 => _69[0], 'optionalAccess', _70 => _70.functionDeclarations]) || [];
1474
- const functionLogs = functionDecls.length > 0 ? functionDecls.map(
1475
- (funcDecl) => this.buildFunctionDeclarationLog(funcDecl)
1476
- ) : [];
1477
- const contentsLogs = _optionalChain([req, 'access', _71 => _71.contents, 'optionalAccess', _72 => _72.map, 'call', _73 => _73(
1478
- (content) => JSON.stringify(content, (key, value) => {
1479
- if (key === "data" && typeof value === "string" && value.length > 100) {
1480
- return "[EXCLUDED]";
1481
- }
1482
- return value;
1483
- })
1484
- )]) || [];
1485
- return _dedent2.default`
1486
- LLM Request:
1487
- -----------------------------------------------------------
1488
- System Instruction:
1489
- ${_optionalChain([req, 'access', _74 => _74.config, 'optionalAccess', _75 => _75.systemInstruction]) || ""}
1490
- -----------------------------------------------------------
1491
- Contents:
1492
- ${contentsLogs.join(NEW_LINE)}
1493
- -----------------------------------------------------------
1494
- Functions:
1495
- ${functionLogs.join(NEW_LINE)}
1496
- -----------------------------------------------------------`;
1497
- }
1498
- /**
1499
- * Builds response log string.
1500
- */
1501
- buildResponseLog(resp) {
1502
- const functionCallsText = [];
1503
- if (resp.functionCalls) {
1504
- for (const funcCall of resp.functionCalls) {
1505
- functionCallsText.push(
1506
- `name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
1507
- );
1508
- }
1509
- }
1510
- return _dedent2.default`
1511
- LLM Response:
1512
- -----------------------------------------------------------
1513
- Text:
1514
- ${resp.text || ""}
1515
- -----------------------------------------------------------
1516
- Function calls:
1517
- ${functionCallsText.join(NEW_LINE)}
1518
- -----------------------------------------------------------
1519
- Raw response:
1520
- ${JSON.stringify(resp, null, 2)}
1521
- -----------------------------------------------------------`;
1522
- }
1523
1542
  /**
1524
1543
  * Provides the api client.
1525
1544
  */
@@ -1608,20 +1627,20 @@ var GoogleLlm = (_class5 = class extends BaseLlm {
1608
1627
  }
1609
1628
  return this._liveApiClient;
1610
1629
  }
1611
- }, _class5);
1630
+ };
1612
1631
 
1613
1632
  // src/models/anthropic-llm.ts
1614
1633
  init_logger();
1615
1634
  var _sdk = require('@anthropic-ai/sdk'); var _sdk2 = _interopRequireDefault(_sdk);
1616
- var logger3 = new Logger({ name: "AnthropicLlm" });
1617
1635
  var MAX_TOKENS = 1024;
1618
- var AnthropicLlm = class extends BaseLlm {
1636
+ var AnthropicLlm = (_class6 = class extends BaseLlm {
1619
1637
 
1638
+ __init12() {this.logger = new Logger({ name: "AnthropicLlm" })}
1620
1639
  /**
1621
1640
  * Constructor for Anthropic LLM
1622
1641
  */
1623
1642
  constructor(model = "claude-3-5-sonnet-20241022") {
1624
- super(model);
1643
+ super(model);_class6.prototype.__init12.call(this);;
1625
1644
  }
1626
1645
  /**
1627
1646
  * Provides the list of supported models
@@ -1633,15 +1652,12 @@ var AnthropicLlm = class extends BaseLlm {
1633
1652
  * Main content generation method - handles both streaming and non-streaming
1634
1653
  */
1635
1654
  async *generateContentAsyncImpl(llmRequest, stream = false) {
1636
- logger3.debug(
1637
- `Sending Anthropic request, model: ${llmRequest.model || this.model}, stream: ${stream}`
1638
- );
1639
1655
  const model = llmRequest.model || this.model;
1640
1656
  const messages = (llmRequest.contents || []).map(
1641
1657
  (content) => this.contentToAnthropicMessage(content)
1642
1658
  );
1643
1659
  let tools;
1644
- if (_optionalChain([llmRequest, 'access', _76 => _76.config, 'optionalAccess', _77 => _77.tools, 'optionalAccess', _78 => _78[0], 'optionalAccess', _79 => _79.functionDeclarations])) {
1660
+ if (_optionalChain([llmRequest, 'access', _64 => _64.config, 'optionalAccess', _65 => _65.tools, 'optionalAccess', _66 => _66[0], 'optionalAccess', _67 => _67.functionDeclarations])) {
1645
1661
  tools = llmRequest.config.tools[0].functionDeclarations.map(
1646
1662
  (decl) => this.functionDeclarationToAnthropicTool(decl)
1647
1663
  );
@@ -1663,9 +1679,9 @@ var AnthropicLlm = class extends BaseLlm {
1663
1679
  messages: anthropicMessages,
1664
1680
  tools,
1665
1681
  tool_choice: tools ? { type: "auto" } : void 0,
1666
- max_tokens: _optionalChain([llmRequest, 'access', _80 => _80.config, 'optionalAccess', _81 => _81.maxOutputTokens]) || MAX_TOKENS,
1667
- temperature: _optionalChain([llmRequest, 'access', _82 => _82.config, 'optionalAccess', _83 => _83.temperature]),
1668
- top_p: _optionalChain([llmRequest, 'access', _84 => _84.config, 'optionalAccess', _85 => _85.topP])
1682
+ max_tokens: _optionalChain([llmRequest, 'access', _68 => _68.config, 'optionalAccess', _69 => _69.maxOutputTokens]) || MAX_TOKENS,
1683
+ temperature: _optionalChain([llmRequest, 'access', _70 => _70.config, 'optionalAccess', _71 => _71.temperature]),
1684
+ top_p: _optionalChain([llmRequest, 'access', _72 => _72.config, 'optionalAccess', _73 => _73.topP])
1669
1685
  });
1670
1686
  yield this.anthropicMessageToLlmResponse(message);
1671
1687
  }
@@ -1679,7 +1695,9 @@ var AnthropicLlm = class extends BaseLlm {
1679
1695
  * Convert Anthropic Message to ADK LlmResponse
1680
1696
  */
1681
1697
  anthropicMessageToLlmResponse(message) {
1682
- logger3.debug("Anthropic response:", JSON.stringify(message, null, 2));
1698
+ this.logger.debug(
1699
+ `Anthropic response: ${message.usage.output_tokens} tokens, ${message.stop_reason}`
1700
+ );
1683
1701
  return new LlmResponse({
1684
1702
  content: {
1685
1703
  role: "model",
@@ -1724,7 +1742,7 @@ var AnthropicLlm = class extends BaseLlm {
1724
1742
  }
1725
1743
  if (part.function_response) {
1726
1744
  let content = "";
1727
- if (_optionalChain([part, 'access', _86 => _86.function_response, 'access', _87 => _87.response, 'optionalAccess', _88 => _88.result])) {
1745
+ if (_optionalChain([part, 'access', _74 => _74.function_response, 'access', _75 => _75.response, 'optionalAccess', _76 => _76.result])) {
1728
1746
  content = String(part.function_response.response.result);
1729
1747
  }
1730
1748
  return {
@@ -1759,7 +1777,7 @@ var AnthropicLlm = class extends BaseLlm {
1759
1777
  */
1760
1778
  functionDeclarationToAnthropicTool(functionDeclaration) {
1761
1779
  const properties = {};
1762
- if (_optionalChain([functionDeclaration, 'access', _89 => _89.parameters, 'optionalAccess', _90 => _90.properties])) {
1780
+ if (_optionalChain([functionDeclaration, 'access', _77 => _77.parameters, 'optionalAccess', _78 => _78.properties])) {
1763
1781
  for (const [key, value] of Object.entries(
1764
1782
  functionDeclaration.parameters.properties
1765
1783
  )) {
@@ -1833,14 +1851,10 @@ var AnthropicLlm = class extends BaseLlm {
1833
1851
  }
1834
1852
  return this._client;
1835
1853
  }
1836
- };
1854
+ }, _class6);
1837
1855
 
1838
1856
  // src/models/openai-llm.ts
1839
- init_logger();
1840
-
1841
1857
  var _openai = require('openai'); var _openai2 = _interopRequireDefault(_openai);
1842
- var logger4 = new Logger({ name: "OpenAiLlm" });
1843
- var NEW_LINE2 = "\n";
1844
1858
  var OpenAiLlm = class extends BaseLlm {
1845
1859
 
1846
1860
  /**
@@ -1860,16 +1874,12 @@ var OpenAiLlm = class extends BaseLlm {
1860
1874
  */
1861
1875
  async *generateContentAsyncImpl(llmRequest, stream = false) {
1862
1876
  this.preprocessRequest(llmRequest);
1863
- logger4.debug(
1864
- `Sending OpenAI request, model: ${llmRequest.model || this.model}, stream: ${stream}`
1865
- );
1866
- logger4.debug(this.buildRequestLog(llmRequest));
1867
1877
  const model = llmRequest.model || this.model;
1868
1878
  const messages = (llmRequest.contents || []).map(
1869
1879
  (content) => this.contentToOpenAiMessage(content)
1870
1880
  );
1871
1881
  let tools;
1872
- if (_optionalChain([llmRequest, 'access', _91 => _91.config, 'optionalAccess', _92 => _92.tools, 'optionalAccess', _93 => _93[0], 'optionalAccess', _94 => _94.functionDeclarations])) {
1882
+ if (_optionalChain([llmRequest, 'access', _79 => _79.config, 'optionalAccess', _80 => _80.tools, 'optionalAccess', _81 => _81[0], 'optionalAccess', _82 => _82.functionDeclarations])) {
1873
1883
  tools = llmRequest.config.tools[0].functionDeclarations.map(
1874
1884
  (funcDecl) => this.functionDeclarationToOpenAiTool(funcDecl)
1875
1885
  );
@@ -1887,9 +1897,9 @@ var OpenAiLlm = class extends BaseLlm {
1887
1897
  messages: openAiMessages,
1888
1898
  tools,
1889
1899
  tool_choice: tools ? "auto" : void 0,
1890
- max_tokens: _optionalChain([llmRequest, 'access', _95 => _95.config, 'optionalAccess', _96 => _96.maxOutputTokens]),
1891
- temperature: _optionalChain([llmRequest, 'access', _97 => _97.config, 'optionalAccess', _98 => _98.temperature]),
1892
- top_p: _optionalChain([llmRequest, 'access', _99 => _99.config, 'optionalAccess', _100 => _100.topP]),
1900
+ max_tokens: _optionalChain([llmRequest, 'access', _83 => _83.config, 'optionalAccess', _84 => _84.maxOutputTokens]),
1901
+ temperature: _optionalChain([llmRequest, 'access', _85 => _85.config, 'optionalAccess', _86 => _86.temperature]),
1902
+ top_p: _optionalChain([llmRequest, 'access', _87 => _87.config, 'optionalAccess', _88 => _88.topP]),
1893
1903
  stream
1894
1904
  };
1895
1905
  if (stream) {
@@ -1905,13 +1915,11 @@ var OpenAiLlm = class extends BaseLlm {
1905
1915
  const choice = chunk.choices[0];
1906
1916
  if (!choice) continue;
1907
1917
  const delta = choice.delta;
1908
- logger4.debug("Delta content:", delta.content);
1909
1918
  const llmResponse = this.createChunkResponse(delta, chunk.usage);
1910
1919
  if (chunk.usage) {
1911
1920
  usageMetadata = chunk.usage;
1912
1921
  }
1913
- logger4.debug(this.buildResponseLog(llmResponse));
1914
- if (_optionalChain([llmResponse, 'access', _101 => _101.content, 'optionalAccess', _102 => _102.parts, 'optionalAccess', _103 => _103[0], 'optionalAccess', _104 => _104.text])) {
1922
+ if (_optionalChain([llmResponse, 'access', _89 => _89.content, 'optionalAccess', _90 => _90.parts, 'optionalAccess', _91 => _91[0], 'optionalAccess', _92 => _92.text])) {
1915
1923
  const part0 = llmResponse.content.parts[0];
1916
1924
  if (part0.thought) {
1917
1925
  thoughtText += part0.text;
@@ -1952,10 +1960,10 @@ var OpenAiLlm = class extends BaseLlm {
1952
1960
  function: { name: "", arguments: "" }
1953
1961
  };
1954
1962
  }
1955
- if (_optionalChain([toolCall, 'access', _105 => _105.function, 'optionalAccess', _106 => _106.name])) {
1963
+ if (_optionalChain([toolCall, 'access', _93 => _93.function, 'optionalAccess', _94 => _94.name])) {
1956
1964
  accumulatedToolCalls[index].function.name += toolCall.function.name;
1957
1965
  }
1958
- if (_optionalChain([toolCall, 'access', _107 => _107.function, 'optionalAccess', _108 => _108.arguments])) {
1966
+ if (_optionalChain([toolCall, 'access', _95 => _95.function, 'optionalAccess', _96 => _96.arguments])) {
1959
1967
  accumulatedToolCalls[index].function.arguments += toolCall.function.arguments;
1960
1968
  }
1961
1969
  }
@@ -1970,7 +1978,7 @@ var OpenAiLlm = class extends BaseLlm {
1970
1978
  }
1971
1979
  if (accumulatedToolCalls.length > 0) {
1972
1980
  for (const toolCall of accumulatedToolCalls) {
1973
- if (_optionalChain([toolCall, 'access', _109 => _109.function, 'optionalAccess', _110 => _110.name])) {
1981
+ if (_optionalChain([toolCall, 'access', _97 => _97.function, 'optionalAccess', _98 => _98.name])) {
1974
1982
  parts.push({
1975
1983
  functionCall: {
1976
1984
  id: toolCall.id,
@@ -1993,7 +2001,6 @@ var OpenAiLlm = class extends BaseLlm {
1993
2001
  } : void 0,
1994
2002
  finishReason: this.toAdkFinishReason(choice.finish_reason)
1995
2003
  });
1996
- logger4.debug(this.buildResponseLog(finalResponse));
1997
2004
  yield finalResponse;
1998
2005
  } else {
1999
2006
  yield llmResponse;
@@ -2030,7 +2037,9 @@ var OpenAiLlm = class extends BaseLlm {
2030
2037
  choice,
2031
2038
  response.usage
2032
2039
  );
2033
- logger4.debug(this.buildResponseLog(llmResponse));
2040
+ this.logger.debug(
2041
+ `OpenAI response: ${_optionalChain([response, 'access', _99 => _99.usage, 'optionalAccess', _100 => _100.completion_tokens]) || 0} tokens`
2042
+ );
2034
2043
  yield llmResponse;
2035
2044
  }
2036
2045
  }
@@ -2056,7 +2065,7 @@ var OpenAiLlm = class extends BaseLlm {
2056
2065
  }
2057
2066
  if (delta.tool_calls) {
2058
2067
  for (const toolCall of delta.tool_calls) {
2059
- if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _111 => _111.function, 'optionalAccess', _112 => _112.name])) {
2068
+ if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _101 => _101.function, 'optionalAccess', _102 => _102.name])) {
2060
2069
  parts.push({
2061
2070
  functionCall: {
2062
2071
  id: toolCall.id || "",
@@ -2084,10 +2093,6 @@ var OpenAiLlm = class extends BaseLlm {
2084
2093
  */
2085
2094
  openAiMessageToLlmResponse(choice, usage) {
2086
2095
  const message = choice.message;
2087
- logger4.debug(
2088
- "OpenAI response:",
2089
- JSON.stringify({ message, usage }, null, 2)
2090
- );
2091
2096
  const parts = [];
2092
2097
  if (message.content) {
2093
2098
  parts.push({ text: message.content });
@@ -2126,10 +2131,10 @@ var OpenAiLlm = class extends BaseLlm {
2126
2131
  if (role === "system") {
2127
2132
  return {
2128
2133
  role: "system",
2129
- content: _optionalChain([content, 'access', _113 => _113.parts, 'optionalAccess', _114 => _114[0], 'optionalAccess', _115 => _115.text]) || ""
2134
+ content: _optionalChain([content, 'access', _103 => _103.parts, 'optionalAccess', _104 => _104[0], 'optionalAccess', _105 => _105.text]) || ""
2130
2135
  };
2131
2136
  }
2132
- if (_optionalChain([content, 'access', _116 => _116.parts, 'optionalAccess', _117 => _117.some, 'call', _118 => _118((part) => part.functionCall)])) {
2137
+ if (_optionalChain([content, 'access', _106 => _106.parts, 'optionalAccess', _107 => _107.some, 'call', _108 => _108((part) => part.functionCall)])) {
2133
2138
  const functionCallPart = content.parts.find(
2134
2139
  (part) => part.functionCall
2135
2140
  );
@@ -2149,7 +2154,7 @@ var OpenAiLlm = class extends BaseLlm {
2149
2154
  ]
2150
2155
  };
2151
2156
  }
2152
- if (_optionalChain([content, 'access', _119 => _119.parts, 'optionalAccess', _120 => _120.some, 'call', _121 => _121((part) => part.functionResponse)])) {
2157
+ if (_optionalChain([content, 'access', _109 => _109.parts, 'optionalAccess', _110 => _110.some, 'call', _111 => _111((part) => part.functionResponse)])) {
2153
2158
  const functionResponsePart = content.parts.find(
2154
2159
  (part) => part.functionResponse
2155
2160
  );
@@ -2161,7 +2166,7 @@ var OpenAiLlm = class extends BaseLlm {
2161
2166
  )
2162
2167
  };
2163
2168
  }
2164
- if (_optionalChain([content, 'access', _122 => _122.parts, 'optionalAccess', _123 => _123.length]) === 1 && content.parts[0].text) {
2169
+ if (_optionalChain([content, 'access', _112 => _112.parts, 'optionalAccess', _113 => _113.length]) === 1 && content.parts[0].text) {
2165
2170
  return {
2166
2171
  role,
2167
2172
  content: content.parts[0].text
@@ -2184,7 +2189,7 @@ var OpenAiLlm = class extends BaseLlm {
2184
2189
  text: part.text
2185
2190
  };
2186
2191
  }
2187
- if (_optionalChain([part, 'access', _124 => _124.inline_data, 'optionalAccess', _125 => _125.mime_type]) && _optionalChain([part, 'access', _126 => _126.inline_data, 'optionalAccess', _127 => _127.data])) {
2192
+ if (_optionalChain([part, 'access', _114 => _114.inline_data, 'optionalAccess', _115 => _115.mime_type]) && _optionalChain([part, 'access', _116 => _116.inline_data, 'optionalAccess', _117 => _117.data])) {
2188
2193
  return {
2189
2194
  type: "image_url",
2190
2195
  image_url: {
@@ -2273,69 +2278,8 @@ var OpenAiLlm = class extends BaseLlm {
2273
2278
  * Check if response has inline data (similar to Google LLM)
2274
2279
  */
2275
2280
  hasInlineData(response) {
2276
- const parts = _optionalChain([response, 'access', _128 => _128.content, 'optionalAccess', _129 => _129.parts]);
2277
- return _optionalChain([parts, 'optionalAccess', _130 => _130.some, 'call', _131 => _131((part) => part.inlineData)]) || false;
2278
- }
2279
- /**
2280
- * Build request log string for debugging (similar to Google LLM)
2281
- */
2282
- buildRequestLog(req) {
2283
- const functionDecls = _optionalChain([req, 'access', _132 => _132.config, 'optionalAccess', _133 => _133.tools, 'optionalAccess', _134 => _134[0], 'optionalAccess', _135 => _135.functionDeclarations]) || [];
2284
- const functionLogs = functionDecls.length > 0 ? functionDecls.map(
2285
- (funcDecl) => `${funcDecl.name}: ${JSON.stringify(_optionalChain([funcDecl, 'access', _136 => _136.parameters, 'optionalAccess', _137 => _137.properties]) || {})}`
2286
- ) : [];
2287
- const contentsLogs = _optionalChain([req, 'access', _138 => _138.contents, 'optionalAccess', _139 => _139.map, 'call', _140 => _140(
2288
- (content) => JSON.stringify(content, (key, value) => {
2289
- if (key === "data" && typeof value === "string" && value.length > 100) {
2290
- return "[EXCLUDED]";
2291
- }
2292
- return value;
2293
- })
2294
- )]) || [];
2295
- return _dedent2.default`
2296
- LLM Request:
2297
- -----------------------------------------------------------
2298
- System Instruction:
2299
- ${req.getSystemInstructionText() || ""}
2300
- -----------------------------------------------------------
2301
- Contents:
2302
- ${contentsLogs.join(NEW_LINE2)}
2303
- -----------------------------------------------------------
2304
- Functions:
2305
- ${functionLogs.join(NEW_LINE2)}
2306
- -----------------------------------------------------------`;
2307
- }
2308
- /**
2309
- * Build response log string for debugging (similar to Google LLM)
2310
- */
2311
- buildResponseLog(response) {
2312
- const functionCallsText = [];
2313
- if (_optionalChain([response, 'access', _141 => _141.content, 'optionalAccess', _142 => _142.parts])) {
2314
- for (const part of response.content.parts) {
2315
- if (part.functionCall) {
2316
- const funcCall = part.functionCall;
2317
- functionCallsText.push(
2318
- `name: ${funcCall.name}, args: ${JSON.stringify(funcCall.args)}`
2319
- );
2320
- }
2321
- }
2322
- }
2323
- const text = _optionalChain([response, 'access', _143 => _143.content, 'optionalAccess', _144 => _144.parts, 'optionalAccess', _145 => _145.filter, 'call', _146 => _146((part) => part.text), 'optionalAccess', _147 => _147.map, 'call', _148 => _148((part) => part.text), 'optionalAccess', _149 => _149.join, 'call', _150 => _150("")]) || "";
2324
- return _dedent2.default`
2325
- LLM Response:
2326
- -----------------------------------------------------------
2327
- Text:
2328
- ${text}
2329
- -----------------------------------------------------------
2330
- Function calls:
2331
- ${functionCallsText.join(NEW_LINE2)}
2332
- -----------------------------------------------------------
2333
- Usage:
2334
- ${JSON.stringify(response.usageMetadata, null, 2)}
2335
- -----------------------------------------------------------
2336
- Finish Reason:
2337
- ${response.finishReason}
2338
- -----------------------------------------------------------`;
2281
+ const parts = _optionalChain([response, 'access', _118 => _118.content, 'optionalAccess', _119 => _119.parts]);
2282
+ return _optionalChain([parts, 'optionalAccess', _120 => _120.some, 'call', _121 => _121((part) => part.inlineData)]) || false;
2339
2283
  }
2340
2284
  /**
2341
2285
  * Gets the OpenAI client
@@ -2356,14 +2300,289 @@ var OpenAiLlm = class extends BaseLlm {
2356
2300
  }
2357
2301
  };
2358
2302
 
2303
+ // src/models/ai-sdk.ts
2304
+ init_logger();
2305
+
2306
+
2307
+
2308
+
2309
+ var _ai = require('ai');
2310
+ var AiSdkLlm = (_class7 = class extends BaseLlm {
2311
+
2312
+ __init13() {this.logger = new Logger({ name: "AiSdkLlm" })}
2313
+ /**
2314
+ * Constructor accepts a pre-configured LanguageModel instance
2315
+ * @param model - Pre-configured LanguageModel from provider(modelName)
2316
+ */
2317
+ constructor(modelInstance) {
2318
+ super(modelInstance.modelId || "ai-sdk-model");_class7.prototype.__init13.call(this);;
2319
+ this.modelInstance = modelInstance;
2320
+ }
2321
+ /**
2322
+ * Returns empty array - following Python ADK pattern
2323
+ */
2324
+ static supportedModels() {
2325
+ return [];
2326
+ }
2327
+ async *generateContentAsyncImpl(request, stream = false) {
2328
+ try {
2329
+ const messages = this.convertToAiSdkMessages(request);
2330
+ const systemMessage = request.getSystemInstructionText();
2331
+ const tools = this.convertToAiSdkTools(request);
2332
+ const requestParams = {
2333
+ model: this.modelInstance,
2334
+ messages,
2335
+ system: systemMessage,
2336
+ tools: Object.keys(tools).length > 0 ? tools : void 0,
2337
+ maxTokens: _optionalChain([request, 'access', _122 => _122.config, 'optionalAccess', _123 => _123.maxOutputTokens]),
2338
+ temperature: _optionalChain([request, 'access', _124 => _124.config, 'optionalAccess', _125 => _125.temperature]),
2339
+ topP: _optionalChain([request, 'access', _126 => _126.config, 'optionalAccess', _127 => _127.topP])
2340
+ };
2341
+ if (stream) {
2342
+ const result = _ai.streamText.call(void 0, requestParams);
2343
+ let accumulatedText = "";
2344
+ for await (const delta of result.textStream) {
2345
+ accumulatedText += delta;
2346
+ yield new LlmResponse({
2347
+ content: {
2348
+ role: "model",
2349
+ parts: [{ text: accumulatedText }]
2350
+ },
2351
+ partial: true
2352
+ });
2353
+ }
2354
+ const toolCalls = await result.toolCalls;
2355
+ const parts = [];
2356
+ if (accumulatedText) {
2357
+ parts.push({ text: accumulatedText });
2358
+ }
2359
+ if (toolCalls && toolCalls.length > 0) {
2360
+ for (const toolCall of toolCalls) {
2361
+ parts.push({
2362
+ functionCall: {
2363
+ id: toolCall.toolCallId,
2364
+ name: toolCall.toolName,
2365
+ args: toolCall.args
2366
+ }
2367
+ });
2368
+ }
2369
+ }
2370
+ const finalUsage = await result.usage;
2371
+ const finishReason = await result.finishReason;
2372
+ yield new LlmResponse({
2373
+ content: {
2374
+ role: "model",
2375
+ parts: parts.length > 0 ? parts : [{ text: "" }]
2376
+ },
2377
+ usageMetadata: finalUsage ? {
2378
+ promptTokenCount: finalUsage.promptTokens,
2379
+ candidatesTokenCount: finalUsage.completionTokens,
2380
+ totalTokenCount: finalUsage.totalTokens
2381
+ } : void 0,
2382
+ finishReason: this.mapFinishReason(finishReason),
2383
+ turnComplete: true
2384
+ });
2385
+ } else {
2386
+ const result = await _ai.generateText.call(void 0, requestParams);
2387
+ const parts = [];
2388
+ if (result.text) {
2389
+ parts.push({ text: result.text });
2390
+ }
2391
+ if (result.toolCalls && result.toolCalls.length > 0) {
2392
+ for (const toolCall of result.toolCalls) {
2393
+ parts.push({
2394
+ functionCall: {
2395
+ id: toolCall.toolCallId,
2396
+ name: toolCall.toolName,
2397
+ args: toolCall.args
2398
+ }
2399
+ });
2400
+ }
2401
+ }
2402
+ yield new LlmResponse({
2403
+ content: {
2404
+ role: "model",
2405
+ parts: parts.length > 0 ? parts : [{ text: "" }]
2406
+ },
2407
+ usageMetadata: result.usage ? {
2408
+ promptTokenCount: result.usage.promptTokens,
2409
+ candidatesTokenCount: result.usage.completionTokens,
2410
+ totalTokenCount: result.usage.totalTokens
2411
+ } : void 0,
2412
+ finishReason: this.mapFinishReason(result.finishReason),
2413
+ turnComplete: true
2414
+ });
2415
+ }
2416
+ } catch (error) {
2417
+ this.logger.error(`AI SDK Error: ${String(error)}`, { error, request });
2418
+ yield LlmResponse.fromError(error, {
2419
+ errorCode: "AI_SDK_ERROR",
2420
+ model: this.model
2421
+ });
2422
+ }
2423
+ }
2424
+ /**
2425
+ * Convert ADK LlmRequest to AI SDK CoreMessage format
2426
+ */
2427
+ convertToAiSdkMessages(llmRequest) {
2428
+ const messages = [];
2429
+ for (const content of llmRequest.contents || []) {
2430
+ const message = this.contentToAiSdkMessage(content);
2431
+ if (message) {
2432
+ messages.push(message);
2433
+ }
2434
+ }
2435
+ return messages;
2436
+ }
2437
+ /**
2438
+ * Convert ADK tools to AI SDK tools format
2439
+ */
2440
+ convertToAiSdkTools(llmRequest) {
2441
+ const tools = {};
2442
+ if (_optionalChain([llmRequest, 'access', _128 => _128.config, 'optionalAccess', _129 => _129.tools])) {
2443
+ for (const toolConfig of llmRequest.config.tools) {
2444
+ if ("functionDeclarations" in toolConfig) {
2445
+ for (const funcDecl of toolConfig.functionDeclarations) {
2446
+ tools[funcDecl.name] = {
2447
+ description: funcDecl.description,
2448
+ parameters: _ai.jsonSchema.call(void 0, funcDecl.parameters || {})
2449
+ };
2450
+ }
2451
+ }
2452
+ }
2453
+ }
2454
+ return tools;
2455
+ }
2456
+ /**
2457
+ * Convert ADK Content to AI SDK CoreMessage
2458
+ */
2459
+ contentToAiSdkMessage(content) {
2460
+ const role = this.mapRole(content.role);
2461
+ if (!content.parts || content.parts.length === 0) {
2462
+ return null;
2463
+ }
2464
+ if (content.parts.length === 1 && content.parts[0].text) {
2465
+ const textContent = content.parts[0].text;
2466
+ if (role === "system") {
2467
+ return { role: "system", content: textContent };
2468
+ }
2469
+ if (role === "assistant") {
2470
+ return { role: "assistant", content: textContent };
2471
+ }
2472
+ return { role: "user", content: textContent };
2473
+ }
2474
+ if (_optionalChain([content, 'access', _130 => _130.parts, 'optionalAccess', _131 => _131.some, 'call', _132 => _132((part) => part.functionCall)])) {
2475
+ const textParts = content.parts.filter((part) => part.text);
2476
+ const functionCalls = content.parts.filter((part) => part.functionCall);
2477
+ const contentParts2 = [];
2478
+ for (const textPart of textParts) {
2479
+ if (textPart.text) {
2480
+ contentParts2.push({
2481
+ type: "text",
2482
+ text: textPart.text
2483
+ });
2484
+ }
2485
+ }
2486
+ for (const funcPart of functionCalls) {
2487
+ if (funcPart.functionCall) {
2488
+ contentParts2.push({
2489
+ type: "tool-call",
2490
+ toolCallId: funcPart.functionCall.id,
2491
+ toolName: funcPart.functionCall.name,
2492
+ args: funcPart.functionCall.args
2493
+ });
2494
+ }
2495
+ }
2496
+ return {
2497
+ role: "assistant",
2498
+ content: contentParts2
2499
+ };
2500
+ }
2501
+ if (_optionalChain([content, 'access', _133 => _133.parts, 'optionalAccess', _134 => _134.some, 'call', _135 => _135((part) => part.functionResponse)])) {
2502
+ const functionResponses = content.parts.filter(
2503
+ (part) => part.functionResponse
2504
+ );
2505
+ const contentParts2 = functionResponses.map((part) => ({
2506
+ type: "tool-result",
2507
+ toolCallId: part.functionResponse.id,
2508
+ toolName: part.functionResponse.name || "unknown",
2509
+ result: part.functionResponse.response
2510
+ }));
2511
+ return {
2512
+ role: "tool",
2513
+ content: contentParts2
2514
+ };
2515
+ }
2516
+ const contentParts = [];
2517
+ for (const part of content.parts) {
2518
+ if (part.text) {
2519
+ contentParts.push({
2520
+ type: "text",
2521
+ text: part.text
2522
+ });
2523
+ }
2524
+ }
2525
+ if (contentParts.length === 0) {
2526
+ return null;
2527
+ }
2528
+ if (contentParts.length === 1) {
2529
+ const textContent = contentParts[0].text;
2530
+ if (role === "system") {
2531
+ return { role: "system", content: textContent };
2532
+ }
2533
+ if (role === "assistant") {
2534
+ return { role: "assistant", content: textContent };
2535
+ }
2536
+ return { role: "user", content: textContent };
2537
+ }
2538
+ if (role === "system") {
2539
+ const combinedText = contentParts.map((p) => p.text).join("");
2540
+ return { role: "system", content: combinedText };
2541
+ }
2542
+ if (role === "assistant") {
2543
+ return { role: "assistant", content: contentParts };
2544
+ }
2545
+ return { role: "user", content: contentParts };
2546
+ }
2547
+ /**
2548
+ * Map ADK role to AI SDK role
2549
+ */
2550
+ mapRole(role) {
2551
+ switch (role) {
2552
+ case "model":
2553
+ case "assistant":
2554
+ return "assistant";
2555
+ case "system":
2556
+ return "system";
2557
+ default:
2558
+ return "user";
2559
+ }
2560
+ }
2561
+ /**
2562
+ * Map AI SDK finish reason to ADK finish reason
2563
+ */
2564
+ mapFinishReason(finishReason) {
2565
+ switch (finishReason) {
2566
+ case "stop":
2567
+ case "end_of_message":
2568
+ return "STOP";
2569
+ case "length":
2570
+ case "max_tokens":
2571
+ return "MAX_TOKENS";
2572
+ default:
2573
+ return "FINISH_REASON_UNSPECIFIED";
2574
+ }
2575
+ }
2576
+ }, _class7);
2577
+
2359
2578
  // src/models/llm-registry.ts
2360
2579
  init_logger();
2361
- var logger5 = new Logger({ name: "LLMRegistry" });
2362
- var LLMRegistry = (_class6 = class _LLMRegistry {
2580
+ var LLMRegistry = (_class8 = class _LLMRegistry {
2363
2581
  /**
2364
2582
  * Map of model name regex to LLM class
2365
2583
  */
2366
2584
  static __initStatic() {this.llmRegistry = /* @__PURE__ */ new Map()}
2585
+ static __initStatic2() {this.logger = new Logger({ name: "LLMRegistry" })}
2367
2586
  /**
2368
2587
  * Creates a new LLM instance
2369
2588
  *
@@ -2415,12 +2634,12 @@ var LLMRegistry = (_class6 = class _LLMRegistry {
2415
2634
  * Logs all registered models for debugging
2416
2635
  */
2417
2636
  static logRegisteredModels() {
2418
- logger5.debug(
2637
+ _LLMRegistry.logger.debug(
2419
2638
  "Registered LLM models:",
2420
2639
  [..._LLMRegistry.llmRegistry.entries()].map(([regex]) => regex.toString())
2421
2640
  );
2422
2641
  }
2423
- }, _class6.__initStatic(), _class6);
2642
+ }, _class8.__initStatic(), _class8.__initStatic2(), _class8);
2424
2643
 
2425
2644
  // src/models/registry.ts
2426
2645
  function registerProviders() {
@@ -2618,7 +2837,7 @@ var OAuth2Credential = class extends AuthCredential {
2618
2837
  "Cannot refresh token: no refresh token or refresh function"
2619
2838
  );
2620
2839
  }
2621
- const result = await _optionalChain([this, 'access', _151 => _151.refreshFunction, 'optionalCall', _152 => _152(this.refreshToken)]);
2840
+ const result = await _optionalChain([this, 'access', _136 => _136.refreshFunction, 'optionalCall', _137 => _137(this.refreshToken)]);
2622
2841
  if (!result) {
2623
2842
  throw new Error("Failed to refresh token");
2624
2843
  }
@@ -2672,7 +2891,7 @@ var AuthHandler = class {
2672
2891
  * Gets the authentication token
2673
2892
  */
2674
2893
  getToken() {
2675
- return _optionalChain([this, 'access', _153 => _153.credential, 'optionalAccess', _154 => _154.getToken, 'call', _155 => _155()]);
2894
+ return _optionalChain([this, 'access', _138 => _138.credential, 'optionalAccess', _139 => _139.getToken, 'call', _140 => _140()]);
2676
2895
  }
2677
2896
  /**
2678
2897
  * Gets headers for HTTP requests
@@ -2687,7 +2906,7 @@ var AuthHandler = class {
2687
2906
  * Refreshes the token if necessary
2688
2907
  */
2689
2908
  async refreshToken() {
2690
- if (_optionalChain([this, 'access', _156 => _156.credential, 'optionalAccess', _157 => _157.canRefresh, 'call', _158 => _158()])) {
2909
+ if (_optionalChain([this, 'access', _141 => _141.credential, 'optionalAccess', _142 => _142.canRefresh, 'call', _143 => _143()])) {
2691
2910
  await this.credential.refresh();
2692
2911
  }
2693
2912
  }
@@ -2794,10 +3013,10 @@ var OpenIdConnectScheme = class extends AuthScheme {
2794
3013
  };
2795
3014
 
2796
3015
  // src/sessions/state.ts
2797
- var State = (_class7 = class _State {
2798
- static __initStatic2() {this.APP_PREFIX = "app:"}
2799
- static __initStatic3() {this.USER_PREFIX = "user:"}
2800
- static __initStatic4() {this.TEMP_PREFIX = "temp:"}
3016
+ var State = (_class9 = class _State {
3017
+ static __initStatic3() {this.APP_PREFIX = "app:"}
3018
+ static __initStatic4() {this.USER_PREFIX = "user:"}
3019
+ static __initStatic5() {this.TEMP_PREFIX = "temp:"}
2801
3020
 
2802
3021
 
2803
3022
  /**
@@ -2891,13 +3110,13 @@ var State = (_class7 = class _State {
2891
3110
  const state = new _State(value, delta);
2892
3111
  return _State.createProxy(state);
2893
3112
  }
2894
- }, _class7.__initStatic2(), _class7.__initStatic3(), _class7.__initStatic4(), _class7);
3113
+ }, _class9.__initStatic3(), _class9.__initStatic4(), _class9.__initStatic5(), _class9);
2895
3114
 
2896
3115
  // src/events/event.ts
2897
3116
  var _uuid = require('uuid');
2898
3117
 
2899
3118
  // src/events/event-actions.ts
2900
- var EventActions = (_class8 = class {
3119
+ var EventActions = (_class10 = class {
2901
3120
  /**
2902
3121
  * If true, it won't call model to summarize function response.
2903
3122
  * Only used for function_response event.
@@ -2906,12 +3125,12 @@ var EventActions = (_class8 = class {
2906
3125
  /**
2907
3126
  * Indicates that the event is updating the state with the given delta.
2908
3127
  */
2909
- __init11() {this.stateDelta = {}}
3128
+ __init14() {this.stateDelta = {}}
2910
3129
  /**
2911
3130
  * Indicates that the event is updating an artifact. key is the filename,
2912
3131
  * value is the version.
2913
3132
  */
2914
- __init12() {this.artifactDelta = {}}
3133
+ __init15() {this.artifactDelta = {}}
2915
3134
  /**
2916
3135
  * If set, the event transfers to the specified agent.
2917
3136
  */
@@ -2927,7 +3146,7 @@ var EventActions = (_class8 = class {
2927
3146
  /**
2928
3147
  * Constructor for EventActions
2929
3148
  */
2930
- constructor(options = {}) {;_class8.prototype.__init11.call(this);_class8.prototype.__init12.call(this);
3149
+ constructor(options = {}) {;_class10.prototype.__init14.call(this);_class10.prototype.__init15.call(this);
2931
3150
  this.skipSummarization = options.skipSummarization;
2932
3151
  this.stateDelta = options.stateDelta || {};
2933
3152
  this.artifactDelta = options.artifactDelta || {};
@@ -2935,16 +3154,16 @@ var EventActions = (_class8 = class {
2935
3154
  this.escalate = options.escalate;
2936
3155
  this.requestedAuthConfigs = options.requestedAuthConfigs;
2937
3156
  }
2938
- }, _class8);
3157
+ }, _class10);
2939
3158
 
2940
3159
  // src/events/event.ts
2941
- var Event = (_class9 = class _Event extends LlmResponse {
3160
+ var Event = (_class11 = class _Event extends LlmResponse {
2942
3161
  /** The invocation ID of the event. */
2943
- __init13() {this.invocationId = ""}
3162
+ __init16() {this.invocationId = ""}
2944
3163
  /** 'user' or the name of the agent, indicating who appended the event to the session. */
2945
3164
 
2946
3165
  /** The actions taken by the agent. */
2947
- __init14() {this.actions = new EventActions()}
3166
+ __init17() {this.actions = new EventActions()}
2948
3167
  /**
2949
3168
  * Set of ids of the long running function calls.
2950
3169
  * Agent client will know from this field about which function call is long running.
@@ -2959,9 +3178,9 @@ var Event = (_class9 = class _Event extends LlmResponse {
2959
3178
  */
2960
3179
 
2961
3180
  /** The unique identifier of the event. */
2962
- __init15() {this.id = ""}
3181
+ __init18() {this.id = ""}
2963
3182
  /** The timestamp of the event (seconds since epoch). */
2964
- __init16() {this.timestamp = Math.floor(Date.now() / 1e3)}
3183
+ __init19() {this.timestamp = Math.floor(Date.now() / 1e3)}
2965
3184
  /**
2966
3185
  * Constructor for Event.
2967
3186
  */
@@ -2969,7 +3188,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
2969
3188
  super({
2970
3189
  content: opts.content,
2971
3190
  partial: opts.partial
2972
- });_class9.prototype.__init13.call(this);_class9.prototype.__init14.call(this);_class9.prototype.__init15.call(this);_class9.prototype.__init16.call(this);;
3191
+ });_class11.prototype.__init16.call(this);_class11.prototype.__init17.call(this);_class11.prototype.__init18.call(this);_class11.prototype.__init19.call(this);;
2973
3192
  this.invocationId = _nullishCoalesce(opts.invocationId, () => ( ""));
2974
3193
  this.author = opts.author;
2975
3194
  this.actions = _nullishCoalesce(opts.actions, () => ( new EventActions()));
@@ -3030,7 +3249,7 @@ var Event = (_class9 = class _Event extends LlmResponse {
3030
3249
  static newId() {
3031
3250
  return _uuid.v4.call(void 0, ).replace(/-/g, "").substring(0, 8);
3032
3251
  }
3033
- }, _class9);
3252
+ }, _class11);
3034
3253
 
3035
3254
  // src/agents/readonly-context.ts
3036
3255
  var ReadonlyContext = class {
@@ -3136,7 +3355,7 @@ var CallbackContext = class extends ReadonlyContext {
3136
3355
  };
3137
3356
 
3138
3357
  // src/agents/base-agent.ts
3139
- var BaseAgent = (_class10 = class {
3358
+ var BaseAgent = (_class12 = class {
3140
3359
  /**
3141
3360
  * The agent's name.
3142
3361
  * Agent name must be a valid identifier and unique within the agent tree.
@@ -3148,7 +3367,7 @@ var BaseAgent = (_class10 = class {
3148
3367
  * The model uses this to determine whether to delegate control to the agent.
3149
3368
  * One-line description is enough and preferred.
3150
3369
  */
3151
- __init17() {this.description = ""}
3370
+ __init20() {this.description = ""}
3152
3371
  /**
3153
3372
  * The parent agent of this agent.
3154
3373
  * Note that an agent can ONLY be added as sub-agent once.
@@ -3160,7 +3379,7 @@ var BaseAgent = (_class10 = class {
3160
3379
  /**
3161
3380
  * The sub-agents of this agent.
3162
3381
  */
3163
- __init18() {this.subAgents = []}
3382
+ __init21() {this.subAgents = []}
3164
3383
  /**
3165
3384
  * Callback or list of callbacks to be invoked before the agent run.
3166
3385
  * When a list of callbacks is provided, the callbacks will be called in the
@@ -3192,7 +3411,7 @@ var BaseAgent = (_class10 = class {
3192
3411
  /**
3193
3412
  * Constructor for BaseAgent
3194
3413
  */
3195
- constructor(config) {;_class10.prototype.__init17.call(this);_class10.prototype.__init18.call(this);
3414
+ constructor(config) {;_class12.prototype.__init20.call(this);_class12.prototype.__init21.call(this);
3196
3415
  this.name = config.name;
3197
3416
  this.description = config.description || "";
3198
3417
  this.subAgents = config.subAgents || [];
@@ -3449,7 +3668,7 @@ var BaseAgent = (_class10 = class {
3449
3668
  subAgent.parentAgent = this;
3450
3669
  }
3451
3670
  }
3452
- }, _class10);
3671
+ }, _class12);
3453
3672
 
3454
3673
  // src/agents/llm-agent.ts
3455
3674
  init_logger();
@@ -3573,8 +3792,8 @@ init_function_utils();
3573
3792
  // src/tools/common/google-search.ts
3574
3793
  init_logger();
3575
3794
  init_base_tool();
3576
- var GoogleSearch = (_class11 = class extends BaseTool {
3577
- __init19() {this.logger = new Logger({ name: "GoogleSearch" })}
3795
+ var GoogleSearch = (_class13 = class extends BaseTool {
3796
+ __init22() {this.logger = new Logger({ name: "GoogleSearch" })}
3578
3797
  /**
3579
3798
  * Constructor for GoogleSearch
3580
3799
  */
@@ -3582,7 +3801,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
3582
3801
  super({
3583
3802
  name: "google_search",
3584
3803
  description: "Search the web using Google"
3585
- });_class11.prototype.__init19.call(this);;
3804
+ });_class13.prototype.__init22.call(this);;
3586
3805
  }
3587
3806
  /**
3588
3807
  * Get the function declaration for the tool
@@ -3631,7 +3850,7 @@ var GoogleSearch = (_class11 = class extends BaseTool {
3631
3850
  ]
3632
3851
  };
3633
3852
  }
3634
- }, _class11);
3853
+ }, _class13);
3635
3854
 
3636
3855
  // src/tools/common/http-request-tool.ts
3637
3856
  init_base_tool();
@@ -3763,7 +3982,7 @@ var FileOperationsTool = class extends BaseTool {
3763
3982
  name: "file_operations",
3764
3983
  description: "Perform file system operations like reading, writing, and managing files"
3765
3984
  });
3766
- this.basePath = _optionalChain([options, 'optionalAccess', _159 => _159.basePath]) || process.cwd();
3985
+ this.basePath = _optionalChain([options, 'optionalAccess', _144 => _144.basePath]) || process.cwd();
3767
3986
  }
3768
3987
  /**
3769
3988
  * Get the function declaration for the tool
@@ -4080,8 +4299,8 @@ var UserInteractionTool = class extends BaseTool {
4080
4299
  // src/tools/common/exit-loop-tool.ts
4081
4300
  init_logger();
4082
4301
  init_base_tool();
4083
- var ExitLoopTool = (_class12 = class extends BaseTool {
4084
- __init20() {this.logger = new Logger({ name: "ExitLoopTool" })}
4302
+ var ExitLoopTool = (_class14 = class extends BaseTool {
4303
+ __init23() {this.logger = new Logger({ name: "ExitLoopTool" })}
4085
4304
  /**
4086
4305
  * Constructor for ExitLoopTool
4087
4306
  */
@@ -4089,7 +4308,7 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
4089
4308
  super({
4090
4309
  name: "exit_loop",
4091
4310
  description: "Exits the loop. Call this function only when you are instructed to do so."
4092
- });_class12.prototype.__init20.call(this);;
4311
+ });_class14.prototype.__init23.call(this);;
4093
4312
  }
4094
4313
  /**
4095
4314
  * Execute the exit loop action
@@ -4098,13 +4317,13 @@ var ExitLoopTool = (_class12 = class extends BaseTool {
4098
4317
  this.logger.debug("Executing exit loop tool");
4099
4318
  context.actions.escalate = true;
4100
4319
  }
4101
- }, _class12);
4320
+ }, _class14);
4102
4321
 
4103
4322
  // src/tools/common/get-user-choice-tool.ts
4104
4323
  init_logger();
4105
4324
  init_base_tool();
4106
- var GetUserChoiceTool = (_class13 = class extends BaseTool {
4107
- __init21() {this.logger = new Logger({ name: "GetUserChoiceTool" })}
4325
+ var GetUserChoiceTool = (_class15 = class extends BaseTool {
4326
+ __init24() {this.logger = new Logger({ name: "GetUserChoiceTool" })}
4108
4327
  /**
4109
4328
  * Constructor for GetUserChoiceTool
4110
4329
  */
@@ -4113,7 +4332,7 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
4113
4332
  name: "get_user_choice",
4114
4333
  description: "This tool provides the options to the user and asks them to choose one. Use this tool when you need the user to make a selection between multiple options. Do not list options in your response - use this tool instead.",
4115
4334
  isLongRunning: true
4116
- });_class13.prototype.__init21.call(this);;
4335
+ });_class15.prototype.__init24.call(this);;
4117
4336
  }
4118
4337
  /**
4119
4338
  * Get the function declaration for the tool
@@ -4156,13 +4375,13 @@ var GetUserChoiceTool = (_class13 = class extends BaseTool {
4156
4375
  context.actions.skipSummarization = true;
4157
4376
  return null;
4158
4377
  }
4159
- }, _class13);
4378
+ }, _class15);
4160
4379
 
4161
4380
  // src/tools/common/transfer-to-agent-tool.ts
4162
4381
  init_logger();
4163
4382
  init_base_tool();
4164
- var TransferToAgentTool = (_class14 = class extends BaseTool {
4165
- __init22() {this.logger = new Logger({ name: "TransferToAgentTool" })}
4383
+ var TransferToAgentTool = (_class16 = class extends BaseTool {
4384
+ __init25() {this.logger = new Logger({ name: "TransferToAgentTool" })}
4166
4385
  /**
4167
4386
  * Constructor for TransferToAgentTool
4168
4387
  */
@@ -4170,7 +4389,7 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
4170
4389
  super({
4171
4390
  name: "transfer_to_agent",
4172
4391
  description: "Transfer the question to another agent."
4173
- });_class14.prototype.__init22.call(this);;
4392
+ });_class16.prototype.__init25.call(this);;
4174
4393
  }
4175
4394
  /**
4176
4395
  * Execute the transfer to agent action
@@ -4179,13 +4398,13 @@ var TransferToAgentTool = (_class14 = class extends BaseTool {
4179
4398
  this.logger.debug(`Executing transfer to agent: ${args.agent_name}`);
4180
4399
  context.actions.transferToAgent = args.agent_name;
4181
4400
  }
4182
- }, _class14);
4401
+ }, _class16);
4183
4402
 
4184
4403
  // src/tools/common/load-memory-tool.ts
4185
4404
  init_logger();
4186
4405
  init_base_tool();
4187
- var LoadMemoryTool = (_class15 = class extends BaseTool {
4188
- __init23() {this.logger = new Logger({ name: "LoadMemoryTool" })}
4406
+ var LoadMemoryTool = (_class17 = class extends BaseTool {
4407
+ __init26() {this.logger = new Logger({ name: "LoadMemoryTool" })}
4189
4408
  /**
4190
4409
  * Constructor for LoadMemoryTool
4191
4410
  */
@@ -4193,7 +4412,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
4193
4412
  super({
4194
4413
  name: "load_memory",
4195
4414
  description: "Loads the memory for the current user based on a query."
4196
- });_class15.prototype.__init23.call(this);;
4415
+ });_class17.prototype.__init26.call(this);;
4197
4416
  }
4198
4417
  /**
4199
4418
  * Get the function declaration for the tool
@@ -4223,7 +4442,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
4223
4442
  const searchResult = await context.searchMemory(args.query);
4224
4443
  return {
4225
4444
  memories: searchResult.memories || [],
4226
- count: _optionalChain([searchResult, 'access', _160 => _160.memories, 'optionalAccess', _161 => _161.length]) || 0
4445
+ count: _optionalChain([searchResult, 'access', _145 => _145.memories, 'optionalAccess', _146 => _146.length]) || 0
4227
4446
  };
4228
4447
  } catch (error) {
4229
4448
  console.error("Error searching memory:", error);
@@ -4233,7 +4452,7 @@ var LoadMemoryTool = (_class15 = class extends BaseTool {
4233
4452
  };
4234
4453
  }
4235
4454
  }
4236
- }, _class15);
4455
+ }, _class17);
4237
4456
 
4238
4457
  // src/tools/common/load-artifacts-tool.ts
4239
4458
  init_base_tool();
@@ -4384,10 +4603,10 @@ var McpError = class extends Error {
4384
4603
  };
4385
4604
 
4386
4605
  // src/tools/mcp/sampling-handler.ts
4387
- var McpSamplingHandler = (_class16 = class {
4388
- __init24() {this.logger = new Logger({ name: "McpSamplingHandler" })}
4606
+ var McpSamplingHandler = (_class18 = class {
4607
+ __init27() {this.logger = new Logger({ name: "McpSamplingHandler" })}
4389
4608
 
4390
- constructor(samplingHandler) {;_class16.prototype.__init24.call(this);
4609
+ constructor(samplingHandler) {;_class18.prototype.__init27.call(this);
4391
4610
  this.samplingHandler = samplingHandler;
4392
4611
  }
4393
4612
  /**
@@ -4568,7 +4787,7 @@ var McpSamplingHandler = (_class16 = class {
4568
4787
  this.samplingHandler = handler;
4569
4788
  this.logger.debug("ADK sampling handler updated");
4570
4789
  }
4571
- }, _class16);
4790
+ }, _class18);
4572
4791
  function createSamplingHandler(handler) {
4573
4792
  return handler;
4574
4793
  }
@@ -4602,14 +4821,14 @@ function withRetry(fn, instance, reinitMethod, maxRetries = 1) {
4602
4821
  }
4603
4822
 
4604
4823
  // src/tools/mcp/client.ts
4605
- var McpClientService = (_class17 = class {
4606
-
4607
- __init25() {this.client = null}
4608
- __init26() {this.transport = null}
4609
- __init27() {this.isClosing = false}
4610
- __init28() {this.mcpSamplingHandler = null}
4611
- __init29() {this.logger = new Logger({ name: "McpClientService" })}
4612
- constructor(config) {;_class17.prototype.__init25.call(this);_class17.prototype.__init26.call(this);_class17.prototype.__init27.call(this);_class17.prototype.__init28.call(this);_class17.prototype.__init29.call(this);
4824
+ var McpClientService = (_class19 = class {
4825
+
4826
+ __init28() {this.client = null}
4827
+ __init29() {this.transport = null}
4828
+ __init30() {this.isClosing = false}
4829
+ __init31() {this.mcpSamplingHandler = null}
4830
+ __init32() {this.logger = new Logger({ name: "McpClientService" })}
4831
+ constructor(config) {;_class19.prototype.__init28.call(this);_class19.prototype.__init29.call(this);_class19.prototype.__init30.call(this);_class19.prototype.__init31.call(this);_class19.prototype.__init32.call(this);
4613
4832
  this.config = config;
4614
4833
  if (config.samplingHandler) {
4615
4834
  this.mcpSamplingHandler = new McpSamplingHandler(config.samplingHandler);
@@ -4665,15 +4884,13 @@ var McpClientService = (_class17 = class {
4665
4884
  await connectPromise;
4666
4885
  }
4667
4886
  await this.setupSamplingHandler(client);
4668
- if (this.config.debug) {
4669
- console.log("\u2705 MCP client connected successfully");
4670
- }
4887
+ this.logger.debug("\u2705 MCP client connected successfully");
4671
4888
  this.client = client;
4672
4889
  return client;
4673
4890
  } catch (error) {
4674
4891
  await this.cleanupResources();
4675
4892
  if (!(error instanceof McpError)) {
4676
- console.error("Failed to initialize MCP client:", error);
4893
+ this.logger.error("Failed to initialize MCP client:", error);
4677
4894
  throw new McpError(
4678
4895
  `Failed to initialize MCP client: ${error instanceof Error ? error.message : String(error)}`,
4679
4896
  "connection_error" /* CONNECTION_ERROR */,
@@ -4689,12 +4906,10 @@ var McpClientService = (_class17 = class {
4689
4906
  async createTransport() {
4690
4907
  try {
4691
4908
  if (this.config.transport.mode === "sse") {
4692
- if (this.config.debug) {
4693
- console.log(
4694
- "\u{1F680} Initializing MCP client in SSE mode",
4695
- this.config.transport.serverUrl
4696
- );
4697
- }
4909
+ this.logger.debug(
4910
+ "\u{1F680} Initializing MCP client in SSE mode",
4911
+ this.config.transport.serverUrl
4912
+ );
4698
4913
  const headers = {
4699
4914
  ...this.config.transport.headers || {},
4700
4915
  ...this.config.headers || {}
@@ -4709,12 +4924,10 @@ var McpClientService = (_class17 = class {
4709
4924
  }
4710
4925
  );
4711
4926
  }
4712
- if (this.config.debug) {
4713
- console.log(
4714
- "\u{1F680} Initializing MCP client in STDIO mode",
4715
- this.config.transport.command
4716
- );
4717
- }
4927
+ this.logger.debug(
4928
+ "\u{1F680} Initializing MCP client in STDIO mode",
4929
+ this.config.transport.command
4930
+ );
4718
4931
  return new (0, _stdiojs.StdioClientTransport)({
4719
4932
  command: this.config.transport.command,
4720
4933
  args: this.config.transport.args,
@@ -4733,9 +4946,7 @@ var McpClientService = (_class17 = class {
4733
4946
  * Used by the retry mechanism.
4734
4947
  */
4735
4948
  async reinitialize() {
4736
- if (this.config.debug) {
4737
- console.log("\u{1F504} Reinitializing MCP client after closed connection");
4738
- }
4949
+ this.logger.debug("\u{1F504} Reinitializing MCP client after closed connection");
4739
4950
  await this.cleanupResources();
4740
4951
  this.client = null;
4741
4952
  this.transport = null;
@@ -4759,11 +4970,9 @@ var McpClientService = (_class17 = class {
4759
4970
  if (this.transport && typeof this.transport.close === "function") {
4760
4971
  await this.transport.close();
4761
4972
  }
4762
- if (this.config.debug) {
4763
- console.log("\u{1F9F9} Cleaned up MCP client resources");
4764
- }
4973
+ this.logger.debug("\u{1F9F9} Cleaned up MCP client resources");
4765
4974
  } catch (error) {
4766
- console.error("Error cleaning up MCP resources:", error);
4975
+ this.logger.error("Error cleaning up MCP resources:", error);
4767
4976
  } finally {
4768
4977
  this.client = null;
4769
4978
  this.transport = null;
@@ -4785,7 +4994,7 @@ var McpClientService = (_class17 = class {
4785
4994
  },
4786
4995
  this,
4787
4996
  async (instance) => await instance.reinitialize(),
4788
- _optionalChain([this, 'access', _162 => _162.config, 'access', _163 => _163.retryOptions, 'optionalAccess', _164 => _164.maxRetries]) || 2
4997
+ _optionalChain([this, 'access', _147 => _147.config, 'access', _148 => _148.retryOptions, 'optionalAccess', _149 => _149.maxRetries]) || 2
4789
4998
  );
4790
4999
  return await wrappedCall();
4791
5000
  } catch (error) {
@@ -4805,9 +5014,7 @@ var McpClientService = (_class17 = class {
4805
5014
  * Similar to Python's close() method.
4806
5015
  */
4807
5016
  async close() {
4808
- if (this.config.debug) {
4809
- console.log("\u{1F51A} Closing MCP client service");
4810
- }
5017
+ this.logger.debug("\u{1F51A} Closing MCP client service");
4811
5018
  await this.cleanupResources();
4812
5019
  }
4813
5020
  /**
@@ -4818,11 +5025,9 @@ var McpClientService = (_class17 = class {
4818
5025
  }
4819
5026
  async setupSamplingHandler(client) {
4820
5027
  if (!this.mcpSamplingHandler) {
4821
- if (this.config.debug) {
4822
- console.log(
4823
- "\u26A0\uFE0F No sampling handler provided - sampling requests will be rejected"
4824
- );
4825
- }
5028
+ this.logger.debug(
5029
+ "\u26A0\uFE0F No sampling handler provided - sampling requests will be rejected"
5030
+ );
4826
5031
  return;
4827
5032
  }
4828
5033
  try {
@@ -4832,12 +5037,10 @@ var McpClientService = (_class17 = class {
4832
5037
  try {
4833
5038
  this.logger.debug("Received sampling request:", request);
4834
5039
  const response = await this.mcpSamplingHandler.handleSamplingRequest(request);
4835
- if (this.config.debug) {
4836
- console.log("\u2705 Sampling request completed successfully");
4837
- }
5040
+ this.logger.debug("\u2705 Sampling request completed successfully");
4838
5041
  return response;
4839
5042
  } catch (error) {
4840
- console.error("\u274C Error handling sampling request:", error);
5043
+ this.logger.error("\u274C Error handling sampling request:", error);
4841
5044
  if (error instanceof McpError) {
4842
5045
  throw error;
4843
5046
  }
@@ -4849,16 +5052,12 @@ var McpClientService = (_class17 = class {
4849
5052
  }
4850
5053
  }
4851
5054
  );
4852
- if (this.config.debug) {
4853
- console.log("\u{1F3AF} Sampling handler registered successfully");
4854
- }
5055
+ this.logger.debug("\u{1F3AF} Sampling handler registered successfully");
4855
5056
  } catch (error) {
4856
- console.error("Failed to setup sampling handler:", error);
4857
- if (this.config.debug) {
4858
- console.log(
4859
- "\u26A0\uFE0F Sampling handler registration failed, continuing without sampling support"
4860
- );
4861
- }
5057
+ this.logger.error("Failed to setup sampling handler:", error);
5058
+ this.logger.debug(
5059
+ "\u26A0\uFE0F Sampling handler registration failed, continuing without sampling support"
5060
+ );
4862
5061
  }
4863
5062
  }
4864
5063
  /**
@@ -4868,7 +5067,7 @@ var McpClientService = (_class17 = class {
4868
5067
  this.mcpSamplingHandler = new McpSamplingHandler(handler);
4869
5068
  if (this.client) {
4870
5069
  this.setupSamplingHandler(this.client).catch((error) => {
4871
- console.error("Failed to update ADK sampling handler:", error);
5070
+ this.logger.error("Failed to update ADK sampling handler:", error);
4872
5071
  });
4873
5072
  }
4874
5073
  }
@@ -4879,13 +5078,13 @@ var McpClientService = (_class17 = class {
4879
5078
  this.mcpSamplingHandler = null;
4880
5079
  if (this.client) {
4881
5080
  try {
4882
- _optionalChain([this, 'access', _165 => _165.client, 'access', _166 => _166.removeRequestHandler, 'optionalCall', _167 => _167("sampling/createMessage")]);
5081
+ _optionalChain([this, 'access', _150 => _150.client, 'access', _151 => _151.removeRequestHandler, 'optionalCall', _152 => _152("sampling/createMessage")]);
4883
5082
  } catch (error) {
4884
- console.error("Failed to remove sampling handler:", error);
5083
+ this.logger.error("Failed to remove sampling handler:", error);
4885
5084
  }
4886
5085
  }
4887
5086
  }
4888
- }, _class17);
5087
+ }, _class19);
4889
5088
 
4890
5089
  // src/tools/mcp/create-tool.ts
4891
5090
  init_logger();
@@ -5087,11 +5286,11 @@ async function createTool(mcpTool, client) {
5087
5286
  throw error;
5088
5287
  }
5089
5288
  }
5090
- var McpToolAdapter = (_class18 = class extends BaseTool {
5289
+ var McpToolAdapter = (_class20 = class extends BaseTool {
5091
5290
 
5092
5291
 
5093
- __init30() {this.clientService = null}
5094
- __init31() {this.logger = new Logger({ name: "McpToolAdapter" })}
5292
+ __init33() {this.clientService = null}
5293
+ __init34() {this.logger = new Logger({ name: "McpToolAdapter" })}
5095
5294
  constructor(mcpTool, client) {
5096
5295
  const metadata = mcpTool.metadata || {};
5097
5296
  super({
@@ -5100,7 +5299,7 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
5100
5299
  isLongRunning: _nullishCoalesce(metadata.isLongRunning, () => ( false)),
5101
5300
  shouldRetryOnFailure: _nullishCoalesce(metadata.shouldRetryOnFailure, () => ( false)),
5102
5301
  maxRetryAttempts: _nullishCoalesce(metadata.maxRetryAttempts, () => ( 3))
5103
- });_class18.prototype.__init30.call(this);_class18.prototype.__init31.call(this);;
5302
+ });_class20.prototype.__init33.call(this);_class20.prototype.__init34.call(this);;
5104
5303
  this.mcpTool = mcpTool;
5105
5304
  this.client = client;
5106
5305
  if (client.reinitialize && typeof client.reinitialize === "function") {
@@ -5173,7 +5372,7 @@ var McpToolAdapter = (_class18 = class extends BaseTool {
5173
5372
  throw error;
5174
5373
  }
5175
5374
  }
5176
- }, _class18);
5375
+ }, _class20);
5177
5376
 
5178
5377
  // src/tools/mcp/servers.ts
5179
5378
  function createMcpConfig(name, packageName, config = {}) {
@@ -5249,7 +5448,7 @@ function McpNearAgent(config = {}) {
5249
5448
  }
5250
5449
  function McpNearIntentSwaps(config = {}) {
5251
5450
  const mcpConfig = createMcpConfig(
5252
- "NEAR Intent Swaps MCP Client",
5451
+ "Near Intents Swaps MCP Client",
5253
5452
  "@iqai/mcp-near-intent-swaps",
5254
5453
  config
5255
5454
  );
@@ -5294,13 +5493,13 @@ function McpGeneric(packageName, config = {}, name) {
5294
5493
  }
5295
5494
 
5296
5495
  // src/tools/mcp/index.ts
5297
- var McpToolset = (_class19 = class {
5496
+ var McpToolset = (_class21 = class {
5298
5497
 
5299
- __init32() {this.clientService = null}
5300
- __init33() {this.toolFilter = null}
5301
- __init34() {this.tools = []}
5302
- __init35() {this.isClosing = false}
5303
- constructor(config, toolFilter = null) {;_class19.prototype.__init32.call(this);_class19.prototype.__init33.call(this);_class19.prototype.__init34.call(this);_class19.prototype.__init35.call(this);
5498
+ __init35() {this.clientService = null}
5499
+ __init36() {this.toolFilter = null}
5500
+ __init37() {this.tools = []}
5501
+ __init38() {this.isClosing = false}
5502
+ constructor(config, toolFilter = null) {;_class21.prototype.__init35.call(this);_class21.prototype.__init36.call(this);_class21.prototype.__init37.call(this);_class21.prototype.__init38.call(this);
5304
5503
  this.config = config;
5305
5504
  this.toolFilter = toolFilter;
5306
5505
  this.clientService = new McpClientService(config);
@@ -5375,7 +5574,7 @@ var McpToolset = (_class19 = class {
5375
5574
  "resource_closed_error" /* RESOURCE_CLOSED_ERROR */
5376
5575
  );
5377
5576
  }
5378
- if (this.tools.length > 0 && !_optionalChain([this, 'access', _168 => _168.config, 'access', _169 => _169.cacheConfig, 'optionalAccess', _170 => _170.enabled]) === false) {
5577
+ if (this.tools.length > 0 && !_optionalChain([this, 'access', _153 => _153.config, 'access', _154 => _154.cacheConfig, 'optionalAccess', _155 => _155.enabled]) === false) {
5379
5578
  return this.tools;
5380
5579
  }
5381
5580
  if (!this.clientService) {
@@ -5401,7 +5600,7 @@ var McpToolset = (_class19 = class {
5401
5600
  }
5402
5601
  }
5403
5602
  }
5404
- if (_optionalChain([this, 'access', _171 => _171.config, 'access', _172 => _172.cacheConfig, 'optionalAccess', _173 => _173.enabled]) !== false) {
5603
+ if (_optionalChain([this, 'access', _156 => _156.config, 'access', _157 => _157.cacheConfig, 'optionalAccess', _158 => _158.enabled]) !== false) {
5405
5604
  this.tools = tools;
5406
5605
  }
5407
5606
  return tools;
@@ -5461,7 +5660,7 @@ var McpToolset = (_class19 = class {
5461
5660
  async dispose() {
5462
5661
  await this.close();
5463
5662
  }
5464
- }, _class19);
5663
+ }, _class21);
5465
5664
  async function getMcpTools(config, toolFilter) {
5466
5665
  const toolset = new McpToolset(config, toolFilter);
5467
5666
  try {
@@ -5489,12 +5688,12 @@ function populateClientFunctionCallId(modelResponseEvent) {
5489
5688
  }
5490
5689
  }
5491
5690
  function removeClientFunctionCallId(content) {
5492
- if (_optionalChain([content, 'optionalAccess', _174 => _174.parts])) {
5691
+ if (_optionalChain([content, 'optionalAccess', _159 => _159.parts])) {
5493
5692
  for (const part of content.parts) {
5494
- if (_optionalChain([part, 'access', _175 => _175.functionCall, 'optionalAccess', _176 => _176.id, 'optionalAccess', _177 => _177.startsWith, 'call', _178 => _178(AF_FUNCTION_CALL_ID_PREFIX)])) {
5693
+ if (_optionalChain([part, 'access', _160 => _160.functionCall, 'optionalAccess', _161 => _161.id, 'optionalAccess', _162 => _162.startsWith, 'call', _163 => _163(AF_FUNCTION_CALL_ID_PREFIX)])) {
5495
5694
  part.functionCall.id = void 0;
5496
5695
  }
5497
- if (_optionalChain([part, 'access', _179 => _179.functionResponse, 'optionalAccess', _180 => _180.id, 'optionalAccess', _181 => _181.startsWith, 'call', _182 => _182(AF_FUNCTION_CALL_ID_PREFIX)])) {
5696
+ if (_optionalChain([part, 'access', _164 => _164.functionResponse, 'optionalAccess', _165 => _165.id, 'optionalAccess', _166 => _166.startsWith, 'call', _167 => _167(AF_FUNCTION_CALL_ID_PREFIX)])) {
5498
5697
  part.functionResponse.id = void 0;
5499
5698
  }
5500
5699
  }
@@ -5639,7 +5838,7 @@ function mergeParallelFunctionResponseEvents(functionResponseEvents) {
5639
5838
  }
5640
5839
  const mergedParts = [];
5641
5840
  for (const event of functionResponseEvents) {
5642
- if (_optionalChain([event, 'access', _183 => _183.content, 'optionalAccess', _184 => _184.parts])) {
5841
+ if (_optionalChain([event, 'access', _168 => _168.content, 'optionalAccess', _169 => _169.parts])) {
5643
5842
  for (const part of event.content.parts) {
5644
5843
  mergedParts.push(part);
5645
5844
  }
@@ -5672,94 +5871,52 @@ function isLlmAgent(agent) {
5672
5871
 
5673
5872
  // src/flows/llm-flows/base-llm-flow.ts
5674
5873
  var _ADK_AGENT_NAME_LABEL_KEY = "adk_agent_name";
5675
- var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36.call(this);_class20.prototype.__init37.call(this);_class20.prototype.__init38.call(this); }
5676
- __init36() {this.requestProcessors = []}
5677
- __init37() {this.responseProcessors = []}
5678
- __init38() {this.logger = new Logger({ name: "BaseLlmFlow" })}
5874
+ var BaseLlmFlow = (_class22 = class {constructor() { _class22.prototype.__init39.call(this);_class22.prototype.__init40.call(this);_class22.prototype.__init41.call(this); }
5875
+ __init39() {this.requestProcessors = []}
5876
+ __init40() {this.responseProcessors = []}
5877
+ __init41() {this.logger = new Logger({ name: "BaseLlmFlow" })}
5679
5878
  async *runAsync(invocationContext) {
5680
- this.logger.debug("\u{1F680} Starting runAsync flow", {
5681
- invocationId: invocationContext.invocationId,
5682
- agentName: invocationContext.agent.name,
5683
- branch: invocationContext.branch
5684
- });
5879
+ this.logger.info(`Agent '${invocationContext.agent.name}' started.`);
5685
5880
  let stepCount = 0;
5686
5881
  while (true) {
5687
5882
  stepCount++;
5688
- this.logger.debug(`\u{1F4CB} Running step ${stepCount}`, {
5689
- invocationId: invocationContext.invocationId
5690
- });
5691
5883
  let lastEvent = null;
5692
- let eventCount = 0;
5693
5884
  for await (const event of this._runOneStepAsync(invocationContext)) {
5694
- eventCount++;
5695
5885
  lastEvent = event;
5696
- this.logger.debug(
5697
- `\u{1F4E4} Yielding event ${eventCount} from step ${stepCount}`,
5698
- {
5699
- eventId: event.id,
5700
- eventType: event.constructor.name,
5701
- hasContent: !!event.content,
5702
- isFinalResponse: event.isFinalResponse(),
5703
- partial: event.partial
5704
- }
5705
- );
5706
5886
  yield event;
5707
5887
  }
5708
5888
  if (!lastEvent || lastEvent.isFinalResponse()) {
5709
- this.logger.debug("\u2705 Flow completed", {
5710
- reason: !lastEvent ? "no_events" : "final_response",
5711
- totalSteps: stepCount
5712
- });
5889
+ this.logger.info(
5890
+ `Agent '${invocationContext.agent.name}' finished after ${stepCount} steps.`
5891
+ );
5713
5892
  break;
5714
5893
  }
5715
5894
  if (lastEvent.partial) {
5716
- this.logger.error("\u274C Flow error: Last event is partial", {
5717
- eventId: lastEvent.id,
5718
- stepCount
5719
- });
5895
+ this.logger.error(
5896
+ "Partial event encountered. LLM max output limit may be reached."
5897
+ );
5720
5898
  throw new Error(
5721
5899
  "Last event shouldn't be partial. LLM max output limit may be reached."
5722
5900
  );
5723
5901
  }
5724
5902
  }
5725
- this.logger.debug("\u{1F3C1} runAsync flow finished", {
5726
- totalSteps: stepCount,
5727
- invocationId: invocationContext.invocationId
5728
- });
5729
5903
  }
5730
5904
  async *runLive(invocationContext) {
5731
- this.logger.debug("\u{1F534} Starting runLive flow", {
5732
- invocationId: invocationContext.invocationId,
5733
- agentName: invocationContext.agent.name
5734
- });
5735
5905
  this.logger.warn("\u26A0\uFE0F runLive not fully implemented, delegating to runAsync");
5736
5906
  yield* this.runAsync(invocationContext);
5737
5907
  }
5738
5908
  async *_runOneStepAsync(invocationContext) {
5739
- this.logger.debug("\u{1F504} Starting one step execution", {
5740
- invocationId: invocationContext.invocationId
5741
- });
5742
5909
  const llmRequest = new LlmRequest();
5743
- this.logger.debug("\u{1F4DD} Created new LlmRequest", {
5744
- requestId: llmRequest.id || "unknown"
5745
- });
5746
- this.logger.debug("\u{1F527} Starting preprocessing phase");
5747
5910
  let preprocessEventCount = 0;
5748
5911
  for await (const event of this._preprocessAsync(
5749
5912
  invocationContext,
5750
5913
  llmRequest
5751
5914
  )) {
5752
5915
  preprocessEventCount++;
5753
- this.logger.debug(`\u{1F4E4} Preprocessing event ${preprocessEventCount}`, {
5754
- eventId: event.id
5755
- });
5756
5916
  yield event;
5757
5917
  }
5758
- this.logger.debug("\u2705 Preprocessing completed", {
5759
- eventCount: preprocessEventCount
5760
- });
5761
5918
  if (invocationContext.endInvocation) {
5762
- this.logger.debug("\u{1F6D1} Invocation ended during preprocessing");
5919
+ this.logger.info("Invocation ended during preprocessing.");
5763
5920
  return;
5764
5921
  }
5765
5922
  const modelResponseEvent = new Event({
@@ -5768,9 +5925,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
5768
5925
  author: invocationContext.agent.name,
5769
5926
  branch: invocationContext.branch
5770
5927
  });
5771
- this.logger.debug("\u{1F916} Starting LLM call phase", {
5772
- modelResponseEventId: modelResponseEvent.id
5773
- });
5774
5928
  let llmResponseCount = 0;
5775
5929
  for await (const llmResponse of this._callLlmAsync(
5776
5930
  invocationContext,
@@ -5778,12 +5932,6 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
5778
5932
  modelResponseEvent
5779
5933
  )) {
5780
5934
  llmResponseCount++;
5781
- this.logger.debug(`\u{1F504} Processing LLM response ${llmResponseCount}`, {
5782
- hasContent: !!llmResponse.content,
5783
- hasError: !!llmResponse.errorCode,
5784
- interrupted: !!llmResponse.interrupted,
5785
- partial: !!llmResponse.partial
5786
- });
5787
5935
  for await (const event of this._postprocessAsync(
5788
5936
  invocationContext,
5789
5937
  llmRequest,
@@ -5791,89 +5939,47 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
5791
5939
  modelResponseEvent
5792
5940
  )) {
5793
5941
  modelResponseEvent.id = Event.newId();
5794
- this.logger.debug("\u{1F4E4} Yielding postprocessed event", {
5795
- eventId: event.id,
5796
- hasFunctionCalls: !!event.getFunctionCalls()
5797
- });
5798
5942
  yield event;
5799
5943
  }
5800
5944
  }
5801
- this.logger.debug("\u2705 One step execution completed", {
5802
- llmResponseCount
5803
- });
5804
5945
  }
5805
5946
  async *_preprocessAsync(invocationContext, llmRequest) {
5806
- this.logger.debug("\u{1F527} Starting preprocessing", {
5807
- processorCount: this.requestProcessors.length
5808
- });
5809
5947
  const agent = invocationContext.agent;
5810
5948
  if (!("canonicalTools" in agent) || typeof agent.canonicalTools !== "function") {
5811
- this.logger.debug("\u2139\uFE0F Agent has no canonical tools");
5812
5949
  return;
5813
5950
  }
5814
- for (let i = 0; i < this.requestProcessors.length; i++) {
5815
- const processor = this.requestProcessors[i];
5816
- this.logger.debug(`\u{1F504} Running request processor ${i + 1}`, {
5817
- processorName: _optionalChain([processor, 'access', _185 => _185.constructor, 'optionalAccess', _186 => _186.name]) || "unknown"
5818
- });
5819
- let processorEventCount = 0;
5951
+ for (const processor of this.requestProcessors) {
5820
5952
  for await (const event of processor.runAsync(
5821
5953
  invocationContext,
5822
5954
  llmRequest
5823
5955
  )) {
5824
- processorEventCount++;
5825
- this.logger.debug(
5826
- `\u{1F4E4} Request processor ${i + 1} event ${processorEventCount}`,
5827
- {
5828
- eventId: event.id
5829
- }
5830
- );
5831
5956
  yield event;
5832
5957
  }
5833
- this.logger.debug(`\u2705 Request processor ${i + 1} completed`, {
5834
- eventCount: processorEventCount
5835
- });
5836
5958
  }
5837
5959
  const tools = await agent.canonicalTools(
5838
5960
  new ReadonlyContext(invocationContext)
5839
5961
  );
5840
- this.logger.debug("\u{1F6E0}\uFE0F Processing canonical tools", {
5841
- toolCount: tools.length
5842
- });
5843
- for (let i = 0; i < tools.length; i++) {
5844
- const tool = tools[i];
5845
- this.logger.debug(`\u{1F504} Processing tool ${i + 1}`, {
5846
- toolName: _optionalChain([tool, 'access', _187 => _187.constructor, 'optionalAccess', _188 => _188.name]) || "unknown"
5847
- });
5962
+ for (const tool of tools) {
5848
5963
  const toolContext = new ToolContext(invocationContext);
5849
5964
  await tool.processLlmRequest(toolContext, llmRequest);
5850
- this.logger.debug(`\u2705 Tool ${i + 1} processed`);
5851
5965
  }
5852
- this.logger.debug("\u2705 Preprocessing completed", {
5853
- totalTools: tools.length
5854
- });
5966
+ if (tools.length > 0) {
5967
+ const toolsData = tools.map((tool) => ({
5968
+ Name: tool.name,
5969
+ Description: _optionalChain([tool, 'access', _170 => _170.description, 'optionalAccess', _171 => _171.substring, 'call', _172 => _172(0, 50)]) + (_optionalChain([tool, 'access', _173 => _173.description, 'optionalAccess', _174 => _174.length]) > 50 ? "..." : ""),
5970
+ "Long Running": tool.isLongRunning ? "Yes" : "No"
5971
+ }));
5972
+ this.logger.debugArray("\u{1F6E0}\uFE0F Available Tools", toolsData);
5973
+ }
5855
5974
  }
5856
5975
  async *_postprocessAsync(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
5857
- this.logger.debug("\u{1F504} Starting postprocessing", {
5858
- hasContent: !!llmResponse.content,
5859
- hasError: !!llmResponse.errorCode,
5860
- interrupted: !!llmResponse.interrupted
5861
- });
5862
- let processorEventCount = 0;
5863
5976
  for await (const event of this._postprocessRunProcessorsAsync(
5864
5977
  invocationContext,
5865
5978
  llmResponse
5866
5979
  )) {
5867
- processorEventCount++;
5868
- this.logger.debug(`\u{1F4E4} Response processor event ${processorEventCount}`, {
5869
- eventId: event.id
5870
- });
5871
5980
  yield event;
5872
5981
  }
5873
5982
  if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted) {
5874
- this.logger.debug(
5875
- "\u2139\uFE0F Skipping event creation - no content, error, or interruption"
5876
- );
5877
5983
  return;
5878
5984
  }
5879
5985
  const finalizedEvent = this._finalizeModelResponseEvent(
@@ -5881,54 +5987,32 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
5881
5987
  llmResponse,
5882
5988
  modelResponseEvent
5883
5989
  );
5884
- this.logger.debug("\u{1F4DD} Finalized model response event", {
5885
- eventId: finalizedEvent.id,
5886
- hasContent: !!finalizedEvent.content,
5887
- hasFunctionCalls: !!finalizedEvent.getFunctionCalls(),
5888
- longRunningToolIds: finalizedEvent.longRunningToolIds.entries.length || 0
5889
- });
5890
5990
  yield finalizedEvent;
5891
5991
  const functionCalls = finalizedEvent.getFunctionCalls();
5892
- if (functionCalls) {
5893
- this.logger.debug("\u{1F527} Processing function calls", {
5894
- functionCallCount: functionCalls.length
5895
- });
5896
- let functionEventCount = 0;
5992
+ if (functionCalls && functionCalls.length > 0) {
5993
+ const functionCallsData = functionCalls.map((fc) => ({
5994
+ Name: fc.name,
5995
+ Arguments: JSON.stringify(fc.args).substring(0, 100) + (JSON.stringify(fc.args).length > 100 ? "..." : ""),
5996
+ ID: fc.id || "auto"
5997
+ }));
5998
+ this.logger.debugArray("\u{1F527} Function Calls", functionCallsData);
5897
5999
  for await (const event of this._postprocessHandleFunctionCallsAsync(
5898
6000
  invocationContext,
5899
6001
  finalizedEvent,
5900
6002
  llmRequest
5901
6003
  )) {
5902
- functionEventCount++;
5903
- this.logger.debug(`\u{1F4E4} Function call event ${functionEventCount}`, {
5904
- eventId: event.id
5905
- });
5906
6004
  yield event;
5907
6005
  }
5908
- this.logger.debug("\u2705 Function calls processed", {
5909
- eventCount: functionEventCount
5910
- });
5911
6006
  }
5912
- this.logger.debug("\u2705 Postprocessing completed");
5913
6007
  }
5914
6008
  async *_postprocessLive(invocationContext, llmRequest, llmResponse, modelResponseEvent) {
5915
- this.logger.debug("\u{1F534} Starting live postprocessing", {
5916
- hasContent: !!llmResponse.content,
5917
- turnComplete: !!llmResponse.turnComplete
5918
- });
5919
6009
  for await (const event of this._postprocessRunProcessorsAsync(
5920
6010
  invocationContext,
5921
6011
  llmResponse
5922
6012
  )) {
5923
- this.logger.debug("\u{1F4E4} Live response processor event", {
5924
- eventId: event.id
5925
- });
5926
6013
  yield event;
5927
6014
  }
5928
6015
  if (!llmResponse.content && !llmResponse.errorCode && !llmResponse.interrupted && !llmResponse.turnComplete) {
5929
- this.logger.debug(
5930
- "\u2139\uFE0F Skipping live event - no content or completion signal"
5931
- );
5932
6016
  return;
5933
6017
  }
5934
6018
  const finalizedEvent = this._finalizeModelResponseEvent(
@@ -5936,165 +6020,83 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
5936
6020
  llmResponse,
5937
6021
  modelResponseEvent
5938
6022
  );
5939
- this.logger.debug("\u{1F4DD} Finalized live model response event", {
5940
- eventId: finalizedEvent.id,
5941
- hasFunctionCalls: !!finalizedEvent.getFunctionCalls()
5942
- });
5943
6023
  yield finalizedEvent;
5944
6024
  if (finalizedEvent.getFunctionCalls()) {
5945
- this.logger.debug("\u{1F527} Processing live function calls");
5946
6025
  const functionResponseEvent = await handleFunctionCallsAsync(
5947
6026
  invocationContext,
5948
6027
  finalizedEvent,
5949
6028
  llmRequest.toolsDict || {}
5950
6029
  );
5951
6030
  if (functionResponseEvent) {
5952
- this.logger.debug("\u{1F4E4} Live function response event", {
5953
- eventId: functionResponseEvent.id,
5954
- hasTransfer: !!_optionalChain([functionResponseEvent, 'access', _189 => _189.actions, 'optionalAccess', _190 => _190.transferToAgent])
5955
- });
5956
6031
  yield functionResponseEvent;
5957
- const transferToAgent = _optionalChain([functionResponseEvent, 'access', _191 => _191.actions, 'optionalAccess', _192 => _192.transferToAgent]);
6032
+ const transferToAgent = _optionalChain([functionResponseEvent, 'access', _175 => _175.actions, 'optionalAccess', _176 => _176.transferToAgent]);
5958
6033
  if (transferToAgent) {
5959
- this.logger.debug("\u{1F504} Transferring to agent in live mode", {
5960
- targetAgent: transferToAgent
5961
- });
6034
+ this.logger.info(`\u{1F504} Live transfer to agent '${transferToAgent}'`);
5962
6035
  const agentToRun = this._getAgentToRun(
5963
6036
  invocationContext,
5964
6037
  transferToAgent
5965
6038
  );
5966
- let transferEventCount = 0;
5967
- for await (const event of _optionalChain([agentToRun, 'access', _193 => _193.runLive, 'optionalCall', _194 => _194(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
5968
- transferEventCount++;
5969
- this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
5970
- eventId: event.id
5971
- });
6039
+ for await (const event of _optionalChain([agentToRun, 'access', _177 => _177.runLive, 'optionalCall', _178 => _178(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
5972
6040
  yield event;
5973
6041
  }
5974
- this.logger.debug("\u2705 Agent transfer completed", {
5975
- eventCount: transferEventCount
5976
- });
5977
6042
  }
5978
6043
  }
5979
6044
  }
5980
- this.logger.debug("\u2705 Live postprocessing completed");
5981
6045
  }
5982
6046
  async *_postprocessRunProcessorsAsync(invocationContext, llmResponse) {
5983
- this.logger.debug("\u{1F504} Running response processors", {
5984
- processorCount: this.responseProcessors.length
5985
- });
5986
- for (let i = 0; i < this.responseProcessors.length; i++) {
5987
- const processor = this.responseProcessors[i];
5988
- this.logger.debug(`\u{1F504} Running response processor ${i + 1}`, {
5989
- processorName: _optionalChain([processor, 'access', _195 => _195.constructor, 'optionalAccess', _196 => _196.name]) || "unknown"
5990
- });
5991
- let processorEventCount = 0;
6047
+ for (const processor of this.responseProcessors) {
5992
6048
  for await (const event of processor.runAsync(
5993
6049
  invocationContext,
5994
6050
  llmResponse
5995
6051
  )) {
5996
- processorEventCount++;
5997
- this.logger.debug(
5998
- `\u{1F4E4} Response processor ${i + 1} event ${processorEventCount}`,
5999
- {
6000
- eventId: event.id
6001
- }
6002
- );
6003
6052
  yield event;
6004
6053
  }
6005
- this.logger;
6006
- this.logger.debug(`\u2705 Response processor ${i + 1} completed`, {
6007
- eventCount: processorEventCount
6008
- });
6009
6054
  }
6010
- this.logger.debug("\u2705 All response processors completed");
6011
6055
  }
6012
6056
  async *_postprocessHandleFunctionCallsAsync(invocationContext, functionCallEvent, llmRequest) {
6013
- this.logger.debug("\u{1F527} Handling function calls", {
6014
- eventId: functionCallEvent.id,
6015
- toolsDictSize: Object.keys(llmRequest.toolsDict || {}).length
6016
- });
6017
6057
  const functionResponseEvent = await handleFunctionCallsAsync(
6018
6058
  invocationContext,
6019
6059
  functionCallEvent,
6020
6060
  llmRequest.toolsDict || {}
6021
6061
  );
6022
6062
  if (functionResponseEvent) {
6023
- this.logger.debug("\u{1F4CB} Function calls executed", {
6024
- responseEventId: functionResponseEvent.id,
6025
- hasActions: !!functionResponseEvent.actions
6026
- });
6027
6063
  const authEvent = generateAuthEvent(
6028
6064
  invocationContext,
6029
6065
  functionResponseEvent
6030
6066
  );
6031
6067
  if (authEvent) {
6032
- this.logger.debug("\u{1F510} Generated auth event", {
6033
- authEventId: authEvent.id
6034
- });
6035
6068
  yield authEvent;
6036
6069
  }
6037
6070
  yield functionResponseEvent;
6038
- const transferToAgent = _optionalChain([functionResponseEvent, 'access', _197 => _197.actions, 'optionalAccess', _198 => _198.transferToAgent]);
6071
+ const transferToAgent = _optionalChain([functionResponseEvent, 'access', _179 => _179.actions, 'optionalAccess', _180 => _180.transferToAgent]);
6039
6072
  if (transferToAgent) {
6040
- this.logger.debug("\u{1F504} Transferring to agent", {
6041
- targetAgent: transferToAgent
6042
- });
6073
+ this.logger.info(`\u{1F504} Transferring to agent '${transferToAgent}'`);
6043
6074
  const agentToRun = this._getAgentToRun(
6044
6075
  invocationContext,
6045
6076
  transferToAgent
6046
6077
  );
6047
- let transferEventCount = 0;
6048
6078
  for await (const event of agentToRun.runAsync(invocationContext)) {
6049
- transferEventCount++;
6050
- this.logger.debug(`\u{1F4E4} Transfer agent event ${transferEventCount}`, {
6051
- eventId: event.id
6052
- });
6053
6079
  yield event;
6054
6080
  }
6055
- this.logger.debug("\u2705 Agent transfer completed", {
6056
- eventCount: transferEventCount
6057
- });
6058
6081
  }
6059
- } else {
6060
- this.logger.debug("\u2139\uFE0F No function response event generated");
6061
6082
  }
6062
6083
  }
6063
6084
  _getAgentToRun(invocationContext, agentName) {
6064
- this.logger.debug("\u{1F50D} Finding agent to run", {
6065
- targetAgent: agentName,
6066
- currentAgent: invocationContext.agent.name
6067
- });
6068
6085
  const rootAgent = invocationContext.agent.rootAgent;
6069
6086
  const agentToRun = rootAgent.findAgent(agentName);
6070
6087
  if (!agentToRun) {
6071
- this.logger.error("\u274C Agent not found", {
6072
- targetAgent: agentName,
6073
- rootAgent: rootAgent.name
6074
- });
6088
+ this.logger.error(`Agent '${agentName}' not found in the agent tree.`);
6075
6089
  throw new Error(`Agent ${agentName} not found in the agent tree.`);
6076
6090
  }
6077
- this.logger.debug("\u2705 Agent found", {
6078
- targetAgent: agentName,
6079
- agentType: agentToRun.constructor.name
6080
- });
6081
6091
  return agentToRun;
6082
6092
  }
6083
6093
  async *_callLlmAsync(invocationContext, llmRequest, modelResponseEvent) {
6084
- this.logger.debug("\u{1F916} Starting LLM call", {
6085
- model: llmRequest.model || "default",
6086
- eventId: modelResponseEvent.id
6087
- });
6088
- this.logger.debug("\u{1F504} Processing before model callbacks");
6089
6094
  const beforeModelCallbackContent = await this._handleBeforeModelCallback(
6090
6095
  invocationContext,
6091
6096
  llmRequest,
6092
6097
  modelResponseEvent
6093
6098
  );
6094
6099
  if (beforeModelCallbackContent) {
6095
- this.logger.debug("\u{1F4CB} Before model callback returned content", {
6096
- hasContent: !!beforeModelCallbackContent.content
6097
- });
6098
6100
  yield beforeModelCallbackContent;
6099
6101
  return;
6100
6102
  }
@@ -6102,27 +6104,38 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
6102
6104
  llmRequest.config.labels = llmRequest.config.labels || {};
6103
6105
  if (!(_ADK_AGENT_NAME_LABEL_KEY in llmRequest.config.labels)) {
6104
6106
  llmRequest.config.labels[_ADK_AGENT_NAME_LABEL_KEY] = invocationContext.agent.name;
6105
- this.logger.debug("\u{1F3F7}\uFE0F Added agent name label", {
6106
- agentName: invocationContext.agent.name
6107
- });
6108
6107
  }
6109
6108
  const llm = this.__getLlm(invocationContext);
6110
- this.logger.debug("\u{1F527} Retrieved LLM instance", {
6111
- llmModel: llm.model,
6112
- llmType: llm.constructor.name
6113
- });
6114
6109
  const runConfig = invocationContext.runConfig;
6115
6110
  if (runConfig.supportCfc) {
6116
6111
  this.logger.warn(
6117
- "\u26A0\uFE0F CFC (supportCfc) not fully implemented, using standard flow"
6112
+ "CFC (supportCfc) not fully implemented, using standard flow."
6118
6113
  );
6119
6114
  }
6120
6115
  invocationContext.incrementLlmCallCount();
6121
- this.logger.debug("\u{1F4C8} Incremented LLM call count");
6122
6116
  const isStreaming = invocationContext.runConfig.streamingMode === "sse" /* SSE */;
6123
- this.logger.debug("\u{1F30A} LLM generation mode", {
6124
- streaming: isStreaming,
6125
- streamingMode: invocationContext.runConfig.streamingMode
6117
+ const tools = _optionalChain([llmRequest, 'access', _181 => _181.config, 'optionalAccess', _182 => _182.tools]) || [];
6118
+ const toolNames = tools.map((tool) => {
6119
+ if (tool.functionDeclarations && Array.isArray(tool.functionDeclarations)) {
6120
+ return tool.functionDeclarations.map((fn) => fn.name).join(", ");
6121
+ }
6122
+ if (tool.name) return tool.name;
6123
+ if (_optionalChain([tool, 'access', _183 => _183.function, 'optionalAccess', _184 => _184.name])) return tool.function.name;
6124
+ if (_optionalChain([tool, 'access', _185 => _185.function, 'optionalAccess', _186 => _186.function, 'optionalAccess', _187 => _187.name])) return tool.function.function.name;
6125
+ return "unknown";
6126
+ }).join(", ");
6127
+ const systemInstruction = llmRequest.getSystemInstructionText() || "";
6128
+ const truncatedSystemInstruction = systemInstruction.length > 100 ? `${systemInstruction.substring(0, 100)}...` : systemInstruction;
6129
+ const contentPreview = _optionalChain([llmRequest, 'access', _188 => _188.contents, 'optionalAccess', _189 => _189.length]) > 0 ? this._formatContentPreview(llmRequest.contents[0]) : "none";
6130
+ this.logger.debugStructured("\u{1F4E4} LLM Request", {
6131
+ Model: llm.model,
6132
+ Agent: invocationContext.agent.name,
6133
+ "Content Items": _optionalChain([llmRequest, 'access', _190 => _190.contents, 'optionalAccess', _191 => _191.length]) || 0,
6134
+ "Content Preview": contentPreview,
6135
+ "System Instruction": truncatedSystemInstruction || "none",
6136
+ "Available Tools": toolNames || "none",
6137
+ "Tool Count": _optionalChain([llmRequest, 'access', _192 => _192.config, 'optionalAccess', _193 => _193.tools, 'optionalAccess', _194 => _194.length]) || 0,
6138
+ Streaming: isStreaming ? "Yes" : "No"
6126
6139
  });
6127
6140
  let responseCount = 0;
6128
6141
  for await (const llmResponse of llm.generateContentAsync(
@@ -6130,59 +6143,46 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
6130
6143
  isStreaming
6131
6144
  )) {
6132
6145
  responseCount++;
6133
- this.logger.debug(`\u{1F4E5} Received LLM response ${responseCount}`, {
6134
- hasContent: !!llmResponse.content,
6135
- hasError: !!llmResponse.errorCode,
6136
- interrupted: !!llmResponse.interrupted,
6137
- partial: !!llmResponse.partial,
6138
- finishReason: llmResponse.finishReason,
6139
- usage: llmResponse.usageMetadata ? {
6140
- promptTokens: llmResponse.usageMetadata.promptTokenCount,
6141
- completionTokens: llmResponse.usageMetadata.candidatesTokenCount,
6142
- totalTokens: llmResponse.usageMetadata.totalTokenCount
6143
- } : null
6144
- });
6145
6146
  traceLlmCall(
6146
6147
  invocationContext,
6147
6148
  modelResponseEvent.id,
6148
6149
  llmRequest,
6149
6150
  llmResponse
6150
6151
  );
6151
- this.logger.debug("\u{1F504} Processing after model callbacks");
6152
+ const tokenCount = _optionalChain([llmResponse, 'access', _195 => _195.usageMetadata, 'optionalAccess', _196 => _196.totalTokenCount]) || "unknown";
6153
+ const functionCallCount = _optionalChain([llmResponse, 'access', _197 => _197.content, 'optionalAccess', _198 => _198.parts, 'optionalAccess', _199 => _199.filter, 'call', _200 => _200((part) => part.functionCall), 'access', _201 => _201.length]) || 0;
6154
+ const responsePreview = this._formatResponsePreview(llmResponse);
6155
+ this.logger.debugStructured("\u{1F4E5} LLM Response", {
6156
+ Model: llm.model,
6157
+ "Token Count": tokenCount,
6158
+ "Function Calls": functionCallCount,
6159
+ "Response Preview": responsePreview,
6160
+ "Finish Reason": llmResponse.finishReason || "unknown",
6161
+ "Response #": responseCount,
6162
+ Partial: llmResponse.partial ? "Yes" : "No",
6163
+ Error: llmResponse.errorCode || "none"
6164
+ });
6152
6165
  const alteredLlmResponse = await this._handleAfterModelCallback(
6153
6166
  invocationContext,
6154
6167
  llmResponse,
6155
6168
  modelResponseEvent
6156
6169
  );
6157
- if (alteredLlmResponse) {
6158
- this.logger.debug("\u{1F4CB} After model callback altered response");
6159
- }
6160
6170
  yield alteredLlmResponse || llmResponse;
6161
6171
  }
6162
- this.logger.debug("\u2705 LLM call completed", {
6163
- totalResponses: responseCount
6164
- });
6165
6172
  }
6166
6173
  async _handleBeforeModelCallback(invocationContext, llmRequest, modelResponseEvent) {
6167
6174
  const agent = invocationContext.agent;
6168
6175
  if (!("canonicalBeforeModelCallbacks" in agent)) {
6169
- this.logger.debug("\u2139\uFE0F Agent has no before model callbacks");
6170
6176
  return;
6171
6177
  }
6172
6178
  const beforeCallbacks = agent.canonicalBeforeModelCallbacks;
6173
6179
  if (!beforeCallbacks) {
6174
- this.logger.debug("\u2139\uFE0F Before model callbacks is null/undefined");
6175
6180
  return;
6176
6181
  }
6177
- this.logger.debug("\u{1F504} Processing before model callbacks", {
6178
- callbackCount: beforeCallbacks.length
6179
- });
6180
6182
  const callbackContext = new CallbackContext(invocationContext, {
6181
6183
  eventActions: modelResponseEvent.actions
6182
6184
  });
6183
- for (let i = 0; i < beforeCallbacks.length; i++) {
6184
- const callback = beforeCallbacks[i];
6185
- this.logger.debug(`\u{1F504} Running before model callback ${i + 1}`);
6185
+ for (const callback of beforeCallbacks) {
6186
6186
  let beforeModelCallbackContent = callback({
6187
6187
  callbackContext,
6188
6188
  llmRequest
@@ -6191,35 +6191,23 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
6191
6191
  beforeModelCallbackContent = await beforeModelCallbackContent;
6192
6192
  }
6193
6193
  if (beforeModelCallbackContent) {
6194
- this.logger.debug(`\u2705 Before model callback ${i + 1} returned content`);
6195
6194
  return beforeModelCallbackContent;
6196
6195
  }
6197
- this.logger.debug(
6198
- `\u2705 Before model callback ${i + 1} completed (no content)`
6199
- );
6200
6196
  }
6201
- this.logger.debug("\u2705 All before model callbacks completed");
6202
6197
  }
6203
6198
  async _handleAfterModelCallback(invocationContext, llmResponse, modelResponseEvent) {
6204
6199
  const agent = invocationContext.agent;
6205
6200
  if (!("canonicalAfterModelCallbacks" in agent)) {
6206
- this.logger.debug("\u2139\uFE0F Agent has no after model callbacks");
6207
6201
  return;
6208
6202
  }
6209
6203
  const afterCallbacks = agent.canonicalAfterModelCallbacks;
6210
6204
  if (!afterCallbacks) {
6211
- this.logger.debug("\u2139\uFE0F After model callbacks is null/undefined");
6212
6205
  return;
6213
6206
  }
6214
- this.logger.debug("\u{1F504} Processing after model callbacks", {
6215
- callbackCount: afterCallbacks.length
6216
- });
6217
6207
  const callbackContext = new CallbackContext(invocationContext, {
6218
6208
  eventActions: modelResponseEvent.actions
6219
6209
  });
6220
- for (let i = 0; i < afterCallbacks.length; i++) {
6221
- const callback = afterCallbacks[i];
6222
- this.logger.debug(`\u{1F504} Running after model callback ${i + 1}`);
6210
+ for (const callback of afterCallbacks) {
6223
6211
  let afterModelCallbackContent = callback({
6224
6212
  callbackContext,
6225
6213
  llmResponse
@@ -6228,21 +6216,11 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
6228
6216
  afterModelCallbackContent = await afterModelCallbackContent;
6229
6217
  }
6230
6218
  if (afterModelCallbackContent) {
6231
- this.logger.debug(`\u2705 After model callback ${i + 1} returned content`);
6232
6219
  return afterModelCallbackContent;
6233
6220
  }
6234
- this.logger.debug(
6235
- `\u2705 After model callback ${i + 1} completed (no content)`
6236
- );
6237
6221
  }
6238
- this.logger.debug("\u2705 All after model callbacks completed");
6239
6222
  }
6240
6223
  _finalizeModelResponseEvent(llmRequest, llmResponse, modelResponseEvent) {
6241
- this.logger.debug("\u{1F4DD} Finalizing model response event", {
6242
- requestModel: llmRequest.model,
6243
- responseHasContent: !!llmResponse.content,
6244
- eventId: modelResponseEvent.id
6245
- });
6246
6224
  const eventData = { ...modelResponseEvent };
6247
6225
  const responseData = { ...llmResponse };
6248
6226
  Object.keys(responseData).forEach((key) => {
@@ -6254,38 +6232,48 @@ var BaseLlmFlow = (_class20 = class {constructor() { _class20.prototype.__init36
6254
6232
  if (event.content) {
6255
6233
  const functionCalls = event.getFunctionCalls();
6256
6234
  if (functionCalls) {
6257
- this.logger.debug("\u{1F527} Processing function calls in event", {
6258
- functionCallCount: functionCalls.length
6259
- });
6260
6235
  populateClientFunctionCallId(event);
6261
6236
  event.longRunningToolIds = getLongRunningFunctionCalls(
6262
6237
  functionCalls,
6263
6238
  llmRequest.toolsDict || {}
6264
6239
  );
6265
- this.logger.debug("\u2705 Function calls processed", {
6266
- longRunningToolCount: event.longRunningToolIds.entries.length || 0
6267
- });
6268
6240
  }
6269
6241
  }
6270
- this.logger.debug("\u2705 Model response event finalized", {
6271
- finalEventId: event.id,
6272
- hasContent: !!event.content,
6273
- hasFunctionCalls: !!event.getFunctionCalls()
6274
- });
6275
6242
  return event;
6276
6243
  }
6244
+ /**
6245
+ * Logs data in a visually appealing format that works well in any terminal size.
6246
+ * Uses vertical layout for better readability and respects debug settings.
6247
+ */
6248
+ _formatContentPreview(content) {
6249
+ if (!content) return "none";
6250
+ if (content.parts && Array.isArray(content.parts)) {
6251
+ const textParts = content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
6252
+ return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
6253
+ }
6254
+ if (typeof content === "string") {
6255
+ return content.length > 80 ? `${content.substring(0, 80)}...` : content;
6256
+ }
6257
+ const stringified = JSON.stringify(content);
6258
+ return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
6259
+ }
6260
+ /**
6261
+ * Formats response content preview for debug logging
6262
+ */
6263
+ _formatResponsePreview(llmResponse) {
6264
+ if (!llmResponse.content) return "none";
6265
+ if (llmResponse.content.parts && Array.isArray(llmResponse.content.parts)) {
6266
+ const textParts = llmResponse.content.parts.filter((part) => part.text).map((part) => part.text).join(" ");
6267
+ return textParts.length > 80 ? `${textParts.substring(0, 80)}...` : textParts || "no text content";
6268
+ }
6269
+ const stringified = JSON.stringify(llmResponse.content);
6270
+ return stringified.length > 80 ? `${stringified.substring(0, 80)}...` : stringified;
6271
+ }
6277
6272
  __getLlm(invocationContext) {
6278
6273
  const llm = invocationContext.agent.canonicalModel;
6279
- this.logger.debug("\u{1F527} Retrieved canonical model", {
6280
- model: _optionalChain([llm, 'optionalAccess', _199 => _199.model]) || "unknown",
6281
- llmType: _optionalChain([llm, 'optionalAccess', _200 => _200.constructor, 'optionalAccess', _201 => _201.name]) || "unknown"
6282
- });
6283
6274
  return llm;
6284
6275
  }
6285
- }, _class20);
6286
-
6287
- // src/flows/llm-flows/single-flow.ts
6288
- init_logger();
6276
+ }, _class22);
6289
6277
 
6290
6278
  // src/flows/llm-flows/base-llm-processor.ts
6291
6279
  var BaseLlmRequestProcessor = class {
@@ -6293,52 +6281,6 @@ var BaseLlmRequestProcessor = class {
6293
6281
  var BaseLlmResponseProcessor = class {
6294
6282
  };
6295
6283
 
6296
- // src/flows/llm-flows/basic.ts
6297
- var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6298
- async *runAsync(invocationContext, llmRequest) {
6299
- const agent = invocationContext.agent;
6300
- if (!this.isLlmAgent(agent)) {
6301
- return;
6302
- }
6303
- llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
6304
- if (agent.generateContentConfig) {
6305
- llmRequest.config = JSON.parse(
6306
- JSON.stringify(agent.generateContentConfig)
6307
- );
6308
- } else {
6309
- llmRequest.config = {};
6310
- }
6311
- if (agent.outputSchema) {
6312
- llmRequest.setOutputSchema(agent.outputSchema);
6313
- }
6314
- const runConfig = invocationContext.runConfig;
6315
- if (!llmRequest.liveConnectConfig) {
6316
- llmRequest.liveConnectConfig = {};
6317
- }
6318
- if (runConfig.responseModalities) {
6319
- llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
6320
- }
6321
- llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
6322
- llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
6323
- llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
6324
- llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
6325
- llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
6326
- llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
6327
- const tools = await agent.canonicalTools();
6328
- llmRequest.appendTools(tools);
6329
- for await (const _ of []) {
6330
- yield _;
6331
- }
6332
- }
6333
- /**
6334
- * Type guard to check if agent is an LlmAgent
6335
- */
6336
- isLlmAgent(agent) {
6337
- return agent && typeof agent === "object" && "canonicalModel" in agent;
6338
- }
6339
- };
6340
- var requestProcessor = new BasicLlmRequestProcessor();
6341
-
6342
6284
  // src/auth/auth-tool.ts
6343
6285
  var EnhancedAuthConfig = class {
6344
6286
  /**
@@ -6546,152 +6488,738 @@ var AuthLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6546
6488
  }
6547
6489
  }
6548
6490
  };
6549
- var requestProcessor2 = new AuthLlmRequestProcessor();
6491
+ var requestProcessor = new AuthLlmRequestProcessor();
6550
6492
 
6551
- // src/flows/llm-flows/identity.ts
6552
- var IdentityLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6493
+ // src/flows/llm-flows/basic.ts
6494
+ var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6553
6495
  async *runAsync(invocationContext, llmRequest) {
6554
6496
  const agent = invocationContext.agent;
6555
- const instructions = [
6556
- `You are an agent. Your internal name is "${agent.name}".`
6557
- ];
6558
- if (agent.description) {
6559
- instructions.push(` The description about you is "${agent.description}"`);
6497
+ if (!this.isLlmAgent(agent)) {
6498
+ return;
6560
6499
  }
6561
- llmRequest.appendInstructions(instructions);
6500
+ llmRequest.model = typeof agent.canonicalModel === "string" ? agent.canonicalModel : agent.canonicalModel.model;
6501
+ if (agent.generateContentConfig) {
6502
+ llmRequest.config = JSON.parse(
6503
+ JSON.stringify(agent.generateContentConfig)
6504
+ );
6505
+ } else {
6506
+ llmRequest.config = {};
6507
+ }
6508
+ if (agent.outputSchema) {
6509
+ llmRequest.setOutputSchema(agent.outputSchema);
6510
+ }
6511
+ const runConfig = invocationContext.runConfig;
6512
+ if (!llmRequest.liveConnectConfig) {
6513
+ llmRequest.liveConnectConfig = {};
6514
+ }
6515
+ if (runConfig.responseModalities) {
6516
+ llmRequest.liveConnectConfig.responseModalities = runConfig.responseModalities;
6517
+ }
6518
+ llmRequest.liveConnectConfig.speechConfig = runConfig.speechConfig;
6519
+ llmRequest.liveConnectConfig.outputAudioTranscription = runConfig.outputAudioTranscription;
6520
+ llmRequest.liveConnectConfig.inputAudioTranscription = runConfig.inputAudioTranscription;
6521
+ llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
6522
+ llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
6523
+ llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
6524
+ const tools = await agent.canonicalTools();
6525
+ llmRequest.appendTools(tools);
6562
6526
  for await (const _ of []) {
6563
6527
  yield _;
6564
6528
  }
6565
6529
  }
6530
+ /**
6531
+ * Type guard to check if agent is an LlmAgent
6532
+ */
6533
+ isLlmAgent(agent) {
6534
+ return agent && typeof agent === "object" && "canonicalModel" in agent;
6535
+ }
6536
+ };
6537
+ var requestProcessor2 = new BasicLlmRequestProcessor();
6538
+
6539
+ // src/code-executors/base-code-executor.ts
6540
+ var BaseCodeExecutor = class {
6541
+
6542
+ constructor(config = {}) {
6543
+ this.config = {
6544
+ optimizeDataFile: _nullishCoalesce(config.optimizeDataFile, () => ( false)),
6545
+ stateful: _nullishCoalesce(config.stateful, () => ( false)),
6546
+ errorRetryAttempts: _nullishCoalesce(config.errorRetryAttempts, () => ( 2)),
6547
+ codeBlockDelimiters: _nullishCoalesce(config.codeBlockDelimiters, () => ( [
6548
+ ["`tool_code\n", "\n`"],
6549
+ ["`python\n", "\n`"]
6550
+ ])),
6551
+ executionResultDelimiters: _nullishCoalesce(config.executionResultDelimiters, () => ( [
6552
+ "`tool_output\n",
6553
+ "\n`"
6554
+ ]))
6555
+ };
6556
+ }
6557
+ // Getters for configuration
6558
+ get optimizeDataFile() {
6559
+ return this.config.optimizeDataFile;
6560
+ }
6561
+ get stateful() {
6562
+ return this.config.stateful;
6563
+ }
6564
+ get errorRetryAttempts() {
6565
+ return this.config.errorRetryAttempts;
6566
+ }
6567
+ get codeBlockDelimiters() {
6568
+ return this.config.codeBlockDelimiters;
6569
+ }
6570
+ get executionResultDelimiters() {
6571
+ return this.config.executionResultDelimiters;
6572
+ }
6566
6573
  };
6567
- var requestProcessor3 = new IdentityLlmRequestProcessor();
6568
6574
 
6569
- // src/utils/instructions-utils.ts
6570
- async function injectSessionState(template, readonlyContext) {
6571
- const invocationContext = readonlyContext._invocationContext;
6572
- async function asyncReplace(pattern, replaceAsyncFn, string) {
6573
- const result = [];
6574
- let lastEnd = 0;
6575
- const matches = Array.from(string.matchAll(pattern));
6576
- for (const match of matches) {
6577
- result.push(string.slice(lastEnd, match.index));
6578
- const replacement = await replaceAsyncFn(match);
6579
- result.push(replacement);
6580
- lastEnd = (match.index || 0) + match[0].length;
6575
+ // src/code-executors/built-in-code-executor.ts
6576
+ var BuiltInCodeExecutor = class extends BaseCodeExecutor {
6577
+ constructor(config = {}) {
6578
+ super(config);
6579
+ }
6580
+ async executeCode(invocationContext, codeExecutionInput) {
6581
+ throw new Error(
6582
+ "BuiltInCodeExecutor.executeCode should not be called directly"
6583
+ );
6584
+ }
6585
+ /**
6586
+ * Pre-process the LLM request for Gemini 2.0+ models to use the code execution tool
6587
+ */
6588
+ processLlmRequest(llmRequest) {
6589
+ if (!_optionalChain([llmRequest, 'access', _207 => _207.model, 'optionalAccess', _208 => _208.startsWith, 'call', _209 => _209("gemini-2")])) {
6590
+ throw new Error(
6591
+ `Gemini code execution tool is not supported for model ${llmRequest.model}`
6592
+ );
6581
6593
  }
6582
- result.push(string.slice(lastEnd));
6583
- return result.join("");
6594
+ if (!llmRequest.config) {
6595
+ llmRequest.config = {};
6596
+ }
6597
+ if (!llmRequest.config.tools) {
6598
+ llmRequest.config.tools = [];
6599
+ }
6600
+ const codeExecutionTool = {
6601
+ codeExecution: {}
6602
+ };
6603
+ llmRequest.config.tools.push(codeExecutionTool);
6584
6604
  }
6585
- async function replaceMatch(match) {
6586
- let varName = match[0].replace(/[{}]/g, "").trim();
6587
- let optional = false;
6588
- if (varName.endsWith("?")) {
6589
- optional = true;
6590
- varName = varName.slice(0, -1);
6605
+ };
6606
+
6607
+ // src/code-executors/code-execution-utils.ts
6608
+
6609
+ var CodeExecutionUtils = class _CodeExecutionUtils {
6610
+ /**
6611
+ * Gets the file content as a base64-encoded string
6612
+ */
6613
+ static getEncodedFileContent(data) {
6614
+ let decodedData;
6615
+ if (data instanceof ArrayBuffer) {
6616
+ decodedData = new TextDecoder().decode(data);
6591
6617
  }
6592
- if (varName.startsWith("artifact.")) {
6593
- varName = varName.replace("artifact.", "");
6594
- if (!invocationContext.artifactService) {
6595
- throw new Error("Artifact service is not initialized.");
6596
- }
6597
- try {
6598
- const artifact = await invocationContext.artifactService.loadArtifact({
6599
- appName: invocationContext.session.appName,
6600
- userId: invocationContext.session.userId,
6601
- sessionId: invocationContext.session.id,
6602
- filename: varName
6603
- });
6604
- if (!artifact) {
6605
- throw new Error(`Artifact ${varName} not found.`);
6606
- }
6607
- return String(artifact);
6608
- } catch (error) {
6609
- if (optional) {
6610
- return "";
6611
- }
6612
- throw error;
6613
- }
6614
- } else {
6615
- if (!isValidStateName(varName)) {
6616
- return match[0];
6618
+ if (_CodeExecutionUtils.isBase64Encoded(decodedData)) {
6619
+ return decodedData;
6620
+ }
6621
+ return btoa(decodedData);
6622
+ }
6623
+ static isBase64Encoded(str) {
6624
+ try {
6625
+ return btoa(atob(str)) === str;
6626
+ } catch (e3) {
6627
+ return false;
6628
+ }
6629
+ }
6630
+ /**
6631
+ * Extracts the first code block from the content and truncates everything after it
6632
+ */
6633
+ static extractCodeAndTruncateContent(content, codeBlockDelimiters) {
6634
+ if (!_optionalChain([content, 'optionalAccess', _210 => _210.parts, 'optionalAccess', _211 => _211.length])) {
6635
+ return null;
6636
+ }
6637
+ for (let idx = 0; idx < content.parts.length; idx++) {
6638
+ const part = content.parts[idx];
6639
+ if (part.executableCode && (idx === content.parts.length - 1 || !content.parts[idx + 1].codeExecutionResult)) {
6640
+ content.parts = content.parts.slice(0, idx + 1);
6641
+ return part.executableCode.code;
6617
6642
  }
6618
- const sessionState = invocationContext.session.state;
6619
- if (varName in sessionState) {
6620
- return String(sessionState[varName]);
6643
+ }
6644
+ const textParts = content.parts.filter((p) => p.text);
6645
+ if (!textParts.length) {
6646
+ return null;
6647
+ }
6648
+ const responseText = textParts.map((p) => p.text).join("\n");
6649
+ const leadingDelimiterPattern = codeBlockDelimiters.map(([start]) => _CodeExecutionUtils.escapeRegex(start)).join("|");
6650
+ const trailingDelimiterPattern = codeBlockDelimiters.map(([, end]) => _CodeExecutionUtils.escapeRegex(end)).join("|");
6651
+ const pattern = new RegExp(
6652
+ `(.*?)(${leadingDelimiterPattern})(.*?)(${trailingDelimiterPattern})(.*?)$`,
6653
+ "s"
6654
+ );
6655
+ const match = responseText.match(pattern);
6656
+ if (!match) {
6657
+ return null;
6658
+ }
6659
+ const [, prefix, , code, , suffix] = match;
6660
+ if (!code) {
6661
+ return null;
6662
+ }
6663
+ content.parts = [];
6664
+ if (prefix) {
6665
+ content.parts.push({ text: prefix });
6666
+ }
6667
+ content.parts.push(_CodeExecutionUtils.buildExecutableCodePart(code));
6668
+ return code;
6669
+ }
6670
+ static escapeRegex(str) {
6671
+ return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
6672
+ }
6673
+ /**
6674
+ * Builds an executable code part with code string
6675
+ */
6676
+ static buildExecutableCodePart(code) {
6677
+ return {
6678
+ executableCode: {
6679
+ code,
6680
+ language: _genai.Language.PYTHON
6621
6681
  }
6622
- if (optional) {
6623
- return "";
6682
+ };
6683
+ }
6684
+ /**
6685
+ * Builds the code execution result part from the code execution result
6686
+ */
6687
+ static buildCodeExecutionResultPart(codeExecutionResult) {
6688
+ if (codeExecutionResult.stderr) {
6689
+ return {
6690
+ codeExecutionResult: {
6691
+ outcome: _genai.Outcome.OUTCOME_FAILED,
6692
+ output: codeExecutionResult.stderr
6693
+ }
6694
+ };
6695
+ }
6696
+ const finalResult = [];
6697
+ if (codeExecutionResult.stdout || !codeExecutionResult.outputFiles.length) {
6698
+ finalResult.push(
6699
+ `Code execution result:
6700
+ ${codeExecutionResult.stdout}
6701
+ `
6702
+ );
6703
+ }
6704
+ if (codeExecutionResult.outputFiles.length) {
6705
+ const fileNames = codeExecutionResult.outputFiles.map((f) => `\`${f.name}\``).join(",");
6706
+ finalResult.push(`Saved artifacts:
6707
+ ${fileNames}`);
6708
+ }
6709
+ return {
6710
+ codeExecutionResult: {
6711
+ outcome: _genai.Outcome.OUTCOME_OK,
6712
+ output: finalResult.join("\n\n")
6624
6713
  }
6625
- throw new Error(`Context variable not found: \`${varName}\`.`);
6714
+ };
6715
+ }
6716
+ /**
6717
+ * Converts the code execution parts to text parts in a Content
6718
+ */
6719
+ static convertCodeExecutionParts(content, codeBlockDelimiter, executionResultDelimiters) {
6720
+ if (!_optionalChain([content, 'access', _212 => _212.parts, 'optionalAccess', _213 => _213.length])) {
6721
+ return;
6722
+ }
6723
+ const lastPart = content.parts[content.parts.length - 1];
6724
+ if (lastPart.executableCode) {
6725
+ content.parts[content.parts.length - 1] = {
6726
+ text: `${codeBlockDelimiter[0]}${lastPart.executableCode.code}${codeBlockDelimiter[1]}`
6727
+ };
6728
+ } else if (content.parts.length === 1 && lastPart.codeExecutionResult) {
6729
+ content.parts[content.parts.length - 1] = {
6730
+ text: `${executionResultDelimiters[0]}${lastPart.codeExecutionResult.output}${executionResultDelimiters[1]}`
6731
+ };
6732
+ content.role = "user";
6626
6733
  }
6627
6734
  }
6628
- return await asyncReplace(/{[^{}]*}/g, replaceMatch, template);
6629
- }
6630
- function isValidStateName(varName) {
6631
- const parts = varName.split(":");
6632
- if (parts.length === 1) {
6633
- return isValidIdentifier(varName);
6735
+ };
6736
+
6737
+ // src/code-executors/code-executor-context.ts
6738
+ var CONTEXT_KEY = "_code_execution_context";
6739
+ var SESSION_ID_KEY = "execution_session_id";
6740
+ var PROCESSED_FILE_NAMES_KEY = "processed_input_files";
6741
+ var INPUT_FILE_KEY = "_code_executor_input_files";
6742
+ var ERROR_COUNT_KEY = "_code_executor_error_counts";
6743
+ var CODE_EXECUTION_RESULTS_KEY = "_code_execution_results";
6744
+ var CodeExecutorContext = class {
6745
+
6746
+
6747
+ constructor(sessionState) {
6748
+ this.sessionState = sessionState;
6749
+ this.context = this.getCodeExecutorContext(sessionState);
6634
6750
  }
6635
- if (parts.length === 2) {
6636
- const validPrefixes = ["app:", "user:", "temp:"];
6637
- const prefix = `${parts[0]}:`;
6638
- if (validPrefixes.includes(prefix)) {
6639
- return isValidIdentifier(parts[1]);
6751
+ /**
6752
+ * Gets the state delta to update in the persistent session state.
6753
+ */
6754
+ getStateDelta() {
6755
+ const contextToUpdate = JSON.parse(JSON.stringify(this.context));
6756
+ return { [CONTEXT_KEY]: contextToUpdate };
6757
+ }
6758
+ /**
6759
+ * Gets the session ID for the code executor.
6760
+ */
6761
+ getExecutionId() {
6762
+ if (!(SESSION_ID_KEY in this.context)) {
6763
+ return null;
6640
6764
  }
6765
+ return this.context[SESSION_ID_KEY];
6641
6766
  }
6642
- return false;
6643
- }
6644
- function isValidIdentifier(name) {
6645
- const identifierRegex = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
6646
- return identifierRegex.test(name);
6647
- }
6767
+ /**
6768
+ * Sets the session ID for the code executor.
6769
+ */
6770
+ setExecutionId(sessionId) {
6771
+ this.context[SESSION_ID_KEY] = sessionId;
6772
+ }
6773
+ /**
6774
+ * Gets the processed file names from the session state.
6775
+ */
6776
+ getProcessedFileNames() {
6777
+ if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
6778
+ return [];
6779
+ }
6780
+ return this.context[PROCESSED_FILE_NAMES_KEY];
6781
+ }
6782
+ /**
6783
+ * Adds the processed file names to the session state.
6784
+ */
6785
+ addProcessedFileNames(fileNames) {
6786
+ if (!(PROCESSED_FILE_NAMES_KEY in this.context)) {
6787
+ this.context[PROCESSED_FILE_NAMES_KEY] = [];
6788
+ }
6789
+ this.context[PROCESSED_FILE_NAMES_KEY].push(...fileNames);
6790
+ }
6791
+ /**
6792
+ * Gets the code executor input files from the session state.
6793
+ */
6794
+ getInputFiles() {
6795
+ if (!(INPUT_FILE_KEY in this.sessionState)) {
6796
+ return [];
6797
+ }
6798
+ return this.sessionState[INPUT_FILE_KEY].map(
6799
+ (file) => file
6800
+ );
6801
+ }
6802
+ /**
6803
+ * Adds the input files to the code executor context.
6804
+ */
6805
+ addInputFiles(inputFiles) {
6806
+ if (!(INPUT_FILE_KEY in this.sessionState)) {
6807
+ this.sessionState[INPUT_FILE_KEY] = [];
6808
+ }
6809
+ const fileArray = this.sessionState[INPUT_FILE_KEY];
6810
+ for (const inputFile of inputFiles) {
6811
+ fileArray.push({
6812
+ name: inputFile.name,
6813
+ content: inputFile.content,
6814
+ mimeType: inputFile.mimeType
6815
+ });
6816
+ }
6817
+ }
6818
+ /**
6819
+ * Removes the input files and processed file names from the code executor context.
6820
+ */
6821
+ clearInputFiles() {
6822
+ if (INPUT_FILE_KEY in this.sessionState) {
6823
+ this.sessionState[INPUT_FILE_KEY] = [];
6824
+ }
6825
+ if (PROCESSED_FILE_NAMES_KEY in this.context) {
6826
+ this.context[PROCESSED_FILE_NAMES_KEY] = [];
6827
+ }
6828
+ }
6829
+ /**
6830
+ * Gets the error count from the session state.
6831
+ */
6832
+ getErrorCount(invocationId) {
6833
+ if (!(ERROR_COUNT_KEY in this.sessionState)) {
6834
+ return 0;
6835
+ }
6836
+ const errorCounts = this.sessionState[ERROR_COUNT_KEY];
6837
+ return _nullishCoalesce(errorCounts[invocationId], () => ( 0));
6838
+ }
6839
+ /**
6840
+ * Increments the error count for the given invocation ID.
6841
+ */
6842
+ incrementErrorCount(invocationId) {
6843
+ if (!(ERROR_COUNT_KEY in this.sessionState)) {
6844
+ this.sessionState[ERROR_COUNT_KEY] = {};
6845
+ }
6846
+ const errorCounts = this.sessionState[ERROR_COUNT_KEY];
6847
+ errorCounts[invocationId] = this.getErrorCount(invocationId) + 1;
6848
+ }
6849
+ /**
6850
+ * Resets the error count for the given invocation ID.
6851
+ */
6852
+ resetErrorCount(invocationId) {
6853
+ if (!(ERROR_COUNT_KEY in this.sessionState)) {
6854
+ return;
6855
+ }
6856
+ const errorCounts = this.sessionState[ERROR_COUNT_KEY];
6857
+ if (invocationId in errorCounts) {
6858
+ delete errorCounts[invocationId];
6859
+ }
6860
+ }
6861
+ /**
6862
+ * Updates the code execution result.
6863
+ */
6864
+ updateCodeExecutionResult(invocationId, code, resultStdout, resultStderr) {
6865
+ if (!(CODE_EXECUTION_RESULTS_KEY in this.sessionState)) {
6866
+ this.sessionState[CODE_EXECUTION_RESULTS_KEY] = {};
6867
+ }
6868
+ const results = this.sessionState[CODE_EXECUTION_RESULTS_KEY];
6869
+ if (!(invocationId in results)) {
6870
+ results[invocationId] = [];
6871
+ }
6872
+ results[invocationId].push({
6873
+ code,
6874
+ resultStdout,
6875
+ resultStderr,
6876
+ timestamp: Math.floor(Date.now() / 1e3)
6877
+ });
6878
+ }
6879
+ /**
6880
+ * Gets the code executor context from the session state.
6881
+ */
6882
+ getCodeExecutorContext(sessionState) {
6883
+ if (!(CONTEXT_KEY in sessionState)) {
6884
+ sessionState[CONTEXT_KEY] = {};
6885
+ }
6886
+ return sessionState[CONTEXT_KEY];
6887
+ }
6888
+ };
6648
6889
 
6649
- // src/flows/llm-flows/instructions.ts
6650
- var InstructionsLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6890
+ // src/flows/llm-flows/code-execution.ts
6891
+ var DATA_FILE_UTIL_MAP = {
6892
+ "text/csv": {
6893
+ extension: ".csv",
6894
+ loaderCodeTemplate: "pd.read_csv('{filename}')"
6895
+ }
6896
+ };
6897
+ var DATA_FILE_HELPER_LIB = `
6898
+ import pandas as pd
6899
+
6900
+ def explore_df(df: pd.DataFrame) -> None:
6901
+ """Prints some information about a pandas DataFrame."""
6902
+
6903
+ with pd.option_context(
6904
+ 'display.max_columns', None, 'display.expand_frame_repr', False
6905
+ ):
6906
+ # Print the column names to never encounter KeyError when selecting one.
6907
+ df_dtypes = df.dtypes
6908
+
6909
+ # Obtain information about data types and missing values.
6910
+ df_nulls = (len(df) - df.isnull().sum()).apply(
6911
+ lambda x: f'{x} / {df.shape[0]} non-null'
6912
+ )
6913
+
6914
+ # Explore unique total values in columns using \`.unique()\`.
6915
+ df_unique_count = df.apply(lambda x: len(x.unique()))
6916
+
6917
+ # Explore unique values in columns using \`.unique()\`.
6918
+ df_unique = df.apply(lambda x: crop(str(list(x.unique()))))
6919
+
6920
+ df_info = pd.concat(
6921
+ (
6922
+ df_dtypes.rename('Dtype'),
6923
+ df_nulls.rename('Non-Null Count'),
6924
+ df_unique_count.rename('Unique Values Count'),
6925
+ df_unique.rename('Unique Values'),
6926
+ ),
6927
+ axis=1,
6928
+ )
6929
+ df_info.index.name = 'Columns'
6930
+ print(f"""Total rows: {df.shape[0]}
6931
+ Total columns: {df.shape[1]}
6932
+
6933
+ {df_info}""")
6934
+
6935
+ def crop(text: str, max_length: int = 100) -> str:
6936
+ """Crop text to maximum length with ellipsis."""
6937
+ return text if len(text) <= max_length else text[:max_length] + "..."
6938
+ `;
6939
+ function hasCodeExecutor(agent) {
6940
+ return agent && typeof agent === "object" && "codeExecutor" in agent;
6941
+ }
6942
+ var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
6651
6943
  async *runAsync(invocationContext, llmRequest) {
6652
6944
  const agent = invocationContext.agent;
6653
- if (!this.isLlmAgent(agent)) {
6945
+ if (!hasCodeExecutor(agent)) {
6654
6946
  return;
6655
6947
  }
6656
- const rootAgent = agent.rootAgent;
6657
- if (this.isLlmAgent(rootAgent) && rootAgent.globalInstruction) {
6658
- const [rawInstruction, bypassStateInjection] = await rootAgent.canonicalGlobalInstruction(
6659
- new ReadonlyContext(invocationContext)
6948
+ if (!(agent instanceof LlmAgent) || !agent.codeExecutor) {
6949
+ return;
6950
+ }
6951
+ yield* runPreProcessor(invocationContext, llmRequest);
6952
+ if (!(agent.codeExecutor instanceof BaseCodeExecutor)) {
6953
+ return;
6954
+ }
6955
+ for (const content of llmRequest.contents || []) {
6956
+ CodeExecutionUtils.convertCodeExecutionParts(
6957
+ content,
6958
+ agent.codeExecutor.codeBlockDelimiters[0] || ["", ""],
6959
+ agent.codeExecutor.executionResultDelimiters
6660
6960
  );
6661
- let instruction = rawInstruction;
6662
- if (!bypassStateInjection) {
6663
- instruction = await injectSessionState(
6664
- rawInstruction,
6665
- new ReadonlyContext(invocationContext)
6666
- );
6961
+ }
6962
+ }
6963
+ };
6964
+ var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
6965
+ async *runAsync(invocationContext, llmResponse) {
6966
+ if (llmResponse.partial) {
6967
+ return;
6968
+ }
6969
+ yield* runPostProcessor(invocationContext, llmResponse);
6970
+ }
6971
+ };
6972
+ async function* runPreProcessor(invocationContext, llmRequest) {
6973
+ const agent = invocationContext.agent;
6974
+ if (!hasCodeExecutor(agent)) {
6975
+ return;
6976
+ }
6977
+ const codeExecutor = agent.codeExecutor;
6978
+ if (!codeExecutor || !(codeExecutor instanceof BaseCodeExecutor)) {
6979
+ return;
6980
+ }
6981
+ if (codeExecutor instanceof BuiltInCodeExecutor) {
6982
+ codeExecutor.processLlmRequest(llmRequest);
6983
+ return;
6984
+ }
6985
+ if (!codeExecutor.optimizeDataFile) {
6986
+ return;
6987
+ }
6988
+ const codeExecutorContext = new CodeExecutorContext(
6989
+ invocationContext.session.state
6990
+ // Type assertion for State compatibility
6991
+ );
6992
+ if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
6993
+ return;
6994
+ }
6995
+ const allInputFiles = extractAndReplaceInlineFiles(
6996
+ codeExecutorContext,
6997
+ llmRequest
6998
+ );
6999
+ const processedFileNames = new Set(
7000
+ codeExecutorContext.getProcessedFileNames()
7001
+ );
7002
+ const filesToProcess = allInputFiles.filter(
7003
+ (f) => !processedFileNames.has(f.name)
7004
+ );
7005
+ for (const file of filesToProcess) {
7006
+ const codeStr = getDataFilePreprocessingCode(file);
7007
+ if (!codeStr) {
7008
+ continue;
7009
+ }
7010
+ const codeContent = {
7011
+ role: "model",
7012
+ parts: [
7013
+ { text: `Processing input file: \`${file.name}\`` },
7014
+ CodeExecutionUtils.buildExecutableCodePart(codeStr)
7015
+ ]
7016
+ };
7017
+ llmRequest.contents = llmRequest.contents || [];
7018
+ llmRequest.contents.push(structuredClone(codeContent));
7019
+ yield new Event({
7020
+ invocationId: invocationContext.invocationId,
7021
+ author: agent.name,
7022
+ branch: invocationContext.branch,
7023
+ content: codeContent
7024
+ });
7025
+ const codeExecutionResult = await codeExecutor.executeCode(
7026
+ invocationContext,
7027
+ {
7028
+ code: codeStr,
7029
+ inputFiles: [file],
7030
+ executionId: getOrSetExecutionId(
7031
+ invocationContext,
7032
+ codeExecutorContext
7033
+ )
6667
7034
  }
6668
- llmRequest.appendInstructions([instruction]);
7035
+ );
7036
+ codeExecutorContext.updateCodeExecutionResult(
7037
+ invocationContext.invocationId,
7038
+ codeStr,
7039
+ codeExecutionResult.stdout,
7040
+ codeExecutionResult.stderr
7041
+ );
7042
+ codeExecutorContext.addProcessedFileNames([file.name]);
7043
+ const executionResultEvent = await postProcessCodeExecutionResult(
7044
+ invocationContext,
7045
+ codeExecutorContext,
7046
+ codeExecutionResult
7047
+ );
7048
+ yield executionResultEvent;
7049
+ llmRequest.contents.push(structuredClone(executionResultEvent.content));
7050
+ }
7051
+ }
7052
+ async function* runPostProcessor(invocationContext, llmResponse) {
7053
+ const agent = invocationContext.agent;
7054
+ if (!hasCodeExecutor(agent)) {
7055
+ return;
7056
+ }
7057
+ const codeExecutor = agent.codeExecutor;
7058
+ if (!(codeExecutor instanceof BaseCodeExecutor)) {
7059
+ return;
7060
+ }
7061
+ if (!llmResponse || !llmResponse.content) {
7062
+ return;
7063
+ }
7064
+ if (codeExecutor instanceof BuiltInCodeExecutor) {
7065
+ return;
7066
+ }
7067
+ const codeExecutorContext = new CodeExecutorContext(
7068
+ invocationContext.session.state
7069
+ // Type assertion for State compatibility
7070
+ );
7071
+ if (codeExecutorContext.getErrorCount(invocationContext.invocationId) >= codeExecutor.errorRetryAttempts) {
7072
+ return;
7073
+ }
7074
+ const responseContent = llmResponse.content;
7075
+ const codeStr = CodeExecutionUtils.extractCodeAndTruncateContent(
7076
+ responseContent,
7077
+ codeExecutor.codeBlockDelimiters
7078
+ );
7079
+ if (!codeStr) {
7080
+ return;
7081
+ }
7082
+ yield new Event({
7083
+ invocationId: invocationContext.invocationId,
7084
+ author: agent.name,
7085
+ branch: invocationContext.branch,
7086
+ content: responseContent,
7087
+ actions: new EventActions()
7088
+ });
7089
+ const codeExecutionResult = await codeExecutor.executeCode(
7090
+ invocationContext,
7091
+ {
7092
+ code: codeStr,
7093
+ inputFiles: codeExecutorContext.getInputFiles(),
7094
+ executionId: getOrSetExecutionId(invocationContext, codeExecutorContext)
6669
7095
  }
6670
- if (agent.instruction) {
6671
- const [rawInstruction, bypassStateInjection] = await agent.canonicalInstruction(
6672
- new ReadonlyContext(invocationContext)
6673
- );
6674
- let instruction = rawInstruction;
6675
- if (!bypassStateInjection) {
6676
- instruction = await injectSessionState(
6677
- rawInstruction,
6678
- new ReadonlyContext(invocationContext)
6679
- );
7096
+ );
7097
+ codeExecutorContext.updateCodeExecutionResult(
7098
+ invocationContext.invocationId,
7099
+ codeStr,
7100
+ codeExecutionResult.stdout,
7101
+ codeExecutionResult.stderr
7102
+ );
7103
+ yield await postProcessCodeExecutionResult(
7104
+ invocationContext,
7105
+ codeExecutorContext,
7106
+ codeExecutionResult
7107
+ );
7108
+ llmResponse.content = void 0;
7109
+ }
7110
+ function extractAndReplaceInlineFiles(codeExecutorContext, llmRequest) {
7111
+ const allInputFiles = codeExecutorContext.getInputFiles();
7112
+ const savedFileNames = new Set(allInputFiles.map((f) => f.name));
7113
+ for (let i = 0; i < (_optionalChain([llmRequest, 'access', _214 => _214.contents, 'optionalAccess', _215 => _215.length]) || 0); i++) {
7114
+ const content = llmRequest.contents[i];
7115
+ if (content.role !== "user" || !content.parts) {
7116
+ continue;
7117
+ }
7118
+ for (let j = 0; j < content.parts.length; j++) {
7119
+ const part = content.parts[j];
7120
+ if (!part.inlineData || !(part.inlineData.mimeType in DATA_FILE_UTIL_MAP)) {
7121
+ continue;
7122
+ }
7123
+ const mimeType = part.inlineData.mimeType;
7124
+ const fileName = `data_${i + 1}_${j + 1}${DATA_FILE_UTIL_MAP[mimeType].extension}`;
7125
+ llmRequest.contents[i].parts[j] = {
7126
+ text: `
7127
+ Available file: \`${fileName}\`
7128
+ `
7129
+ };
7130
+ const file = {
7131
+ name: fileName,
7132
+ content: CodeExecutionUtils.getEncodedFileContent(part.inlineData.data),
7133
+ mimeType
7134
+ };
7135
+ if (!savedFileNames.has(fileName)) {
7136
+ codeExecutorContext.addInputFiles([file]);
7137
+ allInputFiles.push(file);
6680
7138
  }
6681
- llmRequest.appendInstructions([instruction]);
6682
7139
  }
6683
- for await (const _ of []) {
6684
- yield _;
7140
+ }
7141
+ return allInputFiles;
7142
+ }
7143
+ function getOrSetExecutionId(invocationContext, codeExecutorContext) {
7144
+ const agent = invocationContext.agent;
7145
+ if (!hasCodeExecutor(agent) || !_optionalChain([agent, 'access', _216 => _216.codeExecutor, 'optionalAccess', _217 => _217.stateful])) {
7146
+ return void 0;
7147
+ }
7148
+ let executionId = codeExecutorContext.getExecutionId();
7149
+ if (!executionId) {
7150
+ executionId = invocationContext.session.id;
7151
+ codeExecutorContext.setExecutionId(executionId);
7152
+ }
7153
+ return executionId;
7154
+ }
7155
+ async function postProcessCodeExecutionResult(invocationContext, codeExecutorContext, codeExecutionResult) {
7156
+ if (!invocationContext.artifactService) {
7157
+ throw new Error("Artifact service is not initialized.");
7158
+ }
7159
+ const resultContent = {
7160
+ role: "model",
7161
+ parts: [
7162
+ CodeExecutionUtils.buildCodeExecutionResultPart(codeExecutionResult)
7163
+ ]
7164
+ };
7165
+ const eventActions = new EventActions({
7166
+ stateDelta: codeExecutorContext.getStateDelta()
7167
+ });
7168
+ if (codeExecutionResult.stderr) {
7169
+ codeExecutorContext.incrementErrorCount(invocationContext.invocationId);
7170
+ } else {
7171
+ codeExecutorContext.resetErrorCount(invocationContext.invocationId);
7172
+ }
7173
+ for (const outputFile of codeExecutionResult.outputFiles) {
7174
+ const version = await invocationContext.artifactService.saveArtifact({
7175
+ appName: invocationContext.appName,
7176
+ userId: invocationContext.userId,
7177
+ sessionId: invocationContext.session.id,
7178
+ filename: outputFile.name,
7179
+ artifact: {
7180
+ inlineData: {
7181
+ data: atob(outputFile.content),
7182
+ // Convert from base64
7183
+ mimeType: outputFile.mimeType
7184
+ }
7185
+ }
7186
+ });
7187
+ eventActions.artifactDelta[outputFile.name] = version;
7188
+ }
7189
+ return new Event({
7190
+ invocationId: invocationContext.invocationId,
7191
+ author: invocationContext.agent.name,
7192
+ branch: invocationContext.branch,
7193
+ content: resultContent,
7194
+ actions: eventActions
7195
+ });
7196
+ }
7197
+ function getDataFilePreprocessingCode(file) {
7198
+ function getNormalizedFileName(fileName) {
7199
+ const baseName = fileName.split(".")[0];
7200
+ let varName2 = baseName.replace(/[^a-zA-Z0-9_]/g, "_");
7201
+ if (/^\d/.test(varName2)) {
7202
+ varName2 = `_${varName2}`;
6685
7203
  }
7204
+ return varName2;
6686
7205
  }
6687
- /**
6688
- * Type guard to check if agent is an LlmAgent
6689
- */
6690
- isLlmAgent(agent) {
6691
- return agent && typeof agent === "object" && "canonicalModel" in agent;
7206
+ if (!(file.mimeType in DATA_FILE_UTIL_MAP)) {
7207
+ return void 0;
6692
7208
  }
6693
- };
6694
- var requestProcessor4 = new InstructionsLlmRequestProcessor();
7209
+ const varName = getNormalizedFileName(file.name);
7210
+ const loaderCode = DATA_FILE_UTIL_MAP[file.mimeType].loaderCodeTemplate.replace("{filename}", file.name);
7211
+ return `
7212
+ ${DATA_FILE_HELPER_LIB}
7213
+
7214
+ # Load the dataframe.
7215
+ ${varName} = ${loaderCode}
7216
+
7217
+ # Use \`explore_df\` to guide my analysis.
7218
+ explore_df(${varName})
7219
+ `;
7220
+ }
7221
+ var requestProcessor3 = new CodeExecutionRequestProcessor();
7222
+ var responseProcessor = new CodeExecutionResponseProcessor();
6695
7223
 
6696
7224
  // src/flows/llm-flows/contents.ts
6697
7225
  var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
@@ -6724,7 +7252,7 @@ var ContentLlmRequestProcessor = class extends BaseLlmRequestProcessor {
6724
7252
  return agent && typeof agent === "object" && "canonicalModel" in agent;
6725
7253
  }
6726
7254
  };
6727
- var requestProcessor5 = new ContentLlmRequestProcessor();
7255
+ var requestProcessor4 = new ContentLlmRequestProcessor();
6728
7256
  function rearrangeEventsForAsyncFunctionResponsesInHistory(events) {
6729
7257
  const functionCallIdToResponseEventsIndex = {};
6730
7258
  for (let i = 0; i < events.length; i++) {
@@ -6845,7 +7373,7 @@ function rearrangeEventsForLatestFunctionResponse(events) {
6845
7373
  continue;
6846
7374
  }
6847
7375
  const functionResponses2 = event.getFunctionResponses();
6848
- if (_optionalChain([functionResponses2, 'optionalAccess', _207 => _207.some, 'call', _208 => _208((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
7376
+ if (_optionalChain([functionResponses2, 'optionalAccess', _218 => _218.some, 'call', _219 => _219((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
6849
7377
  functionResponseEvents.push(event);
6850
7378
  }
6851
7379
  }
@@ -6944,7 +7472,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
6944
7472
  const partIndicesInMergedEvent = {};
6945
7473
  for (let idx = 0; idx < partsInMergedEvent.length; idx++) {
6946
7474
  const part = partsInMergedEvent[idx];
6947
- if (_optionalChain([part, 'access', _209 => _209.functionResponse, 'optionalAccess', _210 => _210.id])) {
7475
+ if (_optionalChain([part, 'access', _220 => _220.functionResponse, 'optionalAccess', _221 => _221.id])) {
6948
7476
  partIndicesInMergedEvent[part.functionResponse.id] = idx;
6949
7477
  }
6950
7478
  }
@@ -6953,7 +7481,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
6953
7481
  throw new Error("There should be at least one function_response part.");
6954
7482
  }
6955
7483
  for (const part of event.content.parts) {
6956
- if (_optionalChain([part, 'access', _211 => _211.functionResponse, 'optionalAccess', _212 => _212.id])) {
7484
+ if (_optionalChain([part, 'access', _222 => _222.functionResponse, 'optionalAccess', _223 => _223.id])) {
6957
7485
  const functionCallId = part.functionResponse.id;
6958
7486
  if (functionCallId in partIndicesInMergedEvent) {
6959
7487
  partsInMergedEvent[partIndicesInMergedEvent[functionCallId]] = part;
@@ -6989,6 +7517,151 @@ function isAuthEvent(event) {
6989
7517
  return false;
6990
7518
  }
6991
7519
 
7520
+ // src/flows/llm-flows/identity.ts
7521
+ var IdentityLlmRequestProcessor = class extends BaseLlmRequestProcessor {
7522
+ async *runAsync(invocationContext, llmRequest) {
7523
+ const agent = invocationContext.agent;
7524
+ const instructions = [
7525
+ `You are an agent. Your internal name is "${agent.name}".`
7526
+ ];
7527
+ if (agent.description) {
7528
+ instructions.push(` The description about you is "${agent.description}"`);
7529
+ }
7530
+ llmRequest.appendInstructions(instructions);
7531
+ for await (const _ of []) {
7532
+ yield _;
7533
+ }
7534
+ }
7535
+ };
7536
+ var requestProcessor5 = new IdentityLlmRequestProcessor();
7537
+
7538
+ // src/utils/instructions-utils.ts
7539
+ async function injectSessionState(template, readonlyContext) {
7540
+ const invocationContext = readonlyContext._invocationContext;
7541
+ async function asyncReplace(pattern, replaceAsyncFn, string) {
7542
+ const result = [];
7543
+ let lastEnd = 0;
7544
+ const matches = Array.from(string.matchAll(pattern));
7545
+ for (const match of matches) {
7546
+ result.push(string.slice(lastEnd, match.index));
7547
+ const replacement = await replaceAsyncFn(match);
7548
+ result.push(replacement);
7549
+ lastEnd = (match.index || 0) + match[0].length;
7550
+ }
7551
+ result.push(string.slice(lastEnd));
7552
+ return result.join("");
7553
+ }
7554
+ async function replaceMatch(match) {
7555
+ let varName = match[0].replace(/[{}]/g, "").trim();
7556
+ let optional = false;
7557
+ if (varName.endsWith("?")) {
7558
+ optional = true;
7559
+ varName = varName.slice(0, -1);
7560
+ }
7561
+ if (varName.startsWith("artifact.")) {
7562
+ varName = varName.replace("artifact.", "");
7563
+ if (!invocationContext.artifactService) {
7564
+ throw new Error("Artifact service is not initialized.");
7565
+ }
7566
+ try {
7567
+ const artifact = await invocationContext.artifactService.loadArtifact({
7568
+ appName: invocationContext.session.appName,
7569
+ userId: invocationContext.session.userId,
7570
+ sessionId: invocationContext.session.id,
7571
+ filename: varName
7572
+ });
7573
+ if (!artifact) {
7574
+ throw new Error(`Artifact ${varName} not found.`);
7575
+ }
7576
+ return String(artifact);
7577
+ } catch (error) {
7578
+ if (optional) {
7579
+ return "";
7580
+ }
7581
+ throw error;
7582
+ }
7583
+ } else {
7584
+ if (!isValidStateName(varName)) {
7585
+ return match[0];
7586
+ }
7587
+ const sessionState = invocationContext.session.state;
7588
+ if (varName in sessionState) {
7589
+ return String(sessionState[varName]);
7590
+ }
7591
+ if (optional) {
7592
+ return "";
7593
+ }
7594
+ throw new Error(`Context variable not found: \`${varName}\`.`);
7595
+ }
7596
+ }
7597
+ return await asyncReplace(/{[^{}]*}/g, replaceMatch, template);
7598
+ }
7599
+ function isValidStateName(varName) {
7600
+ const parts = varName.split(":");
7601
+ if (parts.length === 1) {
7602
+ return isValidIdentifier(varName);
7603
+ }
7604
+ if (parts.length === 2) {
7605
+ const validPrefixes = ["app:", "user:", "temp:"];
7606
+ const prefix = `${parts[0]}:`;
7607
+ if (validPrefixes.includes(prefix)) {
7608
+ return isValidIdentifier(parts[1]);
7609
+ }
7610
+ }
7611
+ return false;
7612
+ }
7613
+ function isValidIdentifier(name) {
7614
+ const identifierRegex = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
7615
+ return identifierRegex.test(name);
7616
+ }
7617
+
7618
+ // src/flows/llm-flows/instructions.ts
7619
+ var InstructionsLlmRequestProcessor = class extends BaseLlmRequestProcessor {
7620
+ async *runAsync(invocationContext, llmRequest) {
7621
+ const agent = invocationContext.agent;
7622
+ if (!this.isLlmAgent(agent)) {
7623
+ return;
7624
+ }
7625
+ const rootAgent = agent.rootAgent;
7626
+ if (this.isLlmAgent(rootAgent) && rootAgent.globalInstruction) {
7627
+ const [rawInstruction, bypassStateInjection] = await rootAgent.canonicalGlobalInstruction(
7628
+ new ReadonlyContext(invocationContext)
7629
+ );
7630
+ let instruction = rawInstruction;
7631
+ if (!bypassStateInjection) {
7632
+ instruction = await injectSessionState(
7633
+ rawInstruction,
7634
+ new ReadonlyContext(invocationContext)
7635
+ );
7636
+ }
7637
+ llmRequest.appendInstructions([instruction]);
7638
+ }
7639
+ if (agent.instruction) {
7640
+ const [rawInstruction, bypassStateInjection] = await agent.canonicalInstruction(
7641
+ new ReadonlyContext(invocationContext)
7642
+ );
7643
+ let instruction = rawInstruction;
7644
+ if (!bypassStateInjection) {
7645
+ instruction = await injectSessionState(
7646
+ rawInstruction,
7647
+ new ReadonlyContext(invocationContext)
7648
+ );
7649
+ }
7650
+ llmRequest.appendInstructions([instruction]);
7651
+ }
7652
+ for await (const _ of []) {
7653
+ yield _;
7654
+ }
7655
+ }
7656
+ /**
7657
+ * Type guard to check if agent is an LlmAgent
7658
+ */
7659
+ isLlmAgent(agent) {
7660
+ return agent && typeof agent === "object" && "canonicalModel" in agent;
7661
+ }
7662
+ };
7663
+ var requestProcessor6 = new InstructionsLlmRequestProcessor();
7664
+
6992
7665
  // src/planners/base-planner.ts
6993
7666
  var BasePlanner = class {
6994
7667
  };
@@ -7058,7 +7731,7 @@ var PlanReActPlanner = class extends BasePlanner {
7058
7731
  let firstFcPartIndex = -1;
7059
7732
  for (let i = 0; i < responseParts.length; i++) {
7060
7733
  if (responseParts[i].functionCall) {
7061
- if (!_optionalChain([responseParts, 'access', _213 => _213[i], 'access', _214 => _214.functionCall, 'optionalAccess', _215 => _215.name])) {
7734
+ if (!_optionalChain([responseParts, 'access', _224 => _224[i], 'access', _225 => _225.functionCall, 'optionalAccess', _226 => _226.name])) {
7062
7735
  continue;
7063
7736
  }
7064
7737
  preservedParts.push(responseParts[i]);
@@ -7097,7 +7770,7 @@ var PlanReActPlanner = class extends BasePlanner {
7097
7770
  * Handles non-function-call parts of the response
7098
7771
  */
7099
7772
  _handleNonFunctionCallParts(responsePart, preservedParts) {
7100
- if (_optionalChain([responsePart, 'access', _216 => _216.text, 'optionalAccess', _217 => _217.includes, 'call', _218 => _218(FINAL_ANSWER_TAG)])) {
7773
+ if (_optionalChain([responsePart, 'access', _227 => _227.text, 'optionalAccess', _228 => _228.includes, 'call', _229 => _229(FINAL_ANSWER_TAG)])) {
7101
7774
  const [reasoningText, finalAnswerText] = this._splitByLastPattern(
7102
7775
  responsePart.text,
7103
7776
  FINAL_ANSWER_TAG
@@ -7264,66 +7937,10 @@ function removeThoughtFromRequest(llmRequest) {
7264
7937
  }
7265
7938
  }
7266
7939
  }
7267
- var requestProcessor6 = new NlPlanningRequestProcessor();
7268
- var responseProcessor = new NlPlanningResponseProcessor();
7269
-
7270
- // src/flows/llm-flows/code-execution.ts
7271
- var CodeExecutionRequestProcessor = class extends BaseLlmRequestProcessor {
7272
- async *runAsync(invocationContext, llmRequest) {
7273
- const agent = invocationContext.agent;
7274
- if (!("codeExecutor" in agent) || !agent.codeExecutor) {
7275
- return;
7276
- }
7277
- console.log(
7278
- "Code execution request processing - TODO: Implement when code-executors module is ready"
7279
- );
7280
- for await (const _ of []) {
7281
- yield _;
7282
- }
7283
- }
7284
- /**
7285
- * Placeholder for pre-processor logic
7286
- * TODO: Implement when code-executors are ready
7287
- */
7288
- async *runPreProcessor(invocationContext, llmRequest) {
7289
- console.log("Code execution pre-processor - placeholder");
7290
- for await (const _ of []) {
7291
- yield _;
7292
- }
7293
- }
7294
- };
7295
- var CodeExecutionResponseProcessor = class extends BaseLlmResponseProcessor {
7296
- async *runAsync(invocationContext, llmResponse) {
7297
- if (llmResponse.partial) {
7298
- return;
7299
- }
7300
- const agent = invocationContext.agent;
7301
- if (!("codeExecutor" in agent) || !agent.codeExecutor) {
7302
- return;
7303
- }
7304
- console.log(
7305
- "Code execution response processing - TODO: Implement when code-executors module is ready"
7306
- );
7307
- for await (const _ of []) {
7308
- yield _;
7309
- }
7310
- }
7311
- /**
7312
- * Placeholder for post-processor logic
7313
- * TODO: Implement when code-executors are ready
7314
- */
7315
- async *runPostProcessor(invocationContext, llmResponse) {
7316
- console.log("Code execution post-processor - placeholder");
7317
- for await (const _ of []) {
7318
- yield _;
7319
- }
7320
- }
7321
- };
7322
- var requestProcessor7 = new CodeExecutionRequestProcessor();
7323
- var responseProcessor2 = new CodeExecutionResponseProcessor();
7940
+ var requestProcessor7 = new NlPlanningRequestProcessor();
7941
+ var responseProcessor2 = new NlPlanningResponseProcessor();
7324
7942
 
7325
7943
  // src/flows/llm-flows/single-flow.ts
7326
- var logger7 = new Logger({ name: "SingleFlow" });
7327
7944
  var SingleFlow = class extends BaseLlmFlow {
7328
7945
  /**
7329
7946
  * Constructor for SingleFlow
@@ -7331,35 +7948,32 @@ var SingleFlow = class extends BaseLlmFlow {
7331
7948
  constructor() {
7332
7949
  super();
7333
7950
  this.requestProcessors.push(
7334
- requestProcessor,
7335
7951
  requestProcessor2,
7952
+ requestProcessor,
7336
7953
  // Phase 3: Auth preprocessor
7337
- requestProcessor4,
7338
- requestProcessor3,
7954
+ requestProcessor6,
7339
7955
  requestProcessor5,
7956
+ requestProcessor4,
7340
7957
  // Some implementations of NL Planning mark planning contents as thoughts
7341
7958
  // in the post processor. Since these need to be unmarked, NL Planning
7342
7959
  // should be after contents.
7343
- requestProcessor6,
7960
+ requestProcessor7,
7344
7961
  // Phase 5: NL Planning
7345
7962
  // Code execution should be after the contents as it mutates the contents
7346
7963
  // to optimize data files.
7347
- requestProcessor7
7964
+ requestProcessor3
7348
7965
  // Phase 5: Code Execution (placeholder)
7349
7966
  );
7350
7967
  this.responseProcessors.push(
7351
- responseProcessor,
7968
+ responseProcessor2,
7352
7969
  // Phase 5: NL Planning
7353
- responseProcessor2
7970
+ responseProcessor
7354
7971
  // Phase 5: Code Execution (placeholder)
7355
7972
  );
7356
- logger7.debug("SingleFlow initialized with processors");
7973
+ this.logger.debug("SingleFlow initialized with processors");
7357
7974
  }
7358
7975
  };
7359
7976
 
7360
- // src/flows/llm-flows/auto-flow.ts
7361
- init_logger();
7362
-
7363
7977
  // src/flows/llm-flows/agent-transfer.ts
7364
7978
  var AgentTransferLlmRequestProcessor = class extends BaseLlmRequestProcessor {
7365
7979
  /**
@@ -7449,7 +8063,6 @@ function getTransferTargets(agent) {
7449
8063
  var requestProcessor8 = new AgentTransferLlmRequestProcessor();
7450
8064
 
7451
8065
  // src/flows/llm-flows/auto-flow.ts
7452
- var logger8 = new Logger({ name: "AutoFlow" });
7453
8066
  var AutoFlow = class extends SingleFlow {
7454
8067
  /**
7455
8068
  * Constructor for AutoFlow
@@ -7457,13 +8070,13 @@ var AutoFlow = class extends SingleFlow {
7457
8070
  constructor() {
7458
8071
  super();
7459
8072
  this.requestProcessors.push(requestProcessor8);
7460
- logger8.debug("AutoFlow initialized with agent transfer capability");
8073
+ this.logger.debug("AutoFlow initialized with agent transfer capability");
7461
8074
  }
7462
8075
  };
7463
8076
 
7464
8077
  // src/agents/llm-agent.ts
7465
8078
  init_function_tool();
7466
- var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
8079
+ var LlmAgent = (_class23 = class _LlmAgent extends BaseAgent {
7467
8080
  /**
7468
8081
  * The model to use for the agent
7469
8082
  * When not set, the agent will inherit the model from its ancestor
@@ -7482,6 +8095,10 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7482
8095
  * Tools available to this agent
7483
8096
  */
7484
8097
 
8098
+ /**
8099
+ * Code executor for this agent
8100
+ */
8101
+
7485
8102
  /**
7486
8103
  * Disallows LLM-controlled transferring to the parent agent
7487
8104
  */
@@ -7536,7 +8153,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7536
8153
  */
7537
8154
 
7538
8155
  // Schema type - depends on specific implementation
7539
- __init39() {this.logger = new Logger({ name: "LlmAgent" })}
8156
+ __init42() {this.logger = new Logger({ name: "LlmAgent" })}
7540
8157
  /**
7541
8158
  * Constructor for LlmAgent
7542
8159
  */
@@ -7544,11 +8161,12 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7544
8161
  super({
7545
8162
  name: config.name,
7546
8163
  description: config.description
7547
- });_class21.prototype.__init39.call(this);;
8164
+ });_class23.prototype.__init42.call(this);;
7548
8165
  this.model = config.model || "";
7549
8166
  this.instruction = config.instruction || "";
7550
8167
  this.globalInstruction = config.globalInstruction || "";
7551
8168
  this.tools = config.tools || [];
8169
+ this.codeExecutor = config.codeExecutor;
7552
8170
  this.disallowTransferToParent = config.disallowTransferToParent || false;
7553
8171
  this.disallowTransferToPeers = config.disallowTransferToPeers || false;
7554
8172
  this.includeContents = config.includeContents || "default";
@@ -7568,11 +8186,14 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7568
8186
  * This method is only for use by Agent Development Kit
7569
8187
  */
7570
8188
  get canonicalModel() {
7571
- if (typeof this.model !== "string") {
8189
+ if (typeof this.model === "string") {
8190
+ if (this.model) {
8191
+ return LLMRegistry.newLLM(this.model);
8192
+ }
8193
+ } else if (this.model instanceof BaseLlm) {
7572
8194
  return this.model;
7573
- }
7574
- if (this.model) {
7575
- return LLMRegistry.newLLM(this.model);
8195
+ } else if (this.model) {
8196
+ return new AiSdkLlm(this.model);
7576
8197
  }
7577
8198
  let ancestorAgent = this.parentAgent;
7578
8199
  while (ancestorAgent !== null) {
@@ -7626,7 +8247,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7626
8247
  * This matches the Python implementation's _llm_flow property
7627
8248
  */
7628
8249
  get llmFlow() {
7629
- if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _219 => _219.subAgents, 'optionalAccess', _220 => _220.length])) {
8250
+ if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _230 => _230.subAgents, 'optionalAccess', _231 => _231.length])) {
7630
8251
  return new SingleFlow();
7631
8252
  }
7632
8253
  return new AutoFlow();
@@ -7636,7 +8257,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7636
8257
  * This matches the Python implementation's __maybe_save_output_to_state
7637
8258
  */
7638
8259
  maybeSaveOutputToState(event) {
7639
- if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _221 => _221.content, 'optionalAccess', _222 => _222.parts])) {
8260
+ if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _232 => _232.content, 'optionalAccess', _233 => _233.parts])) {
7640
8261
  const result = event.content.parts.map((part) => part.text || "").join("");
7641
8262
  if (result) {
7642
8263
  if (!event.actions.stateDelta) {
@@ -7676,7 +8297,7 @@ var LlmAgent = (_class21 = class _LlmAgent extends BaseAgent {
7676
8297
  yield errorEvent;
7677
8298
  }
7678
8299
  }
7679
- }, _class21);
8300
+ }, _class23);
7680
8301
 
7681
8302
  // src/agents/sequential-agent.ts
7682
8303
  var SequentialAgent = class extends BaseAgent {
@@ -7742,11 +8363,11 @@ var LlmCallsLimitExceededError = class extends Error {
7742
8363
  this.name = "LlmCallsLimitExceededError";
7743
8364
  }
7744
8365
  };
7745
- var InvocationCostManager = (_class22 = class {constructor() { _class22.prototype.__init40.call(this); }
8366
+ var InvocationCostManager = (_class24 = class {constructor() { _class24.prototype.__init43.call(this); }
7746
8367
  /**
7747
8368
  * A counter that keeps track of number of llm calls made.
7748
8369
  */
7749
- __init40() {this._numberOfLlmCalls = 0}
8370
+ __init43() {this._numberOfLlmCalls = 0}
7750
8371
  /**
7751
8372
  * Increments _numberOfLlmCalls and enforces the limit.
7752
8373
  */
@@ -7758,11 +8379,11 @@ var InvocationCostManager = (_class22 = class {constructor() { _class22.prototyp
7758
8379
  );
7759
8380
  }
7760
8381
  }
7761
- }, _class22);
8382
+ }, _class24);
7762
8383
  function newInvocationContextId() {
7763
8384
  return `e-${crypto.randomUUID()}`;
7764
8385
  }
7765
- var InvocationContext = (_class23 = class _InvocationContext {
8386
+ var InvocationContext = (_class25 = class _InvocationContext {
7766
8387
 
7767
8388
 
7768
8389
 
@@ -7797,7 +8418,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
7797
8418
  *
7798
8419
  * Set to True in callbacks or tools to terminate this invocation.
7799
8420
  */
7800
- __init41() {this.endInvocation = false}
8421
+ __init44() {this.endInvocation = false}
7801
8422
  /**
7802
8423
  * The queue to receive live requests.
7803
8424
  */
@@ -7818,11 +8439,11 @@ var InvocationContext = (_class23 = class _InvocationContext {
7818
8439
  * A container to keep track of different kinds of costs incurred as a part
7819
8440
  * of this invocation.
7820
8441
  */
7821
- __init42() {this._invocationCostManager = new InvocationCostManager()}
8442
+ __init45() {this._invocationCostManager = new InvocationCostManager()}
7822
8443
  /**
7823
8444
  * Constructor for InvocationContext
7824
8445
  */
7825
- constructor(options) {;_class23.prototype.__init41.call(this);_class23.prototype.__init42.call(this);
8446
+ constructor(options) {;_class25.prototype.__init44.call(this);_class25.prototype.__init45.call(this);
7826
8447
  this.artifactService = options.artifactService;
7827
8448
  this.sessionService = options.sessionService;
7828
8449
  this.memoryService = options.memoryService;
@@ -7882,7 +8503,7 @@ var InvocationContext = (_class23 = class _InvocationContext {
7882
8503
  runConfig: this.runConfig
7883
8504
  });
7884
8505
  }
7885
- }, _class23);
8506
+ }, _class25);
7886
8507
 
7887
8508
  // src/agents/parallel-agent.ts
7888
8509
  function createBranchContextForSubAgent(agent, subAgent, invocationContext) {
@@ -7999,7 +8620,7 @@ var LoopAgent = class extends BaseAgent {
7999
8620
  for (const subAgent of this.subAgents) {
8000
8621
  for await (const event of subAgent.runAsync(ctx)) {
8001
8622
  yield event;
8002
- if (_optionalChain([event, 'access', _223 => _223.actions, 'optionalAccess', _224 => _224.escalate])) {
8623
+ if (_optionalChain([event, 'access', _234 => _234.actions, 'optionalAccess', _235 => _235.escalate])) {
8003
8624
  return;
8004
8625
  }
8005
8626
  }
@@ -8017,7 +8638,7 @@ var LoopAgent = class extends BaseAgent {
8017
8638
 
8018
8639
  // src/agents/lang-graph-agent.ts
8019
8640
  init_logger();
8020
- var LangGraphAgent = (_class24 = class extends BaseAgent {
8641
+ var LangGraphAgent = (_class26 = class extends BaseAgent {
8021
8642
  /**
8022
8643
  * Graph nodes (agents and their connections)
8023
8644
  */
@@ -8033,8 +8654,8 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
8033
8654
  /**
8034
8655
  * Results from node executions
8035
8656
  */
8036
- __init43() {this.results = []}
8037
- __init44() {this.logger = new Logger({ name: "LangGraphAgent" })}
8657
+ __init46() {this.results = []}
8658
+ __init47() {this.logger = new Logger({ name: "LangGraphAgent" })}
8038
8659
  /**
8039
8660
  * Constructor for LangGraphAgent
8040
8661
  */
@@ -8042,7 +8663,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
8042
8663
  super({
8043
8664
  name: config.name,
8044
8665
  description: config.description
8045
- });_class24.prototype.__init43.call(this);_class24.prototype.__init44.call(this);;
8666
+ });_class26.prototype.__init46.call(this);_class26.prototype.__init47.call(this);;
8046
8667
  this.nodes = /* @__PURE__ */ new Map();
8047
8668
  for (const node of config.nodes) {
8048
8669
  if (this.nodes.has(node.name)) {
@@ -8238,7 +8859,7 @@ var LangGraphAgent = (_class24 = class extends BaseAgent {
8238
8859
  }
8239
8860
  this.maxSteps = maxSteps;
8240
8861
  }
8241
- }, _class24);
8862
+ }, _class26);
8242
8863
 
8243
8864
  // src/runners.ts
8244
8865
 
@@ -8307,17 +8928,17 @@ var RunConfig = class {
8307
8928
  */
8308
8929
 
8309
8930
  constructor(config) {
8310
- this.speechConfig = _optionalChain([config, 'optionalAccess', _225 => _225.speechConfig]);
8311
- this.responseModalities = _optionalChain([config, 'optionalAccess', _226 => _226.responseModalities]);
8312
- this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _227 => _227.saveInputBlobsAsArtifacts]) || false;
8313
- this.supportCFC = _optionalChain([config, 'optionalAccess', _228 => _228.supportCFC]) || false;
8314
- this.streamingMode = _optionalChain([config, 'optionalAccess', _229 => _229.streamingMode]) || "NONE" /* NONE */;
8315
- this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _230 => _230.outputAudioTranscription]);
8316
- this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _231 => _231.inputAudioTranscription]);
8317
- this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _232 => _232.realtimeInputConfig]);
8318
- this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _233 => _233.enableAffectiveDialog]);
8319
- this.proactivity = _optionalChain([config, 'optionalAccess', _234 => _234.proactivity]);
8320
- this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _235 => _235.maxLlmCalls]), () => ( 500));
8931
+ this.speechConfig = _optionalChain([config, 'optionalAccess', _236 => _236.speechConfig]);
8932
+ this.responseModalities = _optionalChain([config, 'optionalAccess', _237 => _237.responseModalities]);
8933
+ this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _238 => _238.saveInputBlobsAsArtifacts]) || false;
8934
+ this.supportCFC = _optionalChain([config, 'optionalAccess', _239 => _239.supportCFC]) || false;
8935
+ this.streamingMode = _optionalChain([config, 'optionalAccess', _240 => _240.streamingMode]) || "NONE" /* NONE */;
8936
+ this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _241 => _241.outputAudioTranscription]);
8937
+ this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _242 => _242.inputAudioTranscription]);
8938
+ this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _243 => _243.realtimeInputConfig]);
8939
+ this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _244 => _244.enableAffectiveDialog]);
8940
+ this.proactivity = _optionalChain([config, 'optionalAccess', _245 => _245.proactivity]);
8941
+ this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _246 => _246.maxLlmCalls]), () => ( 500));
8321
8942
  this.validateMaxLlmCalls();
8322
8943
  }
8323
8944
  /**
@@ -8338,10 +8959,8 @@ var RunConfig = class {
8338
8959
  };
8339
8960
 
8340
8961
  // src/artifacts/in-memory-artifact-service.ts
8341
- init_logger();
8342
- var logger9 = new Logger({ name: "InMemoryArtifactService" });
8343
- var InMemoryArtifactService = (_class25 = class {constructor() { _class25.prototype.__init45.call(this); }
8344
- __init45() {this.artifacts = /* @__PURE__ */ new Map()}
8962
+ var InMemoryArtifactService = (_class27 = class {constructor() { _class27.prototype.__init48.call(this); }
8963
+ __init48() {this.artifacts = /* @__PURE__ */ new Map()}
8345
8964
  fileHasUserNamespace(filename) {
8346
8965
  return filename.startsWith("user:");
8347
8966
  }
@@ -8414,7 +9033,7 @@ var InMemoryArtifactService = (_class25 = class {constructor() { _class25.protot
8414
9033
  }
8415
9034
  return Array.from({ length: versions.length }, (_, i) => i);
8416
9035
  }
8417
- }, _class25);
9036
+ }, _class27);
8418
9037
 
8419
9038
  // src/runners.ts
8420
9039
  init_logger();
@@ -8441,15 +9060,15 @@ function _extractWordsLower(text) {
8441
9060
  const words = text.match(/[A-Za-z]+/g) || [];
8442
9061
  return new Set(words.map((word) => word.toLowerCase()));
8443
9062
  }
8444
- var InMemoryMemoryService = (_class26 = class {
9063
+ var InMemoryMemoryService = (_class28 = class {
8445
9064
  /**
8446
9065
  * Keys are app_name/user_id, session_id. Values are session event lists.
8447
9066
  */
8448
- __init46() {this._sessionEvents = /* @__PURE__ */ new Map()}
9067
+ __init49() {this._sessionEvents = /* @__PURE__ */ new Map()}
8449
9068
  /**
8450
9069
  * Constructor for InMemoryMemoryService
8451
9070
  */
8452
- constructor() {;_class26.prototype.__init46.call(this);
9071
+ constructor() {;_class28.prototype.__init49.call(this);
8453
9072
  this._sessionEvents = /* @__PURE__ */ new Map();
8454
9073
  }
8455
9074
  /**
@@ -8463,7 +9082,7 @@ var InMemoryMemoryService = (_class26 = class {
8463
9082
  }
8464
9083
  const userSessions = this._sessionEvents.get(userKey);
8465
9084
  const filteredEvents = session.events.filter(
8466
- (event) => _optionalChain([event, 'access', _236 => _236.content, 'optionalAccess', _237 => _237.parts])
9085
+ (event) => _optionalChain([event, 'access', _247 => _247.content, 'optionalAccess', _248 => _248.parts])
8467
9086
  );
8468
9087
  userSessions.set(session.id, filteredEvents);
8469
9088
  }
@@ -8533,7 +9152,7 @@ var InMemoryMemoryService = (_class26 = class {
8533
9152
  clear() {
8534
9153
  this._sessionEvents.clear();
8535
9154
  }
8536
- }, _class26);
9155
+ }, _class28);
8537
9156
 
8538
9157
  // src/sessions/in-memory-session-service.ts
8539
9158
  var _crypto = require('crypto');
@@ -8575,19 +9194,19 @@ var BaseSessionService = class {
8575
9194
  };
8576
9195
 
8577
9196
  // src/sessions/in-memory-session-service.ts
8578
- var InMemorySessionService = (_class27 = class extends BaseSessionService {constructor(...args2) { super(...args2); _class27.prototype.__init47.call(this);_class27.prototype.__init48.call(this);_class27.prototype.__init49.call(this); }
9197
+ var InMemorySessionService = (_class29 = class extends BaseSessionService {constructor(...args2) { super(...args2); _class29.prototype.__init50.call(this);_class29.prototype.__init51.call(this);_class29.prototype.__init52.call(this); }
8579
9198
  /**
8580
9199
  * A map from app name to a map from user ID to a map from session ID to session.
8581
9200
  */
8582
- __init47() {this.sessions = /* @__PURE__ */ new Map()}
9201
+ __init50() {this.sessions = /* @__PURE__ */ new Map()}
8583
9202
  /**
8584
9203
  * A map from app name to a map from user ID to a map from key to the value.
8585
9204
  */
8586
- __init48() {this.userState = /* @__PURE__ */ new Map()}
9205
+ __init51() {this.userState = /* @__PURE__ */ new Map()}
8587
9206
  /**
8588
9207
  * A map from app name to a map from key to the value.
8589
9208
  */
8590
- __init49() {this.appState = /* @__PURE__ */ new Map()}
9209
+ __init52() {this.appState = /* @__PURE__ */ new Map()}
8591
9210
  /**
8592
9211
  * Creates a new session.
8593
9212
  */
@@ -8602,7 +9221,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
8602
9221
  return this.createSessionImpl(appName, userId, state, sessionId);
8603
9222
  }
8604
9223
  createSessionImpl(appName, userId, state, sessionId) {
8605
- const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _238 => _238.trim, 'call', _239 => _239()]) || _crypto.randomUUID.call(void 0, );
9224
+ const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _249 => _249.trim, 'call', _250 => _250()]) || _crypto.randomUUID.call(void 0, );
8606
9225
  const session = {
8607
9226
  appName,
8608
9227
  userId,
@@ -8759,7 +9378,7 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
8759
9378
  warning(`sessionId ${sessionId} not in sessions[appName][userId]`);
8760
9379
  return event;
8761
9380
  }
8762
- if (_optionalChain([event, 'access', _240 => _240.actions, 'optionalAccess', _241 => _241.stateDelta])) {
9381
+ if (_optionalChain([event, 'access', _251 => _251.actions, 'optionalAccess', _252 => _252.stateDelta])) {
8763
9382
  for (const key in event.actions.stateDelta) {
8764
9383
  const value = event.actions.stateDelta[key];
8765
9384
  if (key.startsWith(State.APP_PREFIX)) {
@@ -8784,24 +9403,23 @@ var InMemorySessionService = (_class27 = class extends BaseSessionService {const
8784
9403
  storageSession.lastUpdateTime = event.timestamp;
8785
9404
  return event;
8786
9405
  }
8787
- }, _class27);
9406
+ }, _class29);
8788
9407
 
8789
9408
  // src/runners.ts
8790
- var logger10 = new Logger({ name: "Runner" });
8791
9409
  function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
8792
9410
  const events = session.events;
8793
9411
  if (!events || events.length === 0) {
8794
9412
  return null;
8795
9413
  }
8796
9414
  const lastEvent = events[events.length - 1];
8797
- if (_optionalChain([lastEvent, 'access', _242 => _242.content, 'optionalAccess', _243 => _243.parts, 'optionalAccess', _244 => _244.some, 'call', _245 => _245((part) => part.functionResponse)])) {
8798
- const functionCallId = _optionalChain([lastEvent, 'access', _246 => _246.content, 'access', _247 => _247.parts, 'access', _248 => _248.find, 'call', _249 => _249(
9415
+ if (_optionalChain([lastEvent, 'access', _253 => _253.content, 'optionalAccess', _254 => _254.parts, 'optionalAccess', _255 => _255.some, 'call', _256 => _256((part) => part.functionResponse)])) {
9416
+ const functionCallId = _optionalChain([lastEvent, 'access', _257 => _257.content, 'access', _258 => _258.parts, 'access', _259 => _259.find, 'call', _260 => _260(
8799
9417
  (part) => part.functionResponse
8800
- ), 'optionalAccess', _250 => _250.functionResponse, 'optionalAccess', _251 => _251.id]);
9418
+ ), 'optionalAccess', _261 => _261.functionResponse, 'optionalAccess', _262 => _262.id]);
8801
9419
  if (!functionCallId) return null;
8802
9420
  for (let i = events.length - 2; i >= 0; i--) {
8803
9421
  const event = events[i];
8804
- const functionCalls = _optionalChain([event, 'access', _252 => _252.getFunctionCalls, 'optionalCall', _253 => _253()]) || [];
9422
+ const functionCalls = _optionalChain([event, 'access', _263 => _263.getFunctionCalls, 'optionalCall', _264 => _264()]) || [];
8805
9423
  for (const functionCall of functionCalls) {
8806
9424
  if (functionCall.id === functionCallId) {
8807
9425
  return event;
@@ -8811,7 +9429,7 @@ function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
8811
9429
  }
8812
9430
  return null;
8813
9431
  }
8814
- var Runner = class {
9432
+ var Runner = (_class30 = class {
8815
9433
  /**
8816
9434
  * The app name of the runner.
8817
9435
  */
@@ -8832,6 +9450,7 @@ var Runner = class {
8832
9450
  * The memory service for the runner.
8833
9451
  */
8834
9452
 
9453
+ __init53() {this.logger = new Logger({ name: "Runner" })}
8835
9454
  /**
8836
9455
  * Initializes the Runner.
8837
9456
  */
@@ -8841,7 +9460,7 @@ var Runner = class {
8841
9460
  artifactService,
8842
9461
  sessionService,
8843
9462
  memoryService
8844
- }) {
9463
+ }) {;_class30.prototype.__init53.call(this);
8845
9464
  this.appName = appName;
8846
9465
  this.agent = agent;
8847
9466
  this.artifactService = artifactService;
@@ -8934,7 +9553,7 @@ var Runner = class {
8934
9553
  yield event;
8935
9554
  }
8936
9555
  } catch (error) {
8937
- logger10.debug("Error running agent:", error);
9556
+ this.logger.debug("Error running agent:", error);
8938
9557
  span.recordException(error);
8939
9558
  span.setStatus({
8940
9559
  code: _api.SpanStatusCode.ERROR,
@@ -8988,17 +9607,17 @@ var Runner = class {
8988
9607
  */
8989
9608
  _findAgentToRun(session, rootAgent) {
8990
9609
  const event = _findFunctionCallEventIfLastEventIsFunctionResponse(session);
8991
- if (_optionalChain([event, 'optionalAccess', _254 => _254.author])) {
9610
+ if (_optionalChain([event, 'optionalAccess', _265 => _265.author])) {
8992
9611
  return rootAgent.findAgent(event.author);
8993
9612
  }
8994
- const nonUserEvents = _optionalChain([session, 'access', _255 => _255.events, 'optionalAccess', _256 => _256.filter, 'call', _257 => _257((e) => e.author !== "user"), 'access', _258 => _258.reverse, 'call', _259 => _259()]) || [];
9613
+ const nonUserEvents = _optionalChain([session, 'access', _266 => _266.events, 'optionalAccess', _267 => _267.filter, 'call', _268 => _268((e) => e.author !== "user"), 'access', _269 => _269.reverse, 'call', _270 => _270()]) || [];
8995
9614
  for (const event2 of nonUserEvents) {
8996
9615
  if (event2.author === rootAgent.name) {
8997
9616
  return rootAgent;
8998
9617
  }
8999
- const agent = _optionalChain([rootAgent, 'access', _260 => _260.findSubAgent, 'optionalCall', _261 => _261(event2.author)]);
9618
+ const agent = _optionalChain([rootAgent, 'access', _271 => _271.findSubAgent, 'optionalCall', _272 => _272(event2.author)]);
9000
9619
  if (!agent) {
9001
- logger10.debug(
9620
+ this.logger.debug(
9002
9621
  `Event from an unknown agent: ${event2.author}, event id: ${event2.id}`
9003
9622
  );
9004
9623
  continue;
@@ -9045,7 +9664,7 @@ var Runner = class {
9045
9664
  runConfig
9046
9665
  });
9047
9666
  }
9048
- };
9667
+ }, _class30);
9049
9668
  var InMemoryRunner = class extends Runner {
9050
9669
  /**
9051
9670
  * Deprecated. Please don't use. The in-memory session service for the runner.
@@ -9068,14 +9687,14 @@ var InMemoryRunner = class extends Runner {
9068
9687
  };
9069
9688
 
9070
9689
  // src/agents/agent-builder.ts
9071
- var AgentBuilder = (_class28 = class _AgentBuilder {
9690
+ var AgentBuilder = (_class31 = class _AgentBuilder {
9072
9691
 
9073
9692
 
9074
- __init50() {this.agentType = "llm"}
9693
+ __init54() {this.agentType = "llm"}
9075
9694
  /**
9076
9695
  * Private constructor - use static create() method
9077
9696
  */
9078
- constructor(name) {;_class28.prototype.__init50.call(this);
9697
+ constructor(name) {;_class31.prototype.__init54.call(this);
9079
9698
  this.config = { name };
9080
9699
  }
9081
9700
  /**
@@ -9262,7 +9881,7 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
9262
9881
  parts: [{ text: message }]
9263
9882
  }
9264
9883
  })) {
9265
- if (_optionalChain([event, 'access', _262 => _262.content, 'optionalAccess', _263 => _263.parts])) {
9884
+ if (_optionalChain([event, 'access', _273 => _273.content, 'optionalAccess', _274 => _274.parts])) {
9266
9885
  const content = event.content.parts.map((part) => part.text || "").join("");
9267
9886
  if (content) {
9268
9887
  response += content;
@@ -9277,6 +9896,20 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
9277
9896
  */
9278
9897
  createAgent() {
9279
9898
  switch (this.agentType) {
9899
+ case "llm": {
9900
+ if (!this.config.model) {
9901
+ throw new Error("Model is required for LLM agent");
9902
+ }
9903
+ const model = this.config.model;
9904
+ return new LlmAgent({
9905
+ name: this.config.name,
9906
+ model,
9907
+ description: this.config.description,
9908
+ instruction: this.config.instruction,
9909
+ tools: this.config.tools,
9910
+ planner: this.config.planner
9911
+ });
9912
+ }
9280
9913
  case "sequential":
9281
9914
  if (!this.config.subAgents) {
9282
9915
  throw new Error("Sub-agents required for sequential agent");
@@ -9315,18 +9948,9 @@ var AgentBuilder = (_class28 = class _AgentBuilder {
9315
9948
  nodes: this.config.nodes,
9316
9949
  rootNode: this.config.rootNode
9317
9950
  });
9318
- default:
9319
- return new LlmAgent({
9320
- name: this.config.name,
9321
- model: this.config.model,
9322
- description: this.config.description,
9323
- instruction: this.config.instruction,
9324
- tools: this.config.tools,
9325
- planner: this.config.planner
9326
- });
9327
9951
  }
9328
9952
  }
9329
- }, _class28);
9953
+ }, _class31);
9330
9954
 
9331
9955
  // src/memory/index.ts
9332
9956
  var memory_exports = {};
@@ -9390,7 +10014,7 @@ var VertexAiSessionService = class extends BaseSessionService {
9390
10014
  path: `operations/${operationId}`,
9391
10015
  request_dict: {}
9392
10016
  });
9393
- if (_optionalChain([lroResponse, 'optionalAccess', _264 => _264.done])) {
10017
+ if (_optionalChain([lroResponse, 'optionalAccess', _275 => _275.done])) {
9394
10018
  break;
9395
10019
  }
9396
10020
  await new Promise((resolve) => setTimeout(resolve, 1e3));
@@ -9660,11 +10284,11 @@ var VertexAiSessionService = class extends BaseSessionService {
9660
10284
 
9661
10285
  // src/sessions/database-session-service.ts
9662
10286
  var _kysely = require('kysely');
9663
- var DatabaseSessionService = (_class29 = class extends BaseSessionService {
10287
+ var DatabaseSessionService = (_class32 = class extends BaseSessionService {
9664
10288
 
9665
- __init51() {this.initialized = false}
10289
+ __init55() {this.initialized = false}
9666
10290
  constructor(config) {
9667
- super();_class29.prototype.__init51.call(this);;
10291
+ super();_class32.prototype.__init55.call(this);;
9668
10292
  this.db = config.db;
9669
10293
  if (!config.skipTableCreation) {
9670
10294
  this.initializeDatabase().catch((error) => {
@@ -9739,7 +10363,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
9739
10363
  if (!jsonString) return defaultValue;
9740
10364
  try {
9741
10365
  return JSON.parse(jsonString);
9742
- } catch (e3) {
10366
+ } catch (e4) {
9743
10367
  return defaultValue;
9744
10368
  }
9745
10369
  }
@@ -9761,12 +10385,12 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
9761
10385
  }
9762
10386
  async createSession(appName, userId, state, sessionId) {
9763
10387
  await this.ensureInitialized();
9764
- const id = _optionalChain([sessionId, 'optionalAccess', _265 => _265.trim, 'call', _266 => _266()]) || this.generateSessionId();
10388
+ const id = _optionalChain([sessionId, 'optionalAccess', _276 => _276.trim, 'call', _277 => _277()]) || this.generateSessionId();
9765
10389
  return await this.db.transaction().execute(async (trx) => {
9766
10390
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
9767
10391
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
9768
- let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _267 => _267.state]), {});
9769
- let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _268 => _268.state]), {});
10392
+ let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _278 => _278.state]), {});
10393
+ let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _279 => _279.state]), {});
9770
10394
  if (!appState) {
9771
10395
  await trx.insertInto("app_states").values({
9772
10396
  app_name: appName,
@@ -9825,21 +10449,21 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
9825
10449
  return void 0;
9826
10450
  }
9827
10451
  let eventQuery = trx.selectFrom("events").selectAll().where("session_id", "=", sessionId).orderBy("timestamp", "desc");
9828
- if (_optionalChain([config, 'optionalAccess', _269 => _269.afterTimestamp])) {
10452
+ if (_optionalChain([config, 'optionalAccess', _280 => _280.afterTimestamp])) {
9829
10453
  eventQuery = eventQuery.where(
9830
10454
  "timestamp",
9831
10455
  ">=",
9832
10456
  new Date(config.afterTimestamp * 1e3)
9833
10457
  );
9834
10458
  }
9835
- if (_optionalChain([config, 'optionalAccess', _270 => _270.numRecentEvents])) {
10459
+ if (_optionalChain([config, 'optionalAccess', _281 => _281.numRecentEvents])) {
9836
10460
  eventQuery = eventQuery.limit(config.numRecentEvents);
9837
10461
  }
9838
10462
  const storageEvents = await eventQuery.execute();
9839
10463
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
9840
10464
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
9841
- const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _271 => _271.state]), {});
9842
- const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _272 => _272.state]), {});
10465
+ const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _282 => _282.state]), {});
10466
+ const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _283 => _283.state]), {});
9843
10467
  const sessionState = this.parseJsonSafely(storageSession.state, {});
9844
10468
  const mergedState = this.mergeState(
9845
10469
  currentAppState,
@@ -9897,13 +10521,13 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
9897
10521
  }
9898
10522
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", session.appName).executeTakeFirst();
9899
10523
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", session.appName).where("user_id", "=", session.userId).executeTakeFirst();
9900
- let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _273 => _273.state]), {});
9901
- let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _274 => _274.state]), {});
10524
+ let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _284 => _284.state]), {});
10525
+ let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _285 => _285.state]), {});
9902
10526
  let sessionState = this.parseJsonSafely(storageSession.state, {});
9903
10527
  let appStateDelta = {};
9904
10528
  let userStateDelta = {};
9905
10529
  let sessionStateDelta = {};
9906
- if (_optionalChain([event, 'access', _275 => _275.actions, 'optionalAccess', _276 => _276.stateDelta])) {
10530
+ if (_optionalChain([event, 'access', _286 => _286.actions, 'optionalAccess', _287 => _287.stateDelta])) {
9907
10531
  const deltas = this.extractStateDelta(event.actions.stateDelta);
9908
10532
  appStateDelta = deltas.appStateDelta;
9909
10533
  userStateDelta = deltas.userStateDelta;
@@ -10049,7 +10673,7 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
10049
10673
  * Overrides the base class method to work with plain object state.
10050
10674
  */
10051
10675
  updateSessionState(session, event) {
10052
- if (!_optionalChain([event, 'access', _277 => _277.actions, 'optionalAccess', _278 => _278.stateDelta])) {
10676
+ if (!_optionalChain([event, 'access', _288 => _288.actions, 'optionalAccess', _289 => _289.stateDelta])) {
10053
10677
  return;
10054
10678
  }
10055
10679
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
@@ -10058,10 +10682,10 @@ var DatabaseSessionService = (_class29 = class extends BaseSessionService {
10058
10682
  }
10059
10683
  }
10060
10684
  }
10061
- }, _class29);
10685
+ }, _class32);
10062
10686
 
10063
10687
  // src/sessions/database-factories.ts
10064
-
10688
+ var _dedent = require('dedent'); var _dedent2 = _interopRequireDefault(_dedent);
10065
10689
 
10066
10690
  function createDependencyError(packageName, dbType) {
10067
10691
  return new Error(
@@ -10138,11 +10762,9 @@ function createDatabaseSessionService(databaseUrl, options) {
10138
10762
  }
10139
10763
 
10140
10764
  // src/artifacts/gcs-artifact-service.ts
10141
- init_logger();
10142
10765
 
10143
10766
 
10144
10767
  var _storage = require('@google-cloud/storage');
10145
- var logger11 = new Logger({ name: "GcsArtifactService" });
10146
10768
  var GcsArtifactService = class {
10147
10769
 
10148
10770
 
@@ -10221,7 +10843,7 @@ var GcsArtifactService = class {
10221
10843
  };
10222
10844
  return part;
10223
10845
  } catch (error) {
10224
- if (_optionalChain([error, 'optionalAccess', _279 => _279.code]) === 404) {
10846
+ if (_optionalChain([error, 'optionalAccess', _290 => _290.code]) === 404) {
10225
10847
  return null;
10226
10848
  }
10227
10849
  throw error;
@@ -10297,20 +10919,20 @@ __export(flows_exports, {
10297
10919
  REQUEST_EUC_FUNCTION_CALL_NAME: () => REQUEST_EUC_FUNCTION_CALL_NAME,
10298
10920
  SingleFlow: () => SingleFlow,
10299
10921
  agentTransferRequestProcessor: () => requestProcessor8,
10300
- basicRequestProcessor: () => requestProcessor,
10301
- codeExecutionRequestProcessor: () => requestProcessor7,
10302
- codeExecutionResponseProcessor: () => responseProcessor2,
10303
- contentRequestProcessor: () => requestProcessor5,
10922
+ basicRequestProcessor: () => requestProcessor2,
10923
+ codeExecutionRequestProcessor: () => requestProcessor3,
10924
+ codeExecutionResponseProcessor: () => responseProcessor,
10925
+ contentRequestProcessor: () => requestProcessor4,
10304
10926
  generateAuthEvent: () => generateAuthEvent,
10305
10927
  generateClientFunctionCallId: () => generateClientFunctionCallId,
10306
10928
  getLongRunningFunctionCalls: () => getLongRunningFunctionCalls,
10307
10929
  handleFunctionCallsAsync: () => handleFunctionCallsAsync,
10308
10930
  handleFunctionCallsLive: () => handleFunctionCallsLive,
10309
- identityRequestProcessor: () => requestProcessor3,
10310
- instructionsRequestProcessor: () => requestProcessor4,
10931
+ identityRequestProcessor: () => requestProcessor5,
10932
+ instructionsRequestProcessor: () => requestProcessor6,
10311
10933
  mergeParallelFunctionResponseEvents: () => mergeParallelFunctionResponseEvents,
10312
- nlPlanningRequestProcessor: () => requestProcessor6,
10313
- nlPlanningResponseProcessor: () => responseProcessor,
10934
+ nlPlanningRequestProcessor: () => requestProcessor7,
10935
+ nlPlanningResponseProcessor: () => responseProcessor2,
10314
10936
  populateClientFunctionCallId: () => populateClientFunctionCallId,
10315
10937
  removeClientFunctionCallId: () => removeClientFunctionCallId
10316
10938
  });
@@ -10456,4 +11078,9 @@ var VERSION = "0.1.0";
10456
11078
 
10457
11079
 
10458
11080
 
10459
- exports.AF_FUNCTION_CALL_ID_PREFIX = AF_FUNCTION_CALL_ID_PREFIX; exports.Agent = LlmAgent; exports.AgentBuilder = AgentBuilder; exports.Agents = agents_exports; exports.AnthropicLlm = AnthropicLlm; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.AuthTool = AuthTool; exports.AutoFlow = AutoFlow; exports.BaseAgent = BaseAgent; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseLlm = BaseLlm; exports.BaseLlmFlow = BaseLlmFlow; exports.BaseLlmRequestProcessor = BaseLlmRequestProcessor; exports.BaseLlmResponseProcessor = BaseLlmResponseProcessor; exports.BasePlanner = BasePlanner; exports.BaseSessionService = BaseSessionService; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.BuiltInPlanner = BuiltInPlanner; exports.CallbackContext = CallbackContext; exports.DatabaseSessionService = DatabaseSessionService; exports.EnhancedAuthConfig = EnhancedAuthConfig; exports.Event = Event; exports.EventActions = EventActions; exports.Events = events_exports; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.Flows = flows_exports; exports.FunctionTool = FunctionTool; exports.GcsArtifactService = GcsArtifactService; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLlm = GoogleLlm; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryArtifactService = InMemoryArtifactService; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LangGraphAgent = LangGraphAgent; exports.LlmAgent = LlmAgent; exports.LlmCallsLimitExceededError = LlmCallsLimitExceededError; exports.LlmRequest = LlmRequest; exports.LlmResponse = LlmResponse; exports.LoadArtifactsTool = LoadArtifactsTool; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpAbi = McpAbi; exports.McpAtp = McpAtp; exports.McpBamm = McpBamm; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpFilesystem = McpFilesystem; exports.McpFraxlend = McpFraxlend; exports.McpGeneric = McpGeneric; exports.McpIqWiki = McpIqWiki; exports.McpMemory = McpMemory; exports.McpNearAgent = McpNearAgent; exports.McpNearIntentSwaps = McpNearIntentSwaps; exports.McpOdos = McpOdos; exports.McpSamplingHandler = McpSamplingHandler; exports.McpTelegram = McpTelegram; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAiLlm = OpenAiLlm; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PlanReActPlanner = PlanReActPlanner; exports.REQUEST_EUC_FUNCTION_CALL_NAME = REQUEST_EUC_FUNCTION_CALL_NAME; exports.ReadonlyContext = ReadonlyContext; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.Sessions = sessions_exports; exports.SingleFlow = SingleFlow; exports.State = State; exports.StreamingMode = StreamingMode; exports.TelemetryService = TelemetryService; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.VertexAiSessionService = VertexAiSessionService; exports.adkToMcpToolType = adkToMcpToolType; exports.agentTransferRequestProcessor = requestProcessor8; exports.basicRequestProcessor = requestProcessor; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.codeExecutionRequestProcessor = requestProcessor7; exports.codeExecutionResponseProcessor = responseProcessor2; exports.contentRequestProcessor = requestProcessor5; exports.createAuthToolArguments = createAuthToolArguments; exports.createDatabaseSessionService = createDatabaseSessionService; exports.createFunctionTool = createFunctionTool; exports.createMysqlSessionService = createMysqlSessionService; exports.createPostgresSessionService = createPostgresSessionService; exports.createSamplingHandler = createSamplingHandler; exports.createSqliteSessionService = createSqliteSessionService; exports.generateAuthEvent = generateAuthEvent; exports.generateClientFunctionCallId = generateClientFunctionCallId; exports.getLongRunningFunctionCalls = getLongRunningFunctionCalls; exports.getMcpTools = getMcpTools; exports.handleFunctionCallsAsync = handleFunctionCallsAsync; exports.handleFunctionCallsLive = handleFunctionCallsLive; exports.identityRequestProcessor = requestProcessor3; exports.initializeTelemetry = initializeTelemetry; exports.injectSessionState = injectSessionState; exports.instructionsRequestProcessor = requestProcessor4; exports.isEnhancedAuthConfig = isEnhancedAuthConfig; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.mergeParallelFunctionResponseEvents = mergeParallelFunctionResponseEvents; exports.newInvocationContextId = newInvocationContextId; exports.nlPlanningRequestProcessor = requestProcessor6; exports.nlPlanningResponseProcessor = responseProcessor; exports.normalizeJsonSchema = normalizeJsonSchema; exports.populateClientFunctionCallId = populateClientFunctionCallId; exports.registerProviders = registerProviders; exports.removeClientFunctionCallId = removeClientFunctionCallId; exports.requestProcessor = requestProcessor2; exports.shutdownTelemetry = shutdownTelemetry; exports.telemetryService = telemetryService; exports.traceLlmCall = traceLlmCall; exports.traceToolCall = traceToolCall; exports.tracer = tracer;
11081
+
11082
+
11083
+
11084
+
11085
+
11086
+ exports.AF_FUNCTION_CALL_ID_PREFIX = AF_FUNCTION_CALL_ID_PREFIX; exports.Agent = LlmAgent; exports.AgentBuilder = AgentBuilder; exports.Agents = agents_exports; exports.AiSdkLlm = AiSdkLlm; exports.AnthropicLlm = AnthropicLlm; exports.ApiKeyCredential = ApiKeyCredential; exports.ApiKeyScheme = ApiKeyScheme; exports.AuthConfig = AuthConfig; exports.AuthCredential = AuthCredential; exports.AuthCredentialType = AuthCredentialType; exports.AuthHandler = AuthHandler; exports.AuthScheme = AuthScheme; exports.AuthSchemeType = AuthSchemeType; exports.AuthTool = AuthTool; exports.AutoFlow = AutoFlow; exports.BaseAgent = BaseAgent; exports.BaseCodeExecutor = BaseCodeExecutor; exports.BaseLLMConnection = BaseLLMConnection; exports.BaseLlm = BaseLlm; exports.BaseLlmFlow = BaseLlmFlow; exports.BaseLlmRequestProcessor = BaseLlmRequestProcessor; exports.BaseLlmResponseProcessor = BaseLlmResponseProcessor; exports.BasePlanner = BasePlanner; exports.BaseSessionService = BaseSessionService; exports.BaseTool = BaseTool; exports.BasicAuthCredential = BasicAuthCredential; exports.BearerTokenCredential = BearerTokenCredential; exports.BuiltInCodeExecutor = BuiltInCodeExecutor; exports.BuiltInPlanner = BuiltInPlanner; exports.CallbackContext = CallbackContext; exports.CodeExecutionUtils = CodeExecutionUtils; exports.CodeExecutorContext = CodeExecutorContext; exports.DatabaseSessionService = DatabaseSessionService; exports.EnhancedAuthConfig = EnhancedAuthConfig; exports.Event = Event; exports.EventActions = EventActions; exports.Events = events_exports; exports.ExitLoopTool = ExitLoopTool; exports.FileOperationsTool = FileOperationsTool; exports.Flows = flows_exports; exports.FunctionTool = FunctionTool; exports.GcsArtifactService = GcsArtifactService; exports.GetUserChoiceTool = GetUserChoiceTool; exports.GoogleLlm = GoogleLlm; exports.GoogleSearch = GoogleSearch; exports.HttpRequestTool = HttpRequestTool; exports.HttpScheme = HttpScheme; exports.InMemoryArtifactService = InMemoryArtifactService; exports.InMemoryMemoryService = InMemoryMemoryService; exports.InMemoryRunner = InMemoryRunner; exports.InMemorySessionService = InMemorySessionService; exports.InvocationContext = InvocationContext; exports.LLMRegistry = LLMRegistry; exports.LangGraphAgent = LangGraphAgent; exports.LlmAgent = LlmAgent; exports.LlmCallsLimitExceededError = LlmCallsLimitExceededError; exports.LlmRequest = LlmRequest; exports.LlmResponse = LlmResponse; exports.LoadArtifactsTool = LoadArtifactsTool; exports.LoadMemoryTool = LoadMemoryTool; exports.LoopAgent = LoopAgent; exports.McpAbi = McpAbi; exports.McpAtp = McpAtp; exports.McpBamm = McpBamm; exports.McpError = McpError; exports.McpErrorType = McpErrorType; exports.McpFilesystem = McpFilesystem; exports.McpFraxlend = McpFraxlend; exports.McpGeneric = McpGeneric; exports.McpIqWiki = McpIqWiki; exports.McpMemory = McpMemory; exports.McpNearAgent = McpNearAgent; exports.McpNearIntentSwaps = McpNearIntentSwaps; exports.McpOdos = McpOdos; exports.McpSamplingHandler = McpSamplingHandler; exports.McpTelegram = McpTelegram; exports.McpToolset = McpToolset; exports.Memory = memory_exports; exports.Models = models_exports; exports.OAuth2Credential = OAuth2Credential; exports.OAuth2Scheme = OAuth2Scheme; exports.OpenAiLlm = OpenAiLlm; exports.OpenIdConnectScheme = OpenIdConnectScheme; exports.ParallelAgent = ParallelAgent; exports.PlanReActPlanner = PlanReActPlanner; exports.REQUEST_EUC_FUNCTION_CALL_NAME = REQUEST_EUC_FUNCTION_CALL_NAME; exports.ReadonlyContext = ReadonlyContext; exports.RunConfig = RunConfig; exports.Runner = Runner; exports.SequentialAgent = SequentialAgent; exports.Sessions = sessions_exports; exports.SingleFlow = SingleFlow; exports.State = State; exports.StreamingMode = StreamingMode; exports.TelemetryService = TelemetryService; exports.ToolContext = ToolContext; exports.Tools = tools_exports; exports.TransferToAgentTool = TransferToAgentTool; exports.UserInteractionTool = UserInteractionTool; exports.VERSION = VERSION; exports.VertexAiSessionService = VertexAiSessionService; exports.adkToMcpToolType = adkToMcpToolType; exports.agentTransferRequestProcessor = requestProcessor8; exports.basicRequestProcessor = requestProcessor2; exports.buildFunctionDeclaration = buildFunctionDeclaration; exports.codeExecutionRequestProcessor = requestProcessor3; exports.codeExecutionResponseProcessor = responseProcessor; exports.contentRequestProcessor = requestProcessor4; exports.createAuthToolArguments = createAuthToolArguments; exports.createDatabaseSessionService = createDatabaseSessionService; exports.createFunctionTool = createFunctionTool; exports.createMysqlSessionService = createMysqlSessionService; exports.createPostgresSessionService = createPostgresSessionService; exports.createSamplingHandler = createSamplingHandler; exports.createSqliteSessionService = createSqliteSessionService; exports.generateAuthEvent = generateAuthEvent; exports.generateClientFunctionCallId = generateClientFunctionCallId; exports.getLongRunningFunctionCalls = getLongRunningFunctionCalls; exports.getMcpTools = getMcpTools; exports.handleFunctionCallsAsync = handleFunctionCallsAsync; exports.handleFunctionCallsLive = handleFunctionCallsLive; exports.identityRequestProcessor = requestProcessor5; exports.initializeTelemetry = initializeTelemetry; exports.injectSessionState = injectSessionState; exports.instructionsRequestProcessor = requestProcessor6; exports.isEnhancedAuthConfig = isEnhancedAuthConfig; exports.jsonSchemaToDeclaration = jsonSchemaToDeclaration; exports.mcpSchemaToParameters = mcpSchemaToParameters; exports.mergeParallelFunctionResponseEvents = mergeParallelFunctionResponseEvents; exports.newInvocationContextId = newInvocationContextId; exports.nlPlanningRequestProcessor = requestProcessor7; exports.nlPlanningResponseProcessor = responseProcessor2; exports.normalizeJsonSchema = normalizeJsonSchema; exports.populateClientFunctionCallId = populateClientFunctionCallId; exports.registerProviders = registerProviders; exports.removeClientFunctionCallId = removeClientFunctionCallId; exports.requestProcessor = requestProcessor; exports.shutdownTelemetry = shutdownTelemetry; exports.telemetryService = telemetryService; exports.traceLlmCall = traceLlmCall; exports.traceToolCall = traceToolCall; exports.tracer = tracer;