@iqai/adk 0.1.20 → 0.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29; var _class30; var _class31; var _class32; var _class33;var __defProp = Object.defineProperty;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8; var _class9; var _class10; var _class11; var _class12; var _class13; var _class14; var _class15; var _class16; var _class17; var _class18; var _class19; var _class20; var _class21; var _class22; var _class23; var _class24; var _class25; var _class26; var _class27; var _class28; var _class29; var _class30; var _class31; var _class32; var _class33; var _class34;var __defProp = Object.defineProperty;
2
2
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
3
  var __getOwnPropNames = Object.getOwnPropertyNames;
4
4
  var __hasOwnProp = Object.prototype.hasOwnProperty;
@@ -53,7 +53,7 @@ var init_logger = __esm({
53
53
  }
54
54
  info(message, ...args) {
55
55
  const time = (/* @__PURE__ */ new Date()).toLocaleTimeString();
56
- console.info(
56
+ console.debug(
57
57
  this.colorize(`[${time}] \u2139\uFE0F [${this.name}] ${message}`),
58
58
  ...args
59
59
  );
@@ -229,7 +229,7 @@ var init_base_tool = __esm({
229
229
  * @param context The context of the tool
230
230
  * @returns The result of running the tool
231
231
  */
232
- async runAsync(args, context) {
232
+ async runAsync(args, context4) {
233
233
  throw new Error(`${this.constructor.name} runAsync is not implemented`);
234
234
  }
235
235
  /**
@@ -253,6 +253,12 @@ var init_base_tool = __esm({
253
253
  if (!toolWithFunctionDeclarations.functionDeclarations) {
254
254
  toolWithFunctionDeclarations.functionDeclarations = [];
255
255
  }
256
+ const alreadyExists = toolWithFunctionDeclarations.functionDeclarations.some(
257
+ (fd) => _optionalChain([fd, 'optionalAccess', _2 => _2.name]) === functionDeclaration.name
258
+ );
259
+ if (alreadyExists) {
260
+ return;
261
+ }
256
262
  toolWithFunctionDeclarations.functionDeclarations.push(
257
263
  functionDeclaration
258
264
  );
@@ -281,7 +287,7 @@ var init_base_tool = __esm({
281
287
  * @param context Tool execution context
282
288
  * @returns Result of the tool execution or error information
283
289
  */
284
- async safeExecute(args, context) {
290
+ async safeExecute(args, context4) {
285
291
  if (!this.validateArguments(args)) {
286
292
  return {
287
293
  error: "Invalid arguments",
@@ -302,7 +308,7 @@ var init_base_tool = __esm({
302
308
  );
303
309
  await new Promise((resolve) => setTimeout(resolve, delay));
304
310
  }
305
- const result = await this.runAsync(args, context);
311
+ const result = await this.runAsync(args, context4);
306
312
  return { result };
307
313
  } catch (error) {
308
314
  lastError = error instanceof Error ? error : new Error(String(error));
@@ -312,7 +318,7 @@ var init_base_tool = __esm({
312
318
  }
313
319
  return {
314
320
  error: "Execution failed",
315
- message: _optionalChain([lastError, 'optionalAccess', _2 => _2.message]) || "Unknown error occurred",
321
+ message: _optionalChain([lastError, 'optionalAccess', _3 => _3.message]) || "Unknown error occurred",
316
322
  tool: this.name
317
323
  };
318
324
  }
@@ -484,23 +490,23 @@ var init_function_tool = __esm({
484
490
  * @param options Optional configuration for the tool
485
491
  */
486
492
  constructor(func, options) {
487
- const name = _optionalChain([options, 'optionalAccess', _3 => _3.name]) || func.name;
488
- const description = _optionalChain([options, 'optionalAccess', _4 => _4.description]) || _optionalChain([(func.toString().match(/\/\*\*([\s\S]*?)\*\//) || []), 'access', _5 => _5[1], 'optionalAccess', _6 => _6.trim, 'call', _7 => _7()]) || "";
493
+ const name = _optionalChain([options, 'optionalAccess', _4 => _4.name]) || func.name;
494
+ const description = _optionalChain([options, 'optionalAccess', _5 => _5.description]) || _optionalChain([(func.toString().match(/\/\*\*([\s\S]*?)\*\//) || []), 'access', _6 => _6[1], 'optionalAccess', _7 => _7.trim, 'call', _8 => _8()]) || "";
489
495
  super({
490
496
  name,
491
497
  description,
492
- isLongRunning: _optionalChain([options, 'optionalAccess', _8 => _8.isLongRunning]) || false,
493
- shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _9 => _9.shouldRetryOnFailure]) || false,
494
- maxRetryAttempts: _optionalChain([options, 'optionalAccess', _10 => _10.maxRetryAttempts]) || 3
498
+ isLongRunning: _optionalChain([options, 'optionalAccess', _9 => _9.isLongRunning]) || false,
499
+ shouldRetryOnFailure: _optionalChain([options, 'optionalAccess', _10 => _10.shouldRetryOnFailure]) || false,
500
+ maxRetryAttempts: _optionalChain([options, 'optionalAccess', _11 => _11.maxRetryAttempts]) || 3
495
501
  });_class3.prototype.__init6.call(this);_class3.prototype.__init7.call(this);;
496
502
  this.func = func;
497
503
  this.mandatoryArgs = this.getMandatoryArgs(func);
498
- this.parameterTypes = _optionalChain([options, 'optionalAccess', _11 => _11.parameterTypes]) || {};
504
+ this.parameterTypes = _optionalChain([options, 'optionalAccess', _12 => _12.parameterTypes]) || {};
499
505
  }
500
506
  /**
501
507
  * Executes the wrapped function with the provided arguments.
502
508
  */
503
- async runAsync(args, context) {
509
+ async runAsync(args, context4) {
504
510
  try {
505
511
  const missingArgs = this.getMissingMandatoryArgs(args);
506
512
  if (missingArgs.length > 0) {
@@ -513,13 +519,13 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
513
519
  }
514
520
  const argsToCall = { ...args };
515
521
  if (this.functionAcceptsToolContext()) {
516
- argsToCall.toolContext = context;
522
+ argsToCall.toolContext = context4;
517
523
  }
518
524
  const funcParams = this.getFunctionParameters();
519
525
  const argValues = [];
520
526
  for (const paramName of funcParams) {
521
527
  if (paramName === "toolContext" && this.functionAcceptsToolContext()) {
522
- argValues.push(context);
528
+ argValues.push(context4);
523
529
  } else if (paramName in argsToCall) {
524
530
  const convertedValue = this.convertArgumentType(
525
531
  argsToCall[paramName],
@@ -549,7 +555,7 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
549
555
  description: this.description,
550
556
  ignoreParams: ["toolContext"]
551
557
  });
552
- if (Object.keys(this.parameterTypes).length > 0 && _optionalChain([declaration, 'access', _12 => _12.parameters, 'optionalAccess', _13 => _13.properties])) {
558
+ if (Object.keys(this.parameterTypes).length > 0 && _optionalChain([declaration, 'access', _13 => _13.parameters, 'optionalAccess', _14 => _14.properties])) {
553
559
  for (const [paramName, paramType] of Object.entries(
554
560
  this.parameterTypes
555
561
  )) {
@@ -647,9 +653,9 @@ You could retry calling this tool, but it is IMPORTANT for you to provide all th
647
653
  return this.parameterTypes[paramName].toLowerCase();
648
654
  }
649
655
  const declaration = this.getDeclaration();
650
- if (_optionalChain([declaration, 'optionalAccess', _14 => _14.parameters, 'optionalAccess', _15 => _15.properties])) {
656
+ if (_optionalChain([declaration, 'optionalAccess', _15 => _15.parameters, 'optionalAccess', _16 => _16.properties])) {
651
657
  const paramSchema = declaration.parameters.properties[paramName];
652
- if (_optionalChain([paramSchema, 'optionalAccess', _16 => _16.type])) {
658
+ if (_optionalChain([paramSchema, 'optionalAccess', _17 => _17.type])) {
653
659
  return paramSchema.type.toLowerCase();
654
660
  }
655
661
  }
@@ -735,11 +741,11 @@ var LlmRequest = class {
735
741
  */
736
742
 
737
743
  constructor(data) {
738
- this.model = _optionalChain([data, 'optionalAccess', _17 => _17.model]);
739
- this.contents = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _18 => _18.contents]), () => ( []));
740
- this.config = _optionalChain([data, 'optionalAccess', _19 => _19.config]);
741
- this.liveConnectConfig = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _20 => _20.liveConnectConfig]), () => ( {}));
742
- this.toolsDict = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _21 => _21.toolsDict]), () => ( {}));
744
+ this.model = _optionalChain([data, 'optionalAccess', _18 => _18.model]);
745
+ this.contents = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _19 => _19.contents]), () => ( []));
746
+ this.config = _optionalChain([data, 'optionalAccess', _20 => _20.config]);
747
+ this.liveConnectConfig = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _21 => _21.liveConnectConfig]), () => ( {}));
748
+ this.toolsDict = _nullishCoalesce(_optionalChain([data, 'optionalAccess', _22 => _22.toolsDict]), () => ( {}));
743
749
  }
744
750
  /**
745
751
  * Appends instructions to the system instruction.
@@ -760,10 +766,10 @@ ${instructions.join("\n\n")}`;
760
766
  * @param tools The tools to append.
761
767
  */
762
768
  appendTools(tools) {
763
- if (!_optionalChain([tools, 'optionalAccess', _22 => _22.length])) return;
769
+ if (!_optionalChain([tools, 'optionalAccess', _23 => _23.length])) return;
764
770
  const declarations = [];
765
771
  for (const tool of tools) {
766
- const declaration = _optionalChain([tool, 'access', _23 => _23.getDeclaration, 'optionalCall', _24 => _24()]);
772
+ const declaration = _optionalChain([tool, 'access', _24 => _24.getDeclaration, 'optionalCall', _25 => _25()]);
767
773
  if (declaration) {
768
774
  declarations.push(declaration);
769
775
  this.toolsDict[tool.name] = tool;
@@ -790,7 +796,7 @@ ${instructions.join("\n\n")}`;
790
796
  * @returns The system instruction as a string, or undefined if not set.
791
797
  */
792
798
  getSystemInstructionText() {
793
- if (!_optionalChain([this, 'access', _25 => _25.config, 'optionalAccess', _26 => _26.systemInstruction])) {
799
+ if (!_optionalChain([this, 'access', _26 => _26.config, 'optionalAccess', _27 => _27.systemInstruction])) {
794
800
  return void 0;
795
801
  }
796
802
  const systemInstruction = this.config.systemInstruction;
@@ -818,7 +824,7 @@ ${instructions.join("\n\n")}`;
818
824
  if (Array.isArray(content)) {
819
825
  return content.map((part) => part.text || "").filter(Boolean).join("");
820
826
  }
821
- if (_optionalChain([content, 'optionalAccess', _27 => _27.parts])) {
827
+ if (_optionalChain([content, 'optionalAccess', _28 => _28.parts])) {
822
828
  return content.parts.map((part) => part.text || "").filter(Boolean).join("");
823
829
  }
824
830
  return String(content || "");
@@ -956,6 +962,7 @@ init_logger();
956
962
 
957
963
 
958
964
 
965
+
959
966
  var _api = require('@opentelemetry/api');
960
967
  var _autoinstrumentationsnode = require('@opentelemetry/auto-instrumentations-node');
961
968
  var _exportertraceotlphttp = require('@opentelemetry/exporter-trace-otlp-http');
@@ -994,13 +1001,24 @@ var TelemetryService = (_class4 = class {
994
1001
  this.sdk = new (0, _sdknode.NodeSDK)({
995
1002
  resource,
996
1003
  traceExporter,
997
- instrumentations: [_autoinstrumentationsnode.getNodeAutoInstrumentations.call(void 0, )]
1004
+ instrumentations: [
1005
+ _autoinstrumentationsnode.getNodeAutoInstrumentations.call(void 0, {
1006
+ // Follow Python ADK approach: let all HTTP instrumentation through.
1007
+ // This provides transparency and aligns with standard OpenTelemetry behavior.
1008
+ // High-level LLM tracing is provided through dedicated ADK spans.
1009
+ "@opentelemetry/instrumentation-http": {
1010
+ ignoreIncomingRequestHook: (req) => {
1011
+ return true;
1012
+ }
1013
+ }
1014
+ })
1015
+ ]
998
1016
  });
999
1017
  try {
1000
1018
  this.sdk.start();
1001
1019
  this.isInitialized = true;
1002
1020
  this.tracer = _api.trace.getTracer("iqai-adk", config.appVersion || "0.1.0");
1003
- _api.diag.info("OpenTelemetry SDK started successfully.");
1021
+ _api.diag.debug("OpenTelemetry SDK started successfully.");
1004
1022
  } catch (error) {
1005
1023
  _api.diag.error("Error starting OpenTelemetry SDK:", error);
1006
1024
  throw error;
@@ -1043,7 +1061,7 @@ var TelemetryService = (_class4 = class {
1043
1061
  });
1044
1062
  await Promise.race([this.sdk.shutdown(), timeoutPromise]);
1045
1063
  this.isInitialized = false;
1046
- _api.diag.info("Telemetry terminated successfully.");
1064
+ _api.diag.debug("Telemetry terminated successfully.");
1047
1065
  } catch (error) {
1048
1066
  if (error instanceof Error && error.message.includes("timeout")) {
1049
1067
  _api.diag.warn("Telemetry shutdown timed out, some traces may be lost");
@@ -1063,7 +1081,7 @@ var TelemetryService = (_class4 = class {
1063
1081
  if (!span) return;
1064
1082
  let toolCallId = "<not specified>";
1065
1083
  let toolResponse = "<not specified>";
1066
- if (_optionalChain([functionResponseEvent, 'access', _28 => _28.content, 'optionalAccess', _29 => _29.parts]) && functionResponseEvent.content.parts.length > 0) {
1084
+ if (_optionalChain([functionResponseEvent, 'access', _29 => _29.content, 'optionalAccess', _30 => _30.parts]) && functionResponseEvent.content.parts.length > 0) {
1067
1085
  const functionResponse = functionResponseEvent.content.parts[0].functionResponse;
1068
1086
  if (functionResponse) {
1069
1087
  toolCallId = functionResponse.id || "<not specified>";
@@ -1071,7 +1089,7 @@ var TelemetryService = (_class4 = class {
1071
1089
  }
1072
1090
  }
1073
1091
  span.setAttributes({
1074
- "gen_ai.system.name": "iqai-adk",
1092
+ "gen_ai.system": "iqai-adk",
1075
1093
  "gen_ai.operation.name": "execute_tool",
1076
1094
  "gen_ai.tool.name": tool.name,
1077
1095
  "gen_ai.tool.description": tool.description,
@@ -1085,7 +1103,7 @@ var TelemetryService = (_class4 = class {
1085
1103
  ...process.env.NODE_ENV && {
1086
1104
  "deployment.environment.name": process.env.NODE_ENV
1087
1105
  },
1088
- // Tool-specific data
1106
+ // ADK-specific attributes (matching Python namespace pattern)
1089
1107
  "adk.tool_call_args": this._safeJsonStringify(args),
1090
1108
  "adk.event_id": functionResponseEvent.invocationId,
1091
1109
  "adk.tool_response": this._safeJsonStringify(toolResponse),
@@ -1101,9 +1119,8 @@ var TelemetryService = (_class4 = class {
1101
1119
  if (!span) return;
1102
1120
  const requestData = this._buildLlmRequestForTrace(llmRequest);
1103
1121
  span.setAttributes({
1104
- // Standard OpenTelemetry attributes
1105
- "gen_ai.system.name": "iqai-adk",
1106
- "gen_ai.operation.name": "generate",
1122
+ // Standard OpenTelemetry attributes (following Python pattern)
1123
+ "gen_ai.system": "iqai-adk",
1107
1124
  "gen_ai.request.model": llmRequest.model,
1108
1125
  // Session and user tracking (maps to Langfuse sessionId, userId)
1109
1126
  "session.id": invocationContext.session.id,
@@ -1116,15 +1133,21 @@ var TelemetryService = (_class4 = class {
1116
1133
  "gen_ai.request.max_tokens": llmRequest.config.maxOutputTokens || 0,
1117
1134
  "gen_ai.request.temperature": llmRequest.config.temperature || 0,
1118
1135
  "gen_ai.request.top_p": llmRequest.config.topP || 0,
1119
- // Legacy ADK attributes (keep for backward compatibility)
1120
1136
  "adk.system_name": "iqai-adk",
1121
1137
  "adk.request_model": llmRequest.model,
1122
- "adk.invocation_id": invocationContext.session.id,
1138
+ // ADK-specific attributes (matching Python namespace pattern)
1139
+ "adk.invocation_id": invocationContext.invocationId,
1123
1140
  "adk.session_id": invocationContext.session.id,
1124
1141
  "adk.event_id": eventId,
1125
1142
  "adk.llm_request": this._safeJsonStringify(requestData),
1126
1143
  "adk.llm_response": this._safeJsonStringify(llmResponse)
1127
1144
  });
1145
+ if (llmResponse.usageMetadata) {
1146
+ span.setAttributes({
1147
+ "gen_ai.usage.input_tokens": llmResponse.usageMetadata.promptTokenCount || 0,
1148
+ "gen_ai.usage.output_tokens": llmResponse.usageMetadata.candidatesTokenCount || 0
1149
+ });
1150
+ }
1128
1151
  span.addEvent("gen_ai.content.prompt", {
1129
1152
  "gen_ai.prompt": this._safeJsonStringify(requestData.messages)
1130
1153
  });
@@ -1137,9 +1160,14 @@ var TelemetryService = (_class4 = class {
1137
1160
  */
1138
1161
  async *traceAsyncGenerator(spanName, generator) {
1139
1162
  const span = this.tracer.startSpan(spanName);
1163
+ const spanContext = _api.trace.setSpan(_api.context.active(), span);
1140
1164
  try {
1141
- for await (const item of generator) {
1142
- yield item;
1165
+ while (true) {
1166
+ const result = await _api.context.with(spanContext, () => generator.next());
1167
+ if (result.done) {
1168
+ break;
1169
+ }
1170
+ yield result.value;
1143
1171
  }
1144
1172
  } catch (error) {
1145
1173
  span.recordException(error);
@@ -1171,7 +1199,7 @@ var TelemetryService = (_class4 = class {
1171
1199
  contents: []
1172
1200
  };
1173
1201
  for (const content of llmRequest.contents || []) {
1174
- const parts = _optionalChain([content, 'access', _30 => _30.parts, 'optionalAccess', _31 => _31.filter, 'call', _32 => _32((part) => !part.inlineData)]) || [];
1202
+ const parts = _optionalChain([content, 'access', _31 => _31.parts, 'optionalAccess', _32 => _32.filter, 'call', _33 => _33((part) => !part.inlineData)]) || [];
1175
1203
  result.contents.push({
1176
1204
  role: content.role,
1177
1205
  parts
@@ -1226,7 +1254,7 @@ var traceLlmCall = (invocationContext, eventId, llmRequest, llmResponse) => tele
1226
1254
  // src/models/base-llm.ts
1227
1255
  var BaseLlm = (_class5 = class {
1228
1256
  /**
1229
- * The name of the LLM, e.g. gemini-1.5-flash or gemini-1.5-flash-001.
1257
+ * The name of the LLM, e.g. gemini-2.5-flash or gemini-2.5-flash-001.
1230
1258
  */
1231
1259
 
1232
1260
  __init11() {this.logger = new Logger({ name: "BaseLlm" })}
@@ -1265,14 +1293,14 @@ var BaseLlm = (_class5 = class {
1265
1293
  "gen_ai.system.name": "iqai-adk",
1266
1294
  "gen_ai.operation.name": "generate",
1267
1295
  "gen_ai.request.model": this.model,
1268
- "gen_ai.request.max_tokens": _optionalChain([llmRequest, 'access', _33 => _33.config, 'optionalAccess', _34 => _34.maxOutputTokens]) || 0,
1269
- "gen_ai.request.temperature": _optionalChain([llmRequest, 'access', _35 => _35.config, 'optionalAccess', _36 => _36.temperature]) || 0,
1270
- "gen_ai.request.top_p": _optionalChain([llmRequest, 'access', _37 => _37.config, 'optionalAccess', _38 => _38.topP]) || 0,
1296
+ "gen_ai.request.max_tokens": _optionalChain([llmRequest, 'access', _34 => _34.config, 'optionalAccess', _35 => _35.maxOutputTokens]) || 0,
1297
+ "gen_ai.request.temperature": _optionalChain([llmRequest, 'access', _36 => _36.config, 'optionalAccess', _37 => _37.temperature]) || 0,
1298
+ "gen_ai.request.top_p": _optionalChain([llmRequest, 'access', _38 => _38.config, 'optionalAccess', _39 => _39.topP]) || 0,
1271
1299
  "adk.llm_request": JSON.stringify({
1272
1300
  model: this.model,
1273
- contents: _optionalChain([llmRequest, 'access', _39 => _39.contents, 'optionalAccess', _40 => _40.map, 'call', _41 => _41((content) => ({
1301
+ contents: _optionalChain([llmRequest, 'access', _40 => _40.contents, 'optionalAccess', _41 => _41.map, 'call', _42 => _42((content) => ({
1274
1302
  role: content.role,
1275
- parts: _optionalChain([content, 'access', _42 => _42.parts, 'optionalAccess', _43 => _43.map, 'call', _44 => _44((part) => ({
1303
+ parts: _optionalChain([content, 'access', _43 => _43.parts, 'optionalAccess', _44 => _44.map, 'call', _45 => _45((part) => ({
1276
1304
  text: typeof part.text === "string" ? part.text.substring(0, 200) + (part.text.length > 200 ? "..." : "") : "[non_text_content]"
1277
1305
  }))])
1278
1306
  }))]),
@@ -1353,7 +1381,7 @@ var BaseLlm = (_class5 = class {
1353
1381
  * @param llmRequest LlmRequest, the request to send to the LLM.
1354
1382
  * @returns BaseLLMConnection, the connection to the LLM.
1355
1383
  */
1356
- connect(llmRequest) {
1384
+ connect(_llmRequest) {
1357
1385
  throw new Error(`Live connection is not supported for ${this.model}.`);
1358
1386
  }
1359
1387
  }, _class5);
@@ -1396,9 +1424,9 @@ var AiSdkLlm = (_class6 = class extends BaseLlm {
1396
1424
  messages,
1397
1425
  system: systemMessage,
1398
1426
  tools: Object.keys(tools).length > 0 ? tools : void 0,
1399
- maxTokens: _optionalChain([request, 'access', _45 => _45.config, 'optionalAccess', _46 => _46.maxOutputTokens]),
1400
- temperature: _optionalChain([request, 'access', _47 => _47.config, 'optionalAccess', _48 => _48.temperature]),
1401
- topP: _optionalChain([request, 'access', _49 => _49.config, 'optionalAccess', _50 => _50.topP])
1427
+ maxTokens: _optionalChain([request, 'access', _46 => _46.config, 'optionalAccess', _47 => _47.maxOutputTokens]),
1428
+ temperature: _optionalChain([request, 'access', _48 => _48.config, 'optionalAccess', _49 => _49.temperature]),
1429
+ topP: _optionalChain([request, 'access', _50 => _50.config, 'optionalAccess', _51 => _51.topP])
1402
1430
  };
1403
1431
  if (stream) {
1404
1432
  const result = _ai.streamText.call(void 0, requestParams);
@@ -1538,7 +1566,7 @@ var AiSdkLlm = (_class6 = class extends BaseLlm {
1538
1566
  */
1539
1567
  convertToAiSdkTools(llmRequest) {
1540
1568
  const tools = {};
1541
- if (_optionalChain([llmRequest, 'access', _51 => _51.config, 'optionalAccess', _52 => _52.tools])) {
1569
+ if (_optionalChain([llmRequest, 'access', _52 => _52.config, 'optionalAccess', _53 => _53.tools])) {
1542
1570
  for (const toolConfig of llmRequest.config.tools) {
1543
1571
  if ("functionDeclarations" in toolConfig) {
1544
1572
  for (const funcDecl of toolConfig.functionDeclarations) {
@@ -1572,7 +1600,7 @@ var AiSdkLlm = (_class6 = class extends BaseLlm {
1572
1600
  }
1573
1601
  return { role: "user", content: textContent };
1574
1602
  }
1575
- if (_optionalChain([content, 'access', _53 => _53.parts, 'optionalAccess', _54 => _54.some, 'call', _55 => _55((part) => part.functionCall)])) {
1603
+ if (_optionalChain([content, 'access', _54 => _54.parts, 'optionalAccess', _55 => _55.some, 'call', _56 => _56((part) => part.functionCall)])) {
1576
1604
  const textParts = content.parts.filter((part) => part.text);
1577
1605
  const functionCalls = content.parts.filter((part) => part.functionCall);
1578
1606
  const contentParts2 = [];
@@ -1599,7 +1627,7 @@ var AiSdkLlm = (_class6 = class extends BaseLlm {
1599
1627
  content: contentParts2
1600
1628
  };
1601
1629
  }
1602
- if (_optionalChain([content, 'access', _56 => _56.parts, 'optionalAccess', _57 => _57.some, 'call', _58 => _58((part) => part.functionResponse)])) {
1630
+ if (_optionalChain([content, 'access', _57 => _57.parts, 'optionalAccess', _58 => _58.some, 'call', _59 => _59((part) => part.functionResponse)])) {
1603
1631
  const functionResponses = content.parts.filter(
1604
1632
  (part) => part.functionResponse
1605
1633
  );
@@ -1704,7 +1732,7 @@ var AnthropicLlm = (_class7 = class extends BaseLlm {
1704
1732
  (content) => this.contentToAnthropicMessage(content)
1705
1733
  );
1706
1734
  let tools;
1707
- if (_optionalChain([llmRequest, 'access', _59 => _59.config, 'optionalAccess', _60 => _60.tools, 'optionalAccess', _61 => _61[0], 'optionalAccess', _62 => _62.functionDeclarations])) {
1735
+ if (_optionalChain([llmRequest, 'access', _60 => _60.config, 'optionalAccess', _61 => _61.tools, 'optionalAccess', _62 => _62[0], 'optionalAccess', _63 => _63.functionDeclarations])) {
1708
1736
  tools = llmRequest.config.tools[0].functionDeclarations.map(
1709
1737
  (decl) => this.functionDeclarationToAnthropicTool(decl)
1710
1738
  );
@@ -1726,9 +1754,9 @@ var AnthropicLlm = (_class7 = class extends BaseLlm {
1726
1754
  messages: anthropicMessages,
1727
1755
  tools,
1728
1756
  tool_choice: tools ? { type: "auto" } : void 0,
1729
- max_tokens: _optionalChain([llmRequest, 'access', _63 => _63.config, 'optionalAccess', _64 => _64.maxOutputTokens]) || MAX_TOKENS,
1730
- temperature: _optionalChain([llmRequest, 'access', _65 => _65.config, 'optionalAccess', _66 => _66.temperature]),
1731
- top_p: _optionalChain([llmRequest, 'access', _67 => _67.config, 'optionalAccess', _68 => _68.topP])
1757
+ max_tokens: _optionalChain([llmRequest, 'access', _64 => _64.config, 'optionalAccess', _65 => _65.maxOutputTokens]) || MAX_TOKENS,
1758
+ temperature: _optionalChain([llmRequest, 'access', _66 => _66.config, 'optionalAccess', _67 => _67.temperature]),
1759
+ top_p: _optionalChain([llmRequest, 'access', _68 => _68.config, 'optionalAccess', _69 => _69.topP])
1732
1760
  });
1733
1761
  yield this.anthropicMessageToLlmResponse(message);
1734
1762
  }
@@ -1789,7 +1817,7 @@ var AnthropicLlm = (_class7 = class extends BaseLlm {
1789
1817
  }
1790
1818
  if (part.function_response) {
1791
1819
  let content = "";
1792
- if (_optionalChain([part, 'access', _69 => _69.function_response, 'access', _70 => _70.response, 'optionalAccess', _71 => _71.result])) {
1820
+ if (_optionalChain([part, 'access', _70 => _70.function_response, 'access', _71 => _71.response, 'optionalAccess', _72 => _72.result])) {
1793
1821
  content = String(part.function_response.response.result);
1794
1822
  }
1795
1823
  return {
@@ -1824,7 +1852,7 @@ var AnthropicLlm = (_class7 = class extends BaseLlm {
1824
1852
  */
1825
1853
  functionDeclarationToAnthropicTool(functionDeclaration) {
1826
1854
  const properties = {};
1827
- if (_optionalChain([functionDeclaration, 'access', _72 => _72.parameters, 'optionalAccess', _73 => _73.properties])) {
1855
+ if (_optionalChain([functionDeclaration, 'access', _73 => _73.parameters, 'optionalAccess', _74 => _74.properties])) {
1828
1856
  for (const [key, value] of Object.entries(
1829
1857
  functionDeclaration.parameters.properties
1830
1858
  )) {
@@ -1915,7 +1943,7 @@ var GoogleLlm = class extends BaseLlm {
1915
1943
  /**
1916
1944
  * Constructor for Gemini
1917
1945
  */
1918
- constructor(model = "gemini-1.5-flash") {
1946
+ constructor(model = "gemini-2.5-flash") {
1919
1947
  super(model);
1920
1948
  }
1921
1949
  /**
@@ -1952,7 +1980,7 @@ var GoogleLlm = class extends BaseLlm {
1952
1980
  response = resp;
1953
1981
  const llmResponse = LlmResponse.create(resp);
1954
1982
  usageMetadata = llmResponse.usageMetadata;
1955
- if (_optionalChain([llmResponse, 'access', _74 => _74.content, 'optionalAccess', _75 => _75.parts, 'optionalAccess', _76 => _76[0], 'optionalAccess', _77 => _77.text])) {
1983
+ if (_optionalChain([llmResponse, 'access', _75 => _75.content, 'optionalAccess', _76 => _76.parts, 'optionalAccess', _77 => _77[0], 'optionalAccess', _78 => _78.text])) {
1956
1984
  const part0 = llmResponse.content.parts[0];
1957
1985
  if (part0.thought) {
1958
1986
  thoughtText += part0.text;
@@ -1980,7 +2008,7 @@ var GoogleLlm = class extends BaseLlm {
1980
2008
  }
1981
2009
  yield llmResponse;
1982
2010
  }
1983
- if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _78 => _78.candidates, 'access', _79 => _79[0], 'optionalAccess', _80 => _80.finishReason]) === _genai.FinishReason.STOP) {
2011
+ if ((text || thoughtText) && response && response.candidates && _optionalChain([response, 'access', _79 => _79.candidates, 'access', _80 => _80[0], 'optionalAccess', _81 => _81.finishReason]) === _genai.FinishReason.STOP) {
1984
2012
  const parts = [];
1985
2013
  if (thoughtText) {
1986
2014
  parts.push({ text: thoughtText, thought: true });
@@ -2004,7 +2032,7 @@ var GoogleLlm = class extends BaseLlm {
2004
2032
  });
2005
2033
  const llmResponse = LlmResponse.create(response);
2006
2034
  this.logger.debug(
2007
- `Google response: ${_optionalChain([llmResponse, 'access', _81 => _81.usageMetadata, 'optionalAccess', _82 => _82.candidatesTokenCount]) || 0} tokens`
2035
+ `Google response: ${_optionalChain([llmResponse, 'access', _82 => _82.usageMetadata, 'optionalAccess', _83 => _83.candidatesTokenCount]) || 0} tokens`
2008
2036
  );
2009
2037
  yield llmResponse;
2010
2038
  }
@@ -2019,8 +2047,8 @@ var GoogleLlm = class extends BaseLlm {
2019
2047
  * Check if response has inline data
2020
2048
  */
2021
2049
  hasInlineData(response) {
2022
- const parts = _optionalChain([response, 'access', _83 => _83.candidates, 'optionalAccess', _84 => _84[0], 'optionalAccess', _85 => _85.content, 'optionalAccess', _86 => _86.parts]);
2023
- return _optionalChain([parts, 'optionalAccess', _87 => _87.some, 'call', _88 => _88((part) => _optionalChain([part, 'optionalAccess', _89 => _89.inlineData]))]) || false;
2050
+ const parts = _optionalChain([response, 'access', _84 => _84.candidates, 'optionalAccess', _85 => _85[0], 'optionalAccess', _86 => _86.content, 'optionalAccess', _87 => _87.parts]);
2051
+ return _optionalChain([parts, 'optionalAccess', _88 => _88.some, 'call', _89 => _89((part) => _optionalChain([part, 'optionalAccess', _90 => _90.inlineData]))]) || false;
2024
2052
  }
2025
2053
  /**
2026
2054
  * Convert LlmRequest contents to GoogleGenAI format
@@ -2054,7 +2082,7 @@ var GoogleLlm = class extends BaseLlm {
2054
2082
  * Sets display_name to null for the Gemini API (non-Vertex) backend.
2055
2083
  */
2056
2084
  removeDisplayNameIfPresent(dataObj) {
2057
- if (_optionalChain([dataObj, 'optionalAccess', _90 => _90.displayName])) {
2085
+ if (_optionalChain([dataObj, 'optionalAccess', _91 => _91.displayName])) {
2058
2086
  dataObj.displayName = null;
2059
2087
  }
2060
2088
  }
@@ -2063,7 +2091,7 @@ var GoogleLlm = class extends BaseLlm {
2063
2091
  */
2064
2092
  buildFunctionDeclarationLog(funcDecl) {
2065
2093
  let paramStr = "{}";
2066
- if (_optionalChain([funcDecl, 'access', _91 => _91.parameters, 'optionalAccess', _92 => _92.properties])) {
2094
+ if (_optionalChain([funcDecl, 'access', _92 => _92.parameters, 'optionalAccess', _93 => _93.properties])) {
2067
2095
  paramStr = JSON.stringify(funcDecl.parameters.properties);
2068
2096
  }
2069
2097
  return `${funcDecl.name}: ${paramStr}`;
@@ -2184,7 +2212,7 @@ var OpenAiLlm = class extends BaseLlm {
2184
2212
  (content) => this.contentToOpenAiMessage(content)
2185
2213
  );
2186
2214
  let tools;
2187
- if (_optionalChain([llmRequest, 'access', _93 => _93.config, 'optionalAccess', _94 => _94.tools, 'optionalAccess', _95 => _95[0], 'optionalAccess', _96 => _96.functionDeclarations])) {
2215
+ if (_optionalChain([llmRequest, 'access', _94 => _94.config, 'optionalAccess', _95 => _95.tools, 'optionalAccess', _96 => _96[0], 'optionalAccess', _97 => _97.functionDeclarations])) {
2188
2216
  tools = llmRequest.config.tools[0].functionDeclarations.map(
2189
2217
  (funcDecl) => this.functionDeclarationToOpenAiTool(funcDecl)
2190
2218
  );
@@ -2202,9 +2230,9 @@ var OpenAiLlm = class extends BaseLlm {
2202
2230
  messages: openAiMessages,
2203
2231
  tools,
2204
2232
  tool_choice: tools ? "auto" : void 0,
2205
- max_tokens: _optionalChain([llmRequest, 'access', _97 => _97.config, 'optionalAccess', _98 => _98.maxOutputTokens]),
2206
- temperature: _optionalChain([llmRequest, 'access', _99 => _99.config, 'optionalAccess', _100 => _100.temperature]),
2207
- top_p: _optionalChain([llmRequest, 'access', _101 => _101.config, 'optionalAccess', _102 => _102.topP]),
2233
+ max_tokens: _optionalChain([llmRequest, 'access', _98 => _98.config, 'optionalAccess', _99 => _99.maxOutputTokens]),
2234
+ temperature: _optionalChain([llmRequest, 'access', _100 => _100.config, 'optionalAccess', _101 => _101.temperature]),
2235
+ top_p: _optionalChain([llmRequest, 'access', _102 => _102.config, 'optionalAccess', _103 => _103.topP]),
2208
2236
  stream
2209
2237
  };
2210
2238
  if (stream) {
@@ -2224,7 +2252,7 @@ var OpenAiLlm = class extends BaseLlm {
2224
2252
  if (chunk.usage) {
2225
2253
  usageMetadata = chunk.usage;
2226
2254
  }
2227
- if (_optionalChain([llmResponse, 'access', _103 => _103.content, 'optionalAccess', _104 => _104.parts, 'optionalAccess', _105 => _105[0], 'optionalAccess', _106 => _106.text])) {
2255
+ if (_optionalChain([llmResponse, 'access', _104 => _104.content, 'optionalAccess', _105 => _105.parts, 'optionalAccess', _106 => _106[0], 'optionalAccess', _107 => _107.text])) {
2228
2256
  const part0 = llmResponse.content.parts[0];
2229
2257
  if (part0.thought) {
2230
2258
  thoughtText += part0.text;
@@ -2265,10 +2293,10 @@ var OpenAiLlm = class extends BaseLlm {
2265
2293
  function: { name: "", arguments: "" }
2266
2294
  };
2267
2295
  }
2268
- if (_optionalChain([toolCall, 'access', _107 => _107.function, 'optionalAccess', _108 => _108.name])) {
2296
+ if (_optionalChain([toolCall, 'access', _108 => _108.function, 'optionalAccess', _109 => _109.name])) {
2269
2297
  accumulatedToolCalls[index].function.name += toolCall.function.name;
2270
2298
  }
2271
- if (_optionalChain([toolCall, 'access', _109 => _109.function, 'optionalAccess', _110 => _110.arguments])) {
2299
+ if (_optionalChain([toolCall, 'access', _110 => _110.function, 'optionalAccess', _111 => _111.arguments])) {
2272
2300
  accumulatedToolCalls[index].function.arguments += toolCall.function.arguments;
2273
2301
  }
2274
2302
  }
@@ -2283,7 +2311,7 @@ var OpenAiLlm = class extends BaseLlm {
2283
2311
  }
2284
2312
  if (accumulatedToolCalls.length > 0) {
2285
2313
  for (const toolCall of accumulatedToolCalls) {
2286
- if (_optionalChain([toolCall, 'access', _111 => _111.function, 'optionalAccess', _112 => _112.name])) {
2314
+ if (_optionalChain([toolCall, 'access', _112 => _112.function, 'optionalAccess', _113 => _113.name])) {
2287
2315
  parts.push({
2288
2316
  functionCall: {
2289
2317
  id: toolCall.id,
@@ -2343,7 +2371,7 @@ var OpenAiLlm = class extends BaseLlm {
2343
2371
  response.usage
2344
2372
  );
2345
2373
  this.logger.debug(
2346
- `OpenAI response: ${_optionalChain([response, 'access', _113 => _113.usage, 'optionalAccess', _114 => _114.completion_tokens]) || 0} tokens`
2374
+ `OpenAI response: ${_optionalChain([response, 'access', _114 => _114.usage, 'optionalAccess', _115 => _115.completion_tokens]) || 0} tokens`
2347
2375
  );
2348
2376
  yield llmResponse;
2349
2377
  }
@@ -2370,7 +2398,7 @@ var OpenAiLlm = class extends BaseLlm {
2370
2398
  }
2371
2399
  if (delta.tool_calls) {
2372
2400
  for (const toolCall of delta.tool_calls) {
2373
- if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _115 => _115.function, 'optionalAccess', _116 => _116.name])) {
2401
+ if (toolCall.type === "function" && _optionalChain([toolCall, 'access', _116 => _116.function, 'optionalAccess', _117 => _117.name])) {
2374
2402
  parts.push({
2375
2403
  functionCall: {
2376
2404
  id: toolCall.id || "",
@@ -2436,10 +2464,10 @@ var OpenAiLlm = class extends BaseLlm {
2436
2464
  if (role === "system") {
2437
2465
  return {
2438
2466
  role: "system",
2439
- content: _optionalChain([content, 'access', _117 => _117.parts, 'optionalAccess', _118 => _118[0], 'optionalAccess', _119 => _119.text]) || ""
2467
+ content: _optionalChain([content, 'access', _118 => _118.parts, 'optionalAccess', _119 => _119[0], 'optionalAccess', _120 => _120.text]) || ""
2440
2468
  };
2441
2469
  }
2442
- if (_optionalChain([content, 'access', _120 => _120.parts, 'optionalAccess', _121 => _121.some, 'call', _122 => _122((part) => part.functionCall)])) {
2470
+ if (_optionalChain([content, 'access', _121 => _121.parts, 'optionalAccess', _122 => _122.some, 'call', _123 => _123((part) => part.functionCall)])) {
2443
2471
  const functionCallPart = content.parts.find(
2444
2472
  (part) => part.functionCall
2445
2473
  );
@@ -2459,7 +2487,7 @@ var OpenAiLlm = class extends BaseLlm {
2459
2487
  ]
2460
2488
  };
2461
2489
  }
2462
- if (_optionalChain([content, 'access', _123 => _123.parts, 'optionalAccess', _124 => _124.some, 'call', _125 => _125((part) => part.functionResponse)])) {
2490
+ if (_optionalChain([content, 'access', _124 => _124.parts, 'optionalAccess', _125 => _125.some, 'call', _126 => _126((part) => part.functionResponse)])) {
2463
2491
  const functionResponsePart = content.parts.find(
2464
2492
  (part) => part.functionResponse
2465
2493
  );
@@ -2471,7 +2499,7 @@ var OpenAiLlm = class extends BaseLlm {
2471
2499
  )
2472
2500
  };
2473
2501
  }
2474
- if (_optionalChain([content, 'access', _126 => _126.parts, 'optionalAccess', _127 => _127.length]) === 1 && content.parts[0].text) {
2502
+ if (_optionalChain([content, 'access', _127 => _127.parts, 'optionalAccess', _128 => _128.length]) === 1 && content.parts[0].text) {
2475
2503
  return {
2476
2504
  role,
2477
2505
  content: content.parts[0].text
@@ -2494,7 +2522,7 @@ var OpenAiLlm = class extends BaseLlm {
2494
2522
  text: part.text
2495
2523
  };
2496
2524
  }
2497
- if (_optionalChain([part, 'access', _128 => _128.inline_data, 'optionalAccess', _129 => _129.mime_type]) && _optionalChain([part, 'access', _130 => _130.inline_data, 'optionalAccess', _131 => _131.data])) {
2525
+ if (_optionalChain([part, 'access', _129 => _129.inline_data, 'optionalAccess', _130 => _130.mime_type]) && _optionalChain([part, 'access', _131 => _131.inline_data, 'optionalAccess', _132 => _132.data])) {
2498
2526
  return {
2499
2527
  type: "image_url",
2500
2528
  image_url: {
@@ -2622,8 +2650,8 @@ var OpenAiLlm = class extends BaseLlm {
2622
2650
  * Check if response has inline data (similar to Google LLM)
2623
2651
  */
2624
2652
  hasInlineData(response) {
2625
- const parts = _optionalChain([response, 'access', _132 => _132.content, 'optionalAccess', _133 => _133.parts]);
2626
- return _optionalChain([parts, 'optionalAccess', _134 => _134.some, 'call', _135 => _135((part) => part.inlineData)]) || false;
2653
+ const parts = _optionalChain([response, 'access', _133 => _133.content, 'optionalAccess', _134 => _134.parts]);
2654
+ return _optionalChain([parts, 'optionalAccess', _135 => _135.some, 'call', _136 => _136((part) => part.inlineData)]) || false;
2627
2655
  }
2628
2656
  /**
2629
2657
  * Gets the OpenAI client
@@ -2925,7 +2953,7 @@ var OAuth2Credential = class extends AuthCredential {
2925
2953
  "Cannot refresh token: no refresh token or refresh function"
2926
2954
  );
2927
2955
  }
2928
- const result = await _optionalChain([this, 'access', _136 => _136.refreshFunction, 'optionalCall', _137 => _137(this.refreshToken)]);
2956
+ const result = await _optionalChain([this, 'access', _137 => _137.refreshFunction, 'optionalCall', _138 => _138(this.refreshToken)]);
2929
2957
  if (!result) {
2930
2958
  throw new Error("Failed to refresh token");
2931
2959
  }
@@ -2960,7 +2988,7 @@ var AuthHandler = class {
2960
2988
  * Gets the authentication token
2961
2989
  */
2962
2990
  getToken() {
2963
- return _optionalChain([this, 'access', _138 => _138.credential, 'optionalAccess', _139 => _139.getToken, 'call', _140 => _140()]);
2991
+ return _optionalChain([this, 'access', _139 => _139.credential, 'optionalAccess', _140 => _140.getToken, 'call', _141 => _141()]);
2964
2992
  }
2965
2993
  /**
2966
2994
  * Gets headers for HTTP requests
@@ -2975,7 +3003,7 @@ var AuthHandler = class {
2975
3003
  * Refreshes the token if necessary
2976
3004
  */
2977
3005
  async refreshToken() {
2978
- if (_optionalChain([this, 'access', _141 => _141.credential, 'optionalAccess', _142 => _142.canRefresh, 'call', _143 => _143()])) {
3006
+ if (_optionalChain([this, 'access', _142 => _142.credential, 'optionalAccess', _143 => _143.canRefresh, 'call', _144 => _144()])) {
2979
3007
  await this.credential.refresh();
2980
3008
  }
2981
3009
  }
@@ -3954,10 +3982,10 @@ var CreatedTool = class extends BaseTool {
3954
3982
  /**
3955
3983
  * Executes the tool function with validation
3956
3984
  */
3957
- async runAsync(args, context) {
3985
+ async runAsync(args, context4) {
3958
3986
  try {
3959
3987
  const validatedArgs = this.schema.parse(args);
3960
- const result = await Promise.resolve(this.func(validatedArgs, context));
3988
+ const result = await Promise.resolve(this.func(validatedArgs, context4));
3961
3989
  return _nullishCoalesce(result, () => ( {}));
3962
3990
  } catch (error) {
3963
3991
  if (error instanceof z.ZodError) {
@@ -4215,7 +4243,7 @@ var AgentTool = (_class15 = class extends BaseTool {
4215
4243
  /**
4216
4244
  * Execute the tool by running the agent with the provided input
4217
4245
  */
4218
- async runAsync(params, context) {
4246
+ async runAsync(params, context4) {
4219
4247
  try {
4220
4248
  const input = params.input || Object.values(params)[0];
4221
4249
  if (!isLlmAgent(this.agent)) {
@@ -4223,7 +4251,7 @@ var AgentTool = (_class15 = class extends BaseTool {
4223
4251
  `Agent ${this.name} does not support running as a tool`
4224
4252
  );
4225
4253
  }
4226
- const parentInvocation = context._invocationContext;
4254
+ const parentInvocation = context4._invocationContext;
4227
4255
  const childInvocationContext = new InvocationContext({
4228
4256
  invocationId: _uuid.v4.call(void 0, ),
4229
4257
  agent: this.agent,
@@ -4260,8 +4288,8 @@ var AgentTool = (_class15 = class extends BaseTool {
4260
4288
  } catch (e2) {
4261
4289
  toolResult = mergedText;
4262
4290
  }
4263
- if (this.outputKey && _optionalChain([context, 'optionalAccess', _144 => _144.state])) {
4264
- context.state[this.outputKey] = toolResult;
4291
+ if (this.outputKey && _optionalChain([context4, 'optionalAccess', _145 => _145.state])) {
4292
+ context4.state[this.outputKey] = toolResult;
4265
4293
  }
4266
4294
  return toolResult;
4267
4295
  } catch (error) {
@@ -4527,7 +4555,7 @@ var FileOperationsTool = class extends BaseTool {
4527
4555
  name: "file_operations",
4528
4556
  description: "Perform file system operations like reading, writing, and managing files"
4529
4557
  });
4530
- this.basePath = _optionalChain([options, 'optionalAccess', _145 => _145.basePath]) || process.cwd();
4558
+ this.basePath = _optionalChain([options, 'optionalAccess', _146 => _146.basePath]) || process.cwd();
4531
4559
  }
4532
4560
  /**
4533
4561
  * Get the function declaration for the tool
@@ -4809,9 +4837,9 @@ var UserInteractionTool = class extends BaseTool {
4809
4837
  /**
4810
4838
  * Execute the user interaction
4811
4839
  */
4812
- async runAsync(args, context) {
4840
+ async runAsync(args, context4) {
4813
4841
  try {
4814
- const actions = context.actions;
4842
+ const actions = context4.actions;
4815
4843
  if (!actions || !actions.promptUser) {
4816
4844
  return {
4817
4845
  success: false,
@@ -4859,9 +4887,9 @@ var ExitLoopTool = (_class17 = class extends BaseTool {
4859
4887
  /**
4860
4888
  * Execute the exit loop action
4861
4889
  */
4862
- async runAsync(_args, context) {
4890
+ async runAsync(_args, context4) {
4863
4891
  this.logger.debug("Executing exit loop tool");
4864
- context.actions.escalate = true;
4892
+ context4.actions.escalate = true;
4865
4893
  }
4866
4894
  }, _class17);
4867
4895
 
@@ -4912,14 +4940,14 @@ var GetUserChoiceTool = (_class18 = class extends BaseTool {
4912
4940
  * This is a long running operation that will return null initially
4913
4941
  * and the actual choice will be provided asynchronously
4914
4942
  */
4915
- async runAsync(args, context) {
4943
+ async runAsync(args, context4) {
4916
4944
  this.logger.debug(
4917
4945
  `Executing get_user_choice with options: ${args.options.join(", ")}`
4918
4946
  );
4919
4947
  if (args.question) {
4920
4948
  this.logger.debug(`Question: ${args.question}`);
4921
4949
  }
4922
- context.actions.skipSummarization = true;
4950
+ context4.actions.skipSummarization = true;
4923
4951
  return null;
4924
4952
  }
4925
4953
  }, _class18);
@@ -4961,9 +4989,9 @@ var TransferToAgentTool = (_class19 = class extends BaseTool {
4961
4989
  /**
4962
4990
  * Execute the transfer to agent action
4963
4991
  */
4964
- async runAsync(args, context) {
4992
+ async runAsync(args, context4) {
4965
4993
  this.logger.debug(`Executing transfer to agent: ${args.agent_name}`);
4966
- context.actions.transferToAgent = args.agent_name;
4994
+ context4.actions.transferToAgent = args.agent_name;
4967
4995
  }
4968
4996
  }, _class19);
4969
4997
 
@@ -5004,13 +5032,13 @@ var LoadMemoryTool = (_class20 = class extends BaseTool {
5004
5032
  /**
5005
5033
  * Execute the memory loading action
5006
5034
  */
5007
- async runAsync(args, context) {
5035
+ async runAsync(args, context4) {
5008
5036
  this.logger.debug(`Executing load_memory with query: ${args.query}`);
5009
5037
  try {
5010
- const searchResult = await context.searchMemory(args.query);
5038
+ const searchResult = await context4.searchMemory(args.query);
5011
5039
  return {
5012
5040
  memories: searchResult.memories || [],
5013
- count: _optionalChain([searchResult, 'access', _146 => _146.memories, 'optionalAccess', _147 => _147.length]) || 0
5041
+ count: _optionalChain([searchResult, 'access', _147 => _147.memories, 'optionalAccess', _148 => _148.length]) || 0
5014
5042
  };
5015
5043
  } catch (error) {
5016
5044
  console.error("Error searching memory:", error);
@@ -5057,7 +5085,7 @@ var LoadArtifactsTool = class extends BaseTool {
5057
5085
  /**
5058
5086
  * Execute the load artifacts operation
5059
5087
  */
5060
- async runAsync(args, context) {
5088
+ async runAsync(args, context4) {
5061
5089
  const artifactNames = args.artifact_names || [];
5062
5090
  return { artifact_names: artifactNames };
5063
5091
  }
@@ -5567,7 +5595,7 @@ var McpClientService = (_class22 = class {
5567
5595
  },
5568
5596
  this,
5569
5597
  async (instance) => await instance.reinitialize(),
5570
- _optionalChain([this, 'access', _148 => _148.config, 'access', _149 => _149.retryOptions, 'optionalAccess', _150 => _150.maxRetries]) || 2
5598
+ _optionalChain([this, 'access', _149 => _149.config, 'access', _150 => _150.retryOptions, 'optionalAccess', _151 => _151.maxRetries]) || 2
5571
5599
  );
5572
5600
  return await wrappedCall();
5573
5601
  } catch (error) {
@@ -5651,7 +5679,7 @@ var McpClientService = (_class22 = class {
5651
5679
  this.mcpSamplingHandler = null;
5652
5680
  if (this.client) {
5653
5681
  try {
5654
- _optionalChain([this, 'access', _151 => _151.client, 'access', _152 => _152.removeRequestHandler, 'optionalCall', _153 => _153("sampling/createMessage")]);
5682
+ _optionalChain([this, 'access', _152 => _152.client, 'access', _153 => _153.removeRequestHandler, 'optionalCall', _154 => _154("sampling/createMessage")]);
5655
5683
  } catch (error) {
5656
5684
  this.logger.error("Failed to remove sampling handler:", error);
5657
5685
  }
@@ -6088,12 +6116,12 @@ var McpToolset = (_class24 = class {
6088
6116
  * Checks if a tool should be included based on the tool filter.
6089
6117
  * Similar to Python's _is_selected method.
6090
6118
  */
6091
- isSelected(tool, context) {
6119
+ isSelected(tool, context4) {
6092
6120
  if (!this.toolFilter) {
6093
6121
  return true;
6094
6122
  }
6095
6123
  if (typeof this.toolFilter === "function") {
6096
- return this.toolFilter(tool, context);
6124
+ return this.toolFilter(tool, context4);
6097
6125
  }
6098
6126
  if (Array.isArray(this.toolFilter)) {
6099
6127
  return this.toolFilter.includes(tool.name);
@@ -6146,7 +6174,7 @@ var McpToolset = (_class24 = class {
6146
6174
  * Retrieves tools from the MCP server and converts them to BaseTool instances.
6147
6175
  * Similar to Python's get_tools method.
6148
6176
  */
6149
- async getTools(context) {
6177
+ async getTools(context4) {
6150
6178
  try {
6151
6179
  if (this.isClosing) {
6152
6180
  throw new McpError(
@@ -6154,7 +6182,7 @@ var McpToolset = (_class24 = class {
6154
6182
  "resource_closed_error" /* RESOURCE_CLOSED_ERROR */
6155
6183
  );
6156
6184
  }
6157
- if (this.tools.length > 0 && !_optionalChain([this, 'access', _154 => _154.config, 'access', _155 => _155.cacheConfig, 'optionalAccess', _156 => _156.enabled]) === false) {
6185
+ if (this.tools.length > 0 && !_optionalChain([this, 'access', _155 => _155.config, 'access', _156 => _156.cacheConfig, 'optionalAccess', _157 => _157.enabled]) === false) {
6158
6186
  return this.tools;
6159
6187
  }
6160
6188
  if (!this.clientService) {
@@ -6168,7 +6196,7 @@ var McpToolset = (_class24 = class {
6168
6196
  }
6169
6197
  const tools = [];
6170
6198
  for (const mcpTool of toolsResponse.tools) {
6171
- if (this.isSelected(mcpTool, context)) {
6199
+ if (this.isSelected(mcpTool, context4)) {
6172
6200
  try {
6173
6201
  const tool = await createTool2(mcpTool, client);
6174
6202
  tools.push(tool);
@@ -6180,7 +6208,7 @@ var McpToolset = (_class24 = class {
6180
6208
  }
6181
6209
  }
6182
6210
  }
6183
- if (_optionalChain([this, 'access', _157 => _157.config, 'access', _158 => _158.cacheConfig, 'optionalAccess', _159 => _159.enabled]) !== false) {
6211
+ if (_optionalChain([this, 'access', _158 => _158.config, 'access', _159 => _159.cacheConfig, 'optionalAccess', _160 => _160.enabled]) !== false) {
6184
6212
  this.tools = tools;
6185
6213
  }
6186
6214
  return tools;
@@ -6205,9 +6233,9 @@ var McpToolset = (_class24 = class {
6205
6233
  /**
6206
6234
  * Refreshes the tool cache by clearing it and fetching tools again
6207
6235
  */
6208
- async refreshTools(context) {
6236
+ async refreshTools(context4) {
6209
6237
  this.tools = [];
6210
- return this.getTools(context);
6238
+ return this.getTools(context4);
6211
6239
  }
6212
6240
  /**
6213
6241
  * Closes the connection to the MCP server.
@@ -6251,6 +6279,7 @@ async function getMcpTools(config, toolFilter) {
6251
6279
  }
6252
6280
 
6253
6281
  // src/flows/llm-flows/functions.ts
6282
+
6254
6283
  var AF_FUNCTION_CALL_ID_PREFIX = "adk-";
6255
6284
  var REQUEST_EUC_FUNCTION_CALL_NAME = "adk_request_credential";
6256
6285
  function generateClientFunctionCallId() {
@@ -6268,12 +6297,12 @@ function populateClientFunctionCallId(modelResponseEvent) {
6268
6297
  }
6269
6298
  }
6270
6299
  function removeClientFunctionCallId(content) {
6271
- if (_optionalChain([content, 'optionalAccess', _160 => _160.parts])) {
6300
+ if (_optionalChain([content, 'optionalAccess', _161 => _161.parts])) {
6272
6301
  for (const part of content.parts) {
6273
- if (_optionalChain([part, 'access', _161 => _161.functionCall, 'optionalAccess', _162 => _162.id, 'optionalAccess', _163 => _163.startsWith, 'call', _164 => _164(AF_FUNCTION_CALL_ID_PREFIX)])) {
6302
+ if (_optionalChain([part, 'access', _162 => _162.functionCall, 'optionalAccess', _163 => _163.id, 'optionalAccess', _164 => _164.startsWith, 'call', _165 => _165(AF_FUNCTION_CALL_ID_PREFIX)])) {
6274
6303
  part.functionCall.id = void 0;
6275
6304
  }
6276
- if (_optionalChain([part, 'access', _165 => _165.functionResponse, 'optionalAccess', _166 => _166.id, 'optionalAccess', _167 => _167.startsWith, 'call', _168 => _168(AF_FUNCTION_CALL_ID_PREFIX)])) {
6305
+ if (_optionalChain([part, 'access', _166 => _166.functionResponse, 'optionalAccess', _167 => _167.id, 'optionalAccess', _168 => _168.startsWith, 'call', _169 => _169(AF_FUNCTION_CALL_ID_PREFIX)])) {
6277
6306
  part.functionResponse.id = void 0;
6278
6307
  }
6279
6308
  }
@@ -6340,23 +6369,40 @@ async function handleFunctionCallsAsync(invocationContext, functionCallEvent, to
6340
6369
  toolsDict
6341
6370
  );
6342
6371
  const functionArgs = functionCall.args || {};
6343
- const functionResponse = await callToolAsync(
6344
- tool,
6345
- functionArgs,
6346
- toolContext
6347
- );
6348
- if (tool.isLongRunning) {
6372
+ const tracer2 = telemetryService.getTracer();
6373
+ const span = tracer2.startSpan(`execute_tool ${tool.name}`);
6374
+ const spanContext = _api.trace.setSpan(_api.context.active(), span);
6375
+ try {
6376
+ const functionResponse = await _api.context.with(spanContext, async () => {
6377
+ const result = await callToolAsync(tool, functionArgs, toolContext);
6378
+ if (tool.isLongRunning && !result) {
6379
+ return null;
6380
+ }
6381
+ const functionResponseEvent = buildResponseEvent(
6382
+ tool,
6383
+ result,
6384
+ toolContext,
6385
+ invocationContext
6386
+ );
6387
+ telemetryService.traceToolCall(
6388
+ tool,
6389
+ functionArgs,
6390
+ functionResponseEvent
6391
+ );
6392
+ return { result, event: functionResponseEvent };
6393
+ });
6349
6394
  if (!functionResponse) {
6350
6395
  continue;
6351
6396
  }
6397
+ functionResponseEvents.push(functionResponse.event);
6398
+ span.setStatus({ code: 1 });
6399
+ } catch (error) {
6400
+ span.recordException(error);
6401
+ span.setStatus({ code: 2, message: error.message });
6402
+ throw error;
6403
+ } finally {
6404
+ span.end();
6352
6405
  }
6353
- const functionResponseEvent = buildResponseEvent(
6354
- tool,
6355
- functionResponse,
6356
- toolContext,
6357
- invocationContext
6358
- );
6359
- functionResponseEvents.push(functionResponseEvent);
6360
6406
  }
6361
6407
  if (!functionResponseEvents.length) {
6362
6408
  return null;
@@ -6418,7 +6464,7 @@ function mergeParallelFunctionResponseEvents(functionResponseEvents) {
6418
6464
  }
6419
6465
  const mergedParts = [];
6420
6466
  for (const event of functionResponseEvents) {
6421
- if (_optionalChain([event, 'access', _169 => _169.content, 'optionalAccess', _170 => _170.parts])) {
6467
+ if (_optionalChain([event, 'access', _170 => _170.content, 'optionalAccess', _171 => _171.parts])) {
6422
6468
  for (const part of event.content.parts) {
6423
6469
  mergedParts.push(part);
6424
6470
  }
@@ -6456,7 +6502,7 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6456
6502
  __init44() {this.responseProcessors = []}
6457
6503
  __init45() {this.logger = new Logger({ name: "BaseLlmFlow" })}
6458
6504
  async *runAsync(invocationContext) {
6459
- this.logger.info(`Agent '${invocationContext.agent.name}' started.`);
6505
+ this.logger.debug(`Agent '${invocationContext.agent.name}' started.`);
6460
6506
  let stepCount = 0;
6461
6507
  while (true) {
6462
6508
  stepCount++;
@@ -6466,7 +6512,7 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6466
6512
  yield event;
6467
6513
  }
6468
6514
  if (!lastEvent || lastEvent.isFinalResponse()) {
6469
- this.logger.info(
6515
+ this.logger.debug(
6470
6516
  `Agent '${invocationContext.agent.name}' finished after ${stepCount} steps.`
6471
6517
  );
6472
6518
  break;
@@ -6496,7 +6542,7 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6496
6542
  yield event;
6497
6543
  }
6498
6544
  if (invocationContext.endInvocation) {
6499
- this.logger.info("Invocation ended during preprocessing.");
6545
+ this.logger.debug("Invocation ended during preprocessing.");
6500
6546
  return;
6501
6547
  }
6502
6548
  const modelResponseEvent = new Event({
@@ -6546,7 +6592,7 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6546
6592
  if (tools.length > 0) {
6547
6593
  const toolsData = tools.map((tool) => ({
6548
6594
  Name: tool.name,
6549
- Description: _optionalChain([tool, 'access', _171 => _171.description, 'optionalAccess', _172 => _172.substring, 'call', _173 => _173(0, 50)]) + (_optionalChain([tool, 'access', _174 => _174.description, 'optionalAccess', _175 => _175.length]) > 50 ? "..." : ""),
6595
+ Description: _optionalChain([tool, 'access', _172 => _172.description, 'optionalAccess', _173 => _173.substring, 'call', _174 => _174(0, 50)]) + (_optionalChain([tool, 'access', _175 => _175.description, 'optionalAccess', _176 => _176.length]) > 50 ? "..." : ""),
6550
6596
  "Long Running": tool.isLongRunning ? "Yes" : "No"
6551
6597
  }));
6552
6598
  this.logger.debugArray("\u{1F6E0}\uFE0F Available Tools", toolsData);
@@ -6609,14 +6655,14 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6609
6655
  );
6610
6656
  if (functionResponseEvent) {
6611
6657
  yield functionResponseEvent;
6612
- const transferToAgent = _optionalChain([functionResponseEvent, 'access', _176 => _176.actions, 'optionalAccess', _177 => _177.transferToAgent]);
6658
+ const transferToAgent = _optionalChain([functionResponseEvent, 'access', _177 => _177.actions, 'optionalAccess', _178 => _178.transferToAgent]);
6613
6659
  if (transferToAgent) {
6614
- this.logger.info(`\u{1F504} Live transfer to agent '${transferToAgent}'`);
6660
+ this.logger.debug(`\u{1F504} Live transfer to agent '${transferToAgent}'`);
6615
6661
  const agentToRun = this._getAgentToRun(
6616
6662
  invocationContext,
6617
6663
  transferToAgent
6618
6664
  );
6619
- for await (const event of _optionalChain([agentToRun, 'access', _178 => _178.runLive, 'optionalCall', _179 => _179(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
6665
+ for await (const event of _optionalChain([agentToRun, 'access', _179 => _179.runLive, 'optionalCall', _180 => _180(invocationContext)]) || agentToRun.runAsync(invocationContext)) {
6620
6666
  yield event;
6621
6667
  }
6622
6668
  }
@@ -6648,9 +6694,9 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6648
6694
  yield authEvent;
6649
6695
  }
6650
6696
  yield functionResponseEvent;
6651
- const transferToAgent = _optionalChain([functionResponseEvent, 'access', _180 => _180.actions, 'optionalAccess', _181 => _181.transferToAgent]);
6697
+ const transferToAgent = _optionalChain([functionResponseEvent, 'access', _181 => _181.actions, 'optionalAccess', _182 => _182.transferToAgent]);
6652
6698
  if (transferToAgent) {
6653
- this.logger.info(`\u{1F504} Transferring to agent '${transferToAgent}'`);
6699
+ this.logger.debug(`\u{1F504} Transferring to agent '${transferToAgent}'`);
6654
6700
  const agentToRun = this._getAgentToRun(
6655
6701
  invocationContext,
6656
6702
  transferToAgent
@@ -6694,27 +6740,27 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6694
6740
  }
6695
6741
  invocationContext.incrementLlmCallCount();
6696
6742
  const isStreaming = invocationContext.runConfig.streamingMode === "sse" /* SSE */;
6697
- const tools = _optionalChain([llmRequest, 'access', _182 => _182.config, 'optionalAccess', _183 => _183.tools]) || [];
6743
+ const tools = _optionalChain([llmRequest, 'access', _183 => _183.config, 'optionalAccess', _184 => _184.tools]) || [];
6698
6744
  const toolNames = tools.map((tool) => {
6699
6745
  if (tool.functionDeclarations && Array.isArray(tool.functionDeclarations)) {
6700
6746
  return tool.functionDeclarations.map((fn) => fn.name).join(", ");
6701
6747
  }
6702
6748
  if (tool.name) return tool.name;
6703
- if (_optionalChain([tool, 'access', _184 => _184.function, 'optionalAccess', _185 => _185.name])) return tool.function.name;
6704
- if (_optionalChain([tool, 'access', _186 => _186.function, 'optionalAccess', _187 => _187.function, 'optionalAccess', _188 => _188.name])) return tool.function.function.name;
6749
+ if (_optionalChain([tool, 'access', _185 => _185.function, 'optionalAccess', _186 => _186.name])) return tool.function.name;
6750
+ if (_optionalChain([tool, 'access', _187 => _187.function, 'optionalAccess', _188 => _188.function, 'optionalAccess', _189 => _189.name])) return tool.function.function.name;
6705
6751
  return "unknown";
6706
6752
  }).join(", ");
6707
6753
  const systemInstruction = llmRequest.getSystemInstructionText() || "";
6708
6754
  const truncatedSystemInstruction = systemInstruction.length > 100 ? `${systemInstruction.substring(0, 100)}...` : systemInstruction;
6709
- const contentPreview = _optionalChain([llmRequest, 'access', _189 => _189.contents, 'optionalAccess', _190 => _190.length]) > 0 ? LogFormatter.formatContentPreview(llmRequest.contents[0]) : "none";
6755
+ const contentPreview = _optionalChain([llmRequest, 'access', _190 => _190.contents, 'optionalAccess', _191 => _191.length]) > 0 ? LogFormatter.formatContentPreview(llmRequest.contents[0]) : "none";
6710
6756
  this.logger.debugStructured("\u{1F4E4} LLM Request", {
6711
6757
  Model: llm.model,
6712
6758
  Agent: invocationContext.agent.name,
6713
- "Content Items": _optionalChain([llmRequest, 'access', _191 => _191.contents, 'optionalAccess', _192 => _192.length]) || 0,
6759
+ "Content Items": _optionalChain([llmRequest, 'access', _192 => _192.contents, 'optionalAccess', _193 => _193.length]) || 0,
6714
6760
  "Content Preview": contentPreview,
6715
6761
  "System Instruction": truncatedSystemInstruction || "none",
6716
6762
  "Available Tools": toolNames || "none",
6717
- "Tool Count": _optionalChain([llmRequest, 'access', _193 => _193.config, 'optionalAccess', _194 => _194.tools, 'optionalAccess', _195 => _195.length]) || 0,
6763
+ "Tool Count": _optionalChain([llmRequest, 'access', _194 => _194.config, 'optionalAccess', _195 => _195.tools, 'optionalAccess', _196 => _196.length]) || 0,
6718
6764
  Streaming: isStreaming ? "Yes" : "No"
6719
6765
  });
6720
6766
  let responseCount = 0;
@@ -6729,8 +6775,8 @@ var BaseLlmFlow = (_class25 = class {constructor() { _class25.prototype.__init43
6729
6775
  llmRequest,
6730
6776
  llmResponse
6731
6777
  );
6732
- const tokenCount = _optionalChain([llmResponse, 'access', _196 => _196.usageMetadata, 'optionalAccess', _197 => _197.totalTokenCount]) || "unknown";
6733
- const functionCalls = _optionalChain([llmResponse, 'access', _198 => _198.content, 'optionalAccess', _199 => _199.parts, 'optionalAccess', _200 => _200.filter, 'call', _201 => _201((part) => part.functionCall)]) || [];
6778
+ const tokenCount = _optionalChain([llmResponse, 'access', _197 => _197.usageMetadata, 'optionalAccess', _198 => _198.totalTokenCount]) || "unknown";
6779
+ const functionCalls = _optionalChain([llmResponse, 'access', _199 => _199.content, 'optionalAccess', _200 => _200.parts, 'optionalAccess', _201 => _201.filter, 'call', _202 => _202((part) => part.functionCall)]) || [];
6734
6780
  const functionCallsDisplay = LogFormatter.formatFunctionCalls(functionCalls);
6735
6781
  const responsePreview = LogFormatter.formatResponsePreview(llmResponse);
6736
6782
  this.logger.debugStructured("\u{1F4E5} LLM Response", {
@@ -6874,7 +6920,7 @@ var EnhancedAuthConfig = class {
6874
6920
  */
6875
6921
  generateCredentialKey() {
6876
6922
  const schemeKey = this.authScheme.type || "unknown";
6877
- const credentialKey = _optionalChain([this, 'access', _202 => _202.rawAuthCredential, 'optionalAccess', _203 => _203.type]) || "none";
6923
+ const credentialKey = _optionalChain([this, 'access', _203 => _203.rawAuthCredential, 'optionalAccess', _204 => _204.type]) || "none";
6878
6924
  const timestamp = Date.now();
6879
6925
  return `adk_${schemeKey}_${credentialKey}_${timestamp}`;
6880
6926
  }
@@ -7031,7 +7077,7 @@ var AuthLlmRequestProcessor = class extends BaseLlmRequestProcessor {
7031
7077
  */
7032
7078
  parseAndStoreAuthResponse(authHandler, invocationContext) {
7033
7079
  try {
7034
- const credentialKey = _optionalChain([authHandler, 'access', _204 => _204.authConfig, 'access', _205 => _205.context, 'optionalAccess', _206 => _206.credentialKey]) || `temp:${Date.now()}`;
7080
+ const credentialKey = _optionalChain([authHandler, 'access', _205 => _205.authConfig, 'access', _206 => _206.context, 'optionalAccess', _207 => _207.credentialKey]) || `temp:${Date.now()}`;
7035
7081
  const fullCredentialKey = credentialKey.startsWith("temp:") ? credentialKey : `temp:${credentialKey}`;
7036
7082
  invocationContext.session.state[fullCredentialKey] = authHandler.credential;
7037
7083
  if (authHandler.authConfig.authScheme.type === "oauth2" || authHandler.authConfig.authScheme.type === "openIdConnect") {
@@ -7074,8 +7120,6 @@ var BasicLlmRequestProcessor = class extends BaseLlmRequestProcessor {
7074
7120
  llmRequest.liveConnectConfig.realtimeInputConfig = runConfig.realtimeInputConfig;
7075
7121
  llmRequest.liveConnectConfig.enableAffectiveDialog = runConfig.enableAffectiveDialog;
7076
7122
  llmRequest.liveConnectConfig.proactivity = runConfig.proactivity;
7077
- const tools = await agent.canonicalTools();
7078
- llmRequest.appendTools(tools);
7079
7123
  for await (const _ of []) {
7080
7124
  yield _;
7081
7125
  }
@@ -7139,7 +7183,7 @@ var BuiltInCodeExecutor = class extends BaseCodeExecutor {
7139
7183
  * Pre-process the LLM request for Gemini 2.0+ models to use the code execution tool
7140
7184
  */
7141
7185
  processLlmRequest(llmRequest) {
7142
- if (!_optionalChain([llmRequest, 'access', _207 => _207.model, 'optionalAccess', _208 => _208.startsWith, 'call', _209 => _209("gemini-2")])) {
7186
+ if (!_optionalChain([llmRequest, 'access', _208 => _208.model, 'optionalAccess', _209 => _209.startsWith, 'call', _210 => _210("gemini-2")])) {
7143
7187
  throw new Error(
7144
7188
  `Gemini code execution tool is not supported for model ${llmRequest.model}`
7145
7189
  );
@@ -7184,7 +7228,7 @@ var CodeExecutionUtils = class _CodeExecutionUtils {
7184
7228
  * Extracts the first code block from the content and truncates everything after it
7185
7229
  */
7186
7230
  static extractCodeAndTruncateContent(content, codeBlockDelimiters) {
7187
- if (!_optionalChain([content, 'optionalAccess', _210 => _210.parts, 'optionalAccess', _211 => _211.length])) {
7231
+ if (!_optionalChain([content, 'optionalAccess', _211 => _211.parts, 'optionalAccess', _212 => _212.length])) {
7188
7232
  return null;
7189
7233
  }
7190
7234
  for (let idx = 0; idx < content.parts.length; idx++) {
@@ -7270,7 +7314,7 @@ ${fileNames}`);
7270
7314
  * Converts the code execution parts to text parts in a Content
7271
7315
  */
7272
7316
  static convertCodeExecutionParts(content, codeBlockDelimiter, executionResultDelimiters) {
7273
- if (!_optionalChain([content, 'access', _212 => _212.parts, 'optionalAccess', _213 => _213.length])) {
7317
+ if (!_optionalChain([content, 'access', _213 => _213.parts, 'optionalAccess', _214 => _214.length])) {
7274
7318
  return;
7275
7319
  }
7276
7320
  const lastPart = content.parts[content.parts.length - 1];
@@ -7663,7 +7707,7 @@ async function* runPostProcessor(invocationContext, llmResponse) {
7663
7707
  function extractAndReplaceInlineFiles(codeExecutorContext, llmRequest) {
7664
7708
  const allInputFiles = codeExecutorContext.getInputFiles();
7665
7709
  const savedFileNames = new Set(allInputFiles.map((f) => f.name));
7666
- for (let i = 0; i < (_optionalChain([llmRequest, 'access', _214 => _214.contents, 'optionalAccess', _215 => _215.length]) || 0); i++) {
7710
+ for (let i = 0; i < (_optionalChain([llmRequest, 'access', _215 => _215.contents, 'optionalAccess', _216 => _216.length]) || 0); i++) {
7667
7711
  const content = llmRequest.contents[i];
7668
7712
  if (content.role !== "user" || !content.parts) {
7669
7713
  continue;
@@ -7695,7 +7739,7 @@ Available file: \`${fileName}\`
7695
7739
  }
7696
7740
  function getOrSetExecutionId(invocationContext, codeExecutorContext) {
7697
7741
  const agent = invocationContext.agent;
7698
- if (!hasCodeExecutor(agent) || !_optionalChain([agent, 'access', _216 => _216.codeExecutor, 'optionalAccess', _217 => _217.stateful])) {
7742
+ if (!hasCodeExecutor(agent) || !_optionalChain([agent, 'access', _217 => _217.codeExecutor, 'optionalAccess', _218 => _218.stateful])) {
7699
7743
  return void 0;
7700
7744
  }
7701
7745
  let executionId = codeExecutorContext.getExecutionId();
@@ -7926,7 +7970,7 @@ function rearrangeEventsForLatestFunctionResponse(events) {
7926
7970
  continue;
7927
7971
  }
7928
7972
  const functionResponses2 = event.getFunctionResponses();
7929
- if (_optionalChain([functionResponses2, 'optionalAccess', _218 => _218.some, 'call', _219 => _219((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
7973
+ if (_optionalChain([functionResponses2, 'optionalAccess', _219 => _219.some, 'call', _220 => _220((fr) => fr.id && functionResponsesIds.has(fr.id))])) {
7930
7974
  functionResponseEvents.push(event);
7931
7975
  }
7932
7976
  }
@@ -8025,7 +8069,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
8025
8069
  const partIndicesInMergedEvent = {};
8026
8070
  for (let idx = 0; idx < partsInMergedEvent.length; idx++) {
8027
8071
  const part = partsInMergedEvent[idx];
8028
- if (_optionalChain([part, 'access', _220 => _220.functionResponse, 'optionalAccess', _221 => _221.id])) {
8072
+ if (_optionalChain([part, 'access', _221 => _221.functionResponse, 'optionalAccess', _222 => _222.id])) {
8029
8073
  partIndicesInMergedEvent[part.functionResponse.id] = idx;
8030
8074
  }
8031
8075
  }
@@ -8034,7 +8078,7 @@ function mergeFunctionResponseEvents(functionResponseEvents) {
8034
8078
  throw new Error("There should be at least one function_response part.");
8035
8079
  }
8036
8080
  for (const part of event.content.parts) {
8037
- if (_optionalChain([part, 'access', _222 => _222.functionResponse, 'optionalAccess', _223 => _223.id])) {
8081
+ if (_optionalChain([part, 'access', _223 => _223.functionResponse, 'optionalAccess', _224 => _224.id])) {
8038
8082
  const functionCallId = part.functionResponse.id;
8039
8083
  if (functionCallId in partIndicesInMergedEvent) {
8040
8084
  partsInMergedEvent[partIndicesInMergedEvent[functionCallId]] = part;
@@ -8088,6 +8132,9 @@ var IdentityLlmRequestProcessor = class extends BaseLlmRequestProcessor {
8088
8132
  };
8089
8133
  var requestProcessor5 = new IdentityLlmRequestProcessor();
8090
8134
 
8135
+ // src/flows/llm-flows/instructions.ts
8136
+
8137
+
8091
8138
  // src/utils/instructions-utils.ts
8092
8139
  async function injectSessionState(template, readonlyContext) {
8093
8140
  const invocationContext = readonlyContext._invocationContext;
@@ -8202,6 +8249,22 @@ var InstructionsLlmRequestProcessor = class extends BaseLlmRequestProcessor {
8202
8249
  }
8203
8250
  llmRequest.appendInstructions([instruction]);
8204
8251
  }
8252
+ if (agent.outputSchema) {
8253
+ try {
8254
+ const raw = _zodtojsonschema.zodToJsonSchema.call(void 0, agent.outputSchema, {
8255
+ target: "jsonSchema7",
8256
+ $refStrategy: "none"
8257
+ });
8258
+ const { $schema, ...json } = raw || {};
8259
+ llmRequest.appendInstructions([
8260
+ "You must respond with application/json that validates against this JSON Schema:",
8261
+ "```json",
8262
+ JSON.stringify(json, null, 2),
8263
+ "```"
8264
+ ]);
8265
+ } catch (e5) {
8266
+ }
8267
+ }
8205
8268
  for await (const _ of []) {
8206
8269
  yield _;
8207
8270
  }
@@ -8284,7 +8347,7 @@ var PlanReActPlanner = class extends BasePlanner {
8284
8347
  let firstFcPartIndex = -1;
8285
8348
  for (let i = 0; i < responseParts.length; i++) {
8286
8349
  if (responseParts[i].functionCall) {
8287
- if (!_optionalChain([responseParts, 'access', _224 => _224[i], 'access', _225 => _225.functionCall, 'optionalAccess', _226 => _226.name])) {
8350
+ if (!_optionalChain([responseParts, 'access', _225 => _225[i], 'access', _226 => _226.functionCall, 'optionalAccess', _227 => _227.name])) {
8288
8351
  continue;
8289
8352
  }
8290
8353
  preservedParts.push(responseParts[i]);
@@ -8323,7 +8386,7 @@ var PlanReActPlanner = class extends BasePlanner {
8323
8386
  * Handles non-function-call parts of the response
8324
8387
  */
8325
8388
  _handleNonFunctionCallParts(responsePart, preservedParts) {
8326
- if (_optionalChain([responsePart, 'access', _227 => _227.text, 'optionalAccess', _228 => _228.includes, 'call', _229 => _229(FINAL_ANSWER_TAG)])) {
8389
+ if (_optionalChain([responsePart, 'access', _228 => _228.text, 'optionalAccess', _229 => _229.includes, 'call', _230 => _230(FINAL_ANSWER_TAG)])) {
8327
8390
  const [reasoningText, finalAnswerText] = this._splitByLastPattern(
8328
8391
  responsePart.text,
8329
8392
  FINAL_ANSWER_TAG
@@ -8493,12 +8556,87 @@ function removeThoughtFromRequest(llmRequest) {
8493
8556
  var requestProcessor7 = new NlPlanningRequestProcessor();
8494
8557
  var responseProcessor2 = new NlPlanningResponseProcessor();
8495
8558
 
8559
+ // src/flows/llm-flows/output-schema.ts
8560
+ init_logger();
8561
+ var OutputSchemaResponseProcessor = (_class26 = class extends BaseLlmResponseProcessor {constructor(...args2) { super(...args2); _class26.prototype.__init46.call(this); }
8562
+ __init46() {this.logger = new Logger({ name: "OutputSchemaResponseProcessor" })}
8563
+ async *runAsync(invocationContext, llmResponse) {
8564
+ if (!llmResponse || !llmResponse.content || !llmResponse.content.parts || llmResponse.content.parts.length === 0) {
8565
+ return;
8566
+ }
8567
+ const agent = invocationContext.agent;
8568
+ if (!("outputSchema" in agent) || !agent.outputSchema) {
8569
+ return;
8570
+ }
8571
+ let textContent = llmResponse.content.parts.map((part) => {
8572
+ if (part && typeof part === "object" && "text" in part) {
8573
+ return part.text || "";
8574
+ }
8575
+ return "";
8576
+ }).join("");
8577
+ if (!textContent.trim()) {
8578
+ return;
8579
+ }
8580
+ try {
8581
+ const parsed = JSON.parse(textContent);
8582
+ const validated = agent.outputSchema.parse(parsed);
8583
+ textContent = JSON.stringify(validated, null, 2);
8584
+ llmResponse.content.parts = llmResponse.content.parts.map((part) => {
8585
+ if (part && typeof part === "object" && "text" in part) {
8586
+ return {
8587
+ ...part,
8588
+ text: textContent
8589
+ };
8590
+ }
8591
+ return part;
8592
+ });
8593
+ this.logger.debug("Output schema validation successful", {
8594
+ agent: agent.name,
8595
+ originalLength: textContent.length,
8596
+ validatedKeys: Object.keys(validated)
8597
+ });
8598
+ } catch (error) {
8599
+ const errorMessage = error instanceof Error ? error.message : String(error);
8600
+ const detailedError = `Output schema validation failed for agent '${agent.name}': ${errorMessage}`;
8601
+ this.logger.error(detailedError, {
8602
+ agent: agent.name,
8603
+ responseContent: textContent.substring(0, 200) + (textContent.length > 200 ? "..." : ""),
8604
+ error: errorMessage
8605
+ });
8606
+ llmResponse.errorCode = "OUTPUT_SCHEMA_VALIDATION_FAILED";
8607
+ llmResponse.errorMessage = detailedError;
8608
+ llmResponse.error = new Error(detailedError);
8609
+ const errorEvent = new Event({
8610
+ id: Event.newId(),
8611
+ invocationId: invocationContext.invocationId,
8612
+ author: agent.name,
8613
+ branch: invocationContext.branch,
8614
+ content: {
8615
+ role: "assistant",
8616
+ parts: [
8617
+ {
8618
+ text: `Error: ${detailedError}`
8619
+ }
8620
+ ]
8621
+ }
8622
+ });
8623
+ errorEvent.errorCode = "OUTPUT_SCHEMA_VALIDATION_FAILED";
8624
+ errorEvent.errorMessage = detailedError;
8625
+ errorEvent.error = new Error(detailedError);
8626
+ yield errorEvent;
8627
+ }
8628
+ }
8629
+ }, _class26);
8630
+ var responseProcessor3 = new OutputSchemaResponseProcessor();
8631
+
8496
8632
  // src/flows/llm-flows/shared-memory.ts
8497
8633
  var SharedMemoryRequestProcessor = class extends BaseLlmRequestProcessor {
8498
8634
  async *runAsync(invocationContext, llmRequest) {
8499
8635
  const memoryService = invocationContext.memoryService;
8500
8636
  if (!memoryService) return;
8501
- const lastUserEvent = invocationContext.session.events.findLast((e) => e.author === "user" && _optionalChain([e, 'access', _230 => _230.content, 'optionalAccess', _231 => _231.parts, 'optionalAccess', _232 => _232.length]));
8637
+ const lastUserEvent = invocationContext.session.events.findLast(
8638
+ (e) => e.author === "user" && _optionalChain([e, 'access', _231 => _231.content, 'optionalAccess', _232 => _232.parts, 'optionalAccess', _233 => _233.length])
8639
+ );
8502
8640
  if (!lastUserEvent) return;
8503
8641
  const query = (_nullishCoalesce(lastUserEvent.content.parts, () => ( []))).map((p) => p.text || "").join(" ");
8504
8642
  const results = await memoryService.searchMemory({
@@ -8508,7 +8646,7 @@ var SharedMemoryRequestProcessor = class extends BaseLlmRequestProcessor {
8508
8646
  });
8509
8647
  const sessionTexts = new Set(
8510
8648
  (llmRequest.contents || []).flatMap(
8511
- (c) => _optionalChain([c, 'access', _233 => _233.parts, 'optionalAccess', _234 => _234.map, 'call', _235 => _235((p) => p.text)]) || []
8649
+ (c) => _optionalChain([c, 'access', _234 => _234.parts, 'optionalAccess', _235 => _235.map, 'call', _236 => _236((p) => p.text)]) || []
8512
8650
  )
8513
8651
  );
8514
8652
  for (const memory of results.memories) {
@@ -8557,8 +8695,10 @@ var SingleFlow = class extends BaseLlmFlow {
8557
8695
  this.responseProcessors.push(
8558
8696
  responseProcessor2,
8559
8697
  // Phase 5: NL Planning
8698
+ responseProcessor3,
8699
+ // Phase 6: Output Schema validation and parsing - validates response against agent's output schema
8560
8700
  responseProcessor
8561
- // Phase 5: Code Execution (placeholder)
8701
+ // Phase 7: Code Execution (placeholder)
8562
8702
  );
8563
8703
  this.logger.debug("SingleFlow initialized with processors");
8564
8704
  }
@@ -8660,7 +8800,7 @@ var AutoFlow = class extends SingleFlow {
8660
8800
 
8661
8801
  // src/agents/llm-agent.ts
8662
8802
  init_function_tool();
8663
- var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8803
+ var LlmAgent = (_class27 = class _LlmAgent extends BaseAgent {
8664
8804
  /**
8665
8805
  * The model to use for the agent
8666
8806
  * When not set, the agent will inherit the model from its ancestor
@@ -8731,13 +8871,27 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8731
8871
  * The input schema when agent is used as a tool
8732
8872
  */
8733
8873
 
8734
- // Schema type - depends on specific implementation
8735
8874
  /**
8736
8875
  * The output schema when agent replies
8737
8876
  */
8738
8877
 
8739
- // Schema type - depends on specific implementation
8740
- __init46() {this.logger = new Logger({ name: "LlmAgent" })}
8878
+ /**
8879
+ * Callback or list of callbacks to be called before calling the LLM
8880
+ */
8881
+
8882
+ /**
8883
+ * Callback or list of callbacks to be called after calling the LLM
8884
+ */
8885
+
8886
+ /**
8887
+ * Callback or list of callbacks to be called before calling a tool
8888
+ */
8889
+
8890
+ /**
8891
+ * Callback or list of callbacks to be called after calling a tool
8892
+ */
8893
+
8894
+ __init47() {this.logger = new Logger({ name: "LlmAgent" })}
8741
8895
  /**
8742
8896
  * Constructor for LlmAgent
8743
8897
  */
@@ -8748,7 +8902,7 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8748
8902
  subAgents: config.subAgents,
8749
8903
  beforeAgentCallback: config.beforeAgentCallback,
8750
8904
  afterAgentCallback: config.afterAgentCallback
8751
- });_class26.prototype.__init46.call(this);;
8905
+ });_class27.prototype.__init47.call(this);;
8752
8906
  this.model = config.model || "";
8753
8907
  this.instruction = config.instruction || "";
8754
8908
  this.globalInstruction = config.globalInstruction || "";
@@ -8767,6 +8921,11 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8767
8921
  this.generateContentConfig = config.generateContentConfig;
8768
8922
  this.inputSchema = config.inputSchema;
8769
8923
  this.outputSchema = config.outputSchema;
8924
+ this.beforeModelCallback = config.beforeModelCallback;
8925
+ this.afterModelCallback = config.afterModelCallback;
8926
+ this.beforeToolCallback = config.beforeToolCallback;
8927
+ this.afterToolCallback = config.afterToolCallback;
8928
+ this.validateOutputSchemaConfig();
8770
8929
  }
8771
8930
  /**
8772
8931
  * The resolved model field as BaseLLM
@@ -8783,13 +8942,15 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8783
8942
  return new AiSdkLlm(this.model);
8784
8943
  }
8785
8944
  let ancestorAgent = this.parentAgent;
8786
- while (ancestorAgent !== null) {
8945
+ while (ancestorAgent !== null && ancestorAgent !== void 0) {
8787
8946
  if (ancestorAgent instanceof _LlmAgent) {
8788
8947
  return ancestorAgent.canonicalModel;
8789
8948
  }
8790
8949
  ancestorAgent = ancestorAgent.parentAgent;
8791
8950
  }
8792
- throw new Error(`No model found for ${this.name}.`);
8951
+ throw new Error(
8952
+ `No model found for agent "${this.name}". Please specify a model directly on this agent using the 'model' property`
8953
+ );
8793
8954
  }
8794
8955
  /**
8795
8956
  * The resolved instruction field to construct instruction for this agent
@@ -8829,12 +8990,86 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8829
8990
  }
8830
8991
  return resolvedTools;
8831
8992
  }
8993
+ /**
8994
+ * Gets the canonical before model callbacks as an array
8995
+ */
8996
+ get canonicalBeforeModelCallbacks() {
8997
+ if (!this.beforeModelCallback) {
8998
+ return [];
8999
+ }
9000
+ if (Array.isArray(this.beforeModelCallback)) {
9001
+ return this.beforeModelCallback;
9002
+ }
9003
+ return [this.beforeModelCallback];
9004
+ }
9005
+ /**
9006
+ * Gets the canonical after model callbacks as an array
9007
+ */
9008
+ get canonicalAfterModelCallbacks() {
9009
+ if (!this.afterModelCallback) {
9010
+ return [];
9011
+ }
9012
+ if (Array.isArray(this.afterModelCallback)) {
9013
+ return this.afterModelCallback;
9014
+ }
9015
+ return [this.afterModelCallback];
9016
+ }
9017
+ /**
9018
+ * Gets the canonical before tool callbacks as an array
9019
+ */
9020
+ get canonicalBeforeToolCallbacks() {
9021
+ if (!this.beforeToolCallback) {
9022
+ return [];
9023
+ }
9024
+ if (Array.isArray(this.beforeToolCallback)) {
9025
+ return this.beforeToolCallback;
9026
+ }
9027
+ return [this.beforeToolCallback];
9028
+ }
9029
+ /**
9030
+ * Gets the canonical after tool callbacks as an array
9031
+ */
9032
+ get canonicalAfterToolCallbacks() {
9033
+ if (!this.afterToolCallback) {
9034
+ return [];
9035
+ }
9036
+ if (Array.isArray(this.afterToolCallback)) {
9037
+ return this.afterToolCallback;
9038
+ }
9039
+ return [this.afterToolCallback];
9040
+ }
9041
+ /**
9042
+ * Validates output schema configuration
9043
+ * This matches the Python implementation's __check_output_schema
9044
+ */
9045
+ validateOutputSchemaConfig() {
9046
+ if (!this.outputSchema) {
9047
+ return;
9048
+ }
9049
+ if (!this.disallowTransferToParent || !this.disallowTransferToPeers) {
9050
+ this.logger.warn(
9051
+ `Invalid config for agent ${this.name}: output_schema cannot co-exist with agent transfer configurations. Setting disallow_transfer_to_parent=true, disallow_transfer_to_peers=true`
9052
+ );
9053
+ this.disallowTransferToParent = true;
9054
+ this.disallowTransferToPeers = true;
9055
+ }
9056
+ if (this.subAgents && this.subAgents.length > 0) {
9057
+ throw new Error(
9058
+ `Invalid config for agent ${this.name}: if output_schema is set, sub_agents must be empty to disable agent transfer.`
9059
+ );
9060
+ }
9061
+ if (this.tools && this.tools.length > 0) {
9062
+ throw new Error(
9063
+ `Invalid config for agent ${this.name}: if output_schema is set, tools must be empty`
9064
+ );
9065
+ }
9066
+ }
8832
9067
  /**
8833
9068
  * Gets the appropriate LLM flow for this agent
8834
9069
  * This matches the Python implementation's _llm_flow property
8835
9070
  */
8836
9071
  get llmFlow() {
8837
- if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _236 => _236.subAgents, 'optionalAccess', _237 => _237.length])) {
9072
+ if (this.disallowTransferToParent && this.disallowTransferToPeers && !_optionalChain([this, 'access', _237 => _237.subAgents, 'optionalAccess', _238 => _238.length])) {
8838
9073
  return new SingleFlow();
8839
9074
  }
8840
9075
  return new AutoFlow();
@@ -8844,8 +9079,28 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8844
9079
  * This matches the Python implementation's __maybe_save_output_to_state
8845
9080
  */
8846
9081
  maybeSaveOutputToState(event) {
8847
- if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _238 => _238.content, 'optionalAccess', _239 => _239.parts])) {
8848
- const result = event.content.parts.map((part) => part.text || "").join("");
9082
+ if (event.author !== this.name) {
9083
+ this.logger.debug(
9084
+ `Skipping output save for agent ${this.name}: event authored by ${event.author}`
9085
+ );
9086
+ return;
9087
+ }
9088
+ if (this.outputKey && event.isFinalResponse() && _optionalChain([event, 'access', _239 => _239.content, 'optionalAccess', _240 => _240.parts])) {
9089
+ let result = event.content.parts.map((part) => part.text || "").join("");
9090
+ if (this.outputSchema) {
9091
+ if (!result.trim()) {
9092
+ return;
9093
+ }
9094
+ try {
9095
+ const parsed = JSON.parse(result);
9096
+ result = this.outputSchema.parse(parsed);
9097
+ } catch (error) {
9098
+ this.logger.error("Failed to validate output with schema:", error);
9099
+ throw new Error(
9100
+ `Output validation failed: ${error instanceof Error ? error.message : String(error)}`
9101
+ );
9102
+ }
9103
+ }
8849
9104
  if (result) {
8850
9105
  if (!event.actions.stateDelta) {
8851
9106
  event.actions.stateDelta = {};
@@ -8858,19 +9113,19 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8858
9113
  * Core logic to run this agent via text-based conversation
8859
9114
  * This matches the Python implementation's _run_async_impl
8860
9115
  */
8861
- async *runAsyncImpl(context) {
9116
+ async *runAsyncImpl(context4) {
8862
9117
  this.logger.debug(`Starting LlmAgent execution for "${this.name}"`);
8863
9118
  try {
8864
- for await (const event of this.llmFlow.runAsync(context)) {
9119
+ for await (const event of this.llmFlow.runAsync(context4)) {
8865
9120
  this.maybeSaveOutputToState(event);
8866
9121
  yield event;
8867
9122
  }
8868
9123
  } catch (error) {
8869
9124
  this.logger.error("Error in LlmAgent execution:", error);
8870
9125
  const errorEvent = new Event({
8871
- invocationId: context.invocationId,
9126
+ invocationId: context4.invocationId,
8872
9127
  author: this.name,
8873
- branch: context.branch,
9128
+ branch: context4.branch,
8874
9129
  content: {
8875
9130
  parts: [
8876
9131
  {
@@ -8884,7 +9139,7 @@ var LlmAgent = (_class26 = class _LlmAgent extends BaseAgent {
8884
9139
  yield errorEvent;
8885
9140
  }
8886
9141
  }
8887
- }, _class26);
9142
+ }, _class27);
8888
9143
 
8889
9144
  // src/agents/sequential-agent.ts
8890
9145
  var SequentialAgent = class extends BaseAgent {
@@ -9058,7 +9313,7 @@ var LoopAgent = class extends BaseAgent {
9058
9313
  for (const subAgent of this.subAgents) {
9059
9314
  for await (const event of subAgent.runAsync(ctx)) {
9060
9315
  yield event;
9061
- if (_optionalChain([event, 'access', _240 => _240.actions, 'optionalAccess', _241 => _241.escalate])) {
9316
+ if (_optionalChain([event, 'access', _241 => _241.actions, 'optionalAccess', _242 => _242.escalate])) {
9062
9317
  return;
9063
9318
  }
9064
9319
  }
@@ -9076,7 +9331,7 @@ var LoopAgent = class extends BaseAgent {
9076
9331
 
9077
9332
  // src/agents/lang-graph-agent.ts
9078
9333
  init_logger();
9079
- var LangGraphAgent = (_class27 = class extends BaseAgent {
9334
+ var LangGraphAgent = (_class28 = class extends BaseAgent {
9080
9335
  /**
9081
9336
  * Graph nodes (agents and their connections)
9082
9337
  */
@@ -9092,8 +9347,8 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9092
9347
  /**
9093
9348
  * Results from node executions
9094
9349
  */
9095
- __init47() {this.results = []}
9096
- __init48() {this.logger = new Logger({ name: "LangGraphAgent" })}
9350
+ __init48() {this.results = []}
9351
+ __init49() {this.logger = new Logger({ name: "LangGraphAgent" })}
9097
9352
  /**
9098
9353
  * Constructor for LangGraphAgent
9099
9354
  */
@@ -9101,7 +9356,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9101
9356
  super({
9102
9357
  name: config.name,
9103
9358
  description: config.description
9104
- });_class27.prototype.__init47.call(this);_class27.prototype.__init48.call(this);;
9359
+ });_class28.prototype.__init48.call(this);_class28.prototype.__init49.call(this);;
9105
9360
  this.nodes = /* @__PURE__ */ new Map();
9106
9361
  for (const node of config.nodes) {
9107
9362
  if (this.nodes.has(node.name)) {
@@ -9138,7 +9393,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9138
9393
  /**
9139
9394
  * Gets the next nodes to execute based on the current node and its result
9140
9395
  */
9141
- async getNextNodes(currentNode, lastEvent, context) {
9396
+ async getNextNodes(currentNode, lastEvent, context4) {
9142
9397
  if (!currentNode.targets || currentNode.targets.length === 0) {
9143
9398
  return [];
9144
9399
  }
@@ -9150,7 +9405,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9150
9405
  continue;
9151
9406
  }
9152
9407
  if (targetNode.condition) {
9153
- const shouldExecute = await targetNode.condition(lastEvent, context);
9408
+ const shouldExecute = await targetNode.condition(lastEvent, context4);
9154
9409
  if (!shouldExecute) {
9155
9410
  this.logger.debug(`Skipping node "${targetName}" due to condition`);
9156
9411
  continue;
@@ -9163,7 +9418,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9163
9418
  /**
9164
9419
  * Core logic to run this agent via text-based conversation.
9165
9420
  */
9166
- async *runAsyncImpl(context) {
9421
+ async *runAsyncImpl(context4) {
9167
9422
  this.logger.debug(
9168
9423
  `Starting graph execution from root node "${this.rootNode}"`
9169
9424
  );
@@ -9185,7 +9440,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9185
9440
  return;
9186
9441
  }
9187
9442
  let stepCount = 0;
9188
- const nodesToExecute = [{ node: rootNode, context }];
9443
+ const nodesToExecute = [{ node: rootNode, context: context4 }];
9189
9444
  const executedNodes = [];
9190
9445
  let lastEvent = null;
9191
9446
  while (nodesToExecute.length > 0 && stepCount < this.maxSteps) {
@@ -9193,7 +9448,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9193
9448
  const { node } = nodesToExecute.shift();
9194
9449
  this.logger.debug(`Step ${stepCount}: Executing node "${node.name}"`);
9195
9450
  executedNodes.push(node.name);
9196
- const childContext = context.createChildContext(node.agent);
9451
+ const childContext = context4.createChildContext(node.agent);
9197
9452
  try {
9198
9453
  const nodeEvents = [];
9199
9454
  for await (const event of node.agent.runAsync(childContext)) {
@@ -9206,7 +9461,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9206
9461
  events: nodeEvents
9207
9462
  });
9208
9463
  if (lastEvent) {
9209
- const nextNodes = await this.getNextNodes(node, lastEvent, context);
9464
+ const nextNodes = await this.getNextNodes(node, lastEvent, context4);
9210
9465
  for (const nextNode of nextNodes) {
9211
9466
  nodesToExecute.push({
9212
9467
  node: nextNode,
@@ -9249,8 +9504,8 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9249
9504
  * Core logic to run this agent via video/audio-based conversation.
9250
9505
  * For LangGraph, this follows the same execution pattern as text-based.
9251
9506
  */
9252
- async *runLiveImpl(context) {
9253
- yield* this.runAsyncImpl(context);
9507
+ async *runLiveImpl(context4) {
9508
+ yield* this.runAsyncImpl(context4);
9254
9509
  }
9255
9510
  /**
9256
9511
  * Gets the execution results from the last run
@@ -9297,7 +9552,7 @@ var LangGraphAgent = (_class27 = class extends BaseAgent {
9297
9552
  }
9298
9553
  this.maxSteps = maxSteps;
9299
9554
  }
9300
- }, _class27);
9555
+ }, _class28);
9301
9556
 
9302
9557
  // src/agents/agent-builder.ts
9303
9558
 
@@ -9369,17 +9624,17 @@ var RunConfig = class {
9369
9624
  */
9370
9625
 
9371
9626
  constructor(config) {
9372
- this.speechConfig = _optionalChain([config, 'optionalAccess', _242 => _242.speechConfig]);
9373
- this.responseModalities = _optionalChain([config, 'optionalAccess', _243 => _243.responseModalities]);
9374
- this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _244 => _244.saveInputBlobsAsArtifacts]) || false;
9375
- this.supportCFC = _optionalChain([config, 'optionalAccess', _245 => _245.supportCFC]) || false;
9376
- this.streamingMode = _optionalChain([config, 'optionalAccess', _246 => _246.streamingMode]) || "NONE" /* NONE */;
9377
- this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _247 => _247.outputAudioTranscription]);
9378
- this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _248 => _248.inputAudioTranscription]);
9379
- this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _249 => _249.realtimeInputConfig]);
9380
- this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _250 => _250.enableAffectiveDialog]);
9381
- this.proactivity = _optionalChain([config, 'optionalAccess', _251 => _251.proactivity]);
9382
- this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _252 => _252.maxLlmCalls]), () => ( 500));
9627
+ this.speechConfig = _optionalChain([config, 'optionalAccess', _243 => _243.speechConfig]);
9628
+ this.responseModalities = _optionalChain([config, 'optionalAccess', _244 => _244.responseModalities]);
9629
+ this.saveInputBlobsAsArtifacts = _optionalChain([config, 'optionalAccess', _245 => _245.saveInputBlobsAsArtifacts]) || false;
9630
+ this.supportCFC = _optionalChain([config, 'optionalAccess', _246 => _246.supportCFC]) || false;
9631
+ this.streamingMode = _optionalChain([config, 'optionalAccess', _247 => _247.streamingMode]) || "NONE" /* NONE */;
9632
+ this.outputAudioTranscription = _optionalChain([config, 'optionalAccess', _248 => _248.outputAudioTranscription]);
9633
+ this.inputAudioTranscription = _optionalChain([config, 'optionalAccess', _249 => _249.inputAudioTranscription]);
9634
+ this.realtimeInputConfig = _optionalChain([config, 'optionalAccess', _250 => _250.realtimeInputConfig]);
9635
+ this.enableAffectiveDialog = _optionalChain([config, 'optionalAccess', _251 => _251.enableAffectiveDialog]);
9636
+ this.proactivity = _optionalChain([config, 'optionalAccess', _252 => _252.proactivity]);
9637
+ this.maxLlmCalls = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _253 => _253.maxLlmCalls]), () => ( 500));
9383
9638
  this.validateMaxLlmCalls();
9384
9639
  }
9385
9640
  /**
@@ -9400,8 +9655,8 @@ var RunConfig = class {
9400
9655
  };
9401
9656
 
9402
9657
  // src/artifacts/in-memory-artifact-service.ts
9403
- var InMemoryArtifactService = (_class28 = class {constructor() { _class28.prototype.__init49.call(this); }
9404
- __init49() {this.artifacts = /* @__PURE__ */ new Map()}
9658
+ var InMemoryArtifactService = (_class29 = class {constructor() { _class29.prototype.__init50.call(this); }
9659
+ __init50() {this.artifacts = /* @__PURE__ */ new Map()}
9405
9660
  fileHasUserNamespace(filename) {
9406
9661
  return filename.startsWith("user:");
9407
9662
  }
@@ -9474,7 +9729,7 @@ var InMemoryArtifactService = (_class28 = class {constructor() { _class28.protot
9474
9729
  }
9475
9730
  return Array.from({ length: versions.length }, (_, i) => i);
9476
9731
  }
9477
- }, _class28);
9732
+ }, _class29);
9478
9733
 
9479
9734
  // src/runners.ts
9480
9735
  init_logger();
@@ -9501,15 +9756,15 @@ function _extractWordsLower(text) {
9501
9756
  const words = text.match(/[A-Za-z]+/g) || [];
9502
9757
  return new Set(words.map((word) => word.toLowerCase()));
9503
9758
  }
9504
- var InMemoryMemoryService = (_class29 = class {
9759
+ var InMemoryMemoryService = (_class30 = class {
9505
9760
  /**
9506
9761
  * Keys are app_name/user_id, session_id. Values are session event lists.
9507
9762
  */
9508
- __init50() {this._sessionEvents = /* @__PURE__ */ new Map()}
9763
+ __init51() {this._sessionEvents = /* @__PURE__ */ new Map()}
9509
9764
  /**
9510
9765
  * Constructor for InMemoryMemoryService
9511
9766
  */
9512
- constructor() {;_class29.prototype.__init50.call(this);
9767
+ constructor() {;_class30.prototype.__init51.call(this);
9513
9768
  this._sessionEvents = /* @__PURE__ */ new Map();
9514
9769
  }
9515
9770
  /**
@@ -9523,7 +9778,7 @@ var InMemoryMemoryService = (_class29 = class {
9523
9778
  }
9524
9779
  const userSessions = this._sessionEvents.get(userKey);
9525
9780
  const filteredEvents = session.events.filter(
9526
- (event) => _optionalChain([event, 'access', _253 => _253.content, 'optionalAccess', _254 => _254.parts])
9781
+ (event) => _optionalChain([event, 'access', _254 => _254.content, 'optionalAccess', _255 => _255.parts])
9527
9782
  );
9528
9783
  userSessions.set(session.id, filteredEvents);
9529
9784
  }
@@ -9593,7 +9848,7 @@ var InMemoryMemoryService = (_class29 = class {
9593
9848
  clear() {
9594
9849
  this._sessionEvents.clear();
9595
9850
  }
9596
- }, _class29);
9851
+ }, _class30);
9597
9852
 
9598
9853
  // src/sessions/in-memory-session-service.ts
9599
9854
  var _crypto = require('crypto');
@@ -9635,19 +9890,19 @@ var BaseSessionService = class {
9635
9890
  };
9636
9891
 
9637
9892
  // src/sessions/in-memory-session-service.ts
9638
- var InMemorySessionService = (_class30 = class extends BaseSessionService {constructor(...args2) { super(...args2); _class30.prototype.__init51.call(this);_class30.prototype.__init52.call(this);_class30.prototype.__init53.call(this); }
9893
+ var InMemorySessionService = (_class31 = class extends BaseSessionService {constructor(...args3) { super(...args3); _class31.prototype.__init52.call(this);_class31.prototype.__init53.call(this);_class31.prototype.__init54.call(this); }
9639
9894
  /**
9640
9895
  * A map from app name to a map from user ID to a map from session ID to session.
9641
9896
  */
9642
- __init51() {this.sessions = /* @__PURE__ */ new Map()}
9897
+ __init52() {this.sessions = /* @__PURE__ */ new Map()}
9643
9898
  /**
9644
9899
  * A map from app name to a map from user ID to a map from key to the value.
9645
9900
  */
9646
- __init52() {this.userState = /* @__PURE__ */ new Map()}
9901
+ __init53() {this.userState = /* @__PURE__ */ new Map()}
9647
9902
  /**
9648
9903
  * A map from app name to a map from key to the value.
9649
9904
  */
9650
- __init53() {this.appState = /* @__PURE__ */ new Map()}
9905
+ __init54() {this.appState = /* @__PURE__ */ new Map()}
9651
9906
  /**
9652
9907
  * Creates a new session.
9653
9908
  */
@@ -9662,7 +9917,7 @@ var InMemorySessionService = (_class30 = class extends BaseSessionService {const
9662
9917
  return this.createSessionImpl(appName, userId, state, sessionId);
9663
9918
  }
9664
9919
  createSessionImpl(appName, userId, state, sessionId) {
9665
- const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _255 => _255.trim, 'call', _256 => _256()]) || _crypto.randomUUID.call(void 0, );
9920
+ const finalSessionId = _optionalChain([sessionId, 'optionalAccess', _256 => _256.trim, 'call', _257 => _257()]) || _crypto.randomUUID.call(void 0, );
9666
9921
  const session = {
9667
9922
  appName,
9668
9923
  userId,
@@ -9819,7 +10074,7 @@ var InMemorySessionService = (_class30 = class extends BaseSessionService {const
9819
10074
  warning(`sessionId ${sessionId} not in sessions[appName][userId]`);
9820
10075
  return event;
9821
10076
  }
9822
- if (_optionalChain([event, 'access', _257 => _257.actions, 'optionalAccess', _258 => _258.stateDelta])) {
10077
+ if (_optionalChain([event, 'access', _258 => _258.actions, 'optionalAccess', _259 => _259.stateDelta])) {
9823
10078
  for (const key in event.actions.stateDelta) {
9824
10079
  const value = event.actions.stateDelta[key];
9825
10080
  if (key.startsWith(State.APP_PREFIX)) {
@@ -9844,7 +10099,7 @@ var InMemorySessionService = (_class30 = class extends BaseSessionService {const
9844
10099
  storageSession.lastUpdateTime = event.timestamp;
9845
10100
  return event;
9846
10101
  }
9847
- }, _class30);
10102
+ }, _class31);
9848
10103
 
9849
10104
  // src/runners.ts
9850
10105
  function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
@@ -9853,14 +10108,14 @@ function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
9853
10108
  return null;
9854
10109
  }
9855
10110
  const lastEvent = events[events.length - 1];
9856
- if (_optionalChain([lastEvent, 'access', _259 => _259.content, 'optionalAccess', _260 => _260.parts, 'optionalAccess', _261 => _261.some, 'call', _262 => _262((part) => part.functionResponse)])) {
9857
- const functionCallId = _optionalChain([lastEvent, 'access', _263 => _263.content, 'access', _264 => _264.parts, 'access', _265 => _265.find, 'call', _266 => _266(
10111
+ if (_optionalChain([lastEvent, 'access', _260 => _260.content, 'optionalAccess', _261 => _261.parts, 'optionalAccess', _262 => _262.some, 'call', _263 => _263((part) => part.functionResponse)])) {
10112
+ const functionCallId = _optionalChain([lastEvent, 'access', _264 => _264.content, 'access', _265 => _265.parts, 'access', _266 => _266.find, 'call', _267 => _267(
9858
10113
  (part) => part.functionResponse
9859
- ), 'optionalAccess', _267 => _267.functionResponse, 'optionalAccess', _268 => _268.id]);
10114
+ ), 'optionalAccess', _268 => _268.functionResponse, 'optionalAccess', _269 => _269.id]);
9860
10115
  if (!functionCallId) return null;
9861
10116
  for (let i = events.length - 2; i >= 0; i--) {
9862
10117
  const event = events[i];
9863
- const functionCalls = _optionalChain([event, 'access', _269 => _269.getFunctionCalls, 'optionalCall', _270 => _270()]) || [];
10118
+ const functionCalls = _optionalChain([event, 'access', _270 => _270.getFunctionCalls, 'optionalCall', _271 => _271()]) || [];
9864
10119
  for (const functionCall of functionCalls) {
9865
10120
  if (functionCall.id === functionCallId) {
9866
10121
  return event;
@@ -9870,7 +10125,7 @@ function _findFunctionCallEventIfLastEventIsFunctionResponse(session) {
9870
10125
  }
9871
10126
  return null;
9872
10127
  }
9873
- var Runner = (_class31 = class {
10128
+ var Runner = (_class32 = class {
9874
10129
  /**
9875
10130
  * The app name of the runner.
9876
10131
  */
@@ -9891,7 +10146,7 @@ var Runner = (_class31 = class {
9891
10146
  * The memory service for the runner.
9892
10147
  */
9893
10148
 
9894
- __init54() {this.logger = new Logger({ name: "Runner" })}
10149
+ __init55() {this.logger = new Logger({ name: "Runner" })}
9895
10150
  /**
9896
10151
  * Initializes the Runner.
9897
10152
  */
@@ -9901,7 +10156,7 @@ var Runner = (_class31 = class {
9901
10156
  artifactService,
9902
10157
  sessionService,
9903
10158
  memoryService
9904
- }) {;_class31.prototype.__init54.call(this);
10159
+ }) {;_class32.prototype.__init55.call(this);
9905
10160
  this.appName = appName;
9906
10161
  this.agent = agent;
9907
10162
  this.artifactService = artifactService;
@@ -9963,11 +10218,11 @@ var Runner = (_class31 = class {
9963
10218
  runConfig = new RunConfig()
9964
10219
  }) {
9965
10220
  const span = tracer.startSpan("invocation");
10221
+ const spanContext = _api.trace.setSpan(_api.context.active(), span);
9966
10222
  try {
9967
- const session = await this.sessionService.getSession(
9968
- this.appName,
9969
- userId,
9970
- sessionId
10223
+ const session = await _api.context.with(
10224
+ spanContext,
10225
+ () => this.sessionService.getSession(this.appName, userId, sessionId)
9971
10226
  );
9972
10227
  if (!session) {
9973
10228
  throw new Error(`Session not found: ${sessionId}`);
@@ -9977,22 +10232,34 @@ var Runner = (_class31 = class {
9977
10232
  runConfig
9978
10233
  });
9979
10234
  if (newMessage) {
9980
- await this._appendNewMessageToSession(
9981
- session,
9982
- newMessage,
9983
- invocationContext,
9984
- runConfig.saveInputBlobsAsArtifacts || false
10235
+ await _api.context.with(
10236
+ spanContext,
10237
+ () => this._appendNewMessageToSession(
10238
+ session,
10239
+ newMessage,
10240
+ invocationContext,
10241
+ runConfig.saveInputBlobsAsArtifacts || false
10242
+ )
9985
10243
  );
9986
10244
  }
9987
10245
  invocationContext.agent = this._findAgentToRun(session, this.agent);
9988
- for await (const event of invocationContext.agent.runAsync(
9989
- invocationContext
9990
- )) {
10246
+ const agentGenerator = invocationContext.agent.runAsync(invocationContext);
10247
+ while (true) {
10248
+ const result = await _api.context.with(
10249
+ spanContext,
10250
+ () => agentGenerator.next()
10251
+ );
10252
+ if (result.done) {
10253
+ break;
10254
+ }
10255
+ const event = result.value;
9991
10256
  if (!event.partial) {
9992
- await this.sessionService.appendEvent(session, event);
9993
- if (this.memoryService) {
9994
- await this.memoryService.addSessionToMemory(session);
9995
- }
10257
+ await _api.context.with(spanContext, async () => {
10258
+ await this.sessionService.appendEvent(session, event);
10259
+ if (this.memoryService) {
10260
+ await this.memoryService.addSessionToMemory(session);
10261
+ }
10262
+ });
9996
10263
  }
9997
10264
  yield event;
9998
10265
  }
@@ -10051,15 +10318,15 @@ var Runner = (_class31 = class {
10051
10318
  */
10052
10319
  _findAgentToRun(session, rootAgent) {
10053
10320
  const event = _findFunctionCallEventIfLastEventIsFunctionResponse(session);
10054
- if (_optionalChain([event, 'optionalAccess', _271 => _271.author])) {
10321
+ if (_optionalChain([event, 'optionalAccess', _272 => _272.author])) {
10055
10322
  return rootAgent.findAgent(event.author);
10056
10323
  }
10057
- const nonUserEvents = _optionalChain([session, 'access', _272 => _272.events, 'optionalAccess', _273 => _273.filter, 'call', _274 => _274((e) => e.author !== "user"), 'access', _275 => _275.reverse, 'call', _276 => _276()]) || [];
10324
+ const nonUserEvents = _optionalChain([session, 'access', _273 => _273.events, 'optionalAccess', _274 => _274.filter, 'call', _275 => _275((e) => e.author !== "user"), 'access', _276 => _276.reverse, 'call', _277 => _277()]) || [];
10058
10325
  for (const event2 of nonUserEvents) {
10059
10326
  if (event2.author === rootAgent.name) {
10060
10327
  return rootAgent;
10061
10328
  }
10062
- const agent = _optionalChain([rootAgent, 'access', _277 => _277.findSubAgent, 'optionalCall', _278 => _278(event2.author)]);
10329
+ const agent = _optionalChain([rootAgent, 'access', _278 => _278.findSubAgent, 'optionalCall', _279 => _279(event2.author)]);
10063
10330
  if (!agent) {
10064
10331
  this.logger.debug(
10065
10332
  `Event from an unknown agent: ${event2.author}, event id: ${event2.id}`
@@ -10108,7 +10375,7 @@ var Runner = (_class31 = class {
10108
10375
  runConfig
10109
10376
  });
10110
10377
  }
10111
- }, _class31);
10378
+ }, _class32);
10112
10379
  var InMemoryRunner = class extends Runner {
10113
10380
  /**
10114
10381
  * Deprecated. Please don't use. The in-memory session service for the runner.
@@ -10131,18 +10398,18 @@ var InMemoryRunner = class extends Runner {
10131
10398
  };
10132
10399
 
10133
10400
  // src/agents/agent-builder.ts
10134
- var AgentBuilder = (_class32 = class _AgentBuilder {
10401
+ var AgentBuilder = (_class33 = class _AgentBuilder {
10135
10402
 
10136
10403
 
10137
10404
 
10138
10405
 
10139
10406
 
10140
- __init55() {this.agentType = "llm"}
10407
+ __init56() {this.agentType = "llm"}
10141
10408
 
10142
10409
  /**
10143
10410
  * Private constructor - use static create() method
10144
10411
  */
10145
- constructor(name) {;_class32.prototype.__init55.call(this);
10412
+ constructor(name) {;_class33.prototype.__init56.call(this);
10146
10413
  this.config = { name };
10147
10414
  }
10148
10415
  /**
@@ -10188,6 +10455,14 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10188
10455
  this.config.instruction = instruction;
10189
10456
  return this;
10190
10457
  }
10458
+ withInputSchema(schema) {
10459
+ this.config.inputSchema = schema;
10460
+ return this;
10461
+ }
10462
+ withOutputSchema(schema) {
10463
+ this.config.outputSchema = schema;
10464
+ return this;
10465
+ }
10191
10466
  /**
10192
10467
  * Add tools to the agent
10193
10468
  * @param tools Tools to add to the agent
@@ -10394,6 +10669,14 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10394
10669
  }
10395
10670
  return { agent, runner, session };
10396
10671
  }
10672
+ /**
10673
+ * Type-safe build method for agents with output schemas
10674
+ * Provides better type inference for the ask method return type
10675
+ */
10676
+ async buildWithSchema() {
10677
+ const result = await this.build();
10678
+ return result;
10679
+ }
10397
10680
  /**
10398
10681
  * Quick execution helper - build and run a message
10399
10682
  * @param message Message to send to the agent (string or full message object)
@@ -10428,7 +10711,9 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10428
10711
  memoryService: this.memoryService,
10429
10712
  artifactService: this.artifactService,
10430
10713
  outputKey: this.config.outputKey,
10431
- sessionService: this.sessionService
10714
+ sessionService: this.sessionService,
10715
+ inputSchema: this.config.inputSchema,
10716
+ outputSchema: this.config.outputSchema
10432
10717
  });
10433
10718
  }
10434
10719
  case "sequential":
@@ -10487,18 +10772,20 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10487
10772
  return `app-${this.config.name}`;
10488
10773
  }
10489
10774
  /**
10490
- * Create enhanced runner with simplified API
10775
+ * Create enhanced runner with simplified API and proper typing
10491
10776
  * @param baseRunner The base runner instance
10492
10777
  * @param session The session instance
10493
10778
  * @returns Enhanced runner with simplified API
10494
10779
  */
10495
10780
  createEnhancedRunner(baseRunner, session) {
10496
10781
  const sessionOptions = this.sessionOptions;
10782
+ const outputSchema = this.config.outputSchema;
10497
10783
  return {
10784
+ __outputSchema: outputSchema,
10498
10785
  async ask(message) {
10499
10786
  const newMessage = typeof message === "string" ? { parts: [{ text: message }] } : typeof message === "object" && "contents" in message ? { parts: message.contents[message.contents.length - 1].parts } : message;
10500
10787
  let response = "";
10501
- if (!_optionalChain([sessionOptions, 'optionalAccess', _279 => _279.userId])) {
10788
+ if (!_optionalChain([sessionOptions, 'optionalAccess', _280 => _280.userId])) {
10502
10789
  throw new Error("Session configuration is required");
10503
10790
  }
10504
10791
  for await (const event of baseRunner.runAsync({
@@ -10506,7 +10793,7 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10506
10793
  sessionId: session.id,
10507
10794
  newMessage
10508
10795
  })) {
10509
- if (_optionalChain([event, 'access', _280 => _280.content, 'optionalAccess', _281 => _281.parts]) && Array.isArray(event.content.parts)) {
10796
+ if (_optionalChain([event, 'access', _281 => _281.content, 'optionalAccess', _282 => _282.parts]) && Array.isArray(event.content.parts)) {
10510
10797
  const content = event.content.parts.map(
10511
10798
  (part) => (part && typeof part === "object" && "text" in part ? part.text : "") || ""
10512
10799
  ).join("");
@@ -10515,6 +10802,18 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10515
10802
  }
10516
10803
  }
10517
10804
  }
10805
+ if (outputSchema) {
10806
+ try {
10807
+ const parsed = JSON.parse(response);
10808
+ return outputSchema.parse(parsed);
10809
+ } catch (parseError) {
10810
+ try {
10811
+ return outputSchema.parse(response);
10812
+ } catch (validationError) {
10813
+ return response.trim();
10814
+ }
10815
+ }
10816
+ }
10518
10817
  return response.trim();
10519
10818
  },
10520
10819
  runAsync(params) {
@@ -10522,7 +10821,7 @@ var AgentBuilder = (_class32 = class _AgentBuilder {
10522
10821
  }
10523
10822
  };
10524
10823
  }
10525
- }, _class32);
10824
+ }, _class33);
10526
10825
 
10527
10826
  // src/memory/index.ts
10528
10827
  var memory_exports = {};
@@ -10575,7 +10874,7 @@ var VertexAiSessionService = class extends BaseSessionService {
10575
10874
  path: `reasoningEngines/${reasoningEngineId}/sessions`,
10576
10875
  request_dict: sessionJsonDict
10577
10876
  });
10578
- console.info("Create Session response", apiResponse);
10877
+ console.debug("Create Session response", apiResponse);
10579
10878
  const createdSessionId = apiResponse.name.split("/").slice(-3, -2)[0];
10580
10879
  const operationId = apiResponse.name.split("/").pop();
10581
10880
  let maxRetryAttempt = 5;
@@ -10586,7 +10885,7 @@ var VertexAiSessionService = class extends BaseSessionService {
10586
10885
  path: `operations/${operationId}`,
10587
10886
  request_dict: {}
10588
10887
  });
10589
- if (_optionalChain([lroResponse, 'optionalAccess', _282 => _282.done])) {
10888
+ if (_optionalChain([lroResponse, 'optionalAccess', _283 => _283.done])) {
10590
10889
  break;
10591
10890
  }
10592
10891
  await new Promise((resolve) => setTimeout(resolve, 1e3));
@@ -10856,11 +11155,11 @@ var VertexAiSessionService = class extends BaseSessionService {
10856
11155
 
10857
11156
  // src/sessions/database-session-service.ts
10858
11157
  var _kysely = require('kysely');
10859
- var DatabaseSessionService = (_class33 = class extends BaseSessionService {
11158
+ var DatabaseSessionService = (_class34 = class extends BaseSessionService {
10860
11159
 
10861
- __init56() {this.initialized = false}
11160
+ __init57() {this.initialized = false}
10862
11161
  constructor(config) {
10863
- super();_class33.prototype.__init56.call(this);;
11162
+ super();_class34.prototype.__init57.call(this);;
10864
11163
  this.db = config.db;
10865
11164
  if (!config.skipTableCreation) {
10866
11165
  this.initializeDatabase().catch((error) => {
@@ -10935,7 +11234,7 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
10935
11234
  if (!jsonString) return defaultValue;
10936
11235
  try {
10937
11236
  return JSON.parse(jsonString);
10938
- } catch (e5) {
11237
+ } catch (e6) {
10939
11238
  return defaultValue;
10940
11239
  }
10941
11240
  }
@@ -10957,12 +11256,12 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
10957
11256
  }
10958
11257
  async createSession(appName, userId, state, sessionId) {
10959
11258
  await this.ensureInitialized();
10960
- const id = _optionalChain([sessionId, 'optionalAccess', _283 => _283.trim, 'call', _284 => _284()]) || this.generateSessionId();
11259
+ const id = _optionalChain([sessionId, 'optionalAccess', _284 => _284.trim, 'call', _285 => _285()]) || this.generateSessionId();
10961
11260
  return await this.db.transaction().execute(async (trx) => {
10962
11261
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
10963
11262
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
10964
- let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _285 => _285.state]), {});
10965
- let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _286 => _286.state]), {});
11263
+ let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _286 => _286.state]), {});
11264
+ let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _287 => _287.state]), {});
10966
11265
  if (!appState) {
10967
11266
  await trx.insertInto("app_states").values({
10968
11267
  app_name: appName,
@@ -11021,21 +11320,21 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
11021
11320
  return void 0;
11022
11321
  }
11023
11322
  let eventQuery = trx.selectFrom("events").selectAll().where("session_id", "=", sessionId).orderBy("timestamp", "desc");
11024
- if (_optionalChain([config, 'optionalAccess', _287 => _287.afterTimestamp])) {
11323
+ if (_optionalChain([config, 'optionalAccess', _288 => _288.afterTimestamp])) {
11025
11324
  eventQuery = eventQuery.where(
11026
11325
  "timestamp",
11027
11326
  ">=",
11028
11327
  new Date(config.afterTimestamp * 1e3)
11029
11328
  );
11030
11329
  }
11031
- if (_optionalChain([config, 'optionalAccess', _288 => _288.numRecentEvents])) {
11330
+ if (_optionalChain([config, 'optionalAccess', _289 => _289.numRecentEvents])) {
11032
11331
  eventQuery = eventQuery.limit(config.numRecentEvents);
11033
11332
  }
11034
11333
  const storageEvents = await eventQuery.execute();
11035
11334
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", appName).executeTakeFirst();
11036
11335
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", appName).where("user_id", "=", userId).executeTakeFirst();
11037
- const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _289 => _289.state]), {});
11038
- const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _290 => _290.state]), {});
11336
+ const currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _290 => _290.state]), {});
11337
+ const currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _291 => _291.state]), {});
11039
11338
  const sessionState = this.parseJsonSafely(storageSession.state, {});
11040
11339
  const mergedState = this.mergeState(
11041
11340
  currentAppState,
@@ -11093,13 +11392,13 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
11093
11392
  }
11094
11393
  const appState = await trx.selectFrom("app_states").selectAll().where("app_name", "=", session.appName).executeTakeFirst();
11095
11394
  const userState = await trx.selectFrom("user_states").selectAll().where("app_name", "=", session.appName).where("user_id", "=", session.userId).executeTakeFirst();
11096
- let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _291 => _291.state]), {});
11097
- let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _292 => _292.state]), {});
11395
+ let currentAppState = this.parseJsonSafely(_optionalChain([appState, 'optionalAccess', _292 => _292.state]), {});
11396
+ let currentUserState = this.parseJsonSafely(_optionalChain([userState, 'optionalAccess', _293 => _293.state]), {});
11098
11397
  let sessionState = this.parseJsonSafely(storageSession.state, {});
11099
11398
  let appStateDelta = {};
11100
11399
  let userStateDelta = {};
11101
11400
  let sessionStateDelta = {};
11102
- if (_optionalChain([event, 'access', _293 => _293.actions, 'optionalAccess', _294 => _294.stateDelta])) {
11401
+ if (_optionalChain([event, 'access', _294 => _294.actions, 'optionalAccess', _295 => _295.stateDelta])) {
11103
11402
  const deltas = this.extractStateDelta(event.actions.stateDelta);
11104
11403
  appStateDelta = deltas.appStateDelta;
11105
11404
  userStateDelta = deltas.userStateDelta;
@@ -11245,7 +11544,7 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
11245
11544
  * Overrides the base class method to work with plain object state.
11246
11545
  */
11247
11546
  updateSessionState(session, event) {
11248
- if (!_optionalChain([event, 'access', _295 => _295.actions, 'optionalAccess', _296 => _296.stateDelta])) {
11547
+ if (!_optionalChain([event, 'access', _296 => _296.actions, 'optionalAccess', _297 => _297.stateDelta])) {
11249
11548
  return;
11250
11549
  }
11251
11550
  for (const [key, value] of Object.entries(event.actions.stateDelta)) {
@@ -11254,7 +11553,7 @@ var DatabaseSessionService = (_class33 = class extends BaseSessionService {
11254
11553
  }
11255
11554
  }
11256
11555
  }
11257
- }, _class33);
11556
+ }, _class34);
11258
11557
 
11259
11558
  // src/sessions/database-factories.ts
11260
11559
 
@@ -11415,7 +11714,7 @@ var GcsArtifactService = class {
11415
11714
  };
11416
11715
  return part;
11417
11716
  } catch (error) {
11418
- if (_optionalChain([error, 'optionalAccess', _297 => _297.code]) === 404) {
11717
+ if (_optionalChain([error, 'optionalAccess', _298 => _298.code]) === 404) {
11419
11718
  return null;
11420
11719
  }
11421
11720
  throw error;