@inkeep/agents-run-api 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +538 -458
  2. package/dist/index.js +538 -458
  3. package/package.json +3 -2
package/dist/index.js CHANGED
@@ -8,7 +8,7 @@ import { resourceFromAttributes } from '@opentelemetry/resources';
8
8
  import { NodeSDK } from '@opentelemetry/sdk-node';
9
9
  import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
10
10
  import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
11
- import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, setSpanWithError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
11
+ import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
12
12
  import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
13
13
  import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
14
14
  import { Hono } from 'hono';
@@ -24,6 +24,7 @@ import destr from 'destr';
24
24
  import traverse from 'traverse';
25
25
  import { createUIMessageStream, JsonToSseTransformStream, parsePartialJson, generateText, generateObject, tool, streamText } from 'ai';
26
26
  import { createAnthropic, anthropic } from '@ai-sdk/anthropic';
27
+ import { createGoogleGenerativeAI, google } from '@ai-sdk/google';
27
28
  import { createOpenAI, openai } from '@ai-sdk/openai';
28
29
  import jmespath from 'jmespath';
29
30
  import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
@@ -166,6 +167,7 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
166
167
  return;
167
168
  } else if (apiKey) {
168
169
  const executionContext = await extractContextFromApiKey(apiKey);
170
+ executionContext.agentId = agentId;
169
171
  c.set("executionContext", executionContext);
170
172
  logger.info({}, "API key authenticated successfully");
171
173
  await next();
@@ -183,12 +185,14 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
183
185
  }
184
186
  try {
185
187
  const executionContext = await extractContextFromApiKey(apiKey);
188
+ executionContext.agentId = agentId;
186
189
  c.set("executionContext", executionContext);
187
190
  logger.debug(
188
191
  {
189
192
  tenantId: executionContext.tenantId,
190
193
  projectId: executionContext.projectId,
191
- graphId: executionContext.graphId
194
+ graphId: executionContext.graphId,
195
+ agentId: executionContext.agentId
192
196
  },
193
197
  "API key authenticated successfully"
194
198
  );
@@ -855,6 +859,127 @@ async function handleTasksResubscribe(c, agent, request) {
855
859
  });
856
860
  }
857
861
  }
862
+ getLogger("agents");
863
+ function createAgentCard({
864
+ dbAgent,
865
+ baseUrl
866
+ }) {
867
+ const description = dbAgent.description || "AI Agent";
868
+ return {
869
+ name: dbAgent.name,
870
+ description,
871
+ url: baseUrl ? `${baseUrl}/a2a` : "",
872
+ version: "1.0.0",
873
+ capabilities: {
874
+ streaming: true,
875
+ // Enable streaming for A2A compliance
876
+ pushNotifications: false,
877
+ stateTransitionHistory: false
878
+ },
879
+ defaultInputModes: ["text", "text/plain"],
880
+ defaultOutputModes: ["text", "text/plain"],
881
+ skills: [],
882
+ // Add provider info if available
883
+ ...baseUrl && {
884
+ provider: {
885
+ organization: "Inkeep",
886
+ url: baseUrl
887
+ }
888
+ }
889
+ };
890
+ }
891
+ function generateDescriptionWithTransfers(baseDescription, internalRelations, externalRelations) {
892
+ const transfers = [
893
+ ...internalRelations.filter((rel) => rel.relationType === "transfer"),
894
+ ...externalRelations.filter((rel) => rel.relationType === "transfer")
895
+ ];
896
+ const delegates = [
897
+ ...internalRelations.filter((rel) => rel.relationType === "delegate"),
898
+ ...externalRelations.filter((rel) => rel.relationType === "delegate")
899
+ ];
900
+ if (transfers.length === 0 && delegates.length === 0) {
901
+ return baseDescription;
902
+ }
903
+ let enhancedDescription = baseDescription;
904
+ if (transfers.length > 0) {
905
+ const transferList = transfers.map((rel) => {
906
+ const name = rel.externalAgent?.name || rel.name;
907
+ const desc = rel.externalAgent?.description || rel.description || "";
908
+ return `- ${name}: ${desc}`;
909
+ }).join("\n");
910
+ enhancedDescription += `
911
+
912
+ Can transfer to:
913
+ ${transferList}`;
914
+ }
915
+ if (delegates.length > 0) {
916
+ const delegateList = delegates.map((rel) => {
917
+ const name = rel.externalAgent?.name || rel.name;
918
+ const desc = rel.externalAgent?.description || rel.description || "";
919
+ return `- ${name}: ${desc}`;
920
+ }).join("\n");
921
+ enhancedDescription += `
922
+
923
+ Can delegate to:
924
+ ${delegateList}`;
925
+ }
926
+ return enhancedDescription;
927
+ }
928
+ async function hydrateAgent({
929
+ dbAgent,
930
+ graphId,
931
+ baseUrl,
932
+ apiKey,
933
+ credentialStoreRegistry
934
+ }) {
935
+ try {
936
+ const taskHandlerConfig = await createTaskHandlerConfig({
937
+ tenantId: dbAgent.tenantId,
938
+ projectId: dbAgent.projectId,
939
+ graphId,
940
+ agentId: dbAgent.id,
941
+ baseUrl,
942
+ apiKey
943
+ });
944
+ const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
945
+ const agentCard = createAgentCard({
946
+ dbAgent,
947
+ baseUrl
948
+ });
949
+ return {
950
+ agentId: dbAgent.id,
951
+ tenantId: dbAgent.tenantId,
952
+ projectId: dbAgent.projectId,
953
+ graphId,
954
+ agentCard,
955
+ taskHandler
956
+ };
957
+ } catch (error) {
958
+ console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
959
+ throw error;
960
+ }
961
+ }
962
+ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
963
+ const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
964
+ if (!agentId) {
965
+ throw new Error("Agent ID is required");
966
+ }
967
+ const dbAgent = await getAgentById(dbClient_default)({
968
+ scopes: { tenantId, projectId },
969
+ agentId
970
+ });
971
+ if (!dbAgent) {
972
+ return null;
973
+ }
974
+ const agentFrameworkBaseUrl = `${baseUrl}/agents`;
975
+ return hydrateAgent({
976
+ dbAgent,
977
+ graphId,
978
+ baseUrl: agentFrameworkBaseUrl,
979
+ credentialStoreRegistry,
980
+ apiKey
981
+ });
982
+ }
858
983
  function agentInitializingOp(sessionId, graphId) {
859
984
  return {
860
985
  type: "agent_initializing",
@@ -891,10 +1016,10 @@ function statusUpdateOp(ctx) {
891
1016
  ctx
892
1017
  };
893
1018
  }
894
- var logger3 = getLogger("DataComponentSchema");
1019
+ var logger4 = getLogger("DataComponentSchema");
895
1020
  function jsonSchemaToZod(jsonSchema) {
896
1021
  if (!jsonSchema || typeof jsonSchema !== "object") {
897
- logger3.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
1022
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
898
1023
  return z.string();
899
1024
  }
900
1025
  switch (jsonSchema.type) {
@@ -921,7 +1046,7 @@ function jsonSchemaToZod(jsonSchema) {
921
1046
  case "null":
922
1047
  return z.null();
923
1048
  default:
924
- logger3.warn(
1049
+ logger4.warn(
925
1050
  {
926
1051
  unsupportedType: jsonSchema.type,
927
1052
  schema: jsonSchema
@@ -975,8 +1100,40 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
975
1100
  required: ["artifact_id", "task_id"]
976
1101
  });
977
1102
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
978
- var logger4 = getLogger("ModelFactory");
1103
+ var logger5 = getLogger("ModelFactory");
979
1104
  var _ModelFactory = class _ModelFactory {
1105
+ /**
1106
+ * Create a provider instance with custom configuration
1107
+ */
1108
+ static createProvider(provider, config) {
1109
+ switch (provider) {
1110
+ case "anthropic":
1111
+ return createAnthropic(config);
1112
+ case "openai":
1113
+ return createOpenAI(config);
1114
+ case "google":
1115
+ return createGoogleGenerativeAI(config);
1116
+ default:
1117
+ throw new Error(`Unsupported provider: ${provider}`);
1118
+ }
1119
+ }
1120
+ /**
1121
+ * Extract provider configuration from providerOptions
1122
+ * Only includes settings that go to the provider constructor (baseURL, apiKey, etc.)
1123
+ */
1124
+ static extractProviderConfig(providerOptions) {
1125
+ if (!providerOptions) {
1126
+ return {};
1127
+ }
1128
+ const providerConfig = {};
1129
+ if (providerOptions.baseUrl || providerOptions.baseURL) {
1130
+ providerConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1131
+ }
1132
+ if (providerOptions.gateway) {
1133
+ Object.assign(providerConfig, providerOptions.gateway);
1134
+ }
1135
+ return providerConfig;
1136
+ }
980
1137
  /**
981
1138
  * Create a language model instance from configuration
982
1139
  * Throws error if no config provided - models must be configured at project level
@@ -990,7 +1147,7 @@ var _ModelFactory = class _ModelFactory {
990
1147
  const modelSettings = config;
991
1148
  const modelString = modelSettings.model.trim();
992
1149
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
993
- logger4.debug(
1150
+ logger5.debug(
994
1151
  {
995
1152
  provider,
996
1153
  model: modelName,
@@ -999,49 +1156,40 @@ var _ModelFactory = class _ModelFactory {
999
1156
  },
1000
1157
  "Creating language model from config"
1001
1158
  );
1002
- try {
1003
- switch (provider) {
1004
- case "anthropic":
1005
- return _ModelFactory.createAnthropicModel(modelName, modelSettings.providerOptions);
1006
- case "openai":
1007
- return _ModelFactory.createOpenAIModel(modelName, modelSettings.providerOptions);
1008
- default:
1009
- throw new Error(
1010
- `Unsupported provider: ${provider}. Supported providers are: ${_ModelFactory.SUPPORTED_PROVIDERS.join(", ")}`
1011
- );
1012
- }
1013
- } catch (error) {
1014
- logger4.error(
1015
- {
1016
- provider,
1017
- model: modelName,
1018
- error: error instanceof Error ? error.message : "Unknown error"
1019
- },
1020
- "Failed to create model"
1021
- );
1022
- throw new Error(
1023
- `Failed to create model ${modelString}: ${error instanceof Error ? error.message : "Unknown error"}`
1024
- );
1159
+ const providerConfig = _ModelFactory.extractProviderConfig(modelSettings.providerOptions);
1160
+ if (Object.keys(providerConfig).length > 0) {
1161
+ logger5.info({ config: providerConfig }, `Applying custom ${provider} provider configuration`);
1162
+ const customProvider = _ModelFactory.createProvider(provider, providerConfig);
1163
+ return customProvider.languageModel(modelName);
1164
+ }
1165
+ switch (provider) {
1166
+ case "anthropic":
1167
+ return anthropic(modelName);
1168
+ case "openai":
1169
+ return openai(modelName);
1170
+ case "google":
1171
+ return google(modelName);
1172
+ default:
1173
+ throw new Error(`Unsupported provider: ${provider}`);
1025
1174
  }
1026
1175
  }
1027
1176
  /**
1028
1177
  * Parse model string to extract provider and model name
1029
- * Examples: "anthropic/claude-4-sonnet" -> { provider: "anthropic", modelName: "claude-4-sonnet" }
1030
- * "claude-4-sonnet" -> { provider: "anthropic", modelName: "claude-4-sonnet" } (default to anthropic)
1178
+ * Examples: "anthropic/claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" }
1179
+ * "claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" } (default to anthropic)
1031
1180
  */
1032
1181
  static parseModelString(modelString) {
1033
1182
  if (modelString.includes("/")) {
1034
1183
  const [provider, ...modelParts] = modelString.split("/");
1035
1184
  const normalizedProvider = provider.toLowerCase();
1036
1185
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1037
- logger4.warn(
1186
+ logger5.error(
1038
1187
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1039
1188
  "Unsupported provider detected, falling back to anthropic"
1040
1189
  );
1041
- return {
1042
- provider: "anthropic",
1043
- modelName: modelParts.join("/")
1044
- };
1190
+ throw new Error(
1191
+ `Unsupported provider: ${normalizedProvider}. Please provide a model in the format of provider/model-name.`
1192
+ );
1045
1193
  }
1046
1194
  return {
1047
1195
  provider: normalizedProvider,
@@ -1049,51 +1197,9 @@ var _ModelFactory = class _ModelFactory {
1049
1197
  // In case model name has slashes
1050
1198
  };
1051
1199
  }
1052
- return {
1053
- provider: "anthropic",
1054
- modelName: modelString
1055
- };
1056
- }
1057
- /**
1058
- * Create an Anthropic model instance
1059
- */
1060
- static createAnthropicModel(modelName, providerOptions) {
1061
- const anthropicConfig = {};
1062
- if (providerOptions?.baseUrl || providerOptions?.baseURL) {
1063
- anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1064
- }
1065
- if (providerOptions?.gateway) {
1066
- logger4.info(
1067
- { gateway: providerOptions.gateway },
1068
- "Setting up AI Gateway for Anthropic model"
1069
- );
1070
- Object.assign(anthropicConfig, providerOptions.gateway);
1071
- }
1072
- if (Object.keys(anthropicConfig).length > 0) {
1073
- logger4.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1074
- const provider = createAnthropic(anthropicConfig);
1075
- return provider(modelName);
1076
- }
1077
- return anthropic(modelName);
1078
- }
1079
- /**
1080
- * Create an OpenAI model instance
1081
- */
1082
- static createOpenAIModel(modelName, providerOptions) {
1083
- const openaiConfig = {};
1084
- if (providerOptions?.baseUrl || providerOptions?.baseURL) {
1085
- openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1086
- }
1087
- if (providerOptions?.gateway) {
1088
- logger4.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1089
- Object.assign(openaiConfig, providerOptions.gateway);
1090
- }
1091
- if (Object.keys(openaiConfig).length > 0) {
1092
- logger4.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1093
- const provider = createOpenAI(openaiConfig);
1094
- return provider(modelName);
1095
- }
1096
- return openai(modelName);
1200
+ throw new Error(
1201
+ `Invalid model provided: ${modelString}. Please provide a model in the format of provider/model-name.`
1202
+ );
1097
1203
  }
1098
1204
  /**
1099
1205
  * Get generation parameters from provider options
@@ -1118,7 +1224,7 @@ var _ModelFactory = class _ModelFactory {
1118
1224
  * Includes maxDuration if specified in provider options (in seconds, following Vercel standard)
1119
1225
  */
1120
1226
  static prepareGenerationConfig(modelSettings) {
1121
- const modelString = modelSettings?.model?.trim() || "anthropic/claude-4-sonnet-20250514";
1227
+ const modelString = modelSettings?.model?.trim();
1122
1228
  const model = _ModelFactory.createModel({
1123
1229
  model: modelString,
1124
1230
  providerOptions: modelSettings?.providerOptions
@@ -1159,7 +1265,7 @@ var _ModelFactory = class _ModelFactory {
1159
1265
  /**
1160
1266
  * Supported providers for security validation
1161
1267
  */
1162
- __publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai"]);
1268
+ __publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai", "google"]);
1163
1269
  var ModelFactory = _ModelFactory;
1164
1270
  var tracer = getTracer("agents-run-api");
1165
1271
 
@@ -1179,7 +1285,7 @@ function unregisterStreamHelper(requestId2) {
1179
1285
  }
1180
1286
 
1181
1287
  // src/utils/graph-session.ts
1182
- var logger5 = getLogger("GraphSession");
1288
+ var logger6 = getLogger("GraphSession");
1183
1289
  var GraphSession = class {
1184
1290
  // Track scheduled timeouts for cleanup
1185
1291
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1203,7 +1309,7 @@ var GraphSession = class {
1203
1309
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1204
1310
  // Prevent unbounded growth
1205
1311
  __publicField(this, "scheduledTimeouts");
1206
- logger5.debug({ sessionId, messageId, graphId }, "GraphSession created");
1312
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1207
1313
  }
1208
1314
  /**
1209
1315
  * Initialize status updates for this session
@@ -1217,15 +1323,15 @@ var GraphSession = class {
1217
1323
  summarizerModel,
1218
1324
  baseModel,
1219
1325
  config: {
1220
- numEvents: config.numEvents || 10,
1221
- timeInSeconds: config.timeInSeconds || 30,
1326
+ numEvents: config.numEvents || 1,
1327
+ timeInSeconds: config.timeInSeconds || 2,
1222
1328
  ...config
1223
1329
  }
1224
1330
  };
1225
1331
  if (this.statusUpdateState.config.timeInSeconds) {
1226
1332
  this.statusUpdateTimer = setInterval(async () => {
1227
1333
  if (!this.statusUpdateState || this.isEnded) {
1228
- logger5.debug(
1334
+ logger6.debug(
1229
1335
  { sessionId: this.sessionId },
1230
1336
  "Timer triggered but session already cleaned up or ended"
1231
1337
  );
@@ -1237,7 +1343,7 @@ var GraphSession = class {
1237
1343
  }
1238
1344
  await this.checkAndSendTimeBasedUpdate();
1239
1345
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1240
- logger5.info(
1346
+ logger6.info(
1241
1347
  {
1242
1348
  sessionId: this.sessionId,
1243
1349
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1251,7 +1357,7 @@ var GraphSession = class {
1251
1357
  */
1252
1358
  recordEvent(eventType, agentId, data) {
1253
1359
  if (this.isEnded) {
1254
- logger5.debug(
1360
+ logger6.debug(
1255
1361
  {
1256
1362
  sessionId: this.sessionId,
1257
1363
  eventType,
@@ -1271,7 +1377,7 @@ var GraphSession = class {
1271
1377
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1272
1378
  const artifactId = data.artifactId;
1273
1379
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1274
- logger5.warn(
1380
+ logger6.warn(
1275
1381
  {
1276
1382
  sessionId: this.sessionId,
1277
1383
  artifactId,
@@ -1292,7 +1398,7 @@ var GraphSession = class {
1292
1398
  this.artifactProcessingErrors.set(artifactId, errorCount);
1293
1399
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1294
1400
  this.pendingArtifacts.delete(artifactId);
1295
- logger5.error(
1401
+ logger6.error(
1296
1402
  {
1297
1403
  sessionId: this.sessionId,
1298
1404
  artifactId,
@@ -1304,7 +1410,7 @@ var GraphSession = class {
1304
1410
  "Artifact processing failed after max retries, giving up"
1305
1411
  );
1306
1412
  } else {
1307
- logger5.warn(
1413
+ logger6.warn(
1308
1414
  {
1309
1415
  sessionId: this.sessionId,
1310
1416
  artifactId,
@@ -1326,14 +1432,14 @@ var GraphSession = class {
1326
1432
  */
1327
1433
  checkStatusUpdates() {
1328
1434
  if (this.isEnded) {
1329
- logger5.debug(
1435
+ logger6.debug(
1330
1436
  { sessionId: this.sessionId },
1331
1437
  "Session has ended - skipping status update check"
1332
1438
  );
1333
1439
  return;
1334
1440
  }
1335
1441
  if (!this.statusUpdateState) {
1336
- logger5.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1442
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1337
1443
  return;
1338
1444
  }
1339
1445
  const statusUpdateState = this.statusUpdateState;
@@ -1344,11 +1450,11 @@ var GraphSession = class {
1344
1450
  */
1345
1451
  async checkAndSendTimeBasedUpdate() {
1346
1452
  if (this.isEnded) {
1347
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1453
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1348
1454
  return;
1349
1455
  }
1350
1456
  if (!this.statusUpdateState) {
1351
- logger5.debug(
1457
+ logger6.debug(
1352
1458
  { sessionId: this.sessionId },
1353
1459
  "No status updates configured for time-based check"
1354
1460
  );
@@ -1361,7 +1467,7 @@ var GraphSession = class {
1361
1467
  try {
1362
1468
  await this.generateAndSendUpdate();
1363
1469
  } catch (error) {
1364
- logger5.error(
1470
+ logger6.error(
1365
1471
  {
1366
1472
  sessionId: this.sessionId,
1367
1473
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1454,29 +1560,29 @@ var GraphSession = class {
1454
1560
  */
1455
1561
  async generateAndSendUpdate() {
1456
1562
  if (this.isEnded) {
1457
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1563
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1458
1564
  return;
1459
1565
  }
1460
1566
  if (this.isTextStreaming) {
1461
- logger5.debug(
1567
+ logger6.debug(
1462
1568
  { sessionId: this.sessionId },
1463
1569
  "Text is currently streaming - skipping status update"
1464
1570
  );
1465
1571
  return;
1466
1572
  }
1467
1573
  if (this.isGeneratingUpdate) {
1468
- logger5.debug(
1574
+ logger6.debug(
1469
1575
  { sessionId: this.sessionId },
1470
1576
  "Update already in progress - skipping duplicate generation"
1471
1577
  );
1472
1578
  return;
1473
1579
  }
1474
1580
  if (!this.statusUpdateState) {
1475
- logger5.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1581
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1476
1582
  return;
1477
1583
  }
1478
1584
  if (!this.graphId) {
1479
- logger5.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1585
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1480
1586
  return;
1481
1587
  }
1482
1588
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1489,7 +1595,7 @@ var GraphSession = class {
1489
1595
  try {
1490
1596
  const streamHelper = getStreamHelper(this.sessionId);
1491
1597
  if (!streamHelper) {
1492
- logger5.warn(
1598
+ logger6.warn(
1493
1599
  { sessionId: this.sessionId },
1494
1600
  "No stream helper found - cannot send status update"
1495
1601
  );
@@ -1510,7 +1616,7 @@ var GraphSession = class {
1510
1616
  if (result.operations && result.operations.length > 0) {
1511
1617
  for (const op of result.operations) {
1512
1618
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1513
- logger5.warn(
1619
+ logger6.warn(
1514
1620
  {
1515
1621
  sessionId: this.sessionId,
1516
1622
  operation: op
@@ -1563,7 +1669,7 @@ var GraphSession = class {
1563
1669
  this.previousSummaries.shift();
1564
1670
  }
1565
1671
  if (!operation || !operation.type || !operation.ctx) {
1566
- logger5.warn(
1672
+ logger6.warn(
1567
1673
  {
1568
1674
  sessionId: this.sessionId,
1569
1675
  operation
@@ -1578,7 +1684,7 @@ var GraphSession = class {
1578
1684
  this.statusUpdateState.lastEventCount = this.events.length;
1579
1685
  }
1580
1686
  } catch (error) {
1581
- logger5.error(
1687
+ logger6.error(
1582
1688
  {
1583
1689
  sessionId: this.sessionId,
1584
1690
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1616,7 +1722,7 @@ var GraphSession = class {
1616
1722
  this.releaseUpdateLock();
1617
1723
  }
1618
1724
  } catch (error) {
1619
- logger5.error(
1725
+ logger6.error(
1620
1726
  {
1621
1727
  sessionId: this.sessionId,
1622
1728
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1693,7 +1799,7 @@ User's Question/Context:
1693
1799
  ${conversationHistory}
1694
1800
  ` : "";
1695
1801
  } catch (error) {
1696
- logger5.warn(
1802
+ logger6.warn(
1697
1803
  { sessionId: this.sessionId, error },
1698
1804
  "Failed to fetch conversation history for status update"
1699
1805
  );
@@ -1745,7 +1851,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1745
1851
  return text.trim();
1746
1852
  } catch (error) {
1747
1853
  setSpanWithError(span, error);
1748
- logger5.error({ error }, "Failed to generate summary, using fallback");
1854
+ logger6.error({ error }, "Failed to generate summary, using fallback");
1749
1855
  return this.generateFallbackSummary(newEvents, elapsedTime);
1750
1856
  } finally {
1751
1857
  span.end();
@@ -1791,7 +1897,7 @@ User's Question/Context:
1791
1897
  ${conversationHistory}
1792
1898
  ` : "";
1793
1899
  } catch (error) {
1794
- logger5.warn(
1900
+ logger6.warn(
1795
1901
  { sessionId: this.sessionId, error },
1796
1902
  "Failed to fetch conversation history for structured status update"
1797
1903
  );
@@ -1834,9 +1940,11 @@ Rules:
1834
1940
  - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
1835
1941
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
1836
1942
  - Include specific details, numbers, requirements, or insights discovered
1837
- - You are ONE AI (no agents/delegations)
1838
- - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE INTERNAL OPERATIONS!
1839
- - Bad examples: "Searching docs", "Processing request", "Status update", or not using the no_relevant_updates: e.g. "No New Updates", "No new info to report"
1943
+ - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
1944
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
1945
+ - Present all operations as seamless actions by a single system
1946
+ - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
1947
+ - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
1840
1948
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
1841
1949
 
1842
1950
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -1890,7 +1998,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1890
1998
  return { operations };
1891
1999
  } catch (error) {
1892
2000
  setSpanWithError(span, error);
1893
- logger5.error({ error }, "Failed to generate structured update, using fallback");
2001
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
1894
2002
  return { operations: [] };
1895
2003
  } finally {
1896
2004
  span.end();
@@ -1997,8 +2105,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1997
2105
  case "transfer": {
1998
2106
  const data = event.data;
1999
2107
  activities.push(
2000
- `\u{1F504} **Transfer**: ${data.fromAgent} \u2192 ${data.targetAgent}
2001
- ${data.reason ? `Reason: ${data.reason}` : "Control transfer"}
2108
+ `\u{1F504} **Continuing**: ${data.reason || "Processing request"}
2002
2109
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2003
2110
  );
2004
2111
  break;
@@ -2006,8 +2113,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2006
2113
  case "delegation_sent": {
2007
2114
  const data = event.data;
2008
2115
  activities.push(
2009
- `\u{1F4E4} **Delegation Sent** [${data.delegationId}]: ${data.fromAgent} \u2192 ${data.targetAgent}
2010
- Task: ${data.taskDescription}
2116
+ `\u{1F4E4} **Processing**: ${data.taskDescription}
2011
2117
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2012
2118
  );
2013
2119
  break;
@@ -2015,7 +2121,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2015
2121
  case "delegation_returned": {
2016
2122
  const data = event.data;
2017
2123
  activities.push(
2018
- `\u{1F4E5} **Delegation Returned** [${data.delegationId}]: ${data.fromAgent} \u2190 ${data.targetAgent}
2124
+ `\u{1F4E5} **Completed subtask**
2019
2125
  Result: ${JSON.stringify(data.result, null, 2)}`
2020
2126
  );
2021
2127
  break;
@@ -2034,16 +2140,16 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2034
2140
  case "agent_reasoning": {
2035
2141
  const data = event.data;
2036
2142
  activities.push(
2037
- `\u2699\uFE0F **Reasoning**: reasoning
2038
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2143
+ `\u2699\uFE0F **Analyzing request**
2144
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2039
2145
  );
2040
2146
  break;
2041
2147
  }
2042
2148
  case "agent_generate": {
2043
2149
  const data = event.data;
2044
2150
  activities.push(
2045
- `\u2699\uFE0F **Generation**: ${data.generationType}
2046
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2151
+ `\u2699\uFE0F **Preparing response**
2152
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2047
2153
  );
2048
2154
  break;
2049
2155
  }
@@ -2217,7 +2323,7 @@ Make it specific and relevant.`;
2217
2323
  taskId: artifactData.taskId,
2218
2324
  artifacts: [artifactToSave]
2219
2325
  });
2220
- logger5.info(
2326
+ logger6.info(
2221
2327
  {
2222
2328
  sessionId: this.sessionId,
2223
2329
  artifactId: artifactData.artifactId,
@@ -2234,7 +2340,7 @@ Make it specific and relevant.`;
2234
2340
  span.setStatus({ code: SpanStatusCode.OK });
2235
2341
  } catch (error) {
2236
2342
  setSpanWithError(span, error);
2237
- logger5.error(
2343
+ logger6.error(
2238
2344
  {
2239
2345
  sessionId: this.sessionId,
2240
2346
  artifactId: artifactData.artifactId,
@@ -2270,7 +2376,7 @@ Make it specific and relevant.`;
2270
2376
  taskId: artifactData.taskId,
2271
2377
  artifacts: [fallbackArtifact]
2272
2378
  });
2273
- logger5.info(
2379
+ logger6.info(
2274
2380
  {
2275
2381
  sessionId: this.sessionId,
2276
2382
  artifactId: artifactData.artifactId
@@ -2279,7 +2385,7 @@ Make it specific and relevant.`;
2279
2385
  );
2280
2386
  }
2281
2387
  } catch (fallbackError) {
2282
- logger5.error(
2388
+ logger6.error(
2283
2389
  {
2284
2390
  sessionId: this.sessionId,
2285
2391
  artifactId: artifactData.artifactId,
@@ -2306,7 +2412,7 @@ var GraphSessionManager = class {
2306
2412
  const sessionId = messageId;
2307
2413
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2308
2414
  this.sessions.set(sessionId, session);
2309
- logger5.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2415
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2310
2416
  return sessionId;
2311
2417
  }
2312
2418
  /**
@@ -2317,7 +2423,7 @@ var GraphSessionManager = class {
2317
2423
  if (session) {
2318
2424
  session.initializeStatusUpdates(config, summarizerModel);
2319
2425
  } else {
2320
- logger5.error(
2426
+ logger6.error(
2321
2427
  {
2322
2428
  sessionId,
2323
2429
  availableSessions: Array.from(this.sessions.keys())
@@ -2338,7 +2444,7 @@ var GraphSessionManager = class {
2338
2444
  recordEvent(sessionId, eventType, agentId, data) {
2339
2445
  const session = this.sessions.get(sessionId);
2340
2446
  if (!session) {
2341
- logger5.warn({ sessionId }, "Attempted to record event in non-existent session");
2447
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2342
2448
  return;
2343
2449
  }
2344
2450
  session.recordEvent(eventType, agentId, data);
@@ -2349,12 +2455,12 @@ var GraphSessionManager = class {
2349
2455
  endSession(sessionId) {
2350
2456
  const session = this.sessions.get(sessionId);
2351
2457
  if (!session) {
2352
- logger5.warn({ sessionId }, "Attempted to end non-existent session");
2458
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2353
2459
  return [];
2354
2460
  }
2355
2461
  const events = session.getEvents();
2356
2462
  const summary = session.getSummary();
2357
- logger5.info({ sessionId, summary }, "GraphSession ended");
2463
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2358
2464
  session.cleanup();
2359
2465
  this.sessions.delete(sessionId);
2360
2466
  return events;
@@ -2380,7 +2486,7 @@ var GraphSessionManager = class {
2380
2486
  }
2381
2487
  };
2382
2488
  var graphSessionManager = new GraphSessionManager();
2383
- var logger6 = getLogger("ArtifactParser");
2489
+ var logger7 = getLogger("ArtifactParser");
2384
2490
  var _ArtifactParser = class _ArtifactParser {
2385
2491
  constructor(tenantId) {
2386
2492
  this.tenantId = tenantId;
@@ -2396,9 +2502,7 @@ var _ArtifactParser = class _ArtifactParser {
2396
2502
  * More robust detection that handles streaming fragments
2397
2503
  */
2398
2504
  hasIncompleteArtifact(text) {
2399
- return /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(
2400
- text
2401
- ) || /^.*<artifact:ref(?:[^>]*)$/.test(text) || // Incomplete artifact:ref at end
2505
+ return /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(text) || /<artifact:ref[^>]+$/.test(text) || // Incomplete artifact ref at end
2402
2506
  this.findSafeTextBoundary(text) < text.length;
2403
2507
  }
2404
2508
  /**
@@ -2407,10 +2511,10 @@ var _ArtifactParser = class _ArtifactParser {
2407
2511
  */
2408
2512
  findSafeTextBoundary(text) {
2409
2513
  const endPatterns = [
2410
- /^.*<artifact:ref(?:[^/>]+(?:[^>]*[^/])?)?$/,
2514
+ /<artifact:ref(?![^>]*\/>).*$/,
2411
2515
  // artifact:ref that doesn't end with />
2412
- /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/
2413
- // Safe partial artifact pattern
2516
+ /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/
2517
+ // Any partial artifact pattern at end
2414
2518
  ];
2415
2519
  for (const pattern of endPatterns) {
2416
2520
  const match = text.match(pattern);
@@ -2446,7 +2550,7 @@ var _ArtifactParser = class _ArtifactParser {
2446
2550
  id: taskId
2447
2551
  });
2448
2552
  if (!task) {
2449
- logger6.warn({ taskId }, "Task not found when fetching artifacts");
2553
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2450
2554
  continue;
2451
2555
  }
2452
2556
  const taskArtifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2458,9 +2562,9 @@ var _ArtifactParser = class _ArtifactParser {
2458
2562
  artifacts.set(key, artifact);
2459
2563
  }
2460
2564
  }
2461
- logger6.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2565
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2462
2566
  } catch (error) {
2463
- logger6.error({ error, contextId }, "Error loading context artifacts");
2567
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2464
2568
  }
2465
2569
  return artifacts;
2466
2570
  }
@@ -2563,7 +2667,7 @@ var _ArtifactParser = class _ArtifactParser {
2563
2667
  id: taskId
2564
2668
  });
2565
2669
  if (!task) {
2566
- logger6.warn({ taskId }, "Task not found when fetching artifact");
2670
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2567
2671
  return null;
2568
2672
  }
2569
2673
  const artifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2575,7 +2679,7 @@ var _ArtifactParser = class _ArtifactParser {
2575
2679
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2576
2680
  }
2577
2681
  } catch (error) {
2578
- logger6.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2682
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2579
2683
  }
2580
2684
  return null;
2581
2685
  }
@@ -2611,11 +2715,11 @@ var _ArtifactParser = class _ArtifactParser {
2611
2715
  __publicField(_ArtifactParser, "ARTIFACT_REGEX", /<artifact:ref\s+id="([^"]*?)"\s+task="([^"]*?)"\s*\/>/gs);
2612
2716
  __publicField(_ArtifactParser, "ARTIFACT_CHECK_REGEX", /<artifact:ref\s+(?=.*id="[^"]+")(?=.*task="[^"]+")[^>]*\/>/);
2613
2717
  // Regex for catching any partial artifact pattern (< + any prefix of "artifact:ref")
2614
- __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:(r(e(f?)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2718
+ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2615
2719
  var ArtifactParser = _ArtifactParser;
2616
2720
 
2617
2721
  // src/utils/incremental-stream-parser.ts
2618
- var logger7 = getLogger("IncrementalStreamParser");
2722
+ var logger8 = getLogger("IncrementalStreamParser");
2619
2723
  var IncrementalStreamParser = class {
2620
2724
  constructor(streamHelper, tenantId, contextId) {
2621
2725
  __publicField(this, "buffer", "");
@@ -2675,13 +2779,19 @@ var IncrementalStreamParser = class {
2675
2779
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2676
2780
  const delta = part.argsTextDelta || "";
2677
2781
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2678
- logger7.warn({ bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE }, "JSON buffer exceeded maximum size, truncating");
2782
+ logger8.warn(
2783
+ { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
2784
+ "JSON buffer exceeded maximum size, truncating"
2785
+ );
2679
2786
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2680
2787
  }
2681
2788
  jsonBuffer += delta;
2682
2789
  for (const char of delta) {
2683
2790
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
2684
- logger7.warn({ bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE }, "Component buffer exceeded maximum size, resetting");
2791
+ logger8.warn(
2792
+ { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
2793
+ "Component buffer exceeded maximum size, resetting"
2794
+ );
2685
2795
  componentBuffer = "";
2686
2796
  depth = 0;
2687
2797
  continue;
@@ -2696,7 +2806,7 @@ var IncrementalStreamParser = class {
2696
2806
  if (componentMatch) {
2697
2807
  const MAX_COMPONENT_SIZE = 1024 * 1024;
2698
2808
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
2699
- logger7.warn(
2809
+ logger8.warn(
2700
2810
  {
2701
2811
  size: componentMatch[0].length,
2702
2812
  maxSize: MAX_COMPONENT_SIZE
@@ -2709,7 +2819,7 @@ var IncrementalStreamParser = class {
2709
2819
  try {
2710
2820
  const component = JSON.parse(componentMatch[0]);
2711
2821
  if (typeof component !== "object" || !component.id) {
2712
- logger7.warn({ component }, "Invalid component structure, skipping");
2822
+ logger8.warn({ component }, "Invalid component structure, skipping");
2713
2823
  componentBuffer = "";
2714
2824
  continue;
2715
2825
  }
@@ -2722,7 +2832,7 @@ var IncrementalStreamParser = class {
2722
2832
  componentsStreamed++;
2723
2833
  componentBuffer = "";
2724
2834
  } catch (e) {
2725
- logger7.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2835
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2726
2836
  }
2727
2837
  }
2728
2838
  }
@@ -2739,7 +2849,7 @@ var IncrementalStreamParser = class {
2739
2849
  break;
2740
2850
  }
2741
2851
  }
2742
- logger7.debug({ componentsStreamed }, "Finished streaming components");
2852
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
2743
2853
  }
2744
2854
  /**
2745
2855
  * Legacy method for backward compatibility - defaults to text processing
@@ -2883,7 +2993,7 @@ var IncrementalStreamParser = class {
2883
2993
  };
2884
2994
 
2885
2995
  // src/utils/response-formatter.ts
2886
- var logger8 = getLogger("ResponseFormatter");
2996
+ var logger9 = getLogger("ResponseFormatter");
2887
2997
  var ResponseFormatter = class {
2888
2998
  constructor(tenantId) {
2889
2999
  __publicField(this, "artifactParser");
@@ -2914,7 +3024,7 @@ var ResponseFormatter = class {
2914
3024
  return { parts };
2915
3025
  } catch (error) {
2916
3026
  setSpanWithError(span, error);
2917
- logger8.error({ error, responseObject }, "Error formatting object response");
3027
+ logger9.error({ error, responseObject }, "Error formatting object response");
2918
3028
  return {
2919
3029
  parts: [{ kind: "data", data: responseObject }]
2920
3030
  };
@@ -2965,7 +3075,7 @@ var ResponseFormatter = class {
2965
3075
  return { parts };
2966
3076
  } catch (error) {
2967
3077
  setSpanWithError(span, error);
2968
- logger8.error({ error, responseText }, "Error formatting response");
3078
+ logger9.error({ error, responseText }, "Error formatting response");
2969
3079
  return { text: responseText };
2970
3080
  } finally {
2971
3081
  span.end();
@@ -3010,7 +3120,7 @@ var ResponseFormatter = class {
3010
3120
  }
3011
3121
  }
3012
3122
  };
3013
- var logger9 = getLogger("ToolSessionManager");
3123
+ var logger10 = getLogger("ToolSessionManager");
3014
3124
  var _ToolSessionManager = class _ToolSessionManager {
3015
3125
  // 5 minutes
3016
3126
  constructor() {
@@ -3039,7 +3149,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3039
3149
  createdAt: Date.now()
3040
3150
  };
3041
3151
  this.sessions.set(sessionId, session);
3042
- logger9.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3152
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3043
3153
  return sessionId;
3044
3154
  }
3045
3155
  /**
@@ -3048,7 +3158,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3048
3158
  recordToolResult(sessionId, toolResult) {
3049
3159
  const session = this.sessions.get(sessionId);
3050
3160
  if (!session) {
3051
- logger9.warn(
3161
+ logger10.warn(
3052
3162
  { sessionId, toolCallId: toolResult.toolCallId },
3053
3163
  "Tool result recorded for unknown session"
3054
3164
  );
@@ -3062,12 +3172,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3062
3172
  getToolResult(sessionId, toolCallId) {
3063
3173
  const session = this.sessions.get(sessionId);
3064
3174
  if (!session) {
3065
- logger9.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3175
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3066
3176
  return void 0;
3067
3177
  }
3068
3178
  const result = session.toolResults.get(toolCallId);
3069
3179
  if (!result) {
3070
- logger9.warn(
3180
+ logger10.warn(
3071
3181
  {
3072
3182
  sessionId,
3073
3183
  toolCallId,
@@ -3106,10 +3216,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3106
3216
  }
3107
3217
  for (const sessionId of expiredSessions) {
3108
3218
  this.sessions.delete(sessionId);
3109
- logger9.debug({ sessionId }, "Cleaned up expired tool session");
3219
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3110
3220
  }
3111
3221
  if (expiredSessions.length > 0) {
3112
- logger9.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3222
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3113
3223
  }
3114
3224
  }
3115
3225
  };
@@ -3118,7 +3228,7 @@ var ToolSessionManager = _ToolSessionManager;
3118
3228
  var toolSessionManager = ToolSessionManager.getInstance();
3119
3229
 
3120
3230
  // src/agents/artifactTools.ts
3121
- var logger10 = getLogger("artifactTools");
3231
+ var logger11 = getLogger("artifactTools");
3122
3232
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3123
3233
  if (typeof data === "object" && data !== null) {
3124
3234
  if (Array.isArray(data)) {
@@ -3221,7 +3331,7 @@ function createPropSelectorsSchema(artifactComponents) {
3221
3331
  Object.entries(summaryProps.properties).forEach(([propName, propDef]) => {
3222
3332
  const propDescription = propDef?.description || propDef?.title || `${propName} property`;
3223
3333
  propSchema[propName] = z4.string().describe(
3224
- `JMESPath selector for ${propName} (${propDescription}) - summary version, relative to base selector`
3334
+ `JMESPath selector for ${propName} (${propDescription}) - summary version, MUST be relative to your baseSelector target level. Access fields WITHIN the items your baseSelector returns.`
3225
3335
  );
3226
3336
  });
3227
3337
  }
@@ -3233,7 +3343,7 @@ function createPropSelectorsSchema(artifactComponents) {
3233
3343
  if (!propSchema[propName]) {
3234
3344
  const propDescription = propDef?.description || propDef?.title || `${propName} property`;
3235
3345
  propSchema[propName] = z4.string().describe(
3236
- `JMESPath selector for ${propName} (${propDescription}) - full version, relative to base selector`
3346
+ `JMESPath selector for ${propName} (${propDescription}) - MUST be relative to your baseSelector target level. If baseSelector stops at a document, this accesses fields WITHIN that document. Examples: "title", "content.body", "metadata.author"`
3237
3347
  );
3238
3348
  }
3239
3349
  });
@@ -3247,7 +3357,26 @@ function createPropSelectorsSchema(artifactComponents) {
3247
3357
  return z4.union(propSelectorSchemas);
3248
3358
  }
3249
3359
  return z4.record(z4.string(), z4.string()).describe(
3250
- "Prop selectors mapping schema properties to JMESPath expressions relative to base selector"
3360
+ `Prop selectors mapping schema properties to JMESPath expressions relative to base selector. Each path is relative to the item(s) your baseSelector returns.
3361
+
3362
+ \u{1F3AF} CRITICAL: PropSelectors work ONLY on the data your baseSelector returns!
3363
+ If baseSelector = "result.docs[0]" \u2192 propSelectors access fields INSIDE that doc
3364
+ If baseSelector = "result.docs[0].content[0]" \u2192 propSelectors access fields INSIDE that content item
3365
+
3366
+ \u2705 CORRECT EXAMPLES (paths relative to baseSelector target):
3367
+ \u2022 baseSelector: "result.documents[?type=='article']" \u2192 propSelectors: {"title": "title", "url": "url"}
3368
+ \u2022 baseSelector: "result.content[0].text" \u2192 propSelectors: {"content": "content[0].text", "source": "content[0].source"}
3369
+ \u2022 baseSelector: "result.items" \u2192 propSelectors: {"name": "profile.name", "email": "contact.email"}
3370
+
3371
+ \u274C WRONG EXAMPLES (accessing data not at baseSelector level):
3372
+ \u2022 baseSelector: "result.docs[0].content[0]" \u2192 propSelectors: {"title": "title"} \u2190 title is at doc level, not content level!
3373
+ \u2022 baseSelector: "result.source.content" \u2192 propSelectors: {"title": "content[4].text"} \u2190 baseSelector ends at array, can't index into it!
3374
+ \u2022 baseSelector: "result.items" \u2192 propSelectors: {"title": "documents[0].title"} \u2190 going deeper when baseSelector should handle depth
3375
+
3376
+ \u274C NEVER USE LITERAL VALUES:
3377
+ {"title": "Robert Tran", "url": "https://linkedin.com/..."}
3378
+
3379
+ \u{1F4A1} TIP: Match your baseSelector depth to where the properties you need actually exist!`
3251
3380
  );
3252
3381
  }
3253
3382
  function createInputSchema(artifactComponents) {
@@ -3256,7 +3385,18 @@ function createInputSchema(artifactComponents) {
3256
3385
  "EXACT toolCallId from a previous tool execution - copy it exactly from the tool call result. NEVER invent or make up tool call IDs."
3257
3386
  ),
3258
3387
  baseSelector: z4.string().describe(
3259
- `JMESPath selector to get to the main data array/object. ALWAYS start with "result." Example: "result.content[?type=='text']"`
3388
+ `JMESPath selector to get to the main data array/object. ALWAYS start with "result." That is a mandatory prefix.
3389
+
3390
+ Data structures are COMPLEX and NESTED. Examples:
3391
+ \u2022 "result.content[0].text.content[2]" - parsed JSON in text field
3392
+ \u2022 "result.structuredContent.content[1]" - direct structured data
3393
+ \u2022 "result.data.items[?type=='doc']" - filtered array
3394
+
3395
+ \u{1F6A8} CRITICAL: If you need data from array[4], your baseSelector must END at array[4], NOT at the array itself!
3396
+ \u2705 CORRECT: "result.source.content[4]" \u2192 propSelectors can access fields in that item
3397
+ \u274C WRONG: "result.source.content" \u2192 propSelectors can't use content[4] because baseSelector already selected the array
3398
+
3399
+ \u{1F525} IF YOUR PATH FAILS: READ THE ERROR MESSAGE! It tells you the correct path! \u{1F525}`
3260
3400
  ),
3261
3401
  propSelectors: createPropSelectorsSchema(artifactComponents)
3262
3402
  });
@@ -3275,6 +3415,9 @@ function createSaveToolResultTool(sessionId, streamRequestId, agentId, artifactC
3275
3415
  return tool({
3276
3416
  description: `Save tool results as structured artifacts. Each artifact should represent ONE SPECIFIC, IMPORTANT, and UNIQUE document or data item.
3277
3417
 
3418
+ \u26A1 CRITICAL: JSON-like text content in tool results is AUTOMATICALLY PARSED into proper JSON objects - treat all data as structured, not text strings.
3419
+ \u{1F6A8} CRITICAL: Data structures are deeply nested. When your path fails, READ THE ERROR MESSAGE - it shows the correct path!
3420
+
3278
3421
  AVAILABLE ARTIFACT TYPES:
3279
3422
  ${availableTypesWithDescriptions}
3280
3423
 
@@ -3286,26 +3429,6 @@ Each artifact you save becomes a SEPARATE DATA COMPONENT in the structured respo
3286
3429
  \u2705 UNIQUE with distinct value from other artifacts
3287
3430
  \u2705 RENDERED AS INDIVIDUAL DATA COMPONENT in the UI
3288
3431
 
3289
- \u274C DO NOT save multiple different items in one artifact unless they are EXTREMELY SIMILAR
3290
- \u274C DO NOT batch unrelated items together - each item becomes its own data component
3291
- \u274C DO NOT save generic collections - break them into individual data components
3292
-
3293
- \u{1F3AF} STRUCTURED DATA COMPONENT PRINCIPLE:
3294
- Each artifact save creates ONE data component that will be rendered separately in the UI. If you have 5 important items, save them as 5 separate artifacts to create 5 separate data components for better user experience.
3295
-
3296
- THINK: "What is the ONE most important piece of information here that deserves its own data component?"
3297
-
3298
- EXAMPLES OF GOOD INDIVIDUAL ARTIFACTS (SEPARATE DATA COMPONENTS):
3299
- - Nick Gomez's founder profile (specific person) \u2192 Individual data component
3300
- - The /users/create API endpoint documentation (specific endpoint) \u2192 Individual data component
3301
- - Error message for authentication failure (specific error type) \u2192 Individual data component
3302
- - Configuration for Redis caching (specific config topic) \u2192 Individual data component
3303
-
3304
- EXAMPLES OF BAD BATCHING:
3305
- \u274C "All team members" \u2192 Should be separate artifacts for each important member (separate data components)
3306
- \u274C "All API endpoints" \u2192 Should be separate artifacts for each distinct endpoint (separate data components)
3307
- \u274C "All error types" \u2192 Should be separate artifacts for each error category (separate data components)
3308
-
3309
3432
  USAGE PATTERN:
3310
3433
  1. baseSelector: Navigate through nested structures to target ONE SPECIFIC item
3311
3434
  - Navigate through all necessary levels: "result.data.items.nested[?condition]"
@@ -3315,9 +3438,11 @@ USAGE PATTERN:
3315
3438
  - NOT: "result.items[*]" (too broad, gets everything)
3316
3439
 
3317
3440
  2. propSelectors: Extract properties relative to your selected item
3318
- - Always relative to the single item that baseSelector returns
3319
- - Simple paths from that item: { prop1: "field_x", prop2: "nested.field_y", prop3: "deep.nested.field_z" }
3320
- - The tool handles array iteration - your selectors work on individual items
3441
+ - \u{1F3AF} CRITICAL: Always relative to the single item that baseSelector returns
3442
+ - If baseSelector ends at a document \u2192 propSelectors access document fields
3443
+ - If baseSelector ends at content[0] \u2192 propSelectors access content[0] fields
3444
+ - Simple paths from that exact level: { prop1: "field_x", prop2: "nested.field_y" }
3445
+ - \u274C DON'T try to go back up or deeper - adjust your baseSelector instead!
3321
3446
 
3322
3447
  3. Result: ONE artifact representing ONE important, unique item \u2192 ONE data component
3323
3448
 
@@ -3326,20 +3451,12 @@ USAGE PATTERN:
3326
3451
  - Focus on getting to the right level with baseSelector, then keep propSelectors simple
3327
3452
  - Test your baseSelector: Does it return exactly the items you want?
3328
3453
 
3329
- \u26A0\uFE0F STRICT SELECTIVITY RULES FOR DATA COMPONENTS:
3330
- - ALWAYS ask: "Is this ONE specific, important thing that deserves its own data component?"
3331
- - If the answer is no, don't save it or find a more specific selector
3332
- - Multiple similar items = Multiple separate artifact saves (use the tool multiple times) \u2192 Multiple data components
3333
- - Each artifact should be independently valuable and uniquely identifiable \u2192 Each data component stands alone
3334
- - BETTER to save 3 individual, specific artifacts (3 data components) than 1 generic collection (1 data component)
3335
-
3336
- \u{1F504} MULTIPLE ARTIFACTS = MULTIPLE DATA COMPONENTS:
3337
- Remember: Each time you call this tool, you create a separate data component. Call it multiple times for multiple items to create a rich, structured response with individual data components for each important piece of information.`,
3454
+ Please use Error Messages to Debug when there is an error in the tool call.`,
3338
3455
  inputSchema,
3339
3456
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3340
3457
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3341
3458
  if (!sessionId) {
3342
- logger10.warn({ toolCallId }, "No session ID provided to save_tool_result");
3459
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3343
3460
  return {
3344
3461
  saved: false,
3345
3462
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3349,7 +3466,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3349
3466
  }
3350
3467
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3351
3468
  if (!toolResult) {
3352
- logger10.warn({ toolCallId, sessionId }, "Tool result not found in session");
3469
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3353
3470
  return {
3354
3471
  saved: false,
3355
3472
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3362,7 +3479,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3362
3479
  const baseData = jmespath.search(parsedResult, baseSelector);
3363
3480
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3364
3481
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3365
- logger10.warn(
3482
+ logger11.warn(
3366
3483
  {
3367
3484
  baseSelector,
3368
3485
  toolCallId,
@@ -3405,7 +3522,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3405
3522
  const fallbackValue = item[propName];
3406
3523
  if (fallbackValue !== null && fallbackValue !== void 0) {
3407
3524
  extractedItem[propName] = fallbackValue;
3408
- logger10.info(
3525
+ logger11.info(
3409
3526
  { propName, propSelector, context },
3410
3527
  `PropSelector failed, used fallback direct property access`
3411
3528
  );
@@ -3417,7 +3534,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3417
3534
  const fallbackValue = item[propName];
3418
3535
  if (fallbackValue !== null && fallbackValue !== void 0) {
3419
3536
  extractedItem[propName] = fallbackValue;
3420
- logger10.warn(
3537
+ logger11.warn(
3421
3538
  { propName, propSelector, context, error: error.message },
3422
3539
  `PropSelector syntax error, used fallback direct property access`
3423
3540
  );
@@ -3530,7 +3647,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3530
3647
  warnings
3531
3648
  };
3532
3649
  } catch (error) {
3533
- logger10.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3650
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3534
3651
  return {
3535
3652
  saved: false,
3536
3653
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3542,7 +3659,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3542
3659
  }
3543
3660
 
3544
3661
  // src/a2a/client.ts
3545
- var logger11 = getLogger("a2aClient");
3662
+ var logger12 = getLogger("a2aClient");
3546
3663
  var DEFAULT_BACKOFF = {
3547
3664
  initialInterval: 500,
3548
3665
  maxInterval: 6e4,
@@ -3748,7 +3865,7 @@ var A2AClient = class {
3748
3865
  try {
3749
3866
  const res = await fn();
3750
3867
  if (attempt > 0) {
3751
- logger11.info(
3868
+ logger12.info(
3752
3869
  {
3753
3870
  attempts: attempt + 1,
3754
3871
  elapsedTime: Date.now() - start
@@ -3763,7 +3880,7 @@ var A2AClient = class {
3763
3880
  }
3764
3881
  const elapsed = Date.now() - start;
3765
3882
  if (elapsed > maxElapsedTime) {
3766
- logger11.warn(
3883
+ logger12.warn(
3767
3884
  {
3768
3885
  attempts: attempt + 1,
3769
3886
  elapsedTime: elapsed,
@@ -3784,7 +3901,7 @@ var A2AClient = class {
3784
3901
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
3785
3902
  }
3786
3903
  const delayMs = Math.min(retryInterval, maxInterval);
3787
- logger11.info(
3904
+ logger12.info(
3788
3905
  {
3789
3906
  attempt: attempt + 1,
3790
3907
  delayMs,
@@ -3869,7 +3986,7 @@ var A2AClient = class {
3869
3986
  }
3870
3987
  const rpcResponse = await httpResponse.json();
3871
3988
  if (rpcResponse.id !== requestId2) {
3872
- logger11.warn(
3989
+ logger12.warn(
3873
3990
  {
3874
3991
  method,
3875
3992
  expectedId: requestId2,
@@ -4068,7 +4185,7 @@ var A2AClient = class {
4068
4185
  try {
4069
4186
  while (true) {
4070
4187
  const { done, value } = await reader.read();
4071
- logger11.info({ done, value }, "parseA2ASseStream");
4188
+ logger12.info({ done, value }, "parseA2ASseStream");
4072
4189
  if (done) {
4073
4190
  if (eventDataBuffer.trim()) {
4074
4191
  const result = this._processSseEventData(
@@ -4155,7 +4272,7 @@ var A2AClient = class {
4155
4272
  };
4156
4273
 
4157
4274
  // src/agents/relationTools.ts
4158
- var logger12 = getLogger("relationships Tools");
4275
+ var logger13 = getLogger("relationships Tools");
4159
4276
  var generateTransferToolDescription = (config) => {
4160
4277
  return `Hand off the conversation to agent ${config.id}.
4161
4278
 
@@ -4193,7 +4310,7 @@ var createTransferToAgentTool = ({
4193
4310
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4194
4311
  });
4195
4312
  }
4196
- logger12.info(
4313
+ logger13.info(
4197
4314
  {
4198
4315
  transferTo: transferConfig.id ?? "unknown",
4199
4316
  fromAgent: callingAgentId
@@ -4341,7 +4458,7 @@ function createDelegateToAgentTool({
4341
4458
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4342
4459
  }
4343
4460
  };
4344
- logger12.info({ messageToSend }, "messageToSend");
4461
+ logger13.info({ messageToSend }, "messageToSend");
4345
4462
  await createMessage(dbClient_default)({
4346
4463
  id: nanoid(),
4347
4464
  tenantId,
@@ -4403,7 +4520,7 @@ function createDelegateToAgentTool({
4403
4520
  }
4404
4521
 
4405
4522
  // src/agents/SystemPromptBuilder.ts
4406
- var logger13 = getLogger("SystemPromptBuilder");
4523
+ var logger14 = getLogger("SystemPromptBuilder");
4407
4524
  var SystemPromptBuilder = class {
4408
4525
  constructor(version, versionConfig) {
4409
4526
  this.version = version;
@@ -4419,9 +4536,12 @@ var SystemPromptBuilder = class {
4419
4536
  this.templates.set(name, content);
4420
4537
  }
4421
4538
  this.loaded = true;
4422
- logger13.debug({ templateCount: this.templates.size, version: this.version }, `Loaded ${this.templates.size} templates for version ${this.version}`);
4539
+ logger14.debug(
4540
+ { templateCount: this.templates.size, version: this.version },
4541
+ `Loaded ${this.templates.size} templates for version ${this.version}`
4542
+ );
4423
4543
  } catch (error) {
4424
- logger13.error({ error }, `Failed to load templates for version ${this.version}`);
4544
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4425
4545
  throw new Error(`Template loading failed: ${error}`);
4426
4546
  }
4427
4547
  }
@@ -4823,7 +4943,7 @@ function hasToolCallWithPrefix(prefix) {
4823
4943
  return false;
4824
4944
  };
4825
4945
  }
4826
- var logger14 = getLogger("Agent");
4946
+ var logger15 = getLogger("Agent");
4827
4947
  var CONSTANTS = {
4828
4948
  MAX_GENERATION_STEPS: 12,
4829
4949
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5076,14 +5196,14 @@ var Agent = class {
5076
5196
  for (const toolSet of tools) {
5077
5197
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5078
5198
  if (!isValidTool(originalTool)) {
5079
- logger14.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5199
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5080
5200
  continue;
5081
5201
  }
5082
5202
  const sessionWrappedTool = tool({
5083
5203
  description: originalTool.description,
5084
5204
  inputSchema: originalTool.inputSchema,
5085
5205
  execute: async (args, { toolCallId }) => {
5086
- logger14.debug({ toolName, toolCallId }, "MCP Tool Called");
5206
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5087
5207
  try {
5088
5208
  const result = await originalTool.execute(args, { toolCallId });
5089
5209
  toolSessionManager.recordToolResult(sessionId, {
@@ -5095,7 +5215,7 @@ var Agent = class {
5095
5215
  });
5096
5216
  return { result, toolCallId };
5097
5217
  } catch (error) {
5098
- logger14.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5218
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5099
5219
  throw error;
5100
5220
  }
5101
5221
  }
@@ -5180,7 +5300,7 @@ var Agent = class {
5180
5300
  selectedTools
5181
5301
  };
5182
5302
  }
5183
- logger14.info(
5303
+ logger15.info(
5184
5304
  {
5185
5305
  toolName: tool4.name,
5186
5306
  credentialReferenceId,
@@ -5220,7 +5340,7 @@ var Agent = class {
5220
5340
  async getResolvedContext(conversationId, requestContext) {
5221
5341
  try {
5222
5342
  if (!this.config.contextConfigId) {
5223
- logger14.debug({ graphId: this.config.graphId }, "No context config found for graph");
5343
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5224
5344
  return null;
5225
5345
  }
5226
5346
  const contextConfig = await getContextConfigById(dbClient_default)({
@@ -5228,7 +5348,7 @@ var Agent = class {
5228
5348
  id: this.config.contextConfigId
5229
5349
  });
5230
5350
  if (!contextConfig) {
5231
- logger14.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5351
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5232
5352
  return null;
5233
5353
  }
5234
5354
  if (!this.contextResolver) {
@@ -5245,7 +5365,7 @@ var Agent = class {
5245
5365
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5246
5366
  $env: process.env
5247
5367
  };
5248
- logger14.debug(
5368
+ logger15.debug(
5249
5369
  {
5250
5370
  conversationId,
5251
5371
  contextConfigId: contextConfig.id,
@@ -5259,7 +5379,7 @@ var Agent = class {
5259
5379
  );
5260
5380
  return contextWithBuiltins;
5261
5381
  } catch (error) {
5262
- logger14.error(
5382
+ logger15.error(
5263
5383
  {
5264
5384
  conversationId,
5265
5385
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5283,7 +5403,7 @@ var Agent = class {
5283
5403
  });
5284
5404
  return graphDefinition?.graphPrompt || void 0;
5285
5405
  } catch (error) {
5286
- logger14.warn(
5406
+ logger15.warn(
5287
5407
  {
5288
5408
  graphId: this.config.graphId,
5289
5409
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5310,7 +5430,7 @@ var Agent = class {
5310
5430
  }
5311
5431
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5312
5432
  } catch (error) {
5313
- logger14.warn(
5433
+ logger15.warn(
5314
5434
  {
5315
5435
  graphId: this.config.graphId,
5316
5436
  tenantId: this.config.tenantId,
@@ -5370,7 +5490,7 @@ Key requirements:
5370
5490
  preserveUnresolved: false
5371
5491
  });
5372
5492
  } catch (error) {
5373
- logger14.error(
5493
+ logger15.error(
5374
5494
  {
5375
5495
  conversationId,
5376
5496
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5415,7 +5535,7 @@ Key requirements:
5415
5535
  preserveUnresolved: false
5416
5536
  });
5417
5537
  } catch (error) {
5418
- logger14.error(
5538
+ logger15.error(
5419
5539
  {
5420
5540
  conversationId,
5421
5541
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5443,7 +5563,7 @@ Key requirements:
5443
5563
  artifactId: z.string().describe("The unique identifier of the artifact to get.")
5444
5564
  }),
5445
5565
  execute: async ({ artifactId }) => {
5446
- logger14.info({ artifactId }, "get_artifact executed");
5566
+ logger15.info({ artifactId }, "get_artifact executed");
5447
5567
  const artifact = await getLedgerArtifacts(dbClient_default)({
5448
5568
  scopes: {
5449
5569
  tenantId: this.config.tenantId,
@@ -5510,7 +5630,7 @@ Key requirements:
5510
5630
  graphId: this.config.graphId
5511
5631
  });
5512
5632
  } catch (error) {
5513
- logger14.error(
5633
+ logger15.error(
5514
5634
  { error, graphId: this.config.graphId },
5515
5635
  "Failed to check graph artifact components"
5516
5636
  );
@@ -5614,7 +5734,7 @@ Key requirements:
5614
5734
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5615
5735
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5616
5736
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5617
- logger14.warn(
5737
+ logger15.warn(
5618
5738
  {
5619
5739
  requestedTimeout: modelSettings.maxDuration * 1e3,
5620
5740
  appliedTimeout: timeoutMs,
@@ -5656,7 +5776,7 @@ Key requirements:
5656
5776
  }
5657
5777
  );
5658
5778
  } catch (error) {
5659
- logger14.debug({ error }, "Failed to track agent reasoning");
5779
+ logger15.debug({ error }, "Failed to track agent reasoning");
5660
5780
  }
5661
5781
  }
5662
5782
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5739,7 +5859,7 @@ Key requirements:
5739
5859
  }
5740
5860
  );
5741
5861
  } catch (error) {
5742
- logger14.debug({ error }, "Failed to track agent reasoning");
5862
+ logger15.debug({ error }, "Failed to track agent reasoning");
5743
5863
  }
5744
5864
  }
5745
5865
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5784,7 +5904,7 @@ Key requirements:
5784
5904
  return;
5785
5905
  }
5786
5906
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
5787
- logger14.info({ result }, "save_artifact_tool or save_tool_result");
5907
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
5788
5908
  if (result.output.artifacts) {
5789
5909
  for (const artifact of result.output.artifacts) {
5790
5910
  const artifactId = artifact?.artifactId || "N/A";
@@ -5859,7 +5979,7 @@ ${output}`;
5859
5979
  { role: "user", content: userMessage },
5860
5980
  ...reasoningFlow,
5861
5981
  {
5862
- role: "system",
5982
+ role: "user",
5863
5983
  content: await this.buildPhase2SystemPrompt()
5864
5984
  }
5865
5985
  ],
@@ -5955,7 +6075,9 @@ async function resolveModelConfig(graphId, agent) {
5955
6075
  summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
5956
6076
  };
5957
6077
  }
5958
- throw new Error("Base model configuration is required. Please configure models at the project level.");
6078
+ throw new Error(
6079
+ "Base model configuration is required. Please configure models at the project level."
6080
+ );
5959
6081
  }
5960
6082
 
5961
6083
  // src/agents/generateTaskHandler.ts
@@ -5969,7 +6091,7 @@ function parseEmbeddedJson(data) {
5969
6091
  }
5970
6092
  });
5971
6093
  }
5972
- var logger15 = getLogger("generateTaskHandler");
6094
+ var logger16 = getLogger("generateTaskHandler");
5973
6095
  var createTaskHandler = (config, credentialStoreRegistry) => {
5974
6096
  return async (task) => {
5975
6097
  try {
@@ -6019,7 +6141,33 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6019
6141
  agentId: config.agentId
6020
6142
  })
6021
6143
  ]);
6022
- logger15.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6144
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6145
+ const enhancedInternalRelations = await Promise.all(
6146
+ internalRelations.map(async (relation) => {
6147
+ try {
6148
+ const relatedAgent = await getAgentById(dbClient_default)({
6149
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6150
+ agentId: relation.id
6151
+ });
6152
+ if (relatedAgent) {
6153
+ const relatedAgentRelations = await getRelatedAgentsForGraph(dbClient_default)({
6154
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6155
+ graphId: config.graphId,
6156
+ agentId: relation.id
6157
+ });
6158
+ const enhancedDescription = generateDescriptionWithTransfers(
6159
+ relation.description || "",
6160
+ relatedAgentRelations.internalRelations,
6161
+ relatedAgentRelations.externalRelations
6162
+ );
6163
+ return { ...relation, description: enhancedDescription };
6164
+ }
6165
+ } catch (error) {
6166
+ logger16.warn({ agentId: relation.id, error }, "Failed to enhance agent description");
6167
+ }
6168
+ return relation;
6169
+ })
6170
+ );
6023
6171
  const agentPrompt = "prompt" in config.agentSchema ? config.agentSchema.prompt : "";
6024
6172
  const models = "models" in config.agentSchema ? config.agentSchema.models : void 0;
6025
6173
  const stopWhen = "stopWhen" in config.agentSchema ? config.agentSchema.stopWhen : void 0;
@@ -6036,7 +6184,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6036
6184
  agentPrompt,
6037
6185
  models: models || void 0,
6038
6186
  stopWhen: stopWhen || void 0,
6039
- agentRelations: internalRelations.map((relation) => ({
6187
+ agentRelations: enhancedInternalRelations.map((relation) => ({
6040
6188
  id: relation.id,
6041
6189
  tenantId: config.tenantId,
6042
6190
  projectId: config.projectId,
@@ -6050,7 +6198,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6050
6198
  agentRelations: [],
6051
6199
  transferRelations: []
6052
6200
  })),
6053
- transferRelations: internalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6201
+ transferRelations: enhancedInternalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6054
6202
  baseUrl: config.baseUrl,
6055
6203
  apiKey: config.apiKey,
6056
6204
  id: relation.id,
@@ -6066,7 +6214,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6066
6214
  })),
6067
6215
  delegateRelations: [
6068
6216
  // Internal delegate relations
6069
- ...internalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6217
+ ...enhancedInternalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6070
6218
  type: "internal",
6071
6219
  config: {
6072
6220
  id: relation.id,
@@ -6119,7 +6267,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6119
6267
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6120
6268
  if (taskIdMatch) {
6121
6269
  contextId = taskIdMatch[1];
6122
- logger15.info(
6270
+ logger16.info(
6123
6271
  {
6124
6272
  taskId: task.id,
6125
6273
  extractedContextId: contextId,
@@ -6135,7 +6283,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6135
6283
  const isDelegation = task.context?.metadata?.isDelegation === true;
6136
6284
  agent.setDelegationStatus(isDelegation);
6137
6285
  if (isDelegation) {
6138
- logger15.info(
6286
+ logger16.info(
6139
6287
  { agentId: config.agentId, taskId: task.id },
6140
6288
  "Delegated agent - streaming disabled"
6141
6289
  );
@@ -6340,84 +6488,10 @@ async function getRegisteredGraph(executionContext) {
6340
6488
  const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6341
6489
  return hydrateGraph({ dbGraph, baseUrl: agentFrameworkBaseUrl, apiKey });
6342
6490
  }
6343
- getLogger("agents");
6344
- async function hydrateAgent({
6345
- dbAgent,
6346
- graphId,
6347
- baseUrl,
6348
- apiKey,
6349
- credentialStoreRegistry
6350
- }) {
6351
- try {
6352
- const taskHandlerConfig = await createTaskHandlerConfig({
6353
- tenantId: dbAgent.tenantId,
6354
- projectId: dbAgent.projectId,
6355
- graphId,
6356
- agentId: dbAgent.id,
6357
- baseUrl,
6358
- apiKey
6359
- });
6360
- const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
6361
- const agentCard = {
6362
- name: dbAgent.name,
6363
- description: dbAgent.description || "AI Agent",
6364
- url: baseUrl ? `${baseUrl}/a2a` : "",
6365
- version: "1.0.0",
6366
- capabilities: {
6367
- streaming: true,
6368
- // Enable streaming for A2A compliance
6369
- pushNotifications: false,
6370
- stateTransitionHistory: false
6371
- },
6372
- defaultInputModes: ["text", "text/plain"],
6373
- defaultOutputModes: ["text", "text/plain"],
6374
- skills: [],
6375
- // Add provider info if available
6376
- ...baseUrl && {
6377
- provider: {
6378
- organization: "Inkeep",
6379
- url: baseUrl
6380
- }
6381
- }
6382
- };
6383
- return {
6384
- agentId: dbAgent.id,
6385
- tenantId: dbAgent.tenantId,
6386
- projectId: dbAgent.projectId,
6387
- graphId,
6388
- agentCard,
6389
- taskHandler
6390
- };
6391
- } catch (error) {
6392
- console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
6393
- throw error;
6394
- }
6395
- }
6396
- async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6397
- const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
6398
- if (!agentId) {
6399
- throw new Error("Agent ID is required");
6400
- }
6401
- const dbAgent = await getAgentById(dbClient_default)({
6402
- scopes: { tenantId, projectId },
6403
- agentId
6404
- });
6405
- if (!dbAgent) {
6406
- return null;
6407
- }
6408
- const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6409
- return hydrateAgent({
6410
- dbAgent,
6411
- graphId,
6412
- baseUrl: agentFrameworkBaseUrl,
6413
- credentialStoreRegistry,
6414
- apiKey
6415
- });
6416
- }
6417
6491
 
6418
6492
  // src/routes/agents.ts
6419
6493
  var app = new OpenAPIHono();
6420
- var logger16 = getLogger("agents");
6494
+ var logger17 = getLogger("agents");
6421
6495
  app.openapi(
6422
6496
  createRoute({
6423
6497
  method: "get",
@@ -6455,7 +6529,7 @@ app.openapi(
6455
6529
  tracestate: c.req.header("tracestate"),
6456
6530
  baggage: c.req.header("baggage")
6457
6531
  };
6458
- logger16.info(
6532
+ logger17.info(
6459
6533
  {
6460
6534
  otelHeaders,
6461
6535
  path: c.req.path,
@@ -6466,7 +6540,7 @@ app.openapi(
6466
6540
  const executionContext = getRequestExecutionContext(c);
6467
6541
  const { tenantId, projectId, graphId, agentId } = executionContext;
6468
6542
  if (agentId) {
6469
- logger16.info(
6543
+ logger17.info(
6470
6544
  {
6471
6545
  message: "getRegisteredAgent (agent-level)",
6472
6546
  tenantId,
@@ -6478,13 +6552,13 @@ app.openapi(
6478
6552
  );
6479
6553
  const credentialStores = c.get("credentialStores");
6480
6554
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6481
- logger16.info({ agent }, "agent registered: well-known agent.json");
6555
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6482
6556
  if (!agent) {
6483
6557
  return c.json({ error: "Agent not found" }, 404);
6484
6558
  }
6485
6559
  return c.json(agent.agentCard);
6486
6560
  } else {
6487
- logger16.info(
6561
+ logger17.info(
6488
6562
  {
6489
6563
  message: "getRegisteredGraph (graph-level)",
6490
6564
  tenantId,
@@ -6507,7 +6581,7 @@ app.post("/a2a", async (c) => {
6507
6581
  tracestate: c.req.header("tracestate"),
6508
6582
  baggage: c.req.header("baggage")
6509
6583
  };
6510
- logger16.info(
6584
+ logger17.info(
6511
6585
  {
6512
6586
  otelHeaders,
6513
6587
  path: c.req.path,
@@ -6518,7 +6592,7 @@ app.post("/a2a", async (c) => {
6518
6592
  const executionContext = getRequestExecutionContext(c);
6519
6593
  const { tenantId, projectId, graphId, agentId } = executionContext;
6520
6594
  if (agentId) {
6521
- logger16.info(
6595
+ logger17.info(
6522
6596
  {
6523
6597
  message: "a2a (agent-level)",
6524
6598
  tenantId,
@@ -6542,7 +6616,7 @@ app.post("/a2a", async (c) => {
6542
6616
  }
6543
6617
  return a2aHandler(c, agent);
6544
6618
  } else {
6545
- logger16.info(
6619
+ logger17.info(
6546
6620
  {
6547
6621
  message: "a2a (graph-level)",
6548
6622
  tenantId,
@@ -6582,14 +6656,14 @@ app.post("/a2a", async (c) => {
6582
6656
  }
6583
6657
  });
6584
6658
  var agents_default = app;
6585
- var logger17 = getLogger("Transfer");
6659
+ var logger18 = getLogger("Transfer");
6586
6660
  async function executeTransfer({
6587
6661
  tenantId,
6588
6662
  threadId,
6589
6663
  projectId,
6590
6664
  targetAgentId
6591
6665
  }) {
6592
- logger17.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6666
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6593
6667
  await setActiveAgentForThread(dbClient_default)({
6594
6668
  scopes: { tenantId, projectId },
6595
6669
  threadId,
@@ -6784,7 +6858,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
6784
6858
  __publicField(this, "queuedOperations", []);
6785
6859
  // Timing tracking for text sequences (text-end to text-start gap)
6786
6860
  __publicField(this, "lastTextEndTimestamp", 0);
6787
- __publicField(this, "TEXT_GAP_THRESHOLD", 1e3);
6861
+ __publicField(this, "TEXT_GAP_THRESHOLD", 50);
6788
6862
  // milliseconds - if gap between text sequences is less than this, queue operations
6789
6863
  // Connection management and forced cleanup
6790
6864
  __publicField(this, "connectionDropTimer");
@@ -7130,7 +7204,7 @@ var MCPStreamHelper = class {
7130
7204
  function createMCPStreamHelper() {
7131
7205
  return new MCPStreamHelper();
7132
7206
  }
7133
- var logger18 = getLogger("ExecutionHandler");
7207
+ var logger19 = getLogger("ExecutionHandler");
7134
7208
  var ExecutionHandler = class {
7135
7209
  constructor() {
7136
7210
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7155,7 +7229,7 @@ var ExecutionHandler = class {
7155
7229
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7156
7230
  registerStreamHelper(requestId2, sseHelper);
7157
7231
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7158
- logger18.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7232
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7159
7233
  let graphConfig = null;
7160
7234
  try {
7161
7235
  graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7167,7 +7241,7 @@ var ExecutionHandler = class {
7167
7241
  );
7168
7242
  }
7169
7243
  } catch (error) {
7170
- logger18.error(
7244
+ logger19.error(
7171
7245
  {
7172
7246
  error: error instanceof Error ? error.message : "Unknown error",
7173
7247
  stack: error instanceof Error ? error.stack : void 0
@@ -7183,7 +7257,7 @@ var ExecutionHandler = class {
7183
7257
  try {
7184
7258
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7185
7259
  const taskId = `task_${conversationId}-${requestId2}`;
7186
- logger18.info(
7260
+ logger19.info(
7187
7261
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7188
7262
  "Attempting to create or reuse existing task"
7189
7263
  );
@@ -7206,7 +7280,7 @@ var ExecutionHandler = class {
7206
7280
  agent_id: currentAgentId
7207
7281
  }
7208
7282
  });
7209
- logger18.info(
7283
+ logger19.info(
7210
7284
  {
7211
7285
  taskId,
7212
7286
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7215,27 +7289,27 @@ var ExecutionHandler = class {
7215
7289
  );
7216
7290
  } catch (error) {
7217
7291
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7218
- logger18.info(
7292
+ logger19.info(
7219
7293
  { taskId, error: error.message },
7220
7294
  "Task already exists, fetching existing task"
7221
7295
  );
7222
7296
  const existingTask = await getTask(dbClient_default)({ id: taskId });
7223
7297
  if (existingTask) {
7224
7298
  task = existingTask;
7225
- logger18.info(
7299
+ logger19.info(
7226
7300
  { taskId, existingTask },
7227
7301
  "Successfully reused existing task from race condition"
7228
7302
  );
7229
7303
  } else {
7230
- logger18.error({ taskId, error }, "Task constraint failed but task not found");
7304
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7231
7305
  throw error;
7232
7306
  }
7233
7307
  } else {
7234
- logger18.error({ taskId, error }, "Failed to create task due to non-constraint error");
7308
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7235
7309
  throw error;
7236
7310
  }
7237
7311
  }
7238
- logger18.debug(
7312
+ logger19.debug(
7239
7313
  {
7240
7314
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7241
7315
  executionType: "create_initial_task",
@@ -7253,7 +7327,7 @@ var ExecutionHandler = class {
7253
7327
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7254
7328
  while (iterations < maxTransfers) {
7255
7329
  iterations++;
7256
- logger18.info(
7330
+ logger19.info(
7257
7331
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7258
7332
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7259
7333
  );
@@ -7261,10 +7335,10 @@ var ExecutionHandler = class {
7261
7335
  scopes: { tenantId, projectId },
7262
7336
  conversationId
7263
7337
  });
7264
- logger18.info({ activeAgent }, "activeAgent");
7338
+ logger19.info({ activeAgent }, "activeAgent");
7265
7339
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7266
7340
  currentAgentId = activeAgent.activeAgentId;
7267
- logger18.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7341
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7268
7342
  }
7269
7343
  const agentBaseUrl = `${baseUrl}/agents`;
7270
7344
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7305,13 +7379,13 @@ var ExecutionHandler = class {
7305
7379
  });
7306
7380
  if (!messageResponse?.result) {
7307
7381
  errorCount++;
7308
- logger18.error(
7382
+ logger19.error(
7309
7383
  { currentAgentId, iterations, errorCount },
7310
7384
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7311
7385
  );
7312
7386
  if (errorCount >= this.MAX_ERRORS) {
7313
7387
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7314
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7388
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7315
7389
  await sseHelper.writeError(errorMessage2);
7316
7390
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7317
7391
  if (task) {
@@ -7337,7 +7411,7 @@ var ExecutionHandler = class {
7337
7411
  const transferResponse = messageResponse.result;
7338
7412
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7339
7413
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7340
- logger18.info({ targetAgentId, transferReason }, "transfer response");
7414
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7341
7415
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7342
7416
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7343
7417
  projectId,
@@ -7348,7 +7422,7 @@ var ExecutionHandler = class {
7348
7422
  if (success) {
7349
7423
  fromAgentId = currentAgentId;
7350
7424
  currentAgentId = newAgentId;
7351
- logger18.info(
7425
+ logger19.info(
7352
7426
  {
7353
7427
  transferFrom: fromAgentId,
7354
7428
  transferTo: currentAgentId,
@@ -7366,7 +7440,7 @@ var ExecutionHandler = class {
7366
7440
  const graphSessionData = graphSessionManager.getSession(requestId2);
7367
7441
  if (graphSessionData) {
7368
7442
  const sessionSummary = graphSessionData.getSummary();
7369
- logger18.info(sessionSummary, "GraphSession data after completion");
7443
+ logger19.info(sessionSummary, "GraphSession data after completion");
7370
7444
  }
7371
7445
  let textContent = "";
7372
7446
  for (const part of responseParts) {
@@ -7375,78 +7449,84 @@ var ExecutionHandler = class {
7375
7449
  textContent += part.text;
7376
7450
  }
7377
7451
  }
7378
- const activeSpan = trace.getActiveSpan();
7379
- if (activeSpan) {
7380
- activeSpan.setAttributes({
7381
- "ai.response.content": textContent || "No response content",
7382
- "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7383
- "ai.agent.name": currentAgentId
7384
- });
7385
- }
7386
- await createMessage(dbClient_default)({
7387
- id: nanoid(),
7388
- tenantId,
7389
- projectId,
7390
- conversationId,
7391
- role: "agent",
7392
- content: {
7393
- text: textContent || void 0,
7394
- parts: responseParts.map((part) => ({
7395
- type: part.kind === "text" ? "text" : "data",
7396
- text: part.kind === "text" ? part.text : void 0,
7397
- data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7398
- }))
7399
- },
7400
- visibility: "user-facing",
7401
- messageType: "chat",
7402
- agentId: currentAgentId,
7403
- fromAgentId: currentAgentId,
7404
- taskId: task.id
7405
- });
7406
- const updateTaskStart = Date.now();
7407
- await updateTask(dbClient_default)({
7408
- taskId: task.id,
7409
- data: {
7410
- status: "completed",
7411
- metadata: {
7412
- ...task.metadata,
7413
- completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7414
- response: {
7415
- text: textContent,
7416
- parts: responseParts,
7417
- hasText: !!textContent,
7418
- hasData: responseParts.some((p) => p.kind === "data")
7452
+ return tracer.startActiveSpan("execution_handler.execute", {}, async (span) => {
7453
+ try {
7454
+ span.setAttributes({
7455
+ "ai.response.content": textContent || "No response content",
7456
+ "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7457
+ "ai.agent.name": currentAgentId
7458
+ });
7459
+ await createMessage(dbClient_default)({
7460
+ id: nanoid(),
7461
+ tenantId,
7462
+ projectId,
7463
+ conversationId,
7464
+ role: "agent",
7465
+ content: {
7466
+ text: textContent || void 0,
7467
+ parts: responseParts.map((part) => ({
7468
+ type: part.kind === "text" ? "text" : "data",
7469
+ text: part.kind === "text" ? part.text : void 0,
7470
+ data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7471
+ }))
7472
+ },
7473
+ visibility: "user-facing",
7474
+ messageType: "chat",
7475
+ agentId: currentAgentId,
7476
+ fromAgentId: currentAgentId,
7477
+ taskId: task.id
7478
+ });
7479
+ const updateTaskStart = Date.now();
7480
+ await updateTask(dbClient_default)({
7481
+ taskId: task.id,
7482
+ data: {
7483
+ status: "completed",
7484
+ metadata: {
7485
+ ...task.metadata,
7486
+ completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7487
+ response: {
7488
+ text: textContent,
7489
+ parts: responseParts,
7490
+ hasText: !!textContent,
7491
+ hasData: responseParts.some((p) => p.kind === "data")
7492
+ }
7493
+ }
7419
7494
  }
7495
+ });
7496
+ const updateTaskEnd = Date.now();
7497
+ logger19.info(
7498
+ { duration: updateTaskEnd - updateTaskStart },
7499
+ "Completed updateTask operation"
7500
+ );
7501
+ await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7502
+ await sseHelper.complete();
7503
+ logger19.info({}, "Ending GraphSession and cleaning up");
7504
+ graphSessionManager.endSession(requestId2);
7505
+ logger19.info({}, "Cleaning up streamHelper");
7506
+ unregisterStreamHelper(requestId2);
7507
+ let response;
7508
+ if (sseHelper instanceof MCPStreamHelper) {
7509
+ const captured = sseHelper.getCapturedResponse();
7510
+ response = captured.text || "No response content";
7420
7511
  }
7512
+ logger19.info({}, "ExecutionHandler returning success");
7513
+ return { success: true, iterations, response };
7514
+ } catch (error) {
7515
+ setSpanWithError(span, error);
7516
+ throw error;
7517
+ } finally {
7518
+ span.end();
7421
7519
  }
7422
7520
  });
7423
- const updateTaskEnd = Date.now();
7424
- logger18.info(
7425
- { duration: updateTaskEnd - updateTaskStart },
7426
- "Completed updateTask operation"
7427
- );
7428
- await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7429
- await sseHelper.complete();
7430
- logger18.info({}, "Ending GraphSession and cleaning up");
7431
- graphSessionManager.endSession(requestId2);
7432
- logger18.info({}, "Cleaning up streamHelper");
7433
- unregisterStreamHelper(requestId2);
7434
- let response;
7435
- if (sseHelper instanceof MCPStreamHelper) {
7436
- const captured = sseHelper.getCapturedResponse();
7437
- response = captured.text || "No response content";
7438
- }
7439
- logger18.info({}, "ExecutionHandler returning success");
7440
- return { success: true, iterations, response };
7441
7521
  }
7442
7522
  errorCount++;
7443
- logger18.warn(
7523
+ logger19.warn(
7444
7524
  { iterations, errorCount },
7445
7525
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7446
7526
  );
7447
7527
  if (errorCount >= this.MAX_ERRORS) {
7448
7528
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7449
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7529
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7450
7530
  await sseHelper.writeError(errorMessage2);
7451
7531
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7452
7532
  if (task) {
@@ -7468,7 +7548,7 @@ var ExecutionHandler = class {
7468
7548
  }
7469
7549
  }
7470
7550
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7471
- logger18.error({ maxTransfers, iterations }, errorMessage);
7551
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7472
7552
  await sseHelper.writeError(errorMessage);
7473
7553
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7474
7554
  if (task) {
@@ -7488,7 +7568,7 @@ var ExecutionHandler = class {
7488
7568
  unregisterStreamHelper(requestId2);
7489
7569
  return { success: false, error: errorMessage, iterations };
7490
7570
  } catch (error) {
7491
- logger18.error({ error }, "Error in execution handler");
7571
+ logger19.error({ error }, "Error in execution handler");
7492
7572
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7493
7573
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7494
7574
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7514,7 +7594,7 @@ var ExecutionHandler = class {
7514
7594
 
7515
7595
  // src/routes/chat.ts
7516
7596
  var app2 = new OpenAPIHono();
7517
- var logger19 = getLogger("completionsHandler");
7597
+ var logger20 = getLogger("completionsHandler");
7518
7598
  var chatCompletionsRoute = createRoute({
7519
7599
  method: "post",
7520
7600
  path: "/completions",
@@ -7632,7 +7712,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7632
7712
  tracestate: c.req.header("tracestate"),
7633
7713
  baggage: c.req.header("baggage")
7634
7714
  };
7635
- logger19.info(
7715
+ logger20.info(
7636
7716
  {
7637
7717
  otelHeaders,
7638
7718
  path: c.req.path,
@@ -7718,7 +7798,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7718
7798
  dbClient_default,
7719
7799
  credentialStores
7720
7800
  );
7721
- logger19.info(
7801
+ logger20.info(
7722
7802
  {
7723
7803
  tenantId,
7724
7804
  graphId,
@@ -7764,7 +7844,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7764
7844
  return streamSSE(c, async (stream2) => {
7765
7845
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
7766
7846
  await sseHelper.writeRole();
7767
- logger19.info({ agentId }, "Starting execution");
7847
+ logger20.info({ agentId }, "Starting execution");
7768
7848
  const executionHandler = new ExecutionHandler();
7769
7849
  const result = await executionHandler.execute({
7770
7850
  executionContext,
@@ -7774,7 +7854,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7774
7854
  requestId: requestId2,
7775
7855
  sseHelper
7776
7856
  });
7777
- logger19.info(
7857
+ logger20.info(
7778
7858
  { result },
7779
7859
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
7780
7860
  );
@@ -7807,7 +7887,7 @@ var getMessageText = (content) => {
7807
7887
  };
7808
7888
  var chat_default = app2;
7809
7889
  var app3 = new OpenAPIHono();
7810
- var logger20 = getLogger("chatDataStream");
7890
+ var logger21 = getLogger("chatDataStream");
7811
7891
  var chatDataStreamRoute = createRoute({
7812
7892
  method: "post",
7813
7893
  path: "/chat",
@@ -7912,7 +7992,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7912
7992
  );
7913
7993
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
7914
7994
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
7915
- logger20.info({ userText, lastUserMessage }, "userText");
7995
+ logger21.info({ userText, lastUserMessage }, "userText");
7916
7996
  const messageSpan = trace.getActiveSpan();
7917
7997
  if (messageSpan) {
7918
7998
  messageSpan.setAttributes({
@@ -7954,7 +8034,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7954
8034
  await streamHelper.writeError("Unable to process request");
7955
8035
  }
7956
8036
  } catch (err) {
7957
- logger20.error({ err }, "Streaming error");
8037
+ logger21.error({ err }, "Streaming error");
7958
8038
  await streamHelper.writeError("Internal server error");
7959
8039
  } finally {
7960
8040
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -7975,7 +8055,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7975
8055
  )
7976
8056
  );
7977
8057
  } catch (error) {
7978
- logger20.error({ error }, "chatDataStream error");
8058
+ logger21.error({ error }, "chatDataStream error");
7979
8059
  return c.json({ error: "Failed to process chat completion" }, 500);
7980
8060
  }
7981
8061
  });
@@ -7983,7 +8063,7 @@ var chatDataStream_default = app3;
7983
8063
  function createMCPSchema(schema) {
7984
8064
  return schema;
7985
8065
  }
7986
- var logger21 = getLogger("mcp");
8066
+ var logger22 = getLogger("mcp");
7987
8067
  var _MockResponseSingleton = class _MockResponseSingleton {
7988
8068
  constructor() {
7989
8069
  __publicField(this, "mockRes");
@@ -8038,21 +8118,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
8038
8118
  id: 0
8039
8119
  });
8040
8120
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
8041
- logger21.info({ sessionId }, "Spoofing initialization message to set transport state");
8121
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
8042
8122
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
8043
8123
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
8044
8124
  try {
8045
8125
  await transport.handleRequest(req, mockRes, spoofInitMessage);
8046
- logger21.info({ sessionId }, "Successfully spoofed initialization");
8126
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
8047
8127
  } catch (spoofError) {
8048
- logger21.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8128
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8049
8129
  }
8050
8130
  };
8051
8131
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8052
8132
  const sessionId = req.headers["mcp-session-id"];
8053
- logger21.info({ sessionId }, "Received MCP session ID");
8133
+ logger22.info({ sessionId }, "Received MCP session ID");
8054
8134
  if (!sessionId) {
8055
- logger21.info({ body }, "Missing session ID");
8135
+ logger22.info({ body }, "Missing session ID");
8056
8136
  res.writeHead(400).end(
8057
8137
  JSON.stringify({
8058
8138
  jsonrpc: "2.0",
@@ -8078,7 +8158,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8078
8158
  scopes: { tenantId, projectId },
8079
8159
  conversationId: sessionId
8080
8160
  });
8081
- logger21.info(
8161
+ logger22.info(
8082
8162
  {
8083
8163
  sessionId,
8084
8164
  conversationFound: !!conversation,
@@ -8089,7 +8169,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8089
8169
  "Conversation lookup result"
8090
8170
  );
8091
8171
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8092
- logger21.info(
8172
+ logger22.info(
8093
8173
  { sessionId, conversationId: conversation?.id },
8094
8174
  "MCP session not found or invalid"
8095
8175
  );
@@ -8150,7 +8230,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8150
8230
  requestId: requestId2,
8151
8231
  sseHelper: mcpStreamHelper
8152
8232
  });
8153
- logger21.info(
8233
+ logger22.info(
8154
8234
  { result },
8155
8235
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8156
8236
  );
@@ -8224,7 +8304,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8224
8304
  dbClient_default,
8225
8305
  credentialStores
8226
8306
  );
8227
- logger21.info(
8307
+ logger22.info(
8228
8308
  {
8229
8309
  tenantId,
8230
8310
  graphId,
@@ -8285,7 +8365,7 @@ var validateRequestParameters = (c) => {
8285
8365
  };
8286
8366
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8287
8367
  const { tenantId, projectId, graphId } = executionContext;
8288
- logger21.info({ body }, "Received initialization request");
8368
+ logger22.info({ body }, "Received initialization request");
8289
8369
  const sessionId = nanoid();
8290
8370
  const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
8291
8371
  scopes: { tenantId, projectId },
@@ -8316,7 +8396,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8316
8396
  }
8317
8397
  }
8318
8398
  });
8319
- logger21.info(
8399
+ logger22.info(
8320
8400
  { sessionId, conversationId: conversation.id },
8321
8401
  "Created MCP session as conversation"
8322
8402
  );
@@ -8325,9 +8405,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8325
8405
  });
8326
8406
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8327
8407
  await server.connect(transport);
8328
- logger21.info({ sessionId }, "Server connected for initialization");
8408
+ logger22.info({ sessionId }, "Server connected for initialization");
8329
8409
  res.setHeader("Mcp-Session-Id", sessionId);
8330
- logger21.info(
8410
+ logger22.info(
8331
8411
  {
8332
8412
  sessionId,
8333
8413
  bodyMethod: body?.method,
@@ -8336,7 +8416,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8336
8416
  "About to handle initialization request"
8337
8417
  );
8338
8418
  await transport.handleRequest(req, res, body);
8339
- logger21.info({ sessionId }, "Successfully handled initialization request");
8419
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8340
8420
  return toFetchResponse(res);
8341
8421
  };
8342
8422
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8364,8 +8444,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8364
8444
  sessionId,
8365
8445
  conversation.metadata?.session_data?.mcpProtocolVersion
8366
8446
  );
8367
- logger21.info({ sessionId }, "Server connected and transport initialized");
8368
- logger21.info(
8447
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8448
+ logger22.info(
8369
8449
  {
8370
8450
  sessionId,
8371
8451
  bodyKeys: Object.keys(body || {}),
@@ -8379,9 +8459,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8379
8459
  );
8380
8460
  try {
8381
8461
  await transport.handleRequest(req, res, body);
8382
- logger21.info({ sessionId }, "Successfully handled MCP request");
8462
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8383
8463
  } catch (transportError) {
8384
- logger21.error(
8464
+ logger22.error(
8385
8465
  {
8386
8466
  sessionId,
8387
8467
  error: transportError,
@@ -8432,13 +8512,13 @@ app4.openapi(
8432
8512
  }
8433
8513
  const { executionContext } = paramValidation;
8434
8514
  const body = c.get("requestBody") || {};
8435
- logger21.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8515
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8436
8516
  const isInitRequest = body.method === "initialize";
8437
8517
  const { req, res } = toReqRes(c.req.raw);
8438
8518
  const validatedContext = c.get("validatedContext") || {};
8439
8519
  const credentialStores = c.get("credentialStores");
8440
- logger21.info({ validatedContext }, "Validated context");
8441
- logger21.info({ req }, "request");
8520
+ logger22.info({ validatedContext }, "Validated context");
8521
+ logger22.info({ req }, "request");
8442
8522
  if (isInitRequest) {
8443
8523
  return await handleInitializationRequest(
8444
8524
  body,
@@ -8460,7 +8540,7 @@ app4.openapi(
8460
8540
  );
8461
8541
  }
8462
8542
  } catch (e) {
8463
- logger21.error(
8543
+ logger22.error(
8464
8544
  {
8465
8545
  error: e instanceof Error ? e.message : e,
8466
8546
  stack: e instanceof Error ? e.stack : void 0
@@ -8472,7 +8552,7 @@ app4.openapi(
8472
8552
  }
8473
8553
  );
8474
8554
  app4.get("/", async (c) => {
8475
- logger21.info({}, "Received GET MCP request");
8555
+ logger22.info({}, "Received GET MCP request");
8476
8556
  return c.json(
8477
8557
  {
8478
8558
  jsonrpc: "2.0",
@@ -8486,7 +8566,7 @@ app4.get("/", async (c) => {
8486
8566
  );
8487
8567
  });
8488
8568
  app4.delete("/", async (c) => {
8489
- logger21.info({}, "Received DELETE MCP request");
8569
+ logger22.info({}, "Received DELETE MCP request");
8490
8570
  return c.json(
8491
8571
  {
8492
8572
  jsonrpc: "2.0",
@@ -8497,7 +8577,7 @@ app4.delete("/", async (c) => {
8497
8577
  );
8498
8578
  });
8499
8579
  var mcp_default = app4;
8500
- var logger22 = getLogger("agents-run-api");
8580
+ var logger23 = getLogger("agents-run-api");
8501
8581
  function createExecutionHono(serverConfig, credentialStores) {
8502
8582
  const app6 = new OpenAPIHono();
8503
8583
  app6.use("*", otel());
@@ -8513,7 +8593,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8513
8593
  const body = await c.req.json();
8514
8594
  c.set("requestBody", body);
8515
8595
  } catch (error) {
8516
- logger22.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8596
+ logger23.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8517
8597
  }
8518
8598
  }
8519
8599
  return next();
@@ -8564,8 +8644,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8564
8644
  if (!isExpectedError) {
8565
8645
  const errorMessage = err instanceof Error ? err.message : String(err);
8566
8646
  const errorStack = err instanceof Error ? err.stack : void 0;
8567
- if (logger22) {
8568
- logger22.error(
8647
+ if (logger23) {
8648
+ logger23.error(
8569
8649
  {
8570
8650
  error: err,
8571
8651
  message: errorMessage,
@@ -8577,8 +8657,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8577
8657
  );
8578
8658
  }
8579
8659
  } else {
8580
- if (logger22) {
8581
- logger22.error(
8660
+ if (logger23) {
8661
+ logger23.error(
8582
8662
  {
8583
8663
  error: err,
8584
8664
  path: c.req.path,
@@ -8595,8 +8675,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8595
8675
  const response = err.getResponse();
8596
8676
  return response;
8597
8677
  } catch (responseError) {
8598
- if (logger22) {
8599
- logger22.error({ error: responseError }, "Error while handling HTTPException response");
8678
+ if (logger23) {
8679
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8600
8680
  }
8601
8681
  }
8602
8682
  }
@@ -8630,7 +8710,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8630
8710
  app6.use("*", async (c, next) => {
8631
8711
  const executionContext = c.get("executionContext");
8632
8712
  if (!executionContext) {
8633
- logger22.debug({}, "Empty execution context");
8713
+ logger23.debug({}, "Empty execution context");
8634
8714
  return next();
8635
8715
  }
8636
8716
  const { tenantId, projectId, graphId } = executionContext;
@@ -8639,7 +8719,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8639
8719
  if (requestBody) {
8640
8720
  conversationId = requestBody.conversationId;
8641
8721
  if (!conversationId) {
8642
- logger22.debug({ requestBody }, "No conversation ID found in request body");
8722
+ logger23.debug({ requestBody }, "No conversation ID found in request body");
8643
8723
  }
8644
8724
  }
8645
8725
  const entries = Object.fromEntries(
@@ -8654,7 +8734,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8654
8734
  })
8655
8735
  );
8656
8736
  if (!Object.keys(entries).length) {
8657
- logger22.debug({}, "Empty entries for baggage");
8737
+ logger23.debug({}, "Empty entries for baggage");
8658
8738
  return next();
8659
8739
  }
8660
8740
  const bag = Object.entries(entries).reduce(