@inkeep/agents-run-api 0.0.0-dev-20250915163022 → 0.0.0-dev-20250915202938

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +425 -348
  2. package/dist/index.js +425 -348
  3. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -8,7 +8,7 @@ import { resourceFromAttributes } from '@opentelemetry/resources';
8
8
  import { NodeSDK } from '@opentelemetry/sdk-node';
9
9
  import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
10
10
  import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
11
- import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, setSpanWithError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
11
+ import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
12
12
  import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
13
13
  import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
14
14
  import { Hono } from 'hono';
@@ -858,6 +858,127 @@ async function handleTasksResubscribe(c, agent, request) {
858
858
  });
859
859
  }
860
860
  }
861
+ getLogger("agents");
862
+ function createAgentCard({
863
+ dbAgent,
864
+ baseUrl
865
+ }) {
866
+ const description = dbAgent.description || "AI Agent";
867
+ return {
868
+ name: dbAgent.name,
869
+ description,
870
+ url: baseUrl ? `${baseUrl}/a2a` : "",
871
+ version: "1.0.0",
872
+ capabilities: {
873
+ streaming: true,
874
+ // Enable streaming for A2A compliance
875
+ pushNotifications: false,
876
+ stateTransitionHistory: false
877
+ },
878
+ defaultInputModes: ["text", "text/plain"],
879
+ defaultOutputModes: ["text", "text/plain"],
880
+ skills: [],
881
+ // Add provider info if available
882
+ ...baseUrl && {
883
+ provider: {
884
+ organization: "Inkeep",
885
+ url: baseUrl
886
+ }
887
+ }
888
+ };
889
+ }
890
+ function generateDescriptionWithTransfers(baseDescription, internalRelations, externalRelations) {
891
+ const transfers = [
892
+ ...internalRelations.filter((rel) => rel.relationType === "transfer"),
893
+ ...externalRelations.filter((rel) => rel.relationType === "transfer")
894
+ ];
895
+ const delegates = [
896
+ ...internalRelations.filter((rel) => rel.relationType === "delegate"),
897
+ ...externalRelations.filter((rel) => rel.relationType === "delegate")
898
+ ];
899
+ if (transfers.length === 0 && delegates.length === 0) {
900
+ return baseDescription;
901
+ }
902
+ let enhancedDescription = baseDescription;
903
+ if (transfers.length > 0) {
904
+ const transferList = transfers.map((rel) => {
905
+ const name = rel.externalAgent?.name || rel.name;
906
+ const desc = rel.externalAgent?.description || rel.description || "";
907
+ return `- ${name}: ${desc}`;
908
+ }).join("\n");
909
+ enhancedDescription += `
910
+
911
+ Can transfer to:
912
+ ${transferList}`;
913
+ }
914
+ if (delegates.length > 0) {
915
+ const delegateList = delegates.map((rel) => {
916
+ const name = rel.externalAgent?.name || rel.name;
917
+ const desc = rel.externalAgent?.description || rel.description || "";
918
+ return `- ${name}: ${desc}`;
919
+ }).join("\n");
920
+ enhancedDescription += `
921
+
922
+ Can delegate to:
923
+ ${delegateList}`;
924
+ }
925
+ return enhancedDescription;
926
+ }
927
+ async function hydrateAgent({
928
+ dbAgent,
929
+ graphId,
930
+ baseUrl,
931
+ apiKey,
932
+ credentialStoreRegistry
933
+ }) {
934
+ try {
935
+ const taskHandlerConfig = await createTaskHandlerConfig({
936
+ tenantId: dbAgent.tenantId,
937
+ projectId: dbAgent.projectId,
938
+ graphId,
939
+ agentId: dbAgent.id,
940
+ baseUrl,
941
+ apiKey
942
+ });
943
+ const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
944
+ const agentCard = createAgentCard({
945
+ dbAgent,
946
+ baseUrl
947
+ });
948
+ return {
949
+ agentId: dbAgent.id,
950
+ tenantId: dbAgent.tenantId,
951
+ projectId: dbAgent.projectId,
952
+ graphId,
953
+ agentCard,
954
+ taskHandler
955
+ };
956
+ } catch (error) {
957
+ console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
958
+ throw error;
959
+ }
960
+ }
961
+ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
962
+ const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
963
+ if (!agentId) {
964
+ throw new Error("Agent ID is required");
965
+ }
966
+ const dbAgent = await getAgentById(dbClient_default)({
967
+ scopes: { tenantId, projectId },
968
+ agentId
969
+ });
970
+ if (!dbAgent) {
971
+ return null;
972
+ }
973
+ const agentFrameworkBaseUrl = `${baseUrl}/agents`;
974
+ return hydrateAgent({
975
+ dbAgent,
976
+ graphId,
977
+ baseUrl: agentFrameworkBaseUrl,
978
+ credentialStoreRegistry,
979
+ apiKey
980
+ });
981
+ }
861
982
  function agentInitializingOp(sessionId, graphId) {
862
983
  return {
863
984
  type: "agent_initializing",
@@ -894,10 +1015,10 @@ function statusUpdateOp(ctx) {
894
1015
  ctx
895
1016
  };
896
1017
  }
897
- var logger3 = getLogger("DataComponentSchema");
1018
+ var logger4 = getLogger("DataComponentSchema");
898
1019
  function jsonSchemaToZod(jsonSchema) {
899
1020
  if (!jsonSchema || typeof jsonSchema !== "object") {
900
- logger3.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
1021
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
901
1022
  return z.string();
902
1023
  }
903
1024
  switch (jsonSchema.type) {
@@ -924,7 +1045,7 @@ function jsonSchemaToZod(jsonSchema) {
924
1045
  case "null":
925
1046
  return z.null();
926
1047
  default:
927
- logger3.warn(
1048
+ logger4.warn(
928
1049
  {
929
1050
  unsupportedType: jsonSchema.type,
930
1051
  schema: jsonSchema
@@ -978,7 +1099,7 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
978
1099
  required: ["artifact_id", "task_id"]
979
1100
  });
980
1101
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
981
- var logger4 = getLogger("ModelFactory");
1102
+ var logger5 = getLogger("ModelFactory");
982
1103
  var _ModelFactory = class _ModelFactory {
983
1104
  /**
984
1105
  * Create a language model instance from configuration
@@ -993,7 +1114,7 @@ var _ModelFactory = class _ModelFactory {
993
1114
  const modelSettings = config;
994
1115
  const modelString = modelSettings.model.trim();
995
1116
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
996
- logger4.debug(
1117
+ logger5.debug(
997
1118
  {
998
1119
  provider,
999
1120
  model: modelName,
@@ -1014,7 +1135,7 @@ var _ModelFactory = class _ModelFactory {
1014
1135
  );
1015
1136
  }
1016
1137
  } catch (error) {
1017
- logger4.error(
1138
+ logger5.error(
1018
1139
  {
1019
1140
  provider,
1020
1141
  model: modelName,
@@ -1037,7 +1158,7 @@ var _ModelFactory = class _ModelFactory {
1037
1158
  const [provider, ...modelParts] = modelString.split("/");
1038
1159
  const normalizedProvider = provider.toLowerCase();
1039
1160
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1040
- logger4.warn(
1161
+ logger5.warn(
1041
1162
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1042
1163
  "Unsupported provider detected, falling back to anthropic"
1043
1164
  );
@@ -1066,14 +1187,14 @@ var _ModelFactory = class _ModelFactory {
1066
1187
  anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1067
1188
  }
1068
1189
  if (providerOptions?.gateway) {
1069
- logger4.info(
1190
+ logger5.info(
1070
1191
  { gateway: providerOptions.gateway },
1071
1192
  "Setting up AI Gateway for Anthropic model"
1072
1193
  );
1073
1194
  Object.assign(anthropicConfig, providerOptions.gateway);
1074
1195
  }
1075
1196
  if (Object.keys(anthropicConfig).length > 0) {
1076
- logger4.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1197
+ logger5.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1077
1198
  const provider = createAnthropic(anthropicConfig);
1078
1199
  return provider(modelName);
1079
1200
  }
@@ -1088,11 +1209,11 @@ var _ModelFactory = class _ModelFactory {
1088
1209
  openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1089
1210
  }
1090
1211
  if (providerOptions?.gateway) {
1091
- logger4.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1212
+ logger5.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1092
1213
  Object.assign(openaiConfig, providerOptions.gateway);
1093
1214
  }
1094
1215
  if (Object.keys(openaiConfig).length > 0) {
1095
- logger4.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1216
+ logger5.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1096
1217
  const provider = createOpenAI(openaiConfig);
1097
1218
  return provider(modelName);
1098
1219
  }
@@ -1182,7 +1303,7 @@ function unregisterStreamHelper(requestId2) {
1182
1303
  }
1183
1304
 
1184
1305
  // src/utils/graph-session.ts
1185
- var logger5 = getLogger("GraphSession");
1306
+ var logger6 = getLogger("GraphSession");
1186
1307
  var GraphSession = class {
1187
1308
  // Track scheduled timeouts for cleanup
1188
1309
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1206,7 +1327,7 @@ var GraphSession = class {
1206
1327
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1207
1328
  // Prevent unbounded growth
1208
1329
  __publicField(this, "scheduledTimeouts");
1209
- logger5.debug({ sessionId, messageId, graphId }, "GraphSession created");
1330
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1210
1331
  }
1211
1332
  /**
1212
1333
  * Initialize status updates for this session
@@ -1220,15 +1341,15 @@ var GraphSession = class {
1220
1341
  summarizerModel,
1221
1342
  baseModel,
1222
1343
  config: {
1223
- numEvents: config.numEvents || 10,
1224
- timeInSeconds: config.timeInSeconds || 30,
1344
+ numEvents: config.numEvents || 1,
1345
+ timeInSeconds: config.timeInSeconds || 2,
1225
1346
  ...config
1226
1347
  }
1227
1348
  };
1228
1349
  if (this.statusUpdateState.config.timeInSeconds) {
1229
1350
  this.statusUpdateTimer = setInterval(async () => {
1230
1351
  if (!this.statusUpdateState || this.isEnded) {
1231
- logger5.debug(
1352
+ logger6.debug(
1232
1353
  { sessionId: this.sessionId },
1233
1354
  "Timer triggered but session already cleaned up or ended"
1234
1355
  );
@@ -1240,7 +1361,7 @@ var GraphSession = class {
1240
1361
  }
1241
1362
  await this.checkAndSendTimeBasedUpdate();
1242
1363
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1243
- logger5.info(
1364
+ logger6.info(
1244
1365
  {
1245
1366
  sessionId: this.sessionId,
1246
1367
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1254,7 +1375,7 @@ var GraphSession = class {
1254
1375
  */
1255
1376
  recordEvent(eventType, agentId, data) {
1256
1377
  if (this.isEnded) {
1257
- logger5.debug(
1378
+ logger6.debug(
1258
1379
  {
1259
1380
  sessionId: this.sessionId,
1260
1381
  eventType,
@@ -1274,7 +1395,7 @@ var GraphSession = class {
1274
1395
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1275
1396
  const artifactId = data.artifactId;
1276
1397
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1277
- logger5.warn(
1398
+ logger6.warn(
1278
1399
  {
1279
1400
  sessionId: this.sessionId,
1280
1401
  artifactId,
@@ -1295,7 +1416,7 @@ var GraphSession = class {
1295
1416
  this.artifactProcessingErrors.set(artifactId, errorCount);
1296
1417
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1297
1418
  this.pendingArtifacts.delete(artifactId);
1298
- logger5.error(
1419
+ logger6.error(
1299
1420
  {
1300
1421
  sessionId: this.sessionId,
1301
1422
  artifactId,
@@ -1307,7 +1428,7 @@ var GraphSession = class {
1307
1428
  "Artifact processing failed after max retries, giving up"
1308
1429
  );
1309
1430
  } else {
1310
- logger5.warn(
1431
+ logger6.warn(
1311
1432
  {
1312
1433
  sessionId: this.sessionId,
1313
1434
  artifactId,
@@ -1329,14 +1450,14 @@ var GraphSession = class {
1329
1450
  */
1330
1451
  checkStatusUpdates() {
1331
1452
  if (this.isEnded) {
1332
- logger5.debug(
1453
+ logger6.debug(
1333
1454
  { sessionId: this.sessionId },
1334
1455
  "Session has ended - skipping status update check"
1335
1456
  );
1336
1457
  return;
1337
1458
  }
1338
1459
  if (!this.statusUpdateState) {
1339
- logger5.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1460
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1340
1461
  return;
1341
1462
  }
1342
1463
  const statusUpdateState = this.statusUpdateState;
@@ -1347,11 +1468,11 @@ var GraphSession = class {
1347
1468
  */
1348
1469
  async checkAndSendTimeBasedUpdate() {
1349
1470
  if (this.isEnded) {
1350
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1471
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1351
1472
  return;
1352
1473
  }
1353
1474
  if (!this.statusUpdateState) {
1354
- logger5.debug(
1475
+ logger6.debug(
1355
1476
  { sessionId: this.sessionId },
1356
1477
  "No status updates configured for time-based check"
1357
1478
  );
@@ -1364,7 +1485,7 @@ var GraphSession = class {
1364
1485
  try {
1365
1486
  await this.generateAndSendUpdate();
1366
1487
  } catch (error) {
1367
- logger5.error(
1488
+ logger6.error(
1368
1489
  {
1369
1490
  sessionId: this.sessionId,
1370
1491
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1457,29 +1578,29 @@ var GraphSession = class {
1457
1578
  */
1458
1579
  async generateAndSendUpdate() {
1459
1580
  if (this.isEnded) {
1460
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1581
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1461
1582
  return;
1462
1583
  }
1463
1584
  if (this.isTextStreaming) {
1464
- logger5.debug(
1585
+ logger6.debug(
1465
1586
  { sessionId: this.sessionId },
1466
1587
  "Text is currently streaming - skipping status update"
1467
1588
  );
1468
1589
  return;
1469
1590
  }
1470
1591
  if (this.isGeneratingUpdate) {
1471
- logger5.debug(
1592
+ logger6.debug(
1472
1593
  { sessionId: this.sessionId },
1473
1594
  "Update already in progress - skipping duplicate generation"
1474
1595
  );
1475
1596
  return;
1476
1597
  }
1477
1598
  if (!this.statusUpdateState) {
1478
- logger5.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1599
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1479
1600
  return;
1480
1601
  }
1481
1602
  if (!this.graphId) {
1482
- logger5.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1603
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1483
1604
  return;
1484
1605
  }
1485
1606
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1492,7 +1613,7 @@ var GraphSession = class {
1492
1613
  try {
1493
1614
  const streamHelper = getStreamHelper(this.sessionId);
1494
1615
  if (!streamHelper) {
1495
- logger5.warn(
1616
+ logger6.warn(
1496
1617
  { sessionId: this.sessionId },
1497
1618
  "No stream helper found - cannot send status update"
1498
1619
  );
@@ -1513,7 +1634,7 @@ var GraphSession = class {
1513
1634
  if (result.operations && result.operations.length > 0) {
1514
1635
  for (const op of result.operations) {
1515
1636
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1516
- logger5.warn(
1637
+ logger6.warn(
1517
1638
  {
1518
1639
  sessionId: this.sessionId,
1519
1640
  operation: op
@@ -1566,7 +1687,7 @@ var GraphSession = class {
1566
1687
  this.previousSummaries.shift();
1567
1688
  }
1568
1689
  if (!operation || !operation.type || !operation.ctx) {
1569
- logger5.warn(
1690
+ logger6.warn(
1570
1691
  {
1571
1692
  sessionId: this.sessionId,
1572
1693
  operation
@@ -1581,7 +1702,7 @@ var GraphSession = class {
1581
1702
  this.statusUpdateState.lastEventCount = this.events.length;
1582
1703
  }
1583
1704
  } catch (error) {
1584
- logger5.error(
1705
+ logger6.error(
1585
1706
  {
1586
1707
  sessionId: this.sessionId,
1587
1708
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1619,7 +1740,7 @@ var GraphSession = class {
1619
1740
  this.releaseUpdateLock();
1620
1741
  }
1621
1742
  } catch (error) {
1622
- logger5.error(
1743
+ logger6.error(
1623
1744
  {
1624
1745
  sessionId: this.sessionId,
1625
1746
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1696,7 +1817,7 @@ User's Question/Context:
1696
1817
  ${conversationHistory}
1697
1818
  ` : "";
1698
1819
  } catch (error) {
1699
- logger5.warn(
1820
+ logger6.warn(
1700
1821
  { sessionId: this.sessionId, error },
1701
1822
  "Failed to fetch conversation history for status update"
1702
1823
  );
@@ -1748,7 +1869,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1748
1869
  return text.trim();
1749
1870
  } catch (error) {
1750
1871
  setSpanWithError(span, error);
1751
- logger5.error({ error }, "Failed to generate summary, using fallback");
1872
+ logger6.error({ error }, "Failed to generate summary, using fallback");
1752
1873
  return this.generateFallbackSummary(newEvents, elapsedTime);
1753
1874
  } finally {
1754
1875
  span.end();
@@ -1794,7 +1915,7 @@ User's Question/Context:
1794
1915
  ${conversationHistory}
1795
1916
  ` : "";
1796
1917
  } catch (error) {
1797
- logger5.warn(
1918
+ logger6.warn(
1798
1919
  { sessionId: this.sessionId, error },
1799
1920
  "Failed to fetch conversation history for structured status update"
1800
1921
  );
@@ -1837,9 +1958,11 @@ Rules:
1837
1958
  - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
1838
1959
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
1839
1960
  - Include specific details, numbers, requirements, or insights discovered
1840
- - You are ONE AI (no agents/delegations)
1841
- - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE INTERNAL OPERATIONS!
1842
- - Bad examples: "Searching docs", "Processing request", "Status update", or not using the no_relevant_updates: e.g. "No New Updates", "No new info to report"
1961
+ - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
1962
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
1963
+ - Present all operations as seamless actions by a single system
1964
+ - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
1965
+ - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
1843
1966
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
1844
1967
 
1845
1968
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -1893,7 +2016,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1893
2016
  return { operations };
1894
2017
  } catch (error) {
1895
2018
  setSpanWithError(span, error);
1896
- logger5.error({ error }, "Failed to generate structured update, using fallback");
2019
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
1897
2020
  return { operations: [] };
1898
2021
  } finally {
1899
2022
  span.end();
@@ -2000,8 +2123,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2000
2123
  case "transfer": {
2001
2124
  const data = event.data;
2002
2125
  activities.push(
2003
- `\u{1F504} **Transfer**: ${data.fromAgent} \u2192 ${data.targetAgent}
2004
- ${data.reason ? `Reason: ${data.reason}` : "Control transfer"}
2126
+ `\u{1F504} **Continuing**: ${data.reason || "Processing request"}
2005
2127
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2006
2128
  );
2007
2129
  break;
@@ -2009,8 +2131,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2009
2131
  case "delegation_sent": {
2010
2132
  const data = event.data;
2011
2133
  activities.push(
2012
- `\u{1F4E4} **Delegation Sent** [${data.delegationId}]: ${data.fromAgent} \u2192 ${data.targetAgent}
2013
- Task: ${data.taskDescription}
2134
+ `\u{1F4E4} **Processing**: ${data.taskDescription}
2014
2135
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2015
2136
  );
2016
2137
  break;
@@ -2018,7 +2139,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2018
2139
  case "delegation_returned": {
2019
2140
  const data = event.data;
2020
2141
  activities.push(
2021
- `\u{1F4E5} **Delegation Returned** [${data.delegationId}]: ${data.fromAgent} \u2190 ${data.targetAgent}
2142
+ `\u{1F4E5} **Completed subtask**
2022
2143
  Result: ${JSON.stringify(data.result, null, 2)}`
2023
2144
  );
2024
2145
  break;
@@ -2037,16 +2158,16 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2037
2158
  case "agent_reasoning": {
2038
2159
  const data = event.data;
2039
2160
  activities.push(
2040
- `\u2699\uFE0F **Reasoning**: reasoning
2041
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2161
+ `\u2699\uFE0F **Analyzing request**
2162
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2042
2163
  );
2043
2164
  break;
2044
2165
  }
2045
2166
  case "agent_generate": {
2046
2167
  const data = event.data;
2047
2168
  activities.push(
2048
- `\u2699\uFE0F **Generation**: ${data.generationType}
2049
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2169
+ `\u2699\uFE0F **Preparing response**
2170
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2050
2171
  );
2051
2172
  break;
2052
2173
  }
@@ -2220,7 +2341,7 @@ Make it specific and relevant.`;
2220
2341
  taskId: artifactData.taskId,
2221
2342
  artifacts: [artifactToSave]
2222
2343
  });
2223
- logger5.info(
2344
+ logger6.info(
2224
2345
  {
2225
2346
  sessionId: this.sessionId,
2226
2347
  artifactId: artifactData.artifactId,
@@ -2237,7 +2358,7 @@ Make it specific and relevant.`;
2237
2358
  span.setStatus({ code: SpanStatusCode.OK });
2238
2359
  } catch (error) {
2239
2360
  setSpanWithError(span, error);
2240
- logger5.error(
2361
+ logger6.error(
2241
2362
  {
2242
2363
  sessionId: this.sessionId,
2243
2364
  artifactId: artifactData.artifactId,
@@ -2273,7 +2394,7 @@ Make it specific and relevant.`;
2273
2394
  taskId: artifactData.taskId,
2274
2395
  artifacts: [fallbackArtifact]
2275
2396
  });
2276
- logger5.info(
2397
+ logger6.info(
2277
2398
  {
2278
2399
  sessionId: this.sessionId,
2279
2400
  artifactId: artifactData.artifactId
@@ -2282,7 +2403,7 @@ Make it specific and relevant.`;
2282
2403
  );
2283
2404
  }
2284
2405
  } catch (fallbackError) {
2285
- logger5.error(
2406
+ logger6.error(
2286
2407
  {
2287
2408
  sessionId: this.sessionId,
2288
2409
  artifactId: artifactData.artifactId,
@@ -2309,7 +2430,7 @@ var GraphSessionManager = class {
2309
2430
  const sessionId = messageId;
2310
2431
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2311
2432
  this.sessions.set(sessionId, session);
2312
- logger5.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2433
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2313
2434
  return sessionId;
2314
2435
  }
2315
2436
  /**
@@ -2320,7 +2441,7 @@ var GraphSessionManager = class {
2320
2441
  if (session) {
2321
2442
  session.initializeStatusUpdates(config, summarizerModel);
2322
2443
  } else {
2323
- logger5.error(
2444
+ logger6.error(
2324
2445
  {
2325
2446
  sessionId,
2326
2447
  availableSessions: Array.from(this.sessions.keys())
@@ -2341,7 +2462,7 @@ var GraphSessionManager = class {
2341
2462
  recordEvent(sessionId, eventType, agentId, data) {
2342
2463
  const session = this.sessions.get(sessionId);
2343
2464
  if (!session) {
2344
- logger5.warn({ sessionId }, "Attempted to record event in non-existent session");
2465
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2345
2466
  return;
2346
2467
  }
2347
2468
  session.recordEvent(eventType, agentId, data);
@@ -2352,12 +2473,12 @@ var GraphSessionManager = class {
2352
2473
  endSession(sessionId) {
2353
2474
  const session = this.sessions.get(sessionId);
2354
2475
  if (!session) {
2355
- logger5.warn({ sessionId }, "Attempted to end non-existent session");
2476
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2356
2477
  return [];
2357
2478
  }
2358
2479
  const events = session.getEvents();
2359
2480
  const summary = session.getSummary();
2360
- logger5.info({ sessionId, summary }, "GraphSession ended");
2481
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2361
2482
  session.cleanup();
2362
2483
  this.sessions.delete(sessionId);
2363
2484
  return events;
@@ -2383,7 +2504,7 @@ var GraphSessionManager = class {
2383
2504
  }
2384
2505
  };
2385
2506
  var graphSessionManager = new GraphSessionManager();
2386
- var logger6 = getLogger("ArtifactParser");
2507
+ var logger7 = getLogger("ArtifactParser");
2387
2508
  var _ArtifactParser = class _ArtifactParser {
2388
2509
  constructor(tenantId) {
2389
2510
  this.tenantId = tenantId;
@@ -2399,9 +2520,7 @@ var _ArtifactParser = class _ArtifactParser {
2399
2520
  * More robust detection that handles streaming fragments
2400
2521
  */
2401
2522
  hasIncompleteArtifact(text) {
2402
- return /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(
2403
- text
2404
- ) || /^.*<artifact:ref(?:[^>]*)$/.test(text) || // Incomplete artifact:ref at end
2523
+ return /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(text) || /<artifact:ref[^>]+$/.test(text) || // Incomplete artifact ref at end
2405
2524
  this.findSafeTextBoundary(text) < text.length;
2406
2525
  }
2407
2526
  /**
@@ -2410,10 +2529,10 @@ var _ArtifactParser = class _ArtifactParser {
2410
2529
  */
2411
2530
  findSafeTextBoundary(text) {
2412
2531
  const endPatterns = [
2413
- /^.*<artifact:ref(?:[^/>]+(?:[^>]*[^/])?)?$/,
2532
+ /<artifact:ref(?![^>]*\/>).*$/,
2414
2533
  // artifact:ref that doesn't end with />
2415
- /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/
2416
- // Safe partial artifact pattern
2534
+ /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/
2535
+ // Any partial artifact pattern at end
2417
2536
  ];
2418
2537
  for (const pattern of endPatterns) {
2419
2538
  const match = text.match(pattern);
@@ -2449,7 +2568,7 @@ var _ArtifactParser = class _ArtifactParser {
2449
2568
  id: taskId
2450
2569
  });
2451
2570
  if (!task) {
2452
- logger6.warn({ taskId }, "Task not found when fetching artifacts");
2571
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2453
2572
  continue;
2454
2573
  }
2455
2574
  const taskArtifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2461,9 +2580,9 @@ var _ArtifactParser = class _ArtifactParser {
2461
2580
  artifacts.set(key, artifact);
2462
2581
  }
2463
2582
  }
2464
- logger6.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2583
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2465
2584
  } catch (error) {
2466
- logger6.error({ error, contextId }, "Error loading context artifacts");
2585
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2467
2586
  }
2468
2587
  return artifacts;
2469
2588
  }
@@ -2566,7 +2685,7 @@ var _ArtifactParser = class _ArtifactParser {
2566
2685
  id: taskId
2567
2686
  });
2568
2687
  if (!task) {
2569
- logger6.warn({ taskId }, "Task not found when fetching artifact");
2688
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2570
2689
  return null;
2571
2690
  }
2572
2691
  const artifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2578,7 +2697,7 @@ var _ArtifactParser = class _ArtifactParser {
2578
2697
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2579
2698
  }
2580
2699
  } catch (error) {
2581
- logger6.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2700
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2582
2701
  }
2583
2702
  return null;
2584
2703
  }
@@ -2614,11 +2733,11 @@ var _ArtifactParser = class _ArtifactParser {
2614
2733
  __publicField(_ArtifactParser, "ARTIFACT_REGEX", /<artifact:ref\s+id="([^"]*?)"\s+task="([^"]*?)"\s*\/>/gs);
2615
2734
  __publicField(_ArtifactParser, "ARTIFACT_CHECK_REGEX", /<artifact:ref\s+(?=.*id="[^"]+")(?=.*task="[^"]+")[^>]*\/>/);
2616
2735
  // Regex for catching any partial artifact pattern (< + any prefix of "artifact:ref")
2617
- __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:(r(e(f?)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2736
+ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2618
2737
  var ArtifactParser = _ArtifactParser;
2619
2738
 
2620
2739
  // src/utils/incremental-stream-parser.ts
2621
- var logger7 = getLogger("IncrementalStreamParser");
2740
+ var logger8 = getLogger("IncrementalStreamParser");
2622
2741
  var IncrementalStreamParser = class {
2623
2742
  constructor(streamHelper, tenantId, contextId) {
2624
2743
  __publicField(this, "buffer", "");
@@ -2678,13 +2797,13 @@ var IncrementalStreamParser = class {
2678
2797
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2679
2798
  const delta = part.argsTextDelta || "";
2680
2799
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2681
- logger7.warn({ bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE }, "JSON buffer exceeded maximum size, truncating");
2800
+ logger8.warn({ bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE }, "JSON buffer exceeded maximum size, truncating");
2682
2801
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2683
2802
  }
2684
2803
  jsonBuffer += delta;
2685
2804
  for (const char of delta) {
2686
2805
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
2687
- logger7.warn({ bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE }, "Component buffer exceeded maximum size, resetting");
2806
+ logger8.warn({ bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE }, "Component buffer exceeded maximum size, resetting");
2688
2807
  componentBuffer = "";
2689
2808
  depth = 0;
2690
2809
  continue;
@@ -2699,7 +2818,7 @@ var IncrementalStreamParser = class {
2699
2818
  if (componentMatch) {
2700
2819
  const MAX_COMPONENT_SIZE = 1024 * 1024;
2701
2820
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
2702
- logger7.warn(
2821
+ logger8.warn(
2703
2822
  {
2704
2823
  size: componentMatch[0].length,
2705
2824
  maxSize: MAX_COMPONENT_SIZE
@@ -2712,7 +2831,7 @@ var IncrementalStreamParser = class {
2712
2831
  try {
2713
2832
  const component = JSON.parse(componentMatch[0]);
2714
2833
  if (typeof component !== "object" || !component.id) {
2715
- logger7.warn({ component }, "Invalid component structure, skipping");
2834
+ logger8.warn({ component }, "Invalid component structure, skipping");
2716
2835
  componentBuffer = "";
2717
2836
  continue;
2718
2837
  }
@@ -2725,7 +2844,7 @@ var IncrementalStreamParser = class {
2725
2844
  componentsStreamed++;
2726
2845
  componentBuffer = "";
2727
2846
  } catch (e) {
2728
- logger7.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2847
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2729
2848
  }
2730
2849
  }
2731
2850
  }
@@ -2742,7 +2861,7 @@ var IncrementalStreamParser = class {
2742
2861
  break;
2743
2862
  }
2744
2863
  }
2745
- logger7.debug({ componentsStreamed }, "Finished streaming components");
2864
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
2746
2865
  }
2747
2866
  /**
2748
2867
  * Legacy method for backward compatibility - defaults to text processing
@@ -2886,7 +3005,7 @@ var IncrementalStreamParser = class {
2886
3005
  };
2887
3006
 
2888
3007
  // src/utils/response-formatter.ts
2889
- var logger8 = getLogger("ResponseFormatter");
3008
+ var logger9 = getLogger("ResponseFormatter");
2890
3009
  var ResponseFormatter = class {
2891
3010
  constructor(tenantId) {
2892
3011
  __publicField(this, "artifactParser");
@@ -2917,7 +3036,7 @@ var ResponseFormatter = class {
2917
3036
  return { parts };
2918
3037
  } catch (error) {
2919
3038
  setSpanWithError(span, error);
2920
- logger8.error({ error, responseObject }, "Error formatting object response");
3039
+ logger9.error({ error, responseObject }, "Error formatting object response");
2921
3040
  return {
2922
3041
  parts: [{ kind: "data", data: responseObject }]
2923
3042
  };
@@ -2968,7 +3087,7 @@ var ResponseFormatter = class {
2968
3087
  return { parts };
2969
3088
  } catch (error) {
2970
3089
  setSpanWithError(span, error);
2971
- logger8.error({ error, responseText }, "Error formatting response");
3090
+ logger9.error({ error, responseText }, "Error formatting response");
2972
3091
  return { text: responseText };
2973
3092
  } finally {
2974
3093
  span.end();
@@ -3013,7 +3132,7 @@ var ResponseFormatter = class {
3013
3132
  }
3014
3133
  }
3015
3134
  };
3016
- var logger9 = getLogger("ToolSessionManager");
3135
+ var logger10 = getLogger("ToolSessionManager");
3017
3136
  var _ToolSessionManager = class _ToolSessionManager {
3018
3137
  // 5 minutes
3019
3138
  constructor() {
@@ -3042,7 +3161,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3042
3161
  createdAt: Date.now()
3043
3162
  };
3044
3163
  this.sessions.set(sessionId, session);
3045
- logger9.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3164
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3046
3165
  return sessionId;
3047
3166
  }
3048
3167
  /**
@@ -3051,7 +3170,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3051
3170
  recordToolResult(sessionId, toolResult) {
3052
3171
  const session = this.sessions.get(sessionId);
3053
3172
  if (!session) {
3054
- logger9.warn(
3173
+ logger10.warn(
3055
3174
  { sessionId, toolCallId: toolResult.toolCallId },
3056
3175
  "Tool result recorded for unknown session"
3057
3176
  );
@@ -3065,12 +3184,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3065
3184
  getToolResult(sessionId, toolCallId) {
3066
3185
  const session = this.sessions.get(sessionId);
3067
3186
  if (!session) {
3068
- logger9.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3187
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3069
3188
  return void 0;
3070
3189
  }
3071
3190
  const result = session.toolResults.get(toolCallId);
3072
3191
  if (!result) {
3073
- logger9.warn(
3192
+ logger10.warn(
3074
3193
  {
3075
3194
  sessionId,
3076
3195
  toolCallId,
@@ -3109,10 +3228,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3109
3228
  }
3110
3229
  for (const sessionId of expiredSessions) {
3111
3230
  this.sessions.delete(sessionId);
3112
- logger9.debug({ sessionId }, "Cleaned up expired tool session");
3231
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3113
3232
  }
3114
3233
  if (expiredSessions.length > 0) {
3115
- logger9.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3234
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3116
3235
  }
3117
3236
  }
3118
3237
  };
@@ -3121,7 +3240,7 @@ var ToolSessionManager = _ToolSessionManager;
3121
3240
  var toolSessionManager = ToolSessionManager.getInstance();
3122
3241
 
3123
3242
  // src/agents/artifactTools.ts
3124
- var logger10 = getLogger("artifactTools");
3243
+ var logger11 = getLogger("artifactTools");
3125
3244
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3126
3245
  if (typeof data === "object" && data !== null) {
3127
3246
  if (Array.isArray(data)) {
@@ -3342,7 +3461,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3342
3461
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3343
3462
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3344
3463
  if (!sessionId) {
3345
- logger10.warn({ toolCallId }, "No session ID provided to save_tool_result");
3464
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3346
3465
  return {
3347
3466
  saved: false,
3348
3467
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3352,7 +3471,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3352
3471
  }
3353
3472
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3354
3473
  if (!toolResult) {
3355
- logger10.warn({ toolCallId, sessionId }, "Tool result not found in session");
3474
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3356
3475
  return {
3357
3476
  saved: false,
3358
3477
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3365,7 +3484,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3365
3484
  const baseData = jmespath.search(parsedResult, baseSelector);
3366
3485
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3367
3486
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3368
- logger10.warn(
3487
+ logger11.warn(
3369
3488
  {
3370
3489
  baseSelector,
3371
3490
  toolCallId,
@@ -3408,7 +3527,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3408
3527
  const fallbackValue = item[propName];
3409
3528
  if (fallbackValue !== null && fallbackValue !== void 0) {
3410
3529
  extractedItem[propName] = fallbackValue;
3411
- logger10.info(
3530
+ logger11.info(
3412
3531
  { propName, propSelector, context },
3413
3532
  `PropSelector failed, used fallback direct property access`
3414
3533
  );
@@ -3420,7 +3539,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3420
3539
  const fallbackValue = item[propName];
3421
3540
  if (fallbackValue !== null && fallbackValue !== void 0) {
3422
3541
  extractedItem[propName] = fallbackValue;
3423
- logger10.warn(
3542
+ logger11.warn(
3424
3543
  { propName, propSelector, context, error: error.message },
3425
3544
  `PropSelector syntax error, used fallback direct property access`
3426
3545
  );
@@ -3533,7 +3652,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3533
3652
  warnings
3534
3653
  };
3535
3654
  } catch (error) {
3536
- logger10.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3655
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3537
3656
  return {
3538
3657
  saved: false,
3539
3658
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3545,7 +3664,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3545
3664
  }
3546
3665
 
3547
3666
  // src/a2a/client.ts
3548
- var logger11 = getLogger("a2aClient");
3667
+ var logger12 = getLogger("a2aClient");
3549
3668
  var DEFAULT_BACKOFF = {
3550
3669
  initialInterval: 500,
3551
3670
  maxInterval: 6e4,
@@ -3751,7 +3870,7 @@ var A2AClient = class {
3751
3870
  try {
3752
3871
  const res = await fn();
3753
3872
  if (attempt > 0) {
3754
- logger11.info(
3873
+ logger12.info(
3755
3874
  {
3756
3875
  attempts: attempt + 1,
3757
3876
  elapsedTime: Date.now() - start
@@ -3766,7 +3885,7 @@ var A2AClient = class {
3766
3885
  }
3767
3886
  const elapsed = Date.now() - start;
3768
3887
  if (elapsed > maxElapsedTime) {
3769
- logger11.warn(
3888
+ logger12.warn(
3770
3889
  {
3771
3890
  attempts: attempt + 1,
3772
3891
  elapsedTime: elapsed,
@@ -3787,7 +3906,7 @@ var A2AClient = class {
3787
3906
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
3788
3907
  }
3789
3908
  const delayMs = Math.min(retryInterval, maxInterval);
3790
- logger11.info(
3909
+ logger12.info(
3791
3910
  {
3792
3911
  attempt: attempt + 1,
3793
3912
  delayMs,
@@ -3872,7 +3991,7 @@ var A2AClient = class {
3872
3991
  }
3873
3992
  const rpcResponse = await httpResponse.json();
3874
3993
  if (rpcResponse.id !== requestId2) {
3875
- logger11.warn(
3994
+ logger12.warn(
3876
3995
  {
3877
3996
  method,
3878
3997
  expectedId: requestId2,
@@ -4071,7 +4190,7 @@ var A2AClient = class {
4071
4190
  try {
4072
4191
  while (true) {
4073
4192
  const { done, value } = await reader.read();
4074
- logger11.info({ done, value }, "parseA2ASseStream");
4193
+ logger12.info({ done, value }, "parseA2ASseStream");
4075
4194
  if (done) {
4076
4195
  if (eventDataBuffer.trim()) {
4077
4196
  const result = this._processSseEventData(
@@ -4158,7 +4277,7 @@ var A2AClient = class {
4158
4277
  };
4159
4278
 
4160
4279
  // src/agents/relationTools.ts
4161
- var logger12 = getLogger("relationships Tools");
4280
+ var logger13 = getLogger("relationships Tools");
4162
4281
  var generateTransferToolDescription = (config) => {
4163
4282
  return `Hand off the conversation to agent ${config.id}.
4164
4283
 
@@ -4196,7 +4315,7 @@ var createTransferToAgentTool = ({
4196
4315
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4197
4316
  });
4198
4317
  }
4199
- logger12.info(
4318
+ logger13.info(
4200
4319
  {
4201
4320
  transferTo: transferConfig.id ?? "unknown",
4202
4321
  fromAgent: callingAgentId
@@ -4344,7 +4463,7 @@ function createDelegateToAgentTool({
4344
4463
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4345
4464
  }
4346
4465
  };
4347
- logger12.info({ messageToSend }, "messageToSend");
4466
+ logger13.info({ messageToSend }, "messageToSend");
4348
4467
  await createMessage(dbClient_default)({
4349
4468
  id: nanoid(),
4350
4469
  tenantId,
@@ -4406,7 +4525,7 @@ function createDelegateToAgentTool({
4406
4525
  }
4407
4526
 
4408
4527
  // src/agents/SystemPromptBuilder.ts
4409
- var logger13 = getLogger("SystemPromptBuilder");
4528
+ var logger14 = getLogger("SystemPromptBuilder");
4410
4529
  var SystemPromptBuilder = class {
4411
4530
  constructor(version, versionConfig) {
4412
4531
  this.version = version;
@@ -4422,9 +4541,9 @@ var SystemPromptBuilder = class {
4422
4541
  this.templates.set(name, content);
4423
4542
  }
4424
4543
  this.loaded = true;
4425
- logger13.debug({ templateCount: this.templates.size, version: this.version }, `Loaded ${this.templates.size} templates for version ${this.version}`);
4544
+ logger14.debug({ templateCount: this.templates.size, version: this.version }, `Loaded ${this.templates.size} templates for version ${this.version}`);
4426
4545
  } catch (error) {
4427
- logger13.error({ error }, `Failed to load templates for version ${this.version}`);
4546
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4428
4547
  throw new Error(`Template loading failed: ${error}`);
4429
4548
  }
4430
4549
  }
@@ -4826,7 +4945,7 @@ function hasToolCallWithPrefix(prefix) {
4826
4945
  return false;
4827
4946
  };
4828
4947
  }
4829
- var logger14 = getLogger("Agent");
4948
+ var logger15 = getLogger("Agent");
4830
4949
  var CONSTANTS = {
4831
4950
  MAX_GENERATION_STEPS: 12,
4832
4951
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5079,14 +5198,14 @@ var Agent = class {
5079
5198
  for (const toolSet of tools) {
5080
5199
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5081
5200
  if (!isValidTool(originalTool)) {
5082
- logger14.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5201
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5083
5202
  continue;
5084
5203
  }
5085
5204
  const sessionWrappedTool = tool({
5086
5205
  description: originalTool.description,
5087
5206
  inputSchema: originalTool.inputSchema,
5088
5207
  execute: async (args, { toolCallId }) => {
5089
- logger14.debug({ toolName, toolCallId }, "MCP Tool Called");
5208
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5090
5209
  try {
5091
5210
  const result = await originalTool.execute(args, { toolCallId });
5092
5211
  toolSessionManager.recordToolResult(sessionId, {
@@ -5098,7 +5217,7 @@ var Agent = class {
5098
5217
  });
5099
5218
  return { result, toolCallId };
5100
5219
  } catch (error) {
5101
- logger14.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5220
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5102
5221
  throw error;
5103
5222
  }
5104
5223
  }
@@ -5183,7 +5302,7 @@ var Agent = class {
5183
5302
  selectedTools
5184
5303
  };
5185
5304
  }
5186
- logger14.info(
5305
+ logger15.info(
5187
5306
  {
5188
5307
  toolName: tool4.name,
5189
5308
  credentialReferenceId,
@@ -5223,7 +5342,7 @@ var Agent = class {
5223
5342
  async getResolvedContext(conversationId, requestContext) {
5224
5343
  try {
5225
5344
  if (!this.config.contextConfigId) {
5226
- logger14.debug({ graphId: this.config.graphId }, "No context config found for graph");
5345
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5227
5346
  return null;
5228
5347
  }
5229
5348
  const contextConfig = await getContextConfigById(dbClient_default)({
@@ -5231,7 +5350,7 @@ var Agent = class {
5231
5350
  id: this.config.contextConfigId
5232
5351
  });
5233
5352
  if (!contextConfig) {
5234
- logger14.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5353
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5235
5354
  return null;
5236
5355
  }
5237
5356
  if (!this.contextResolver) {
@@ -5248,7 +5367,7 @@ var Agent = class {
5248
5367
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5249
5368
  $env: process.env
5250
5369
  };
5251
- logger14.debug(
5370
+ logger15.debug(
5252
5371
  {
5253
5372
  conversationId,
5254
5373
  contextConfigId: contextConfig.id,
@@ -5262,7 +5381,7 @@ var Agent = class {
5262
5381
  );
5263
5382
  return contextWithBuiltins;
5264
5383
  } catch (error) {
5265
- logger14.error(
5384
+ logger15.error(
5266
5385
  {
5267
5386
  conversationId,
5268
5387
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5286,7 +5405,7 @@ var Agent = class {
5286
5405
  });
5287
5406
  return graphDefinition?.graphPrompt || void 0;
5288
5407
  } catch (error) {
5289
- logger14.warn(
5408
+ logger15.warn(
5290
5409
  {
5291
5410
  graphId: this.config.graphId,
5292
5411
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5313,7 +5432,7 @@ var Agent = class {
5313
5432
  }
5314
5433
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5315
5434
  } catch (error) {
5316
- logger14.warn(
5435
+ logger15.warn(
5317
5436
  {
5318
5437
  graphId: this.config.graphId,
5319
5438
  tenantId: this.config.tenantId,
@@ -5373,7 +5492,7 @@ Key requirements:
5373
5492
  preserveUnresolved: false
5374
5493
  });
5375
5494
  } catch (error) {
5376
- logger14.error(
5495
+ logger15.error(
5377
5496
  {
5378
5497
  conversationId,
5379
5498
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5418,7 +5537,7 @@ Key requirements:
5418
5537
  preserveUnresolved: false
5419
5538
  });
5420
5539
  } catch (error) {
5421
- logger14.error(
5540
+ logger15.error(
5422
5541
  {
5423
5542
  conversationId,
5424
5543
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5446,7 +5565,7 @@ Key requirements:
5446
5565
  artifactId: z.string().describe("The unique identifier of the artifact to get.")
5447
5566
  }),
5448
5567
  execute: async ({ artifactId }) => {
5449
- logger14.info({ artifactId }, "get_artifact executed");
5568
+ logger15.info({ artifactId }, "get_artifact executed");
5450
5569
  const artifact = await getLedgerArtifacts(dbClient_default)({
5451
5570
  scopes: {
5452
5571
  tenantId: this.config.tenantId,
@@ -5513,7 +5632,7 @@ Key requirements:
5513
5632
  graphId: this.config.graphId
5514
5633
  });
5515
5634
  } catch (error) {
5516
- logger14.error(
5635
+ logger15.error(
5517
5636
  { error, graphId: this.config.graphId },
5518
5637
  "Failed to check graph artifact components"
5519
5638
  );
@@ -5617,7 +5736,7 @@ Key requirements:
5617
5736
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5618
5737
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5619
5738
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5620
- logger14.warn(
5739
+ logger15.warn(
5621
5740
  {
5622
5741
  requestedTimeout: modelSettings.maxDuration * 1e3,
5623
5742
  appliedTimeout: timeoutMs,
@@ -5659,7 +5778,7 @@ Key requirements:
5659
5778
  }
5660
5779
  );
5661
5780
  } catch (error) {
5662
- logger14.debug({ error }, "Failed to track agent reasoning");
5781
+ logger15.debug({ error }, "Failed to track agent reasoning");
5663
5782
  }
5664
5783
  }
5665
5784
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5742,7 +5861,7 @@ Key requirements:
5742
5861
  }
5743
5862
  );
5744
5863
  } catch (error) {
5745
- logger14.debug({ error }, "Failed to track agent reasoning");
5864
+ logger15.debug({ error }, "Failed to track agent reasoning");
5746
5865
  }
5747
5866
  }
5748
5867
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5787,7 +5906,7 @@ Key requirements:
5787
5906
  return;
5788
5907
  }
5789
5908
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
5790
- logger14.info({ result }, "save_artifact_tool or save_tool_result");
5909
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
5791
5910
  if (result.output.artifacts) {
5792
5911
  for (const artifact of result.output.artifacts) {
5793
5912
  const artifactId = artifact?.artifactId || "N/A";
@@ -5972,7 +6091,7 @@ function parseEmbeddedJson(data) {
5972
6091
  }
5973
6092
  });
5974
6093
  }
5975
- var logger15 = getLogger("generateTaskHandler");
6094
+ var logger16 = getLogger("generateTaskHandler");
5976
6095
  var createTaskHandler = (config, credentialStoreRegistry) => {
5977
6096
  return async (task) => {
5978
6097
  try {
@@ -6022,7 +6141,33 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6022
6141
  agentId: config.agentId
6023
6142
  })
6024
6143
  ]);
6025
- logger15.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6144
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6145
+ const enhancedInternalRelations = await Promise.all(
6146
+ internalRelations.map(async (relation) => {
6147
+ try {
6148
+ const relatedAgent = await getAgentById(dbClient_default)({
6149
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6150
+ agentId: relation.id
6151
+ });
6152
+ if (relatedAgent) {
6153
+ const relatedAgentRelations = await getRelatedAgentsForGraph(dbClient_default)({
6154
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6155
+ graphId: config.graphId,
6156
+ agentId: relation.id
6157
+ });
6158
+ const enhancedDescription = generateDescriptionWithTransfers(
6159
+ relation.description || "",
6160
+ relatedAgentRelations.internalRelations,
6161
+ relatedAgentRelations.externalRelations
6162
+ );
6163
+ return { ...relation, description: enhancedDescription };
6164
+ }
6165
+ } catch (error) {
6166
+ logger16.warn({ agentId: relation.id, error }, "Failed to enhance agent description");
6167
+ }
6168
+ return relation;
6169
+ })
6170
+ );
6026
6171
  const agentPrompt = "prompt" in config.agentSchema ? config.agentSchema.prompt : "";
6027
6172
  const models = "models" in config.agentSchema ? config.agentSchema.models : void 0;
6028
6173
  const stopWhen = "stopWhen" in config.agentSchema ? config.agentSchema.stopWhen : void 0;
@@ -6039,7 +6184,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6039
6184
  agentPrompt,
6040
6185
  models: models || void 0,
6041
6186
  stopWhen: stopWhen || void 0,
6042
- agentRelations: internalRelations.map((relation) => ({
6187
+ agentRelations: enhancedInternalRelations.map((relation) => ({
6043
6188
  id: relation.id,
6044
6189
  tenantId: config.tenantId,
6045
6190
  projectId: config.projectId,
@@ -6053,7 +6198,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6053
6198
  agentRelations: [],
6054
6199
  transferRelations: []
6055
6200
  })),
6056
- transferRelations: internalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6201
+ transferRelations: enhancedInternalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6057
6202
  baseUrl: config.baseUrl,
6058
6203
  apiKey: config.apiKey,
6059
6204
  id: relation.id,
@@ -6069,7 +6214,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6069
6214
  })),
6070
6215
  delegateRelations: [
6071
6216
  // Internal delegate relations
6072
- ...internalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6217
+ ...enhancedInternalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6073
6218
  type: "internal",
6074
6219
  config: {
6075
6220
  id: relation.id,
@@ -6122,7 +6267,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6122
6267
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6123
6268
  if (taskIdMatch) {
6124
6269
  contextId = taskIdMatch[1];
6125
- logger15.info(
6270
+ logger16.info(
6126
6271
  {
6127
6272
  taskId: task.id,
6128
6273
  extractedContextId: contextId,
@@ -6138,7 +6283,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6138
6283
  const isDelegation = task.context?.metadata?.isDelegation === true;
6139
6284
  agent.setDelegationStatus(isDelegation);
6140
6285
  if (isDelegation) {
6141
- logger15.info(
6286
+ logger16.info(
6142
6287
  { agentId: config.agentId, taskId: task.id },
6143
6288
  "Delegated agent - streaming disabled"
6144
6289
  );
@@ -6343,84 +6488,10 @@ async function getRegisteredGraph(executionContext) {
6343
6488
  const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6344
6489
  return hydrateGraph({ dbGraph, baseUrl: agentFrameworkBaseUrl, apiKey });
6345
6490
  }
6346
- getLogger("agents");
6347
- async function hydrateAgent({
6348
- dbAgent,
6349
- graphId,
6350
- baseUrl,
6351
- apiKey,
6352
- credentialStoreRegistry
6353
- }) {
6354
- try {
6355
- const taskHandlerConfig = await createTaskHandlerConfig({
6356
- tenantId: dbAgent.tenantId,
6357
- projectId: dbAgent.projectId,
6358
- graphId,
6359
- agentId: dbAgent.id,
6360
- baseUrl,
6361
- apiKey
6362
- });
6363
- const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
6364
- const agentCard = {
6365
- name: dbAgent.name,
6366
- description: dbAgent.description || "AI Agent",
6367
- url: baseUrl ? `${baseUrl}/a2a` : "",
6368
- version: "1.0.0",
6369
- capabilities: {
6370
- streaming: true,
6371
- // Enable streaming for A2A compliance
6372
- pushNotifications: false,
6373
- stateTransitionHistory: false
6374
- },
6375
- defaultInputModes: ["text", "text/plain"],
6376
- defaultOutputModes: ["text", "text/plain"],
6377
- skills: [],
6378
- // Add provider info if available
6379
- ...baseUrl && {
6380
- provider: {
6381
- organization: "Inkeep",
6382
- url: baseUrl
6383
- }
6384
- }
6385
- };
6386
- return {
6387
- agentId: dbAgent.id,
6388
- tenantId: dbAgent.tenantId,
6389
- projectId: dbAgent.projectId,
6390
- graphId,
6391
- agentCard,
6392
- taskHandler
6393
- };
6394
- } catch (error) {
6395
- console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
6396
- throw error;
6397
- }
6398
- }
6399
- async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6400
- const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
6401
- if (!agentId) {
6402
- throw new Error("Agent ID is required");
6403
- }
6404
- const dbAgent = await getAgentById(dbClient_default)({
6405
- scopes: { tenantId, projectId },
6406
- agentId
6407
- });
6408
- if (!dbAgent) {
6409
- return null;
6410
- }
6411
- const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6412
- return hydrateAgent({
6413
- dbAgent,
6414
- graphId,
6415
- baseUrl: agentFrameworkBaseUrl,
6416
- credentialStoreRegistry,
6417
- apiKey
6418
- });
6419
- }
6420
6491
 
6421
6492
  // src/routes/agents.ts
6422
6493
  var app = new OpenAPIHono();
6423
- var logger16 = getLogger("agents");
6494
+ var logger17 = getLogger("agents");
6424
6495
  app.openapi(
6425
6496
  createRoute({
6426
6497
  method: "get",
@@ -6458,7 +6529,7 @@ app.openapi(
6458
6529
  tracestate: c.req.header("tracestate"),
6459
6530
  baggage: c.req.header("baggage")
6460
6531
  };
6461
- logger16.info(
6532
+ logger17.info(
6462
6533
  {
6463
6534
  otelHeaders,
6464
6535
  path: c.req.path,
@@ -6469,7 +6540,7 @@ app.openapi(
6469
6540
  const executionContext = getRequestExecutionContext(c);
6470
6541
  const { tenantId, projectId, graphId, agentId } = executionContext;
6471
6542
  if (agentId) {
6472
- logger16.info(
6543
+ logger17.info(
6473
6544
  {
6474
6545
  message: "getRegisteredAgent (agent-level)",
6475
6546
  tenantId,
@@ -6481,13 +6552,13 @@ app.openapi(
6481
6552
  );
6482
6553
  const credentialStores = c.get("credentialStores");
6483
6554
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6484
- logger16.info({ agent }, "agent registered: well-known agent.json");
6555
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6485
6556
  if (!agent) {
6486
6557
  return c.json({ error: "Agent not found" }, 404);
6487
6558
  }
6488
6559
  return c.json(agent.agentCard);
6489
6560
  } else {
6490
- logger16.info(
6561
+ logger17.info(
6491
6562
  {
6492
6563
  message: "getRegisteredGraph (graph-level)",
6493
6564
  tenantId,
@@ -6510,7 +6581,7 @@ app.post("/a2a", async (c) => {
6510
6581
  tracestate: c.req.header("tracestate"),
6511
6582
  baggage: c.req.header("baggage")
6512
6583
  };
6513
- logger16.info(
6584
+ logger17.info(
6514
6585
  {
6515
6586
  otelHeaders,
6516
6587
  path: c.req.path,
@@ -6521,7 +6592,7 @@ app.post("/a2a", async (c) => {
6521
6592
  const executionContext = getRequestExecutionContext(c);
6522
6593
  const { tenantId, projectId, graphId, agentId } = executionContext;
6523
6594
  if (agentId) {
6524
- logger16.info(
6595
+ logger17.info(
6525
6596
  {
6526
6597
  message: "a2a (agent-level)",
6527
6598
  tenantId,
@@ -6545,7 +6616,7 @@ app.post("/a2a", async (c) => {
6545
6616
  }
6546
6617
  return a2aHandler(c, agent);
6547
6618
  } else {
6548
- logger16.info(
6619
+ logger17.info(
6549
6620
  {
6550
6621
  message: "a2a (graph-level)",
6551
6622
  tenantId,
@@ -6585,14 +6656,14 @@ app.post("/a2a", async (c) => {
6585
6656
  }
6586
6657
  });
6587
6658
  var agents_default = app;
6588
- var logger17 = getLogger("Transfer");
6659
+ var logger18 = getLogger("Transfer");
6589
6660
  async function executeTransfer({
6590
6661
  tenantId,
6591
6662
  threadId,
6592
6663
  projectId,
6593
6664
  targetAgentId
6594
6665
  }) {
6595
- logger17.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6666
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6596
6667
  await setActiveAgentForThread(dbClient_default)({
6597
6668
  scopes: { tenantId, projectId },
6598
6669
  threadId,
@@ -6787,7 +6858,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
6787
6858
  __publicField(this, "queuedOperations", []);
6788
6859
  // Timing tracking for text sequences (text-end to text-start gap)
6789
6860
  __publicField(this, "lastTextEndTimestamp", 0);
6790
- __publicField(this, "TEXT_GAP_THRESHOLD", 1e3);
6861
+ __publicField(this, "TEXT_GAP_THRESHOLD", 50);
6791
6862
  // milliseconds - if gap between text sequences is less than this, queue operations
6792
6863
  // Connection management and forced cleanup
6793
6864
  __publicField(this, "connectionDropTimer");
@@ -7133,7 +7204,7 @@ var MCPStreamHelper = class {
7133
7204
  function createMCPStreamHelper() {
7134
7205
  return new MCPStreamHelper();
7135
7206
  }
7136
- var logger18 = getLogger("ExecutionHandler");
7207
+ var logger19 = getLogger("ExecutionHandler");
7137
7208
  var ExecutionHandler = class {
7138
7209
  constructor() {
7139
7210
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7158,7 +7229,7 @@ var ExecutionHandler = class {
7158
7229
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7159
7230
  registerStreamHelper(requestId2, sseHelper);
7160
7231
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7161
- logger18.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7232
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7162
7233
  let graphConfig = null;
7163
7234
  try {
7164
7235
  graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7170,7 +7241,7 @@ var ExecutionHandler = class {
7170
7241
  );
7171
7242
  }
7172
7243
  } catch (error) {
7173
- logger18.error(
7244
+ logger19.error(
7174
7245
  {
7175
7246
  error: error instanceof Error ? error.message : "Unknown error",
7176
7247
  stack: error instanceof Error ? error.stack : void 0
@@ -7186,7 +7257,7 @@ var ExecutionHandler = class {
7186
7257
  try {
7187
7258
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7188
7259
  const taskId = `task_${conversationId}-${requestId2}`;
7189
- logger18.info(
7260
+ logger19.info(
7190
7261
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7191
7262
  "Attempting to create or reuse existing task"
7192
7263
  );
@@ -7209,7 +7280,7 @@ var ExecutionHandler = class {
7209
7280
  agent_id: currentAgentId
7210
7281
  }
7211
7282
  });
7212
- logger18.info(
7283
+ logger19.info(
7213
7284
  {
7214
7285
  taskId,
7215
7286
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7218,27 +7289,27 @@ var ExecutionHandler = class {
7218
7289
  );
7219
7290
  } catch (error) {
7220
7291
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7221
- logger18.info(
7292
+ logger19.info(
7222
7293
  { taskId, error: error.message },
7223
7294
  "Task already exists, fetching existing task"
7224
7295
  );
7225
7296
  const existingTask = await getTask(dbClient_default)({ id: taskId });
7226
7297
  if (existingTask) {
7227
7298
  task = existingTask;
7228
- logger18.info(
7299
+ logger19.info(
7229
7300
  { taskId, existingTask },
7230
7301
  "Successfully reused existing task from race condition"
7231
7302
  );
7232
7303
  } else {
7233
- logger18.error({ taskId, error }, "Task constraint failed but task not found");
7304
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7234
7305
  throw error;
7235
7306
  }
7236
7307
  } else {
7237
- logger18.error({ taskId, error }, "Failed to create task due to non-constraint error");
7308
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7238
7309
  throw error;
7239
7310
  }
7240
7311
  }
7241
- logger18.debug(
7312
+ logger19.debug(
7242
7313
  {
7243
7314
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7244
7315
  executionType: "create_initial_task",
@@ -7256,7 +7327,7 @@ var ExecutionHandler = class {
7256
7327
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7257
7328
  while (iterations < maxTransfers) {
7258
7329
  iterations++;
7259
- logger18.info(
7330
+ logger19.info(
7260
7331
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7261
7332
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7262
7333
  );
@@ -7264,10 +7335,10 @@ var ExecutionHandler = class {
7264
7335
  scopes: { tenantId, projectId },
7265
7336
  conversationId
7266
7337
  });
7267
- logger18.info({ activeAgent }, "activeAgent");
7338
+ logger19.info({ activeAgent }, "activeAgent");
7268
7339
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7269
7340
  currentAgentId = activeAgent.activeAgentId;
7270
- logger18.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7341
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7271
7342
  }
7272
7343
  const agentBaseUrl = `${baseUrl}/agents`;
7273
7344
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7308,13 +7379,13 @@ var ExecutionHandler = class {
7308
7379
  });
7309
7380
  if (!messageResponse?.result) {
7310
7381
  errorCount++;
7311
- logger18.error(
7382
+ logger19.error(
7312
7383
  { currentAgentId, iterations, errorCount },
7313
7384
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7314
7385
  );
7315
7386
  if (errorCount >= this.MAX_ERRORS) {
7316
7387
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7317
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7388
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7318
7389
  await sseHelper.writeError(errorMessage2);
7319
7390
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7320
7391
  if (task) {
@@ -7340,7 +7411,7 @@ var ExecutionHandler = class {
7340
7411
  const transferResponse = messageResponse.result;
7341
7412
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7342
7413
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7343
- logger18.info({ targetAgentId, transferReason }, "transfer response");
7414
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7344
7415
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7345
7416
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7346
7417
  projectId,
@@ -7351,7 +7422,7 @@ var ExecutionHandler = class {
7351
7422
  if (success) {
7352
7423
  fromAgentId = currentAgentId;
7353
7424
  currentAgentId = newAgentId;
7354
- logger18.info(
7425
+ logger19.info(
7355
7426
  {
7356
7427
  transferFrom: fromAgentId,
7357
7428
  transferTo: currentAgentId,
@@ -7369,7 +7440,7 @@ var ExecutionHandler = class {
7369
7440
  const graphSessionData = graphSessionManager.getSession(requestId2);
7370
7441
  if (graphSessionData) {
7371
7442
  const sessionSummary = graphSessionData.getSummary();
7372
- logger18.info(sessionSummary, "GraphSession data after completion");
7443
+ logger19.info(sessionSummary, "GraphSession data after completion");
7373
7444
  }
7374
7445
  let textContent = "";
7375
7446
  for (const part of responseParts) {
@@ -7378,78 +7449,84 @@ var ExecutionHandler = class {
7378
7449
  textContent += part.text;
7379
7450
  }
7380
7451
  }
7381
- const activeSpan = trace.getActiveSpan();
7382
- if (activeSpan) {
7383
- activeSpan.setAttributes({
7384
- "ai.response.content": textContent || "No response content",
7385
- "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7386
- "ai.agent.name": currentAgentId
7387
- });
7388
- }
7389
- await createMessage(dbClient_default)({
7390
- id: nanoid(),
7391
- tenantId,
7392
- projectId,
7393
- conversationId,
7394
- role: "agent",
7395
- content: {
7396
- text: textContent || void 0,
7397
- parts: responseParts.map((part) => ({
7398
- type: part.kind === "text" ? "text" : "data",
7399
- text: part.kind === "text" ? part.text : void 0,
7400
- data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7401
- }))
7402
- },
7403
- visibility: "user-facing",
7404
- messageType: "chat",
7405
- agentId: currentAgentId,
7406
- fromAgentId: currentAgentId,
7407
- taskId: task.id
7408
- });
7409
- const updateTaskStart = Date.now();
7410
- await updateTask(dbClient_default)({
7411
- taskId: task.id,
7412
- data: {
7413
- status: "completed",
7414
- metadata: {
7415
- ...task.metadata,
7416
- completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7417
- response: {
7418
- text: textContent,
7419
- parts: responseParts,
7420
- hasText: !!textContent,
7421
- hasData: responseParts.some((p) => p.kind === "data")
7452
+ return tracer.startActiveSpan("execution_handler.execute", {}, async (span) => {
7453
+ try {
7454
+ span.setAttributes({
7455
+ "ai.response.content": textContent || "No response content",
7456
+ "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7457
+ "ai.agent.name": currentAgentId
7458
+ });
7459
+ await createMessage(dbClient_default)({
7460
+ id: nanoid(),
7461
+ tenantId,
7462
+ projectId,
7463
+ conversationId,
7464
+ role: "agent",
7465
+ content: {
7466
+ text: textContent || void 0,
7467
+ parts: responseParts.map((part) => ({
7468
+ type: part.kind === "text" ? "text" : "data",
7469
+ text: part.kind === "text" ? part.text : void 0,
7470
+ data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7471
+ }))
7472
+ },
7473
+ visibility: "user-facing",
7474
+ messageType: "chat",
7475
+ agentId: currentAgentId,
7476
+ fromAgentId: currentAgentId,
7477
+ taskId: task.id
7478
+ });
7479
+ const updateTaskStart = Date.now();
7480
+ await updateTask(dbClient_default)({
7481
+ taskId: task.id,
7482
+ data: {
7483
+ status: "completed",
7484
+ metadata: {
7485
+ ...task.metadata,
7486
+ completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7487
+ response: {
7488
+ text: textContent,
7489
+ parts: responseParts,
7490
+ hasText: !!textContent,
7491
+ hasData: responseParts.some((p) => p.kind === "data")
7492
+ }
7493
+ }
7422
7494
  }
7495
+ });
7496
+ const updateTaskEnd = Date.now();
7497
+ logger19.info(
7498
+ { duration: updateTaskEnd - updateTaskStart },
7499
+ "Completed updateTask operation"
7500
+ );
7501
+ await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7502
+ await sseHelper.complete();
7503
+ logger19.info({}, "Ending GraphSession and cleaning up");
7504
+ graphSessionManager.endSession(requestId2);
7505
+ logger19.info({}, "Cleaning up streamHelper");
7506
+ unregisterStreamHelper(requestId2);
7507
+ let response;
7508
+ if (sseHelper instanceof MCPStreamHelper) {
7509
+ const captured = sseHelper.getCapturedResponse();
7510
+ response = captured.text || "No response content";
7423
7511
  }
7512
+ logger19.info({}, "ExecutionHandler returning success");
7513
+ return { success: true, iterations, response };
7514
+ } catch (error) {
7515
+ setSpanWithError(span, error);
7516
+ throw error;
7517
+ } finally {
7518
+ span.end();
7424
7519
  }
7425
7520
  });
7426
- const updateTaskEnd = Date.now();
7427
- logger18.info(
7428
- { duration: updateTaskEnd - updateTaskStart },
7429
- "Completed updateTask operation"
7430
- );
7431
- await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7432
- await sseHelper.complete();
7433
- logger18.info({}, "Ending GraphSession and cleaning up");
7434
- graphSessionManager.endSession(requestId2);
7435
- logger18.info({}, "Cleaning up streamHelper");
7436
- unregisterStreamHelper(requestId2);
7437
- let response;
7438
- if (sseHelper instanceof MCPStreamHelper) {
7439
- const captured = sseHelper.getCapturedResponse();
7440
- response = captured.text || "No response content";
7441
- }
7442
- logger18.info({}, "ExecutionHandler returning success");
7443
- return { success: true, iterations, response };
7444
7521
  }
7445
7522
  errorCount++;
7446
- logger18.warn(
7523
+ logger19.warn(
7447
7524
  { iterations, errorCount },
7448
7525
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7449
7526
  );
7450
7527
  if (errorCount >= this.MAX_ERRORS) {
7451
7528
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7452
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7529
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7453
7530
  await sseHelper.writeError(errorMessage2);
7454
7531
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7455
7532
  if (task) {
@@ -7471,7 +7548,7 @@ var ExecutionHandler = class {
7471
7548
  }
7472
7549
  }
7473
7550
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7474
- logger18.error({ maxTransfers, iterations }, errorMessage);
7551
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7475
7552
  await sseHelper.writeError(errorMessage);
7476
7553
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7477
7554
  if (task) {
@@ -7491,7 +7568,7 @@ var ExecutionHandler = class {
7491
7568
  unregisterStreamHelper(requestId2);
7492
7569
  return { success: false, error: errorMessage, iterations };
7493
7570
  } catch (error) {
7494
- logger18.error({ error }, "Error in execution handler");
7571
+ logger19.error({ error }, "Error in execution handler");
7495
7572
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7496
7573
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7497
7574
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7517,7 +7594,7 @@ var ExecutionHandler = class {
7517
7594
 
7518
7595
  // src/routes/chat.ts
7519
7596
  var app2 = new OpenAPIHono();
7520
- var logger19 = getLogger("completionsHandler");
7597
+ var logger20 = getLogger("completionsHandler");
7521
7598
  var chatCompletionsRoute = createRoute({
7522
7599
  method: "post",
7523
7600
  path: "/completions",
@@ -7635,7 +7712,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7635
7712
  tracestate: c.req.header("tracestate"),
7636
7713
  baggage: c.req.header("baggage")
7637
7714
  };
7638
- logger19.info(
7715
+ logger20.info(
7639
7716
  {
7640
7717
  otelHeaders,
7641
7718
  path: c.req.path,
@@ -7721,7 +7798,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7721
7798
  dbClient_default,
7722
7799
  credentialStores
7723
7800
  );
7724
- logger19.info(
7801
+ logger20.info(
7725
7802
  {
7726
7803
  tenantId,
7727
7804
  graphId,
@@ -7767,7 +7844,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7767
7844
  return streamSSE(c, async (stream2) => {
7768
7845
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
7769
7846
  await sseHelper.writeRole();
7770
- logger19.info({ agentId }, "Starting execution");
7847
+ logger20.info({ agentId }, "Starting execution");
7771
7848
  const executionHandler = new ExecutionHandler();
7772
7849
  const result = await executionHandler.execute({
7773
7850
  executionContext,
@@ -7777,7 +7854,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7777
7854
  requestId: requestId2,
7778
7855
  sseHelper
7779
7856
  });
7780
- logger19.info(
7857
+ logger20.info(
7781
7858
  { result },
7782
7859
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
7783
7860
  );
@@ -7810,7 +7887,7 @@ var getMessageText = (content) => {
7810
7887
  };
7811
7888
  var chat_default = app2;
7812
7889
  var app3 = new OpenAPIHono();
7813
- var logger20 = getLogger("chatDataStream");
7890
+ var logger21 = getLogger("chatDataStream");
7814
7891
  var chatDataStreamRoute = createRoute({
7815
7892
  method: "post",
7816
7893
  path: "/chat",
@@ -7915,7 +7992,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7915
7992
  );
7916
7993
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
7917
7994
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
7918
- logger20.info({ userText, lastUserMessage }, "userText");
7995
+ logger21.info({ userText, lastUserMessage }, "userText");
7919
7996
  const messageSpan = trace.getActiveSpan();
7920
7997
  if (messageSpan) {
7921
7998
  messageSpan.setAttributes({
@@ -7957,7 +8034,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7957
8034
  await streamHelper.writeError("Unable to process request");
7958
8035
  }
7959
8036
  } catch (err) {
7960
- logger20.error({ err }, "Streaming error");
8037
+ logger21.error({ err }, "Streaming error");
7961
8038
  await streamHelper.writeError("Internal server error");
7962
8039
  } finally {
7963
8040
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -7978,7 +8055,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7978
8055
  )
7979
8056
  );
7980
8057
  } catch (error) {
7981
- logger20.error({ error }, "chatDataStream error");
8058
+ logger21.error({ error }, "chatDataStream error");
7982
8059
  return c.json({ error: "Failed to process chat completion" }, 500);
7983
8060
  }
7984
8061
  });
@@ -7986,7 +8063,7 @@ var chatDataStream_default = app3;
7986
8063
  function createMCPSchema(schema) {
7987
8064
  return schema;
7988
8065
  }
7989
- var logger21 = getLogger("mcp");
8066
+ var logger22 = getLogger("mcp");
7990
8067
  var _MockResponseSingleton = class _MockResponseSingleton {
7991
8068
  constructor() {
7992
8069
  __publicField(this, "mockRes");
@@ -8041,21 +8118,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
8041
8118
  id: 0
8042
8119
  });
8043
8120
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
8044
- logger21.info({ sessionId }, "Spoofing initialization message to set transport state");
8121
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
8045
8122
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
8046
8123
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
8047
8124
  try {
8048
8125
  await transport.handleRequest(req, mockRes, spoofInitMessage);
8049
- logger21.info({ sessionId }, "Successfully spoofed initialization");
8126
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
8050
8127
  } catch (spoofError) {
8051
- logger21.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8128
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8052
8129
  }
8053
8130
  };
8054
8131
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8055
8132
  const sessionId = req.headers["mcp-session-id"];
8056
- logger21.info({ sessionId }, "Received MCP session ID");
8133
+ logger22.info({ sessionId }, "Received MCP session ID");
8057
8134
  if (!sessionId) {
8058
- logger21.info({ body }, "Missing session ID");
8135
+ logger22.info({ body }, "Missing session ID");
8059
8136
  res.writeHead(400).end(
8060
8137
  JSON.stringify({
8061
8138
  jsonrpc: "2.0",
@@ -8081,7 +8158,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8081
8158
  scopes: { tenantId, projectId },
8082
8159
  conversationId: sessionId
8083
8160
  });
8084
- logger21.info(
8161
+ logger22.info(
8085
8162
  {
8086
8163
  sessionId,
8087
8164
  conversationFound: !!conversation,
@@ -8092,7 +8169,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8092
8169
  "Conversation lookup result"
8093
8170
  );
8094
8171
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8095
- logger21.info(
8172
+ logger22.info(
8096
8173
  { sessionId, conversationId: conversation?.id },
8097
8174
  "MCP session not found or invalid"
8098
8175
  );
@@ -8153,7 +8230,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8153
8230
  requestId: requestId2,
8154
8231
  sseHelper: mcpStreamHelper
8155
8232
  });
8156
- logger21.info(
8233
+ logger22.info(
8157
8234
  { result },
8158
8235
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8159
8236
  );
@@ -8227,7 +8304,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8227
8304
  dbClient_default,
8228
8305
  credentialStores
8229
8306
  );
8230
- logger21.info(
8307
+ logger22.info(
8231
8308
  {
8232
8309
  tenantId,
8233
8310
  graphId,
@@ -8288,7 +8365,7 @@ var validateRequestParameters = (c) => {
8288
8365
  };
8289
8366
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8290
8367
  const { tenantId, projectId, graphId } = executionContext;
8291
- logger21.info({ body }, "Received initialization request");
8368
+ logger22.info({ body }, "Received initialization request");
8292
8369
  const sessionId = nanoid();
8293
8370
  const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
8294
8371
  scopes: { tenantId, projectId },
@@ -8319,7 +8396,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8319
8396
  }
8320
8397
  }
8321
8398
  });
8322
- logger21.info(
8399
+ logger22.info(
8323
8400
  { sessionId, conversationId: conversation.id },
8324
8401
  "Created MCP session as conversation"
8325
8402
  );
@@ -8328,9 +8405,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8328
8405
  });
8329
8406
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8330
8407
  await server.connect(transport);
8331
- logger21.info({ sessionId }, "Server connected for initialization");
8408
+ logger22.info({ sessionId }, "Server connected for initialization");
8332
8409
  res.setHeader("Mcp-Session-Id", sessionId);
8333
- logger21.info(
8410
+ logger22.info(
8334
8411
  {
8335
8412
  sessionId,
8336
8413
  bodyMethod: body?.method,
@@ -8339,7 +8416,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8339
8416
  "About to handle initialization request"
8340
8417
  );
8341
8418
  await transport.handleRequest(req, res, body);
8342
- logger21.info({ sessionId }, "Successfully handled initialization request");
8419
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8343
8420
  return toFetchResponse(res);
8344
8421
  };
8345
8422
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8367,8 +8444,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8367
8444
  sessionId,
8368
8445
  conversation.metadata?.session_data?.mcpProtocolVersion
8369
8446
  );
8370
- logger21.info({ sessionId }, "Server connected and transport initialized");
8371
- logger21.info(
8447
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8448
+ logger22.info(
8372
8449
  {
8373
8450
  sessionId,
8374
8451
  bodyKeys: Object.keys(body || {}),
@@ -8382,9 +8459,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8382
8459
  );
8383
8460
  try {
8384
8461
  await transport.handleRequest(req, res, body);
8385
- logger21.info({ sessionId }, "Successfully handled MCP request");
8462
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8386
8463
  } catch (transportError) {
8387
- logger21.error(
8464
+ logger22.error(
8388
8465
  {
8389
8466
  sessionId,
8390
8467
  error: transportError,
@@ -8435,13 +8512,13 @@ app4.openapi(
8435
8512
  }
8436
8513
  const { executionContext } = paramValidation;
8437
8514
  const body = c.get("requestBody") || {};
8438
- logger21.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8515
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8439
8516
  const isInitRequest = body.method === "initialize";
8440
8517
  const { req, res } = toReqRes(c.req.raw);
8441
8518
  const validatedContext = c.get("validatedContext") || {};
8442
8519
  const credentialStores = c.get("credentialStores");
8443
- logger21.info({ validatedContext }, "Validated context");
8444
- logger21.info({ req }, "request");
8520
+ logger22.info({ validatedContext }, "Validated context");
8521
+ logger22.info({ req }, "request");
8445
8522
  if (isInitRequest) {
8446
8523
  return await handleInitializationRequest(
8447
8524
  body,
@@ -8463,7 +8540,7 @@ app4.openapi(
8463
8540
  );
8464
8541
  }
8465
8542
  } catch (e) {
8466
- logger21.error(
8543
+ logger22.error(
8467
8544
  {
8468
8545
  error: e instanceof Error ? e.message : e,
8469
8546
  stack: e instanceof Error ? e.stack : void 0
@@ -8475,7 +8552,7 @@ app4.openapi(
8475
8552
  }
8476
8553
  );
8477
8554
  app4.get("/", async (c) => {
8478
- logger21.info({}, "Received GET MCP request");
8555
+ logger22.info({}, "Received GET MCP request");
8479
8556
  return c.json(
8480
8557
  {
8481
8558
  jsonrpc: "2.0",
@@ -8489,7 +8566,7 @@ app4.get("/", async (c) => {
8489
8566
  );
8490
8567
  });
8491
8568
  app4.delete("/", async (c) => {
8492
- logger21.info({}, "Received DELETE MCP request");
8569
+ logger22.info({}, "Received DELETE MCP request");
8493
8570
  return c.json(
8494
8571
  {
8495
8572
  jsonrpc: "2.0",
@@ -8500,7 +8577,7 @@ app4.delete("/", async (c) => {
8500
8577
  );
8501
8578
  });
8502
8579
  var mcp_default = app4;
8503
- var logger22 = getLogger("agents-run-api");
8580
+ var logger23 = getLogger("agents-run-api");
8504
8581
  function createExecutionHono(serverConfig, credentialStores) {
8505
8582
  const app6 = new OpenAPIHono();
8506
8583
  app6.use("*", otel());
@@ -8516,7 +8593,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8516
8593
  const body = await c.req.json();
8517
8594
  c.set("requestBody", body);
8518
8595
  } catch (error) {
8519
- logger22.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8596
+ logger23.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8520
8597
  }
8521
8598
  }
8522
8599
  return next();
@@ -8567,8 +8644,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8567
8644
  if (!isExpectedError) {
8568
8645
  const errorMessage = err instanceof Error ? err.message : String(err);
8569
8646
  const errorStack = err instanceof Error ? err.stack : void 0;
8570
- if (logger22) {
8571
- logger22.error(
8647
+ if (logger23) {
8648
+ logger23.error(
8572
8649
  {
8573
8650
  error: err,
8574
8651
  message: errorMessage,
@@ -8580,8 +8657,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8580
8657
  );
8581
8658
  }
8582
8659
  } else {
8583
- if (logger22) {
8584
- logger22.error(
8660
+ if (logger23) {
8661
+ logger23.error(
8585
8662
  {
8586
8663
  error: err,
8587
8664
  path: c.req.path,
@@ -8598,8 +8675,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8598
8675
  const response = err.getResponse();
8599
8676
  return response;
8600
8677
  } catch (responseError) {
8601
- if (logger22) {
8602
- logger22.error({ error: responseError }, "Error while handling HTTPException response");
8678
+ if (logger23) {
8679
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8603
8680
  }
8604
8681
  }
8605
8682
  }
@@ -8633,7 +8710,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8633
8710
  app6.use("*", async (c, next) => {
8634
8711
  const executionContext = c.get("executionContext");
8635
8712
  if (!executionContext) {
8636
- logger22.debug({}, "Empty execution context");
8713
+ logger23.debug({}, "Empty execution context");
8637
8714
  return next();
8638
8715
  }
8639
8716
  const { tenantId, projectId, graphId } = executionContext;
@@ -8642,7 +8719,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8642
8719
  if (requestBody) {
8643
8720
  conversationId = requestBody.conversationId;
8644
8721
  if (!conversationId) {
8645
- logger22.debug({ requestBody }, "No conversation ID found in request body");
8722
+ logger23.debug({ requestBody }, "No conversation ID found in request body");
8646
8723
  }
8647
8724
  }
8648
8725
  const entries = Object.fromEntries(
@@ -8657,7 +8734,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8657
8734
  })
8658
8735
  );
8659
8736
  if (!Object.keys(entries).length) {
8660
- logger22.debug({}, "Empty entries for baggage");
8737
+ logger23.debug({}, "Empty entries for baggage");
8661
8738
  return next();
8662
8739
  }
8663
8740
  const bag = Object.entries(entries).reduce(