@inkeep/agents-run-api 0.0.0-dev-20250915190940 → 0.0.0-dev-20250916015245

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +376 -294
  2. package/dist/index.js +375 -293
  3. package/package.json +2 -2
package/dist/index.cjs CHANGED
@@ -1159,6 +1159,128 @@ async function handleTasksResubscribe(c, agent, request) {
1159
1159
  });
1160
1160
  }
1161
1161
  }
1162
+ init_dbClient();
1163
+ agentsCore.getLogger("agents");
1164
+ function createAgentCard({
1165
+ dbAgent,
1166
+ baseUrl
1167
+ }) {
1168
+ const description = dbAgent.description || "AI Agent";
1169
+ return {
1170
+ name: dbAgent.name,
1171
+ description,
1172
+ url: baseUrl ? `${baseUrl}/a2a` : "",
1173
+ version: "1.0.0",
1174
+ capabilities: {
1175
+ streaming: true,
1176
+ // Enable streaming for A2A compliance
1177
+ pushNotifications: false,
1178
+ stateTransitionHistory: false
1179
+ },
1180
+ defaultInputModes: ["text", "text/plain"],
1181
+ defaultOutputModes: ["text", "text/plain"],
1182
+ skills: [],
1183
+ // Add provider info if available
1184
+ ...baseUrl && {
1185
+ provider: {
1186
+ organization: "Inkeep",
1187
+ url: baseUrl
1188
+ }
1189
+ }
1190
+ };
1191
+ }
1192
+ function generateDescriptionWithTransfers(baseDescription, internalRelations, externalRelations) {
1193
+ const transfers = [
1194
+ ...internalRelations.filter((rel) => rel.relationType === "transfer"),
1195
+ ...externalRelations.filter((rel) => rel.relationType === "transfer")
1196
+ ];
1197
+ const delegates = [
1198
+ ...internalRelations.filter((rel) => rel.relationType === "delegate"),
1199
+ ...externalRelations.filter((rel) => rel.relationType === "delegate")
1200
+ ];
1201
+ if (transfers.length === 0 && delegates.length === 0) {
1202
+ return baseDescription;
1203
+ }
1204
+ let enhancedDescription = baseDescription;
1205
+ if (transfers.length > 0) {
1206
+ const transferList = transfers.map((rel) => {
1207
+ const name = rel.externalAgent?.name || rel.name;
1208
+ const desc = rel.externalAgent?.description || rel.description || "";
1209
+ return `- ${name}: ${desc}`;
1210
+ }).join("\n");
1211
+ enhancedDescription += `
1212
+
1213
+ Can transfer to:
1214
+ ${transferList}`;
1215
+ }
1216
+ if (delegates.length > 0) {
1217
+ const delegateList = delegates.map((rel) => {
1218
+ const name = rel.externalAgent?.name || rel.name;
1219
+ const desc = rel.externalAgent?.description || rel.description || "";
1220
+ return `- ${name}: ${desc}`;
1221
+ }).join("\n");
1222
+ enhancedDescription += `
1223
+
1224
+ Can delegate to:
1225
+ ${delegateList}`;
1226
+ }
1227
+ return enhancedDescription;
1228
+ }
1229
+ async function hydrateAgent({
1230
+ dbAgent,
1231
+ graphId,
1232
+ baseUrl,
1233
+ apiKey,
1234
+ credentialStoreRegistry
1235
+ }) {
1236
+ try {
1237
+ const taskHandlerConfig = await createTaskHandlerConfig({
1238
+ tenantId: dbAgent.tenantId,
1239
+ projectId: dbAgent.projectId,
1240
+ graphId,
1241
+ agentId: dbAgent.id,
1242
+ baseUrl,
1243
+ apiKey
1244
+ });
1245
+ const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
1246
+ const agentCard = createAgentCard({
1247
+ dbAgent,
1248
+ baseUrl
1249
+ });
1250
+ return {
1251
+ agentId: dbAgent.id,
1252
+ tenantId: dbAgent.tenantId,
1253
+ projectId: dbAgent.projectId,
1254
+ graphId,
1255
+ agentCard,
1256
+ taskHandler
1257
+ };
1258
+ } catch (error) {
1259
+ console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
1260
+ throw error;
1261
+ }
1262
+ }
1263
+ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1264
+ const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
1265
+ if (!agentId) {
1266
+ throw new Error("Agent ID is required");
1267
+ }
1268
+ const dbAgent = await agentsCore.getAgentById(dbClient_default)({
1269
+ scopes: { tenantId, projectId },
1270
+ agentId
1271
+ });
1272
+ if (!dbAgent) {
1273
+ return null;
1274
+ }
1275
+ const agentFrameworkBaseUrl = `${baseUrl}/agents`;
1276
+ return hydrateAgent({
1277
+ dbAgent,
1278
+ graphId,
1279
+ baseUrl: agentFrameworkBaseUrl,
1280
+ credentialStoreRegistry,
1281
+ apiKey
1282
+ });
1283
+ }
1162
1284
 
1163
1285
  // src/agents/generateTaskHandler.ts
1164
1286
  init_dbClient();
@@ -1202,10 +1324,10 @@ function statusUpdateOp(ctx) {
1202
1324
  ctx
1203
1325
  };
1204
1326
  }
1205
- var logger3 = agentsCore.getLogger("DataComponentSchema");
1327
+ var logger4 = agentsCore.getLogger("DataComponentSchema");
1206
1328
  function jsonSchemaToZod(jsonSchema) {
1207
1329
  if (!jsonSchema || typeof jsonSchema !== "object") {
1208
- logger3.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
1330
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
1209
1331
  return z5.z.string();
1210
1332
  }
1211
1333
  switch (jsonSchema.type) {
@@ -1232,7 +1354,7 @@ function jsonSchemaToZod(jsonSchema) {
1232
1354
  case "null":
1233
1355
  return z5.z.null();
1234
1356
  default:
1235
- logger3.warn(
1357
+ logger4.warn(
1236
1358
  {
1237
1359
  unsupportedType: jsonSchema.type,
1238
1360
  schema: jsonSchema
@@ -1286,7 +1408,7 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
1286
1408
  required: ["artifact_id", "task_id"]
1287
1409
  });
1288
1410
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
1289
- var logger4 = agentsCore.getLogger("ModelFactory");
1411
+ var logger5 = agentsCore.getLogger("ModelFactory");
1290
1412
  var _ModelFactory = class _ModelFactory {
1291
1413
  /**
1292
1414
  * Create a language model instance from configuration
@@ -1301,7 +1423,7 @@ var _ModelFactory = class _ModelFactory {
1301
1423
  const modelSettings = config2;
1302
1424
  const modelString = modelSettings.model.trim();
1303
1425
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1304
- logger4.debug(
1426
+ logger5.debug(
1305
1427
  {
1306
1428
  provider,
1307
1429
  model: modelName,
@@ -1322,7 +1444,7 @@ var _ModelFactory = class _ModelFactory {
1322
1444
  );
1323
1445
  }
1324
1446
  } catch (error) {
1325
- logger4.error(
1447
+ logger5.error(
1326
1448
  {
1327
1449
  provider,
1328
1450
  model: modelName,
@@ -1345,7 +1467,7 @@ var _ModelFactory = class _ModelFactory {
1345
1467
  const [provider, ...modelParts] = modelString.split("/");
1346
1468
  const normalizedProvider = provider.toLowerCase();
1347
1469
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1348
- logger4.warn(
1470
+ logger5.warn(
1349
1471
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1350
1472
  "Unsupported provider detected, falling back to anthropic"
1351
1473
  );
@@ -1374,14 +1496,14 @@ var _ModelFactory = class _ModelFactory {
1374
1496
  anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1375
1497
  }
1376
1498
  if (providerOptions?.gateway) {
1377
- logger4.info(
1499
+ logger5.info(
1378
1500
  { gateway: providerOptions.gateway },
1379
1501
  "Setting up AI Gateway for Anthropic model"
1380
1502
  );
1381
1503
  Object.assign(anthropicConfig, providerOptions.gateway);
1382
1504
  }
1383
1505
  if (Object.keys(anthropicConfig).length > 0) {
1384
- logger4.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1506
+ logger5.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1385
1507
  const provider = anthropic.createAnthropic(anthropicConfig);
1386
1508
  return provider(modelName);
1387
1509
  }
@@ -1396,11 +1518,11 @@ var _ModelFactory = class _ModelFactory {
1396
1518
  openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1397
1519
  }
1398
1520
  if (providerOptions?.gateway) {
1399
- logger4.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1521
+ logger5.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1400
1522
  Object.assign(openaiConfig, providerOptions.gateway);
1401
1523
  }
1402
1524
  if (Object.keys(openaiConfig).length > 0) {
1403
- logger4.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1525
+ logger5.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1404
1526
  const provider = openai.createOpenAI(openaiConfig);
1405
1527
  return provider(modelName);
1406
1528
  }
@@ -1494,7 +1616,7 @@ function unregisterStreamHelper(requestId2) {
1494
1616
  }
1495
1617
 
1496
1618
  // src/utils/graph-session.ts
1497
- var logger5 = agentsCore.getLogger("GraphSession");
1619
+ var logger6 = agentsCore.getLogger("GraphSession");
1498
1620
  var GraphSession = class {
1499
1621
  // Track scheduled timeouts for cleanup
1500
1622
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1518,7 +1640,7 @@ var GraphSession = class {
1518
1640
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1519
1641
  // Prevent unbounded growth
1520
1642
  __publicField(this, "scheduledTimeouts");
1521
- logger5.debug({ sessionId, messageId, graphId }, "GraphSession created");
1643
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1522
1644
  }
1523
1645
  /**
1524
1646
  * Initialize status updates for this session
@@ -1532,15 +1654,15 @@ var GraphSession = class {
1532
1654
  summarizerModel,
1533
1655
  baseModel,
1534
1656
  config: {
1535
- numEvents: config2.numEvents || 10,
1536
- timeInSeconds: config2.timeInSeconds || 30,
1657
+ numEvents: config2.numEvents || 1,
1658
+ timeInSeconds: config2.timeInSeconds || 2,
1537
1659
  ...config2
1538
1660
  }
1539
1661
  };
1540
1662
  if (this.statusUpdateState.config.timeInSeconds) {
1541
1663
  this.statusUpdateTimer = setInterval(async () => {
1542
1664
  if (!this.statusUpdateState || this.isEnded) {
1543
- logger5.debug(
1665
+ logger6.debug(
1544
1666
  { sessionId: this.sessionId },
1545
1667
  "Timer triggered but session already cleaned up or ended"
1546
1668
  );
@@ -1552,7 +1674,7 @@ var GraphSession = class {
1552
1674
  }
1553
1675
  await this.checkAndSendTimeBasedUpdate();
1554
1676
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1555
- logger5.info(
1677
+ logger6.info(
1556
1678
  {
1557
1679
  sessionId: this.sessionId,
1558
1680
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1566,7 +1688,7 @@ var GraphSession = class {
1566
1688
  */
1567
1689
  recordEvent(eventType, agentId, data) {
1568
1690
  if (this.isEnded) {
1569
- logger5.debug(
1691
+ logger6.debug(
1570
1692
  {
1571
1693
  sessionId: this.sessionId,
1572
1694
  eventType,
@@ -1586,7 +1708,7 @@ var GraphSession = class {
1586
1708
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1587
1709
  const artifactId = data.artifactId;
1588
1710
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1589
- logger5.warn(
1711
+ logger6.warn(
1590
1712
  {
1591
1713
  sessionId: this.sessionId,
1592
1714
  artifactId,
@@ -1607,7 +1729,7 @@ var GraphSession = class {
1607
1729
  this.artifactProcessingErrors.set(artifactId, errorCount);
1608
1730
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1609
1731
  this.pendingArtifacts.delete(artifactId);
1610
- logger5.error(
1732
+ logger6.error(
1611
1733
  {
1612
1734
  sessionId: this.sessionId,
1613
1735
  artifactId,
@@ -1619,7 +1741,7 @@ var GraphSession = class {
1619
1741
  "Artifact processing failed after max retries, giving up"
1620
1742
  );
1621
1743
  } else {
1622
- logger5.warn(
1744
+ logger6.warn(
1623
1745
  {
1624
1746
  sessionId: this.sessionId,
1625
1747
  artifactId,
@@ -1641,14 +1763,14 @@ var GraphSession = class {
1641
1763
  */
1642
1764
  checkStatusUpdates() {
1643
1765
  if (this.isEnded) {
1644
- logger5.debug(
1766
+ logger6.debug(
1645
1767
  { sessionId: this.sessionId },
1646
1768
  "Session has ended - skipping status update check"
1647
1769
  );
1648
1770
  return;
1649
1771
  }
1650
1772
  if (!this.statusUpdateState) {
1651
- logger5.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1773
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1652
1774
  return;
1653
1775
  }
1654
1776
  const statusUpdateState = this.statusUpdateState;
@@ -1659,11 +1781,11 @@ var GraphSession = class {
1659
1781
  */
1660
1782
  async checkAndSendTimeBasedUpdate() {
1661
1783
  if (this.isEnded) {
1662
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1784
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1663
1785
  return;
1664
1786
  }
1665
1787
  if (!this.statusUpdateState) {
1666
- logger5.debug(
1788
+ logger6.debug(
1667
1789
  { sessionId: this.sessionId },
1668
1790
  "No status updates configured for time-based check"
1669
1791
  );
@@ -1676,7 +1798,7 @@ var GraphSession = class {
1676
1798
  try {
1677
1799
  await this.generateAndSendUpdate();
1678
1800
  } catch (error) {
1679
- logger5.error(
1801
+ logger6.error(
1680
1802
  {
1681
1803
  sessionId: this.sessionId,
1682
1804
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1769,29 +1891,29 @@ var GraphSession = class {
1769
1891
  */
1770
1892
  async generateAndSendUpdate() {
1771
1893
  if (this.isEnded) {
1772
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1894
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1773
1895
  return;
1774
1896
  }
1775
1897
  if (this.isTextStreaming) {
1776
- logger5.debug(
1898
+ logger6.debug(
1777
1899
  { sessionId: this.sessionId },
1778
1900
  "Text is currently streaming - skipping status update"
1779
1901
  );
1780
1902
  return;
1781
1903
  }
1782
1904
  if (this.isGeneratingUpdate) {
1783
- logger5.debug(
1905
+ logger6.debug(
1784
1906
  { sessionId: this.sessionId },
1785
1907
  "Update already in progress - skipping duplicate generation"
1786
1908
  );
1787
1909
  return;
1788
1910
  }
1789
1911
  if (!this.statusUpdateState) {
1790
- logger5.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1912
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1791
1913
  return;
1792
1914
  }
1793
1915
  if (!this.graphId) {
1794
- logger5.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1916
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1795
1917
  return;
1796
1918
  }
1797
1919
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1804,7 +1926,7 @@ var GraphSession = class {
1804
1926
  try {
1805
1927
  const streamHelper = getStreamHelper(this.sessionId);
1806
1928
  if (!streamHelper) {
1807
- logger5.warn(
1929
+ logger6.warn(
1808
1930
  { sessionId: this.sessionId },
1809
1931
  "No stream helper found - cannot send status update"
1810
1932
  );
@@ -1825,7 +1947,7 @@ var GraphSession = class {
1825
1947
  if (result.operations && result.operations.length > 0) {
1826
1948
  for (const op of result.operations) {
1827
1949
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1828
- logger5.warn(
1950
+ logger6.warn(
1829
1951
  {
1830
1952
  sessionId: this.sessionId,
1831
1953
  operation: op
@@ -1878,7 +2000,7 @@ var GraphSession = class {
1878
2000
  this.previousSummaries.shift();
1879
2001
  }
1880
2002
  if (!operation || !operation.type || !operation.ctx) {
1881
- logger5.warn(
2003
+ logger6.warn(
1882
2004
  {
1883
2005
  sessionId: this.sessionId,
1884
2006
  operation
@@ -1893,7 +2015,7 @@ var GraphSession = class {
1893
2015
  this.statusUpdateState.lastEventCount = this.events.length;
1894
2016
  }
1895
2017
  } catch (error) {
1896
- logger5.error(
2018
+ logger6.error(
1897
2019
  {
1898
2020
  sessionId: this.sessionId,
1899
2021
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1931,7 +2053,7 @@ var GraphSession = class {
1931
2053
  this.releaseUpdateLock();
1932
2054
  }
1933
2055
  } catch (error) {
1934
- logger5.error(
2056
+ logger6.error(
1935
2057
  {
1936
2058
  sessionId: this.sessionId,
1937
2059
  error: error instanceof Error ? error.message : "Unknown error"
@@ -2008,7 +2130,7 @@ User's Question/Context:
2008
2130
  ${conversationHistory}
2009
2131
  ` : "";
2010
2132
  } catch (error) {
2011
- logger5.warn(
2133
+ logger6.warn(
2012
2134
  { sessionId: this.sessionId, error },
2013
2135
  "Failed to fetch conversation history for status update"
2014
2136
  );
@@ -2060,7 +2182,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2060
2182
  return text.trim();
2061
2183
  } catch (error) {
2062
2184
  agentsCore.setSpanWithError(span, error);
2063
- logger5.error({ error }, "Failed to generate summary, using fallback");
2185
+ logger6.error({ error }, "Failed to generate summary, using fallback");
2064
2186
  return this.generateFallbackSummary(newEvents, elapsedTime);
2065
2187
  } finally {
2066
2188
  span.end();
@@ -2106,7 +2228,7 @@ User's Question/Context:
2106
2228
  ${conversationHistory}
2107
2229
  ` : "";
2108
2230
  } catch (error) {
2109
- logger5.warn(
2231
+ logger6.warn(
2110
2232
  { sessionId: this.sessionId, error },
2111
2233
  "Failed to fetch conversation history for structured status update"
2112
2234
  );
@@ -2149,9 +2271,11 @@ Rules:
2149
2271
  - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
2150
2272
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
2151
2273
  - Include specific details, numbers, requirements, or insights discovered
2152
- - You are ONE AI (no agents/delegations)
2153
- - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE INTERNAL OPERATIONS!
2154
- - Bad examples: "Searching docs", "Processing request", "Status update", or not using the no_relevant_updates: e.g. "No New Updates", "No new info to report"
2274
+ - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
2275
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
2276
+ - Present all operations as seamless actions by a single system
2277
+ - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
2278
+ - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
2155
2279
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
2156
2280
 
2157
2281
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -2205,7 +2329,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2205
2329
  return { operations };
2206
2330
  } catch (error) {
2207
2331
  agentsCore.setSpanWithError(span, error);
2208
- logger5.error({ error }, "Failed to generate structured update, using fallback");
2332
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
2209
2333
  return { operations: [] };
2210
2334
  } finally {
2211
2335
  span.end();
@@ -2312,8 +2436,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2312
2436
  case "transfer": {
2313
2437
  const data = event.data;
2314
2438
  activities.push(
2315
- `\u{1F504} **Transfer**: ${data.fromAgent} \u2192 ${data.targetAgent}
2316
- ${data.reason ? `Reason: ${data.reason}` : "Control transfer"}
2439
+ `\u{1F504} **Continuing**: ${data.reason || "Processing request"}
2317
2440
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2318
2441
  );
2319
2442
  break;
@@ -2321,8 +2444,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2321
2444
  case "delegation_sent": {
2322
2445
  const data = event.data;
2323
2446
  activities.push(
2324
- `\u{1F4E4} **Delegation Sent** [${data.delegationId}]: ${data.fromAgent} \u2192 ${data.targetAgent}
2325
- Task: ${data.taskDescription}
2447
+ `\u{1F4E4} **Processing**: ${data.taskDescription}
2326
2448
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2327
2449
  );
2328
2450
  break;
@@ -2330,7 +2452,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2330
2452
  case "delegation_returned": {
2331
2453
  const data = event.data;
2332
2454
  activities.push(
2333
- `\u{1F4E5} **Delegation Returned** [${data.delegationId}]: ${data.fromAgent} \u2190 ${data.targetAgent}
2455
+ `\u{1F4E5} **Completed subtask**
2334
2456
  Result: ${JSON.stringify(data.result, null, 2)}`
2335
2457
  );
2336
2458
  break;
@@ -2349,16 +2471,16 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2349
2471
  case "agent_reasoning": {
2350
2472
  const data = event.data;
2351
2473
  activities.push(
2352
- `\u2699\uFE0F **Reasoning**: reasoning
2353
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2474
+ `\u2699\uFE0F **Analyzing request**
2475
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2354
2476
  );
2355
2477
  break;
2356
2478
  }
2357
2479
  case "agent_generate": {
2358
2480
  const data = event.data;
2359
2481
  activities.push(
2360
- `\u2699\uFE0F **Generation**: ${data.generationType}
2361
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2482
+ `\u2699\uFE0F **Preparing response**
2483
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2362
2484
  );
2363
2485
  break;
2364
2486
  }
@@ -2532,7 +2654,7 @@ Make it specific and relevant.`;
2532
2654
  taskId: artifactData.taskId,
2533
2655
  artifacts: [artifactToSave]
2534
2656
  });
2535
- logger5.info(
2657
+ logger6.info(
2536
2658
  {
2537
2659
  sessionId: this.sessionId,
2538
2660
  artifactId: artifactData.artifactId,
@@ -2549,7 +2671,7 @@ Make it specific and relevant.`;
2549
2671
  span.setStatus({ code: api.SpanStatusCode.OK });
2550
2672
  } catch (error) {
2551
2673
  agentsCore.setSpanWithError(span, error);
2552
- logger5.error(
2674
+ logger6.error(
2553
2675
  {
2554
2676
  sessionId: this.sessionId,
2555
2677
  artifactId: artifactData.artifactId,
@@ -2585,7 +2707,7 @@ Make it specific and relevant.`;
2585
2707
  taskId: artifactData.taskId,
2586
2708
  artifacts: [fallbackArtifact]
2587
2709
  });
2588
- logger5.info(
2710
+ logger6.info(
2589
2711
  {
2590
2712
  sessionId: this.sessionId,
2591
2713
  artifactId: artifactData.artifactId
@@ -2594,7 +2716,7 @@ Make it specific and relevant.`;
2594
2716
  );
2595
2717
  }
2596
2718
  } catch (fallbackError) {
2597
- logger5.error(
2719
+ logger6.error(
2598
2720
  {
2599
2721
  sessionId: this.sessionId,
2600
2722
  artifactId: artifactData.artifactId,
@@ -2621,7 +2743,7 @@ var GraphSessionManager = class {
2621
2743
  const sessionId = messageId;
2622
2744
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2623
2745
  this.sessions.set(sessionId, session);
2624
- logger5.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2746
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2625
2747
  return sessionId;
2626
2748
  }
2627
2749
  /**
@@ -2632,7 +2754,7 @@ var GraphSessionManager = class {
2632
2754
  if (session) {
2633
2755
  session.initializeStatusUpdates(config2, summarizerModel);
2634
2756
  } else {
2635
- logger5.error(
2757
+ logger6.error(
2636
2758
  {
2637
2759
  sessionId,
2638
2760
  availableSessions: Array.from(this.sessions.keys())
@@ -2653,7 +2775,7 @@ var GraphSessionManager = class {
2653
2775
  recordEvent(sessionId, eventType, agentId, data) {
2654
2776
  const session = this.sessions.get(sessionId);
2655
2777
  if (!session) {
2656
- logger5.warn({ sessionId }, "Attempted to record event in non-existent session");
2778
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2657
2779
  return;
2658
2780
  }
2659
2781
  session.recordEvent(eventType, agentId, data);
@@ -2664,12 +2786,12 @@ var GraphSessionManager = class {
2664
2786
  endSession(sessionId) {
2665
2787
  const session = this.sessions.get(sessionId);
2666
2788
  if (!session) {
2667
- logger5.warn({ sessionId }, "Attempted to end non-existent session");
2789
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2668
2790
  return [];
2669
2791
  }
2670
2792
  const events = session.getEvents();
2671
2793
  const summary = session.getSummary();
2672
- logger5.info({ sessionId, summary }, "GraphSession ended");
2794
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2673
2795
  session.cleanup();
2674
2796
  this.sessions.delete(sessionId);
2675
2797
  return events;
@@ -2698,7 +2820,7 @@ var graphSessionManager = new GraphSessionManager();
2698
2820
 
2699
2821
  // src/utils/artifact-parser.ts
2700
2822
  init_dbClient();
2701
- var logger6 = agentsCore.getLogger("ArtifactParser");
2823
+ var logger7 = agentsCore.getLogger("ArtifactParser");
2702
2824
  var _ArtifactParser = class _ArtifactParser {
2703
2825
  constructor(tenantId) {
2704
2826
  this.tenantId = tenantId;
@@ -2714,9 +2836,7 @@ var _ArtifactParser = class _ArtifactParser {
2714
2836
  * More robust detection that handles streaming fragments
2715
2837
  */
2716
2838
  hasIncompleteArtifact(text) {
2717
- return /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(
2718
- text
2719
- ) || /^.*<artifact:ref(?:[^>]*)$/.test(text) || // Incomplete artifact:ref at end
2839
+ return /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(text) || /<artifact:ref[^>]+$/.test(text) || // Incomplete artifact ref at end
2720
2840
  this.findSafeTextBoundary(text) < text.length;
2721
2841
  }
2722
2842
  /**
@@ -2725,10 +2845,10 @@ var _ArtifactParser = class _ArtifactParser {
2725
2845
  */
2726
2846
  findSafeTextBoundary(text) {
2727
2847
  const endPatterns = [
2728
- /^.*<artifact:ref(?:[^/>]+(?:[^>]*[^/])?)?$/,
2848
+ /<artifact:ref(?![^>]*\/>).*$/,
2729
2849
  // artifact:ref that doesn't end with />
2730
- /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/
2731
- // Safe partial artifact pattern
2850
+ /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/
2851
+ // Any partial artifact pattern at end
2732
2852
  ];
2733
2853
  for (const pattern of endPatterns) {
2734
2854
  const match = text.match(pattern);
@@ -2764,7 +2884,7 @@ var _ArtifactParser = class _ArtifactParser {
2764
2884
  id: taskId
2765
2885
  });
2766
2886
  if (!task) {
2767
- logger6.warn({ taskId }, "Task not found when fetching artifacts");
2887
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2768
2888
  continue;
2769
2889
  }
2770
2890
  const taskArtifacts = await agentsCore.getLedgerArtifacts(dbClient_default)({
@@ -2776,9 +2896,9 @@ var _ArtifactParser = class _ArtifactParser {
2776
2896
  artifacts.set(key, artifact);
2777
2897
  }
2778
2898
  }
2779
- logger6.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2899
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2780
2900
  } catch (error) {
2781
- logger6.error({ error, contextId }, "Error loading context artifacts");
2901
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2782
2902
  }
2783
2903
  return artifacts;
2784
2904
  }
@@ -2881,7 +3001,7 @@ var _ArtifactParser = class _ArtifactParser {
2881
3001
  id: taskId
2882
3002
  });
2883
3003
  if (!task) {
2884
- logger6.warn({ taskId }, "Task not found when fetching artifact");
3004
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2885
3005
  return null;
2886
3006
  }
2887
3007
  const artifacts = await agentsCore.getLedgerArtifacts(dbClient_default)({
@@ -2893,7 +3013,7 @@ var _ArtifactParser = class _ArtifactParser {
2893
3013
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2894
3014
  }
2895
3015
  } catch (error) {
2896
- logger6.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
3016
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2897
3017
  }
2898
3018
  return null;
2899
3019
  }
@@ -2929,11 +3049,11 @@ var _ArtifactParser = class _ArtifactParser {
2929
3049
  __publicField(_ArtifactParser, "ARTIFACT_REGEX", /<artifact:ref\s+id="([^"]*?)"\s+task="([^"]*?)"\s*\/>/gs);
2930
3050
  __publicField(_ArtifactParser, "ARTIFACT_CHECK_REGEX", /<artifact:ref\s+(?=.*id="[^"]+")(?=.*task="[^"]+")[^>]*\/>/);
2931
3051
  // Regex for catching any partial artifact pattern (< + any prefix of "artifact:ref")
2932
- __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:(r(e(f?)?)?)?)?)?)?)?)?)?)?)?)?$/g);
3052
+ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2933
3053
  var ArtifactParser = _ArtifactParser;
2934
3054
 
2935
3055
  // src/utils/incremental-stream-parser.ts
2936
- var logger7 = agentsCore.getLogger("IncrementalStreamParser");
3056
+ var logger8 = agentsCore.getLogger("IncrementalStreamParser");
2937
3057
  var IncrementalStreamParser = class {
2938
3058
  constructor(streamHelper, tenantId, contextId) {
2939
3059
  __publicField(this, "buffer", "");
@@ -2993,13 +3113,19 @@ var IncrementalStreamParser = class {
2993
3113
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2994
3114
  const delta = part.argsTextDelta || "";
2995
3115
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2996
- logger7.warn({ bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE }, "JSON buffer exceeded maximum size, truncating");
3116
+ logger8.warn(
3117
+ { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
3118
+ "JSON buffer exceeded maximum size, truncating"
3119
+ );
2997
3120
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2998
3121
  }
2999
3122
  jsonBuffer += delta;
3000
3123
  for (const char of delta) {
3001
3124
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
3002
- logger7.warn({ bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE }, "Component buffer exceeded maximum size, resetting");
3125
+ logger8.warn(
3126
+ { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
3127
+ "Component buffer exceeded maximum size, resetting"
3128
+ );
3003
3129
  componentBuffer = "";
3004
3130
  depth = 0;
3005
3131
  continue;
@@ -3014,7 +3140,7 @@ var IncrementalStreamParser = class {
3014
3140
  if (componentMatch) {
3015
3141
  const MAX_COMPONENT_SIZE = 1024 * 1024;
3016
3142
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
3017
- logger7.warn(
3143
+ logger8.warn(
3018
3144
  {
3019
3145
  size: componentMatch[0].length,
3020
3146
  maxSize: MAX_COMPONENT_SIZE
@@ -3027,7 +3153,7 @@ var IncrementalStreamParser = class {
3027
3153
  try {
3028
3154
  const component = JSON.parse(componentMatch[0]);
3029
3155
  if (typeof component !== "object" || !component.id) {
3030
- logger7.warn({ component }, "Invalid component structure, skipping");
3156
+ logger8.warn({ component }, "Invalid component structure, skipping");
3031
3157
  componentBuffer = "";
3032
3158
  continue;
3033
3159
  }
@@ -3040,7 +3166,7 @@ var IncrementalStreamParser = class {
3040
3166
  componentsStreamed++;
3041
3167
  componentBuffer = "";
3042
3168
  } catch (e) {
3043
- logger7.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3169
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3044
3170
  }
3045
3171
  }
3046
3172
  }
@@ -3057,7 +3183,7 @@ var IncrementalStreamParser = class {
3057
3183
  break;
3058
3184
  }
3059
3185
  }
3060
- logger7.debug({ componentsStreamed }, "Finished streaming components");
3186
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
3061
3187
  }
3062
3188
  /**
3063
3189
  * Legacy method for backward compatibility - defaults to text processing
@@ -3201,7 +3327,7 @@ var IncrementalStreamParser = class {
3201
3327
  };
3202
3328
 
3203
3329
  // src/utils/response-formatter.ts
3204
- var logger8 = agentsCore.getLogger("ResponseFormatter");
3330
+ var logger9 = agentsCore.getLogger("ResponseFormatter");
3205
3331
  var ResponseFormatter = class {
3206
3332
  constructor(tenantId) {
3207
3333
  __publicField(this, "artifactParser");
@@ -3232,7 +3358,7 @@ var ResponseFormatter = class {
3232
3358
  return { parts };
3233
3359
  } catch (error) {
3234
3360
  agentsCore.setSpanWithError(span, error);
3235
- logger8.error({ error, responseObject }, "Error formatting object response");
3361
+ logger9.error({ error, responseObject }, "Error formatting object response");
3236
3362
  return {
3237
3363
  parts: [{ kind: "data", data: responseObject }]
3238
3364
  };
@@ -3283,7 +3409,7 @@ var ResponseFormatter = class {
3283
3409
  return { parts };
3284
3410
  } catch (error) {
3285
3411
  agentsCore.setSpanWithError(span, error);
3286
- logger8.error({ error, responseText }, "Error formatting response");
3412
+ logger9.error({ error, responseText }, "Error formatting response");
3287
3413
  return { text: responseText };
3288
3414
  } finally {
3289
3415
  span.end();
@@ -3328,7 +3454,7 @@ var ResponseFormatter = class {
3328
3454
  }
3329
3455
  }
3330
3456
  };
3331
- var logger9 = agentsCore.getLogger("ToolSessionManager");
3457
+ var logger10 = agentsCore.getLogger("ToolSessionManager");
3332
3458
  var _ToolSessionManager = class _ToolSessionManager {
3333
3459
  // 5 minutes
3334
3460
  constructor() {
@@ -3357,7 +3483,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3357
3483
  createdAt: Date.now()
3358
3484
  };
3359
3485
  this.sessions.set(sessionId, session);
3360
- logger9.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3486
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3361
3487
  return sessionId;
3362
3488
  }
3363
3489
  /**
@@ -3366,7 +3492,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3366
3492
  recordToolResult(sessionId, toolResult) {
3367
3493
  const session = this.sessions.get(sessionId);
3368
3494
  if (!session) {
3369
- logger9.warn(
3495
+ logger10.warn(
3370
3496
  { sessionId, toolCallId: toolResult.toolCallId },
3371
3497
  "Tool result recorded for unknown session"
3372
3498
  );
@@ -3380,12 +3506,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3380
3506
  getToolResult(sessionId, toolCallId) {
3381
3507
  const session = this.sessions.get(sessionId);
3382
3508
  if (!session) {
3383
- logger9.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3509
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3384
3510
  return void 0;
3385
3511
  }
3386
3512
  const result = session.toolResults.get(toolCallId);
3387
3513
  if (!result) {
3388
- logger9.warn(
3514
+ logger10.warn(
3389
3515
  {
3390
3516
  sessionId,
3391
3517
  toolCallId,
@@ -3424,10 +3550,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3424
3550
  }
3425
3551
  for (const sessionId of expiredSessions) {
3426
3552
  this.sessions.delete(sessionId);
3427
- logger9.debug({ sessionId }, "Cleaned up expired tool session");
3553
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3428
3554
  }
3429
3555
  if (expiredSessions.length > 0) {
3430
- logger9.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3556
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3431
3557
  }
3432
3558
  }
3433
3559
  };
@@ -3436,7 +3562,7 @@ var ToolSessionManager = _ToolSessionManager;
3436
3562
  var toolSessionManager = ToolSessionManager.getInstance();
3437
3563
 
3438
3564
  // src/agents/artifactTools.ts
3439
- var logger10 = agentsCore.getLogger("artifactTools");
3565
+ var logger11 = agentsCore.getLogger("artifactTools");
3440
3566
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3441
3567
  if (typeof data === "object" && data !== null) {
3442
3568
  if (Array.isArray(data)) {
@@ -3657,7 +3783,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3657
3783
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3658
3784
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3659
3785
  if (!sessionId) {
3660
- logger10.warn({ toolCallId }, "No session ID provided to save_tool_result");
3786
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3661
3787
  return {
3662
3788
  saved: false,
3663
3789
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3667,7 +3793,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3667
3793
  }
3668
3794
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3669
3795
  if (!toolResult) {
3670
- logger10.warn({ toolCallId, sessionId }, "Tool result not found in session");
3796
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3671
3797
  return {
3672
3798
  saved: false,
3673
3799
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3680,7 +3806,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3680
3806
  const baseData = jmespath__default.default.search(parsedResult, baseSelector);
3681
3807
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3682
3808
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3683
- logger10.warn(
3809
+ logger11.warn(
3684
3810
  {
3685
3811
  baseSelector,
3686
3812
  toolCallId,
@@ -3723,7 +3849,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3723
3849
  const fallbackValue = item[propName];
3724
3850
  if (fallbackValue !== null && fallbackValue !== void 0) {
3725
3851
  extractedItem[propName] = fallbackValue;
3726
- logger10.info(
3852
+ logger11.info(
3727
3853
  { propName, propSelector, context },
3728
3854
  `PropSelector failed, used fallback direct property access`
3729
3855
  );
@@ -3735,7 +3861,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3735
3861
  const fallbackValue = item[propName];
3736
3862
  if (fallbackValue !== null && fallbackValue !== void 0) {
3737
3863
  extractedItem[propName] = fallbackValue;
3738
- logger10.warn(
3864
+ logger11.warn(
3739
3865
  { propName, propSelector, context, error: error.message },
3740
3866
  `PropSelector syntax error, used fallback direct property access`
3741
3867
  );
@@ -3848,7 +3974,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3848
3974
  warnings
3849
3975
  };
3850
3976
  } catch (error) {
3851
- logger10.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3977
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3852
3978
  return {
3853
3979
  saved: false,
3854
3980
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3860,7 +3986,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3860
3986
  }
3861
3987
 
3862
3988
  // src/a2a/client.ts
3863
- var logger11 = agentsCore.getLogger("a2aClient");
3989
+ var logger12 = agentsCore.getLogger("a2aClient");
3864
3990
  var DEFAULT_BACKOFF = {
3865
3991
  initialInterval: 500,
3866
3992
  maxInterval: 6e4,
@@ -4066,7 +4192,7 @@ var A2AClient = class {
4066
4192
  try {
4067
4193
  const res = await fn();
4068
4194
  if (attempt > 0) {
4069
- logger11.info(
4195
+ logger12.info(
4070
4196
  {
4071
4197
  attempts: attempt + 1,
4072
4198
  elapsedTime: Date.now() - start
@@ -4081,7 +4207,7 @@ var A2AClient = class {
4081
4207
  }
4082
4208
  const elapsed = Date.now() - start;
4083
4209
  if (elapsed > maxElapsedTime) {
4084
- logger11.warn(
4210
+ logger12.warn(
4085
4211
  {
4086
4212
  attempts: attempt + 1,
4087
4213
  elapsedTime: elapsed,
@@ -4102,7 +4228,7 @@ var A2AClient = class {
4102
4228
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
4103
4229
  }
4104
4230
  const delayMs = Math.min(retryInterval, maxInterval);
4105
- logger11.info(
4231
+ logger12.info(
4106
4232
  {
4107
4233
  attempt: attempt + 1,
4108
4234
  delayMs,
@@ -4187,7 +4313,7 @@ var A2AClient = class {
4187
4313
  }
4188
4314
  const rpcResponse = await httpResponse.json();
4189
4315
  if (rpcResponse.id !== requestId2) {
4190
- logger11.warn(
4316
+ logger12.warn(
4191
4317
  {
4192
4318
  method,
4193
4319
  expectedId: requestId2,
@@ -4386,7 +4512,7 @@ var A2AClient = class {
4386
4512
  try {
4387
4513
  while (true) {
4388
4514
  const { done, value } = await reader.read();
4389
- logger11.info({ done, value }, "parseA2ASseStream");
4515
+ logger12.info({ done, value }, "parseA2ASseStream");
4390
4516
  if (done) {
4391
4517
  if (eventDataBuffer.trim()) {
4392
4518
  const result = this._processSseEventData(
@@ -4475,7 +4601,7 @@ var A2AClient = class {
4475
4601
  // src/agents/relationTools.ts
4476
4602
  init_conversations();
4477
4603
  init_dbClient();
4478
- var logger12 = agentsCore.getLogger("relationships Tools");
4604
+ var logger13 = agentsCore.getLogger("relationships Tools");
4479
4605
  var generateTransferToolDescription = (config2) => {
4480
4606
  return `Hand off the conversation to agent ${config2.id}.
4481
4607
 
@@ -4513,7 +4639,7 @@ var createTransferToAgentTool = ({
4513
4639
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4514
4640
  });
4515
4641
  }
4516
- logger12.info(
4642
+ logger13.info(
4517
4643
  {
4518
4644
  transferTo: transferConfig.id ?? "unknown",
4519
4645
  fromAgent: callingAgentId
@@ -4661,7 +4787,7 @@ function createDelegateToAgentTool({
4661
4787
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4662
4788
  }
4663
4789
  };
4664
- logger12.info({ messageToSend }, "messageToSend");
4790
+ logger13.info({ messageToSend }, "messageToSend");
4665
4791
  await agentsCore.createMessage(dbClient_default)({
4666
4792
  id: nanoid.nanoid(),
4667
4793
  tenantId,
@@ -4723,7 +4849,7 @@ function createDelegateToAgentTool({
4723
4849
  }
4724
4850
 
4725
4851
  // src/agents/SystemPromptBuilder.ts
4726
- var logger13 = agentsCore.getLogger("SystemPromptBuilder");
4852
+ var logger14 = agentsCore.getLogger("SystemPromptBuilder");
4727
4853
  var SystemPromptBuilder = class {
4728
4854
  constructor(version, versionConfig) {
4729
4855
  this.version = version;
@@ -4739,9 +4865,12 @@ var SystemPromptBuilder = class {
4739
4865
  this.templates.set(name, content);
4740
4866
  }
4741
4867
  this.loaded = true;
4742
- logger13.debug({ templateCount: this.templates.size, version: this.version }, `Loaded ${this.templates.size} templates for version ${this.version}`);
4868
+ logger14.debug(
4869
+ { templateCount: this.templates.size, version: this.version },
4870
+ `Loaded ${this.templates.size} templates for version ${this.version}`
4871
+ );
4743
4872
  } catch (error) {
4744
- logger13.error({ error }, `Failed to load templates for version ${this.version}`);
4873
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4745
4874
  throw new Error(`Template loading failed: ${error}`);
4746
4875
  }
4747
4876
  }
@@ -5143,7 +5272,7 @@ function hasToolCallWithPrefix(prefix) {
5143
5272
  return false;
5144
5273
  };
5145
5274
  }
5146
- var logger14 = agentsCore.getLogger("Agent");
5275
+ var logger15 = agentsCore.getLogger("Agent");
5147
5276
  var CONSTANTS = {
5148
5277
  MAX_GENERATION_STEPS: 12,
5149
5278
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5396,14 +5525,14 @@ var Agent = class {
5396
5525
  for (const toolSet of tools) {
5397
5526
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5398
5527
  if (!isValidTool(originalTool)) {
5399
- logger14.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5528
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5400
5529
  continue;
5401
5530
  }
5402
5531
  const sessionWrappedTool = ai.tool({
5403
5532
  description: originalTool.description,
5404
5533
  inputSchema: originalTool.inputSchema,
5405
5534
  execute: async (args, { toolCallId }) => {
5406
- logger14.debug({ toolName, toolCallId }, "MCP Tool Called");
5535
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5407
5536
  try {
5408
5537
  const result = await originalTool.execute(args, { toolCallId });
5409
5538
  toolSessionManager.recordToolResult(sessionId, {
@@ -5415,7 +5544,7 @@ var Agent = class {
5415
5544
  });
5416
5545
  return { result, toolCallId };
5417
5546
  } catch (error) {
5418
- logger14.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5547
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5419
5548
  throw error;
5420
5549
  }
5421
5550
  }
@@ -5500,7 +5629,7 @@ var Agent = class {
5500
5629
  selectedTools
5501
5630
  };
5502
5631
  }
5503
- logger14.info(
5632
+ logger15.info(
5504
5633
  {
5505
5634
  toolName: tool4.name,
5506
5635
  credentialReferenceId,
@@ -5540,7 +5669,7 @@ var Agent = class {
5540
5669
  async getResolvedContext(conversationId, requestContext) {
5541
5670
  try {
5542
5671
  if (!this.config.contextConfigId) {
5543
- logger14.debug({ graphId: this.config.graphId }, "No context config found for graph");
5672
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5544
5673
  return null;
5545
5674
  }
5546
5675
  const contextConfig = await agentsCore.getContextConfigById(dbClient_default)({
@@ -5548,7 +5677,7 @@ var Agent = class {
5548
5677
  id: this.config.contextConfigId
5549
5678
  });
5550
5679
  if (!contextConfig) {
5551
- logger14.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5680
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5552
5681
  return null;
5553
5682
  }
5554
5683
  if (!this.contextResolver) {
@@ -5565,7 +5694,7 @@ var Agent = class {
5565
5694
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5566
5695
  $env: process.env
5567
5696
  };
5568
- logger14.debug(
5697
+ logger15.debug(
5569
5698
  {
5570
5699
  conversationId,
5571
5700
  contextConfigId: contextConfig.id,
@@ -5579,7 +5708,7 @@ var Agent = class {
5579
5708
  );
5580
5709
  return contextWithBuiltins;
5581
5710
  } catch (error) {
5582
- logger14.error(
5711
+ logger15.error(
5583
5712
  {
5584
5713
  conversationId,
5585
5714
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5603,7 +5732,7 @@ var Agent = class {
5603
5732
  });
5604
5733
  return graphDefinition?.graphPrompt || void 0;
5605
5734
  } catch (error) {
5606
- logger14.warn(
5735
+ logger15.warn(
5607
5736
  {
5608
5737
  graphId: this.config.graphId,
5609
5738
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5630,7 +5759,7 @@ var Agent = class {
5630
5759
  }
5631
5760
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5632
5761
  } catch (error) {
5633
- logger14.warn(
5762
+ logger15.warn(
5634
5763
  {
5635
5764
  graphId: this.config.graphId,
5636
5765
  tenantId: this.config.tenantId,
@@ -5690,7 +5819,7 @@ Key requirements:
5690
5819
  preserveUnresolved: false
5691
5820
  });
5692
5821
  } catch (error) {
5693
- logger14.error(
5822
+ logger15.error(
5694
5823
  {
5695
5824
  conversationId,
5696
5825
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5735,7 +5864,7 @@ Key requirements:
5735
5864
  preserveUnresolved: false
5736
5865
  });
5737
5866
  } catch (error) {
5738
- logger14.error(
5867
+ logger15.error(
5739
5868
  {
5740
5869
  conversationId,
5741
5870
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5763,7 +5892,7 @@ Key requirements:
5763
5892
  artifactId: z5.z.string().describe("The unique identifier of the artifact to get.")
5764
5893
  }),
5765
5894
  execute: async ({ artifactId }) => {
5766
- logger14.info({ artifactId }, "get_artifact executed");
5895
+ logger15.info({ artifactId }, "get_artifact executed");
5767
5896
  const artifact = await agentsCore.getLedgerArtifacts(dbClient_default)({
5768
5897
  scopes: {
5769
5898
  tenantId: this.config.tenantId,
@@ -5830,7 +5959,7 @@ Key requirements:
5830
5959
  graphId: this.config.graphId
5831
5960
  });
5832
5961
  } catch (error) {
5833
- logger14.error(
5962
+ logger15.error(
5834
5963
  { error, graphId: this.config.graphId },
5835
5964
  "Failed to check graph artifact components"
5836
5965
  );
@@ -5934,7 +6063,7 @@ Key requirements:
5934
6063
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5935
6064
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5936
6065
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5937
- logger14.warn(
6066
+ logger15.warn(
5938
6067
  {
5939
6068
  requestedTimeout: modelSettings.maxDuration * 1e3,
5940
6069
  appliedTimeout: timeoutMs,
@@ -5976,7 +6105,7 @@ Key requirements:
5976
6105
  }
5977
6106
  );
5978
6107
  } catch (error) {
5979
- logger14.debug({ error }, "Failed to track agent reasoning");
6108
+ logger15.debug({ error }, "Failed to track agent reasoning");
5980
6109
  }
5981
6110
  }
5982
6111
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -6059,7 +6188,7 @@ Key requirements:
6059
6188
  }
6060
6189
  );
6061
6190
  } catch (error) {
6062
- logger14.debug({ error }, "Failed to track agent reasoning");
6191
+ logger15.debug({ error }, "Failed to track agent reasoning");
6063
6192
  }
6064
6193
  }
6065
6194
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -6104,7 +6233,7 @@ Key requirements:
6104
6233
  return;
6105
6234
  }
6106
6235
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
6107
- logger14.info({ result }, "save_artifact_tool or save_tool_result");
6236
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
6108
6237
  if (result.output.artifacts) {
6109
6238
  for (const artifact of result.output.artifacts) {
6110
6239
  const artifactId = artifact?.artifactId || "N/A";
@@ -6278,7 +6407,9 @@ async function resolveModelConfig(graphId, agent) {
6278
6407
  summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
6279
6408
  };
6280
6409
  }
6281
- throw new Error("Base model configuration is required. Please configure models at the project level.");
6410
+ throw new Error(
6411
+ "Base model configuration is required. Please configure models at the project level."
6412
+ );
6282
6413
  }
6283
6414
 
6284
6415
  // src/agents/generateTaskHandler.ts
@@ -6292,7 +6423,7 @@ function parseEmbeddedJson(data) {
6292
6423
  }
6293
6424
  });
6294
6425
  }
6295
- var logger15 = agentsCore.getLogger("generateTaskHandler");
6426
+ var logger16 = agentsCore.getLogger("generateTaskHandler");
6296
6427
  var createTaskHandler = (config2, credentialStoreRegistry) => {
6297
6428
  return async (task) => {
6298
6429
  try {
@@ -6342,7 +6473,33 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6342
6473
  agentId: config2.agentId
6343
6474
  })
6344
6475
  ]);
6345
- logger15.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6476
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6477
+ const enhancedInternalRelations = await Promise.all(
6478
+ internalRelations.map(async (relation) => {
6479
+ try {
6480
+ const relatedAgent = await agentsCore.getAgentById(dbClient_default)({
6481
+ scopes: { tenantId: config2.tenantId, projectId: config2.projectId },
6482
+ agentId: relation.id
6483
+ });
6484
+ if (relatedAgent) {
6485
+ const relatedAgentRelations = await agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6486
+ scopes: { tenantId: config2.tenantId, projectId: config2.projectId },
6487
+ graphId: config2.graphId,
6488
+ agentId: relation.id
6489
+ });
6490
+ const enhancedDescription = generateDescriptionWithTransfers(
6491
+ relation.description || "",
6492
+ relatedAgentRelations.internalRelations,
6493
+ relatedAgentRelations.externalRelations
6494
+ );
6495
+ return { ...relation, description: enhancedDescription };
6496
+ }
6497
+ } catch (error) {
6498
+ logger16.warn({ agentId: relation.id, error }, "Failed to enhance agent description");
6499
+ }
6500
+ return relation;
6501
+ })
6502
+ );
6346
6503
  const agentPrompt = "prompt" in config2.agentSchema ? config2.agentSchema.prompt : "";
6347
6504
  const models = "models" in config2.agentSchema ? config2.agentSchema.models : void 0;
6348
6505
  const stopWhen = "stopWhen" in config2.agentSchema ? config2.agentSchema.stopWhen : void 0;
@@ -6359,7 +6516,7 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6359
6516
  agentPrompt,
6360
6517
  models: models || void 0,
6361
6518
  stopWhen: stopWhen || void 0,
6362
- agentRelations: internalRelations.map((relation) => ({
6519
+ agentRelations: enhancedInternalRelations.map((relation) => ({
6363
6520
  id: relation.id,
6364
6521
  tenantId: config2.tenantId,
6365
6522
  projectId: config2.projectId,
@@ -6373,7 +6530,7 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6373
6530
  agentRelations: [],
6374
6531
  transferRelations: []
6375
6532
  })),
6376
- transferRelations: internalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6533
+ transferRelations: enhancedInternalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6377
6534
  baseUrl: config2.baseUrl,
6378
6535
  apiKey: config2.apiKey,
6379
6536
  id: relation.id,
@@ -6389,7 +6546,7 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6389
6546
  })),
6390
6547
  delegateRelations: [
6391
6548
  // Internal delegate relations
6392
- ...internalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6549
+ ...enhancedInternalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6393
6550
  type: "internal",
6394
6551
  config: {
6395
6552
  id: relation.id,
@@ -6442,7 +6599,7 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6442
6599
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6443
6600
  if (taskIdMatch) {
6444
6601
  contextId = taskIdMatch[1];
6445
- logger15.info(
6602
+ logger16.info(
6446
6603
  {
6447
6604
  taskId: task.id,
6448
6605
  extractedContextId: contextId,
@@ -6458,7 +6615,7 @@ var createTaskHandler = (config2, credentialStoreRegistry) => {
6458
6615
  const isDelegation = task.context?.metadata?.isDelegation === true;
6459
6616
  agent.setDelegationStatus(isDelegation);
6460
6617
  if (isDelegation) {
6461
- logger15.info(
6618
+ logger16.info(
6462
6619
  { agentId: config2.agentId, taskId: task.id },
6463
6620
  "Delegated agent - streaming disabled"
6464
6621
  );
@@ -6664,86 +6821,11 @@ async function getRegisteredGraph(executionContext) {
6664
6821
  const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6665
6822
  return hydrateGraph({ dbGraph, baseUrl: agentFrameworkBaseUrl, apiKey });
6666
6823
  }
6667
- init_dbClient();
6668
- agentsCore.getLogger("agents");
6669
- async function hydrateAgent({
6670
- dbAgent,
6671
- graphId,
6672
- baseUrl,
6673
- apiKey,
6674
- credentialStoreRegistry
6675
- }) {
6676
- try {
6677
- const taskHandlerConfig = await createTaskHandlerConfig({
6678
- tenantId: dbAgent.tenantId,
6679
- projectId: dbAgent.projectId,
6680
- graphId,
6681
- agentId: dbAgent.id,
6682
- baseUrl,
6683
- apiKey
6684
- });
6685
- const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
6686
- const agentCard = {
6687
- name: dbAgent.name,
6688
- description: dbAgent.description || "AI Agent",
6689
- url: baseUrl ? `${baseUrl}/a2a` : "",
6690
- version: "1.0.0",
6691
- capabilities: {
6692
- streaming: true,
6693
- // Enable streaming for A2A compliance
6694
- pushNotifications: false,
6695
- stateTransitionHistory: false
6696
- },
6697
- defaultInputModes: ["text", "text/plain"],
6698
- defaultOutputModes: ["text", "text/plain"],
6699
- skills: [],
6700
- // Add provider info if available
6701
- ...baseUrl && {
6702
- provider: {
6703
- organization: "Inkeep",
6704
- url: baseUrl
6705
- }
6706
- }
6707
- };
6708
- return {
6709
- agentId: dbAgent.id,
6710
- tenantId: dbAgent.tenantId,
6711
- projectId: dbAgent.projectId,
6712
- graphId,
6713
- agentCard,
6714
- taskHandler
6715
- };
6716
- } catch (error) {
6717
- console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
6718
- throw error;
6719
- }
6720
- }
6721
- async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6722
- const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
6723
- if (!agentId) {
6724
- throw new Error("Agent ID is required");
6725
- }
6726
- const dbAgent = await agentsCore.getAgentById(dbClient_default)({
6727
- scopes: { tenantId, projectId },
6728
- agentId
6729
- });
6730
- if (!dbAgent) {
6731
- return null;
6732
- }
6733
- const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6734
- return hydrateAgent({
6735
- dbAgent,
6736
- graphId,
6737
- baseUrl: agentFrameworkBaseUrl,
6738
- credentialStoreRegistry,
6739
- apiKey
6740
- });
6741
- }
6742
6824
 
6743
6825
  // src/routes/agents.ts
6744
6826
  init_dbClient();
6745
6827
  var app = new zodOpenapi.OpenAPIHono();
6746
- var logger16 = agentsCore.getLogger("agents");
6828
+ var logger17 = agentsCore.getLogger("agents");
6747
6829
  app.openapi(
6748
6830
  zodOpenapi.createRoute({
6749
6831
  method: "get",
@@ -6781,7 +6863,7 @@ app.openapi(
6781
6863
  tracestate: c.req.header("tracestate"),
6782
6864
  baggage: c.req.header("baggage")
6783
6865
  };
6784
- logger16.info(
6866
+ logger17.info(
6785
6867
  {
6786
6868
  otelHeaders,
6787
6869
  path: c.req.path,
@@ -6792,7 +6874,7 @@ app.openapi(
6792
6874
  const executionContext = agentsCore.getRequestExecutionContext(c);
6793
6875
  const { tenantId, projectId, graphId, agentId } = executionContext;
6794
6876
  if (agentId) {
6795
- logger16.info(
6877
+ logger17.info(
6796
6878
  {
6797
6879
  message: "getRegisteredAgent (agent-level)",
6798
6880
  tenantId,
@@ -6804,13 +6886,13 @@ app.openapi(
6804
6886
  );
6805
6887
  const credentialStores = c.get("credentialStores");
6806
6888
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6807
- logger16.info({ agent }, "agent registered: well-known agent.json");
6889
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6808
6890
  if (!agent) {
6809
6891
  return c.json({ error: "Agent not found" }, 404);
6810
6892
  }
6811
6893
  return c.json(agent.agentCard);
6812
6894
  } else {
6813
- logger16.info(
6895
+ logger17.info(
6814
6896
  {
6815
6897
  message: "getRegisteredGraph (graph-level)",
6816
6898
  tenantId,
@@ -6833,7 +6915,7 @@ app.post("/a2a", async (c) => {
6833
6915
  tracestate: c.req.header("tracestate"),
6834
6916
  baggage: c.req.header("baggage")
6835
6917
  };
6836
- logger16.info(
6918
+ logger17.info(
6837
6919
  {
6838
6920
  otelHeaders,
6839
6921
  path: c.req.path,
@@ -6844,7 +6926,7 @@ app.post("/a2a", async (c) => {
6844
6926
  const executionContext = agentsCore.getRequestExecutionContext(c);
6845
6927
  const { tenantId, projectId, graphId, agentId } = executionContext;
6846
6928
  if (agentId) {
6847
- logger16.info(
6929
+ logger17.info(
6848
6930
  {
6849
6931
  message: "a2a (agent-level)",
6850
6932
  tenantId,
@@ -6868,7 +6950,7 @@ app.post("/a2a", async (c) => {
6868
6950
  }
6869
6951
  return a2aHandler(c, agent);
6870
6952
  } else {
6871
- logger16.info(
6953
+ logger17.info(
6872
6954
  {
6873
6955
  message: "a2a (graph-level)",
6874
6956
  tenantId,
@@ -6914,14 +6996,14 @@ init_dbClient();
6914
6996
 
6915
6997
  // src/a2a/transfer.ts
6916
6998
  init_dbClient();
6917
- var logger17 = agentsCore.getLogger("Transfer");
6999
+ var logger18 = agentsCore.getLogger("Transfer");
6918
7000
  async function executeTransfer({
6919
7001
  tenantId,
6920
7002
  threadId,
6921
7003
  projectId,
6922
7004
  targetAgentId
6923
7005
  }) {
6924
- logger17.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
7006
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6925
7007
  await agentsCore.setActiveAgentForThread(dbClient_default)({
6926
7008
  scopes: { tenantId, projectId },
6927
7009
  threadId,
@@ -7116,7 +7198,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7116
7198
  __publicField(this, "queuedOperations", []);
7117
7199
  // Timing tracking for text sequences (text-end to text-start gap)
7118
7200
  __publicField(this, "lastTextEndTimestamp", 0);
7119
- __publicField(this, "TEXT_GAP_THRESHOLD", 1e3);
7201
+ __publicField(this, "TEXT_GAP_THRESHOLD", 50);
7120
7202
  // milliseconds - if gap between text sequences is less than this, queue operations
7121
7203
  // Connection management and forced cleanup
7122
7204
  __publicField(this, "connectionDropTimer");
@@ -7465,7 +7547,7 @@ function createMCPStreamHelper() {
7465
7547
 
7466
7548
  // src/handlers/executionHandler.ts
7467
7549
  init_dbClient();
7468
- var logger18 = agentsCore.getLogger("ExecutionHandler");
7550
+ var logger19 = agentsCore.getLogger("ExecutionHandler");
7469
7551
  var ExecutionHandler = class {
7470
7552
  constructor() {
7471
7553
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7490,7 +7572,7 @@ var ExecutionHandler = class {
7490
7572
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7491
7573
  registerStreamHelper(requestId2, sseHelper);
7492
7574
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7493
- logger18.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7575
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7494
7576
  let graphConfig = null;
7495
7577
  try {
7496
7578
  graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7502,7 +7584,7 @@ var ExecutionHandler = class {
7502
7584
  );
7503
7585
  }
7504
7586
  } catch (error) {
7505
- logger18.error(
7587
+ logger19.error(
7506
7588
  {
7507
7589
  error: error instanceof Error ? error.message : "Unknown error",
7508
7590
  stack: error instanceof Error ? error.stack : void 0
@@ -7518,7 +7600,7 @@ var ExecutionHandler = class {
7518
7600
  try {
7519
7601
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7520
7602
  const taskId = `task_${conversationId}-${requestId2}`;
7521
- logger18.info(
7603
+ logger19.info(
7522
7604
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7523
7605
  "Attempting to create or reuse existing task"
7524
7606
  );
@@ -7541,7 +7623,7 @@ var ExecutionHandler = class {
7541
7623
  agent_id: currentAgentId
7542
7624
  }
7543
7625
  });
7544
- logger18.info(
7626
+ logger19.info(
7545
7627
  {
7546
7628
  taskId,
7547
7629
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7550,27 +7632,27 @@ var ExecutionHandler = class {
7550
7632
  );
7551
7633
  } catch (error) {
7552
7634
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7553
- logger18.info(
7635
+ logger19.info(
7554
7636
  { taskId, error: error.message },
7555
7637
  "Task already exists, fetching existing task"
7556
7638
  );
7557
7639
  const existingTask = await agentsCore.getTask(dbClient_default)({ id: taskId });
7558
7640
  if (existingTask) {
7559
7641
  task = existingTask;
7560
- logger18.info(
7642
+ logger19.info(
7561
7643
  { taskId, existingTask },
7562
7644
  "Successfully reused existing task from race condition"
7563
7645
  );
7564
7646
  } else {
7565
- logger18.error({ taskId, error }, "Task constraint failed but task not found");
7647
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7566
7648
  throw error;
7567
7649
  }
7568
7650
  } else {
7569
- logger18.error({ taskId, error }, "Failed to create task due to non-constraint error");
7651
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7570
7652
  throw error;
7571
7653
  }
7572
7654
  }
7573
- logger18.debug(
7655
+ logger19.debug(
7574
7656
  {
7575
7657
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7576
7658
  executionType: "create_initial_task",
@@ -7588,7 +7670,7 @@ var ExecutionHandler = class {
7588
7670
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7589
7671
  while (iterations < maxTransfers) {
7590
7672
  iterations++;
7591
- logger18.info(
7673
+ logger19.info(
7592
7674
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7593
7675
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7594
7676
  );
@@ -7596,10 +7678,10 @@ var ExecutionHandler = class {
7596
7678
  scopes: { tenantId, projectId },
7597
7679
  conversationId
7598
7680
  });
7599
- logger18.info({ activeAgent }, "activeAgent");
7681
+ logger19.info({ activeAgent }, "activeAgent");
7600
7682
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7601
7683
  currentAgentId = activeAgent.activeAgentId;
7602
- logger18.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7684
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7603
7685
  }
7604
7686
  const agentBaseUrl = `${baseUrl}/agents`;
7605
7687
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7640,13 +7722,13 @@ var ExecutionHandler = class {
7640
7722
  });
7641
7723
  if (!messageResponse?.result) {
7642
7724
  errorCount++;
7643
- logger18.error(
7725
+ logger19.error(
7644
7726
  { currentAgentId, iterations, errorCount },
7645
7727
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7646
7728
  );
7647
7729
  if (errorCount >= this.MAX_ERRORS) {
7648
7730
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7649
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7731
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7650
7732
  await sseHelper.writeError(errorMessage2);
7651
7733
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7652
7734
  if (task) {
@@ -7672,7 +7754,7 @@ var ExecutionHandler = class {
7672
7754
  const transferResponse = messageResponse.result;
7673
7755
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7674
7756
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7675
- logger18.info({ targetAgentId, transferReason }, "transfer response");
7757
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7676
7758
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7677
7759
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7678
7760
  projectId,
@@ -7683,7 +7765,7 @@ var ExecutionHandler = class {
7683
7765
  if (success) {
7684
7766
  fromAgentId = currentAgentId;
7685
7767
  currentAgentId = newAgentId;
7686
- logger18.info(
7768
+ logger19.info(
7687
7769
  {
7688
7770
  transferFrom: fromAgentId,
7689
7771
  transferTo: currentAgentId,
@@ -7701,7 +7783,7 @@ var ExecutionHandler = class {
7701
7783
  const graphSessionData = graphSessionManager.getSession(requestId2);
7702
7784
  if (graphSessionData) {
7703
7785
  const sessionSummary = graphSessionData.getSummary();
7704
- logger18.info(sessionSummary, "GraphSession data after completion");
7786
+ logger19.info(sessionSummary, "GraphSession data after completion");
7705
7787
  }
7706
7788
  let textContent = "";
7707
7789
  for (const part of responseParts) {
@@ -7755,22 +7837,22 @@ var ExecutionHandler = class {
7755
7837
  }
7756
7838
  });
7757
7839
  const updateTaskEnd = Date.now();
7758
- logger18.info(
7840
+ logger19.info(
7759
7841
  { duration: updateTaskEnd - updateTaskStart },
7760
7842
  "Completed updateTask operation"
7761
7843
  );
7762
7844
  await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7763
7845
  await sseHelper.complete();
7764
- logger18.info({}, "Ending GraphSession and cleaning up");
7846
+ logger19.info({}, "Ending GraphSession and cleaning up");
7765
7847
  graphSessionManager.endSession(requestId2);
7766
- logger18.info({}, "Cleaning up streamHelper");
7848
+ logger19.info({}, "Cleaning up streamHelper");
7767
7849
  unregisterStreamHelper(requestId2);
7768
7850
  let response;
7769
7851
  if (sseHelper instanceof MCPStreamHelper) {
7770
7852
  const captured = sseHelper.getCapturedResponse();
7771
7853
  response = captured.text || "No response content";
7772
7854
  }
7773
- logger18.info({}, "ExecutionHandler returning success");
7855
+ logger19.info({}, "ExecutionHandler returning success");
7774
7856
  return { success: true, iterations, response };
7775
7857
  } catch (error) {
7776
7858
  agentsCore.setSpanWithError(span, error);
@@ -7781,13 +7863,13 @@ var ExecutionHandler = class {
7781
7863
  });
7782
7864
  }
7783
7865
  errorCount++;
7784
- logger18.warn(
7866
+ logger19.warn(
7785
7867
  { iterations, errorCount },
7786
7868
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7787
7869
  );
7788
7870
  if (errorCount >= this.MAX_ERRORS) {
7789
7871
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7790
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7872
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7791
7873
  await sseHelper.writeError(errorMessage2);
7792
7874
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7793
7875
  if (task) {
@@ -7809,7 +7891,7 @@ var ExecutionHandler = class {
7809
7891
  }
7810
7892
  }
7811
7893
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7812
- logger18.error({ maxTransfers, iterations }, errorMessage);
7894
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7813
7895
  await sseHelper.writeError(errorMessage);
7814
7896
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7815
7897
  if (task) {
@@ -7829,7 +7911,7 @@ var ExecutionHandler = class {
7829
7911
  unregisterStreamHelper(requestId2);
7830
7912
  return { success: false, error: errorMessage, iterations };
7831
7913
  } catch (error) {
7832
- logger18.error({ error }, "Error in execution handler");
7914
+ logger19.error({ error }, "Error in execution handler");
7833
7915
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7834
7916
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7835
7917
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7855,7 +7937,7 @@ var ExecutionHandler = class {
7855
7937
 
7856
7938
  // src/routes/chat.ts
7857
7939
  var app2 = new zodOpenapi.OpenAPIHono();
7858
- var logger19 = agentsCore.getLogger("completionsHandler");
7940
+ var logger20 = agentsCore.getLogger("completionsHandler");
7859
7941
  var chatCompletionsRoute = zodOpenapi.createRoute({
7860
7942
  method: "post",
7861
7943
  path: "/completions",
@@ -7973,7 +8055,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7973
8055
  tracestate: c.req.header("tracestate"),
7974
8056
  baggage: c.req.header("baggage")
7975
8057
  };
7976
- logger19.info(
8058
+ logger20.info(
7977
8059
  {
7978
8060
  otelHeaders,
7979
8061
  path: c.req.path,
@@ -8059,7 +8141,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8059
8141
  dbClient_default,
8060
8142
  credentialStores
8061
8143
  );
8062
- logger19.info(
8144
+ logger20.info(
8063
8145
  {
8064
8146
  tenantId,
8065
8147
  graphId,
@@ -8105,7 +8187,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8105
8187
  return streaming.streamSSE(c, async (stream2) => {
8106
8188
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8107
8189
  await sseHelper.writeRole();
8108
- logger19.info({ agentId }, "Starting execution");
8190
+ logger20.info({ agentId }, "Starting execution");
8109
8191
  const executionHandler = new ExecutionHandler();
8110
8192
  const result = await executionHandler.execute({
8111
8193
  executionContext,
@@ -8115,7 +8197,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8115
8197
  requestId: requestId2,
8116
8198
  sseHelper
8117
8199
  });
8118
- logger19.info(
8200
+ logger20.info(
8119
8201
  { result },
8120
8202
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8121
8203
  );
@@ -8151,7 +8233,7 @@ var chat_default = app2;
8151
8233
  // src/routes/chatDataStream.ts
8152
8234
  init_dbClient();
8153
8235
  var app3 = new zodOpenapi.OpenAPIHono();
8154
- var logger20 = agentsCore.getLogger("chatDataStream");
8236
+ var logger21 = agentsCore.getLogger("chatDataStream");
8155
8237
  var chatDataStreamRoute = zodOpenapi.createRoute({
8156
8238
  method: "post",
8157
8239
  path: "/chat",
@@ -8256,7 +8338,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8256
8338
  );
8257
8339
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
8258
8340
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
8259
- logger20.info({ userText, lastUserMessage }, "userText");
8341
+ logger21.info({ userText, lastUserMessage }, "userText");
8260
8342
  const messageSpan = api.trace.getActiveSpan();
8261
8343
  if (messageSpan) {
8262
8344
  messageSpan.setAttributes({
@@ -8298,7 +8380,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8298
8380
  await streamHelper.writeError("Unable to process request");
8299
8381
  }
8300
8382
  } catch (err) {
8301
- logger20.error({ err }, "Streaming error");
8383
+ logger21.error({ err }, "Streaming error");
8302
8384
  await streamHelper.writeError("Internal server error");
8303
8385
  } finally {
8304
8386
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -8319,7 +8401,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8319
8401
  )
8320
8402
  );
8321
8403
  } catch (error) {
8322
- logger20.error({ error }, "chatDataStream error");
8404
+ logger21.error({ error }, "chatDataStream error");
8323
8405
  return c.json({ error: "Failed to process chat completion" }, 500);
8324
8406
  }
8325
8407
  });
@@ -8330,7 +8412,7 @@ init_dbClient();
8330
8412
  function createMCPSchema(schema) {
8331
8413
  return schema;
8332
8414
  }
8333
- var logger21 = agentsCore.getLogger("mcp");
8415
+ var logger22 = agentsCore.getLogger("mcp");
8334
8416
  var _MockResponseSingleton = class _MockResponseSingleton {
8335
8417
  constructor() {
8336
8418
  __publicField(this, "mockRes");
@@ -8385,21 +8467,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
8385
8467
  id: 0
8386
8468
  });
8387
8469
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
8388
- logger21.info({ sessionId }, "Spoofing initialization message to set transport state");
8470
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
8389
8471
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
8390
8472
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
8391
8473
  try {
8392
8474
  await transport.handleRequest(req, mockRes, spoofInitMessage);
8393
- logger21.info({ sessionId }, "Successfully spoofed initialization");
8475
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
8394
8476
  } catch (spoofError) {
8395
- logger21.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8477
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8396
8478
  }
8397
8479
  };
8398
8480
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8399
8481
  const sessionId = req.headers["mcp-session-id"];
8400
- logger21.info({ sessionId }, "Received MCP session ID");
8482
+ logger22.info({ sessionId }, "Received MCP session ID");
8401
8483
  if (!sessionId) {
8402
- logger21.info({ body }, "Missing session ID");
8484
+ logger22.info({ body }, "Missing session ID");
8403
8485
  res.writeHead(400).end(
8404
8486
  JSON.stringify({
8405
8487
  jsonrpc: "2.0",
@@ -8425,7 +8507,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8425
8507
  scopes: { tenantId, projectId },
8426
8508
  conversationId: sessionId
8427
8509
  });
8428
- logger21.info(
8510
+ logger22.info(
8429
8511
  {
8430
8512
  sessionId,
8431
8513
  conversationFound: !!conversation,
@@ -8436,7 +8518,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8436
8518
  "Conversation lookup result"
8437
8519
  );
8438
8520
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8439
- logger21.info(
8521
+ logger22.info(
8440
8522
  { sessionId, conversationId: conversation?.id },
8441
8523
  "MCP session not found or invalid"
8442
8524
  );
@@ -8497,7 +8579,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8497
8579
  requestId: requestId2,
8498
8580
  sseHelper: mcpStreamHelper
8499
8581
  });
8500
- logger21.info(
8582
+ logger22.info(
8501
8583
  { result },
8502
8584
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8503
8585
  );
@@ -8571,7 +8653,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8571
8653
  dbClient_default,
8572
8654
  credentialStores
8573
8655
  );
8574
- logger21.info(
8656
+ logger22.info(
8575
8657
  {
8576
8658
  tenantId,
8577
8659
  graphId,
@@ -8632,7 +8714,7 @@ var validateRequestParameters = (c) => {
8632
8714
  };
8633
8715
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8634
8716
  const { tenantId, projectId, graphId } = executionContext;
8635
- logger21.info({ body }, "Received initialization request");
8717
+ logger22.info({ body }, "Received initialization request");
8636
8718
  const sessionId = nanoid.nanoid();
8637
8719
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8638
8720
  scopes: { tenantId, projectId },
@@ -8663,7 +8745,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8663
8745
  }
8664
8746
  }
8665
8747
  });
8666
- logger21.info(
8748
+ logger22.info(
8667
8749
  { sessionId, conversationId: conversation.id },
8668
8750
  "Created MCP session as conversation"
8669
8751
  );
@@ -8672,9 +8754,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8672
8754
  });
8673
8755
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8674
8756
  await server.connect(transport);
8675
- logger21.info({ sessionId }, "Server connected for initialization");
8757
+ logger22.info({ sessionId }, "Server connected for initialization");
8676
8758
  res.setHeader("Mcp-Session-Id", sessionId);
8677
- logger21.info(
8759
+ logger22.info(
8678
8760
  {
8679
8761
  sessionId,
8680
8762
  bodyMethod: body?.method,
@@ -8683,7 +8765,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8683
8765
  "About to handle initialization request"
8684
8766
  );
8685
8767
  await transport.handleRequest(req, res, body);
8686
- logger21.info({ sessionId }, "Successfully handled initialization request");
8768
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8687
8769
  return fetchToNode.toFetchResponse(res);
8688
8770
  };
8689
8771
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8711,8 +8793,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8711
8793
  sessionId,
8712
8794
  conversation.metadata?.session_data?.mcpProtocolVersion
8713
8795
  );
8714
- logger21.info({ sessionId }, "Server connected and transport initialized");
8715
- logger21.info(
8796
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8797
+ logger22.info(
8716
8798
  {
8717
8799
  sessionId,
8718
8800
  bodyKeys: Object.keys(body || {}),
@@ -8726,9 +8808,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8726
8808
  );
8727
8809
  try {
8728
8810
  await transport.handleRequest(req, res, body);
8729
- logger21.info({ sessionId }, "Successfully handled MCP request");
8811
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8730
8812
  } catch (transportError) {
8731
- logger21.error(
8813
+ logger22.error(
8732
8814
  {
8733
8815
  sessionId,
8734
8816
  error: transportError,
@@ -8779,13 +8861,13 @@ app4.openapi(
8779
8861
  }
8780
8862
  const { executionContext } = paramValidation;
8781
8863
  const body = c.get("requestBody") || {};
8782
- logger21.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8864
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8783
8865
  const isInitRequest = body.method === "initialize";
8784
8866
  const { req, res } = fetchToNode.toReqRes(c.req.raw);
8785
8867
  const validatedContext = c.get("validatedContext") || {};
8786
8868
  const credentialStores = c.get("credentialStores");
8787
- logger21.info({ validatedContext }, "Validated context");
8788
- logger21.info({ req }, "request");
8869
+ logger22.info({ validatedContext }, "Validated context");
8870
+ logger22.info({ req }, "request");
8789
8871
  if (isInitRequest) {
8790
8872
  return await handleInitializationRequest(
8791
8873
  body,
@@ -8807,7 +8889,7 @@ app4.openapi(
8807
8889
  );
8808
8890
  }
8809
8891
  } catch (e) {
8810
- logger21.error(
8892
+ logger22.error(
8811
8893
  {
8812
8894
  error: e instanceof Error ? e.message : e,
8813
8895
  stack: e instanceof Error ? e.stack : void 0
@@ -8819,7 +8901,7 @@ app4.openapi(
8819
8901
  }
8820
8902
  );
8821
8903
  app4.get("/", async (c) => {
8822
- logger21.info({}, "Received GET MCP request");
8904
+ logger22.info({}, "Received GET MCP request");
8823
8905
  return c.json(
8824
8906
  {
8825
8907
  jsonrpc: "2.0",
@@ -8833,7 +8915,7 @@ app4.get("/", async (c) => {
8833
8915
  );
8834
8916
  });
8835
8917
  app4.delete("/", async (c) => {
8836
- logger21.info({}, "Received DELETE MCP request");
8918
+ logger22.info({}, "Received DELETE MCP request");
8837
8919
  return c.json(
8838
8920
  {
8839
8921
  jsonrpc: "2.0",
@@ -8844,7 +8926,7 @@ app4.delete("/", async (c) => {
8844
8926
  );
8845
8927
  });
8846
8928
  var mcp_default = app4;
8847
- var logger22 = agentsCore.getLogger("agents-run-api");
8929
+ var logger23 = agentsCore.getLogger("agents-run-api");
8848
8930
  function createExecutionHono(serverConfig, credentialStores) {
8849
8931
  const app6 = new zodOpenapi.OpenAPIHono();
8850
8932
  app6.use("*", otel.otel());
@@ -8860,7 +8942,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8860
8942
  const body = await c.req.json();
8861
8943
  c.set("requestBody", body);
8862
8944
  } catch (error) {
8863
- logger22.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8945
+ logger23.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8864
8946
  }
8865
8947
  }
8866
8948
  return next();
@@ -8911,8 +8993,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8911
8993
  if (!isExpectedError) {
8912
8994
  const errorMessage = err instanceof Error ? err.message : String(err);
8913
8995
  const errorStack = err instanceof Error ? err.stack : void 0;
8914
- if (logger22) {
8915
- logger22.error(
8996
+ if (logger23) {
8997
+ logger23.error(
8916
8998
  {
8917
8999
  error: err,
8918
9000
  message: errorMessage,
@@ -8924,8 +9006,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8924
9006
  );
8925
9007
  }
8926
9008
  } else {
8927
- if (logger22) {
8928
- logger22.error(
9009
+ if (logger23) {
9010
+ logger23.error(
8929
9011
  {
8930
9012
  error: err,
8931
9013
  path: c.req.path,
@@ -8942,8 +9024,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8942
9024
  const response = err.getResponse();
8943
9025
  return response;
8944
9026
  } catch (responseError) {
8945
- if (logger22) {
8946
- logger22.error({ error: responseError }, "Error while handling HTTPException response");
9027
+ if (logger23) {
9028
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8947
9029
  }
8948
9030
  }
8949
9031
  }
@@ -8977,7 +9059,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8977
9059
  app6.use("*", async (c, next) => {
8978
9060
  const executionContext = c.get("executionContext");
8979
9061
  if (!executionContext) {
8980
- logger22.debug({}, "Empty execution context");
9062
+ logger23.debug({}, "Empty execution context");
8981
9063
  return next();
8982
9064
  }
8983
9065
  const { tenantId, projectId, graphId } = executionContext;
@@ -8986,7 +9068,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8986
9068
  if (requestBody) {
8987
9069
  conversationId = requestBody.conversationId;
8988
9070
  if (!conversationId) {
8989
- logger22.debug({ requestBody }, "No conversation ID found in request body");
9071
+ logger23.debug({ requestBody }, "No conversation ID found in request body");
8990
9072
  }
8991
9073
  }
8992
9074
  const entries = Object.fromEntries(
@@ -9001,7 +9083,7 @@ function createExecutionHono(serverConfig, credentialStores) {
9001
9083
  })
9002
9084
  );
9003
9085
  if (!Object.keys(entries).length) {
9004
- logger22.debug({}, "Empty entries for baggage");
9086
+ logger23.debug({}, "Empty entries for baggage");
9005
9087
  return next();
9006
9088
  }
9007
9089
  const bag = Object.entries(entries).reduce(