@inkeep/agents-run-api 0.1.10 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +549 -463
  2. package/dist/index.js +549 -463
  3. package/package.json +6 -3
package/dist/index.js CHANGED
@@ -1,13 +1,14 @@
1
1
  import { env, __publicField, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-HO5J26MO.js';
2
2
  import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
3
3
  import { BaggageSpanProcessor, ALLOW_ALL_BAGGAGE_KEYS } from '@opentelemetry/baggage-span-processor';
4
+ import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
5
+ import { CompositePropagator, W3CTraceContextPropagator, W3CBaggagePropagator } from '@opentelemetry/core';
4
6
  import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
7
+ import { resourceFromAttributes } from '@opentelemetry/resources';
5
8
  import { NodeSDK } from '@opentelemetry/sdk-node';
6
9
  import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
7
10
  import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
8
- import { resourceFromAttributes } from '@opentelemetry/resources';
9
- import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
10
- import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, setSpanWithError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
11
+ import { getLogger, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, setSpanWithError, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, getProject, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
11
12
  import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
12
13
  import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
13
14
  import { Hono } from 'hono';
@@ -23,12 +24,14 @@ import destr from 'destr';
23
24
  import traverse from 'traverse';
24
25
  import { createUIMessageStream, JsonToSseTransformStream, parsePartialJson, generateText, generateObject, tool, streamText } from 'ai';
25
26
  import { createAnthropic, anthropic } from '@ai-sdk/anthropic';
27
+ import { createGoogleGenerativeAI, google } from '@ai-sdk/google';
26
28
  import { createOpenAI, openai } from '@ai-sdk/openai';
27
29
  import jmespath from 'jmespath';
28
30
  import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
29
31
  import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
30
32
  import { z as z$2 } from 'zod/v3';
31
33
  import { toReqRes, toFetchResponse } from 'fetch-to-node';
34
+ import { otel } from '@hono/otel';
32
35
 
33
36
  var maxExportBatchSize = env.OTEL_MAX_EXPORT_BATCH_SIZE ?? (env.ENVIRONMENT === "development" ? 1 : 512);
34
37
  var otlpExporter = new OTLPTraceExporter();
@@ -41,6 +44,9 @@ var resource = resourceFromAttributes({
41
44
  var sdk = new NodeSDK({
42
45
  resource,
43
46
  contextManager: new AsyncLocalStorageContextManager(),
47
+ textMapPropagator: new CompositePropagator({
48
+ propagators: [new W3CTraceContextPropagator(), new W3CBaggagePropagator()]
49
+ }),
44
50
  spanProcessors: [new BaggageSpanProcessor(ALLOW_ALL_BAGGAGE_KEYS), batchProcessor],
45
51
  instrumentations: [
46
52
  getNodeAutoInstrumentations({
@@ -161,6 +167,7 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
161
167
  return;
162
168
  } else if (apiKey) {
163
169
  const executionContext = await extractContextFromApiKey(apiKey);
170
+ executionContext.agentId = agentId;
164
171
  c.set("executionContext", executionContext);
165
172
  logger.info({}, "API key authenticated successfully");
166
173
  await next();
@@ -178,12 +185,14 @@ var apiKeyAuth = () => createMiddleware(async (c, next) => {
178
185
  }
179
186
  try {
180
187
  const executionContext = await extractContextFromApiKey(apiKey);
188
+ executionContext.agentId = agentId;
181
189
  c.set("executionContext", executionContext);
182
190
  logger.debug(
183
191
  {
184
192
  tenantId: executionContext.tenantId,
185
193
  projectId: executionContext.projectId,
186
- graphId: executionContext.graphId
194
+ graphId: executionContext.graphId,
195
+ agentId: executionContext.agentId
187
196
  },
188
197
  "API key authenticated successfully"
189
198
  );
@@ -850,6 +859,127 @@ async function handleTasksResubscribe(c, agent, request) {
850
859
  });
851
860
  }
852
861
  }
862
+ getLogger("agents");
863
+ function createAgentCard({
864
+ dbAgent,
865
+ baseUrl
866
+ }) {
867
+ const description = dbAgent.description || "AI Agent";
868
+ return {
869
+ name: dbAgent.name,
870
+ description,
871
+ url: baseUrl ? `${baseUrl}/a2a` : "",
872
+ version: "1.0.0",
873
+ capabilities: {
874
+ streaming: true,
875
+ // Enable streaming for A2A compliance
876
+ pushNotifications: false,
877
+ stateTransitionHistory: false
878
+ },
879
+ defaultInputModes: ["text", "text/plain"],
880
+ defaultOutputModes: ["text", "text/plain"],
881
+ skills: [],
882
+ // Add provider info if available
883
+ ...baseUrl && {
884
+ provider: {
885
+ organization: "Inkeep",
886
+ url: baseUrl
887
+ }
888
+ }
889
+ };
890
+ }
891
+ function generateDescriptionWithTransfers(baseDescription, internalRelations, externalRelations) {
892
+ const transfers = [
893
+ ...internalRelations.filter((rel) => rel.relationType === "transfer"),
894
+ ...externalRelations.filter((rel) => rel.relationType === "transfer")
895
+ ];
896
+ const delegates = [
897
+ ...internalRelations.filter((rel) => rel.relationType === "delegate"),
898
+ ...externalRelations.filter((rel) => rel.relationType === "delegate")
899
+ ];
900
+ if (transfers.length === 0 && delegates.length === 0) {
901
+ return baseDescription;
902
+ }
903
+ let enhancedDescription = baseDescription;
904
+ if (transfers.length > 0) {
905
+ const transferList = transfers.map((rel) => {
906
+ const name = rel.externalAgent?.name || rel.name;
907
+ const desc = rel.externalAgent?.description || rel.description || "";
908
+ return `- ${name}: ${desc}`;
909
+ }).join("\n");
910
+ enhancedDescription += `
911
+
912
+ Can transfer to:
913
+ ${transferList}`;
914
+ }
915
+ if (delegates.length > 0) {
916
+ const delegateList = delegates.map((rel) => {
917
+ const name = rel.externalAgent?.name || rel.name;
918
+ const desc = rel.externalAgent?.description || rel.description || "";
919
+ return `- ${name}: ${desc}`;
920
+ }).join("\n");
921
+ enhancedDescription += `
922
+
923
+ Can delegate to:
924
+ ${delegateList}`;
925
+ }
926
+ return enhancedDescription;
927
+ }
928
+ async function hydrateAgent({
929
+ dbAgent,
930
+ graphId,
931
+ baseUrl,
932
+ apiKey,
933
+ credentialStoreRegistry
934
+ }) {
935
+ try {
936
+ const taskHandlerConfig = await createTaskHandlerConfig({
937
+ tenantId: dbAgent.tenantId,
938
+ projectId: dbAgent.projectId,
939
+ graphId,
940
+ agentId: dbAgent.id,
941
+ baseUrl,
942
+ apiKey
943
+ });
944
+ const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
945
+ const agentCard = createAgentCard({
946
+ dbAgent,
947
+ baseUrl
948
+ });
949
+ return {
950
+ agentId: dbAgent.id,
951
+ tenantId: dbAgent.tenantId,
952
+ projectId: dbAgent.projectId,
953
+ graphId,
954
+ agentCard,
955
+ taskHandler
956
+ };
957
+ } catch (error) {
958
+ console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
959
+ throw error;
960
+ }
961
+ }
962
+ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
963
+ const { tenantId, projectId, graphId, agentId, baseUrl, apiKey } = executionContext;
964
+ if (!agentId) {
965
+ throw new Error("Agent ID is required");
966
+ }
967
+ const dbAgent = await getAgentById(dbClient_default)({
968
+ scopes: { tenantId, projectId },
969
+ agentId
970
+ });
971
+ if (!dbAgent) {
972
+ return null;
973
+ }
974
+ const agentFrameworkBaseUrl = `${baseUrl}/agents`;
975
+ return hydrateAgent({
976
+ dbAgent,
977
+ graphId,
978
+ baseUrl: agentFrameworkBaseUrl,
979
+ credentialStoreRegistry,
980
+ apiKey
981
+ });
982
+ }
853
983
  function agentInitializingOp(sessionId, graphId) {
854
984
  return {
855
985
  type: "agent_initializing",
@@ -886,10 +1016,10 @@ function statusUpdateOp(ctx) {
886
1016
  ctx
887
1017
  };
888
1018
  }
889
- var logger3 = getLogger("DataComponentSchema");
1019
+ var logger4 = getLogger("DataComponentSchema");
890
1020
  function jsonSchemaToZod(jsonSchema) {
891
1021
  if (!jsonSchema || typeof jsonSchema !== "object") {
892
- logger3.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
1022
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
893
1023
  return z.string();
894
1024
  }
895
1025
  switch (jsonSchema.type) {
@@ -916,7 +1046,7 @@ function jsonSchemaToZod(jsonSchema) {
916
1046
  case "null":
917
1047
  return z.null();
918
1048
  default:
919
- logger3.warn(
1049
+ logger4.warn(
920
1050
  {
921
1051
  unsupportedType: jsonSchema.type,
922
1052
  schema: jsonSchema
@@ -970,8 +1100,40 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
970
1100
  required: ["artifact_id", "task_id"]
971
1101
  });
972
1102
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
973
- var logger4 = getLogger("ModelFactory");
1103
+ var logger5 = getLogger("ModelFactory");
974
1104
  var _ModelFactory = class _ModelFactory {
1105
+ /**
1106
+ * Create a provider instance with custom configuration
1107
+ */
1108
+ static createProvider(provider, config) {
1109
+ switch (provider) {
1110
+ case "anthropic":
1111
+ return createAnthropic(config);
1112
+ case "openai":
1113
+ return createOpenAI(config);
1114
+ case "google":
1115
+ return createGoogleGenerativeAI(config);
1116
+ default:
1117
+ throw new Error(`Unsupported provider: ${provider}`);
1118
+ }
1119
+ }
1120
+ /**
1121
+ * Extract provider configuration from providerOptions
1122
+ * Only includes settings that go to the provider constructor (baseURL, apiKey, etc.)
1123
+ */
1124
+ static extractProviderConfig(providerOptions) {
1125
+ if (!providerOptions) {
1126
+ return {};
1127
+ }
1128
+ const providerConfig = {};
1129
+ if (providerOptions.baseUrl || providerOptions.baseURL) {
1130
+ providerConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1131
+ }
1132
+ if (providerOptions.gateway) {
1133
+ Object.assign(providerConfig, providerOptions.gateway);
1134
+ }
1135
+ return providerConfig;
1136
+ }
975
1137
  /**
976
1138
  * Create a language model instance from configuration
977
1139
  * Throws error if no config provided - models must be configured at project level
@@ -985,7 +1147,7 @@ var _ModelFactory = class _ModelFactory {
985
1147
  const modelSettings = config;
986
1148
  const modelString = modelSettings.model.trim();
987
1149
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
988
- logger4.debug(
1150
+ logger5.debug(
989
1151
  {
990
1152
  provider,
991
1153
  model: modelName,
@@ -994,49 +1156,40 @@ var _ModelFactory = class _ModelFactory {
994
1156
  },
995
1157
  "Creating language model from config"
996
1158
  );
997
- try {
998
- switch (provider) {
999
- case "anthropic":
1000
- return _ModelFactory.createAnthropicModel(modelName, modelSettings.providerOptions);
1001
- case "openai":
1002
- return _ModelFactory.createOpenAIModel(modelName, modelSettings.providerOptions);
1003
- default:
1004
- throw new Error(
1005
- `Unsupported provider: ${provider}. Supported providers are: ${_ModelFactory.SUPPORTED_PROVIDERS.join(", ")}`
1006
- );
1007
- }
1008
- } catch (error) {
1009
- logger4.error(
1010
- {
1011
- provider,
1012
- model: modelName,
1013
- error: error instanceof Error ? error.message : "Unknown error"
1014
- },
1015
- "Failed to create model"
1016
- );
1017
- throw new Error(
1018
- `Failed to create model ${modelString}: ${error instanceof Error ? error.message : "Unknown error"}`
1019
- );
1159
+ const providerConfig = _ModelFactory.extractProviderConfig(modelSettings.providerOptions);
1160
+ if (Object.keys(providerConfig).length > 0) {
1161
+ logger5.info({ config: providerConfig }, `Applying custom ${provider} provider configuration`);
1162
+ const customProvider = _ModelFactory.createProvider(provider, providerConfig);
1163
+ return customProvider.languageModel(modelName);
1164
+ }
1165
+ switch (provider) {
1166
+ case "anthropic":
1167
+ return anthropic(modelName);
1168
+ case "openai":
1169
+ return openai(modelName);
1170
+ case "google":
1171
+ return google(modelName);
1172
+ default:
1173
+ throw new Error(`Unsupported provider: ${provider}`);
1020
1174
  }
1021
1175
  }
1022
1176
  /**
1023
1177
  * Parse model string to extract provider and model name
1024
- * Examples: "anthropic/claude-4-sonnet" -> { provider: "anthropic", modelName: "claude-4-sonnet" }
1025
- * "claude-4-sonnet" -> { provider: "anthropic", modelName: "claude-4-sonnet" } (default to anthropic)
1178
+ * Examples: "anthropic/claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" }
1179
+ * "claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" } (default to anthropic)
1026
1180
  */
1027
1181
  static parseModelString(modelString) {
1028
1182
  if (modelString.includes("/")) {
1029
1183
  const [provider, ...modelParts] = modelString.split("/");
1030
1184
  const normalizedProvider = provider.toLowerCase();
1031
1185
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1032
- logger4.warn(
1186
+ logger5.error(
1033
1187
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1034
1188
  "Unsupported provider detected, falling back to anthropic"
1035
1189
  );
1036
- return {
1037
- provider: "anthropic",
1038
- modelName: modelParts.join("/")
1039
- };
1190
+ throw new Error(
1191
+ `Unsupported provider: ${normalizedProvider}. Please provide a model in the format of provider/model-name.`
1192
+ );
1040
1193
  }
1041
1194
  return {
1042
1195
  provider: normalizedProvider,
@@ -1044,51 +1197,9 @@ var _ModelFactory = class _ModelFactory {
1044
1197
  // In case model name has slashes
1045
1198
  };
1046
1199
  }
1047
- return {
1048
- provider: "anthropic",
1049
- modelName: modelString
1050
- };
1051
- }
1052
- /**
1053
- * Create an Anthropic model instance
1054
- */
1055
- static createAnthropicModel(modelName, providerOptions) {
1056
- const anthropicConfig = {};
1057
- if (providerOptions?.baseUrl || providerOptions?.baseURL) {
1058
- anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1059
- }
1060
- if (providerOptions?.gateway) {
1061
- logger4.info(
1062
- { gateway: providerOptions.gateway },
1063
- "Setting up AI Gateway for Anthropic model"
1064
- );
1065
- Object.assign(anthropicConfig, providerOptions.gateway);
1066
- }
1067
- if (Object.keys(anthropicConfig).length > 0) {
1068
- logger4.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1069
- const provider = createAnthropic(anthropicConfig);
1070
- return provider(modelName);
1071
- }
1072
- return anthropic(modelName);
1073
- }
1074
- /**
1075
- * Create an OpenAI model instance
1076
- */
1077
- static createOpenAIModel(modelName, providerOptions) {
1078
- const openaiConfig = {};
1079
- if (providerOptions?.baseUrl || providerOptions?.baseURL) {
1080
- openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1081
- }
1082
- if (providerOptions?.gateway) {
1083
- logger4.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1084
- Object.assign(openaiConfig, providerOptions.gateway);
1085
- }
1086
- if (Object.keys(openaiConfig).length > 0) {
1087
- logger4.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1088
- const provider = createOpenAI(openaiConfig);
1089
- return provider(modelName);
1090
- }
1091
- return openai(modelName);
1200
+ throw new Error(
1201
+ `Invalid model provided: ${modelString}. Please provide a model in the format of provider/model-name.`
1202
+ );
1092
1203
  }
1093
1204
  /**
1094
1205
  * Get generation parameters from provider options
@@ -1113,7 +1224,7 @@ var _ModelFactory = class _ModelFactory {
1113
1224
  * Includes maxDuration if specified in provider options (in seconds, following Vercel standard)
1114
1225
  */
1115
1226
  static prepareGenerationConfig(modelSettings) {
1116
- const modelString = modelSettings?.model?.trim() || "anthropic/claude-4-sonnet-20250514";
1227
+ const modelString = modelSettings?.model?.trim();
1117
1228
  const model = _ModelFactory.createModel({
1118
1229
  model: modelString,
1119
1230
  providerOptions: modelSettings?.providerOptions
@@ -1154,7 +1265,7 @@ var _ModelFactory = class _ModelFactory {
1154
1265
  /**
1155
1266
  * Supported providers for security validation
1156
1267
  */
1157
- __publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai"]);
1268
+ __publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai", "google"]);
1158
1269
  var ModelFactory = _ModelFactory;
1159
1270
  var tracer = getTracer("agents-run-api");
1160
1271
 
@@ -1174,7 +1285,7 @@ function unregisterStreamHelper(requestId2) {
1174
1285
  }
1175
1286
 
1176
1287
  // src/utils/graph-session.ts
1177
- var logger5 = getLogger("GraphSession");
1288
+ var logger6 = getLogger("GraphSession");
1178
1289
  var GraphSession = class {
1179
1290
  // Track scheduled timeouts for cleanup
1180
1291
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1198,7 +1309,7 @@ var GraphSession = class {
1198
1309
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1199
1310
  // Prevent unbounded growth
1200
1311
  __publicField(this, "scheduledTimeouts");
1201
- logger5.debug({ sessionId, messageId, graphId }, "GraphSession created");
1312
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1202
1313
  }
1203
1314
  /**
1204
1315
  * Initialize status updates for this session
@@ -1212,15 +1323,15 @@ var GraphSession = class {
1212
1323
  summarizerModel,
1213
1324
  baseModel,
1214
1325
  config: {
1215
- numEvents: config.numEvents || 10,
1216
- timeInSeconds: config.timeInSeconds || 30,
1326
+ numEvents: config.numEvents || 1,
1327
+ timeInSeconds: config.timeInSeconds || 2,
1217
1328
  ...config
1218
1329
  }
1219
1330
  };
1220
1331
  if (this.statusUpdateState.config.timeInSeconds) {
1221
1332
  this.statusUpdateTimer = setInterval(async () => {
1222
1333
  if (!this.statusUpdateState || this.isEnded) {
1223
- logger5.debug(
1334
+ logger6.debug(
1224
1335
  { sessionId: this.sessionId },
1225
1336
  "Timer triggered but session already cleaned up or ended"
1226
1337
  );
@@ -1232,7 +1343,7 @@ var GraphSession = class {
1232
1343
  }
1233
1344
  await this.checkAndSendTimeBasedUpdate();
1234
1345
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1235
- logger5.info(
1346
+ logger6.info(
1236
1347
  {
1237
1348
  sessionId: this.sessionId,
1238
1349
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1246,7 +1357,7 @@ var GraphSession = class {
1246
1357
  */
1247
1358
  recordEvent(eventType, agentId, data) {
1248
1359
  if (this.isEnded) {
1249
- logger5.debug(
1360
+ logger6.debug(
1250
1361
  {
1251
1362
  sessionId: this.sessionId,
1252
1363
  eventType,
@@ -1266,7 +1377,7 @@ var GraphSession = class {
1266
1377
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1267
1378
  const artifactId = data.artifactId;
1268
1379
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1269
- logger5.warn(
1380
+ logger6.warn(
1270
1381
  {
1271
1382
  sessionId: this.sessionId,
1272
1383
  artifactId,
@@ -1287,7 +1398,7 @@ var GraphSession = class {
1287
1398
  this.artifactProcessingErrors.set(artifactId, errorCount);
1288
1399
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1289
1400
  this.pendingArtifacts.delete(artifactId);
1290
- logger5.error(
1401
+ logger6.error(
1291
1402
  {
1292
1403
  sessionId: this.sessionId,
1293
1404
  artifactId,
@@ -1299,7 +1410,7 @@ var GraphSession = class {
1299
1410
  "Artifact processing failed after max retries, giving up"
1300
1411
  );
1301
1412
  } else {
1302
- logger5.warn(
1413
+ logger6.warn(
1303
1414
  {
1304
1415
  sessionId: this.sessionId,
1305
1416
  artifactId,
@@ -1321,14 +1432,14 @@ var GraphSession = class {
1321
1432
  */
1322
1433
  checkStatusUpdates() {
1323
1434
  if (this.isEnded) {
1324
- logger5.debug(
1435
+ logger6.debug(
1325
1436
  { sessionId: this.sessionId },
1326
1437
  "Session has ended - skipping status update check"
1327
1438
  );
1328
1439
  return;
1329
1440
  }
1330
1441
  if (!this.statusUpdateState) {
1331
- logger5.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1442
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1332
1443
  return;
1333
1444
  }
1334
1445
  const statusUpdateState = this.statusUpdateState;
@@ -1339,11 +1450,11 @@ var GraphSession = class {
1339
1450
  */
1340
1451
  async checkAndSendTimeBasedUpdate() {
1341
1452
  if (this.isEnded) {
1342
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1453
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1343
1454
  return;
1344
1455
  }
1345
1456
  if (!this.statusUpdateState) {
1346
- logger5.debug(
1457
+ logger6.debug(
1347
1458
  { sessionId: this.sessionId },
1348
1459
  "No status updates configured for time-based check"
1349
1460
  );
@@ -1356,7 +1467,7 @@ var GraphSession = class {
1356
1467
  try {
1357
1468
  await this.generateAndSendUpdate();
1358
1469
  } catch (error) {
1359
- logger5.error(
1470
+ logger6.error(
1360
1471
  {
1361
1472
  sessionId: this.sessionId,
1362
1473
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1449,29 +1560,29 @@ var GraphSession = class {
1449
1560
  */
1450
1561
  async generateAndSendUpdate() {
1451
1562
  if (this.isEnded) {
1452
- logger5.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1563
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1453
1564
  return;
1454
1565
  }
1455
1566
  if (this.isTextStreaming) {
1456
- logger5.debug(
1567
+ logger6.debug(
1457
1568
  { sessionId: this.sessionId },
1458
1569
  "Text is currently streaming - skipping status update"
1459
1570
  );
1460
1571
  return;
1461
1572
  }
1462
1573
  if (this.isGeneratingUpdate) {
1463
- logger5.debug(
1574
+ logger6.debug(
1464
1575
  { sessionId: this.sessionId },
1465
1576
  "Update already in progress - skipping duplicate generation"
1466
1577
  );
1467
1578
  return;
1468
1579
  }
1469
1580
  if (!this.statusUpdateState) {
1470
- logger5.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1581
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1471
1582
  return;
1472
1583
  }
1473
1584
  if (!this.graphId) {
1474
- logger5.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1585
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1475
1586
  return;
1476
1587
  }
1477
1588
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1484,7 +1595,7 @@ var GraphSession = class {
1484
1595
  try {
1485
1596
  const streamHelper = getStreamHelper(this.sessionId);
1486
1597
  if (!streamHelper) {
1487
- logger5.warn(
1598
+ logger6.warn(
1488
1599
  { sessionId: this.sessionId },
1489
1600
  "No stream helper found - cannot send status update"
1490
1601
  );
@@ -1505,7 +1616,7 @@ var GraphSession = class {
1505
1616
  if (result.operations && result.operations.length > 0) {
1506
1617
  for (const op of result.operations) {
1507
1618
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1508
- logger5.warn(
1619
+ logger6.warn(
1509
1620
  {
1510
1621
  sessionId: this.sessionId,
1511
1622
  operation: op
@@ -1558,7 +1669,7 @@ var GraphSession = class {
1558
1669
  this.previousSummaries.shift();
1559
1670
  }
1560
1671
  if (!operation || !operation.type || !operation.ctx) {
1561
- logger5.warn(
1672
+ logger6.warn(
1562
1673
  {
1563
1674
  sessionId: this.sessionId,
1564
1675
  operation
@@ -1573,7 +1684,7 @@ var GraphSession = class {
1573
1684
  this.statusUpdateState.lastEventCount = this.events.length;
1574
1685
  }
1575
1686
  } catch (error) {
1576
- logger5.error(
1687
+ logger6.error(
1577
1688
  {
1578
1689
  sessionId: this.sessionId,
1579
1690
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1611,7 +1722,7 @@ var GraphSession = class {
1611
1722
  this.releaseUpdateLock();
1612
1723
  }
1613
1724
  } catch (error) {
1614
- logger5.error(
1725
+ logger6.error(
1615
1726
  {
1616
1727
  sessionId: this.sessionId,
1617
1728
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1688,7 +1799,7 @@ User's Question/Context:
1688
1799
  ${conversationHistory}
1689
1800
  ` : "";
1690
1801
  } catch (error) {
1691
- logger5.warn(
1802
+ logger6.warn(
1692
1803
  { sessionId: this.sessionId, error },
1693
1804
  "Failed to fetch conversation history for status update"
1694
1805
  );
@@ -1740,7 +1851,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1740
1851
  return text.trim();
1741
1852
  } catch (error) {
1742
1853
  setSpanWithError(span, error);
1743
- logger5.error({ error }, "Failed to generate summary, using fallback");
1854
+ logger6.error({ error }, "Failed to generate summary, using fallback");
1744
1855
  return this.generateFallbackSummary(newEvents, elapsedTime);
1745
1856
  } finally {
1746
1857
  span.end();
@@ -1786,7 +1897,7 @@ User's Question/Context:
1786
1897
  ${conversationHistory}
1787
1898
  ` : "";
1788
1899
  } catch (error) {
1789
- logger5.warn(
1900
+ logger6.warn(
1790
1901
  { sessionId: this.sessionId, error },
1791
1902
  "Failed to fetch conversation history for structured status update"
1792
1903
  );
@@ -1829,9 +1940,11 @@ Rules:
1829
1940
  - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
1830
1941
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
1831
1942
  - Include specific details, numbers, requirements, or insights discovered
1832
- - You are ONE AI (no agents/delegations)
1833
- - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE INTERNAL OPERATIONS!
1834
- - Bad examples: "Searching docs", "Processing request", "Status update", or not using the no_relevant_updates: e.g. "No New Updates", "No new info to report"
1943
+ - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
1944
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
1945
+ - Present all operations as seamless actions by a single system
1946
+ - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
1947
+ - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
1835
1948
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
1836
1949
 
1837
1950
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -1885,7 +1998,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1885
1998
  return { operations };
1886
1999
  } catch (error) {
1887
2000
  setSpanWithError(span, error);
1888
- logger5.error({ error }, "Failed to generate structured update, using fallback");
2001
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
1889
2002
  return { operations: [] };
1890
2003
  } finally {
1891
2004
  span.end();
@@ -1992,8 +2105,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1992
2105
  case "transfer": {
1993
2106
  const data = event.data;
1994
2107
  activities.push(
1995
- `\u{1F504} **Transfer**: ${data.fromAgent} \u2192 ${data.targetAgent}
1996
- ${data.reason ? `Reason: ${data.reason}` : "Control transfer"}
2108
+ `\u{1F504} **Continuing**: ${data.reason || "Processing request"}
1997
2109
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
1998
2110
  );
1999
2111
  break;
@@ -2001,8 +2113,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2001
2113
  case "delegation_sent": {
2002
2114
  const data = event.data;
2003
2115
  activities.push(
2004
- `\u{1F4E4} **Delegation Sent** [${data.delegationId}]: ${data.fromAgent} \u2192 ${data.targetAgent}
2005
- Task: ${data.taskDescription}
2116
+ `\u{1F4E4} **Processing**: ${data.taskDescription}
2006
2117
  ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2007
2118
  );
2008
2119
  break;
@@ -2010,7 +2121,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2010
2121
  case "delegation_returned": {
2011
2122
  const data = event.data;
2012
2123
  activities.push(
2013
- `\u{1F4E5} **Delegation Returned** [${data.delegationId}]: ${data.fromAgent} \u2190 ${data.targetAgent}
2124
+ `\u{1F4E5} **Completed subtask**
2014
2125
  Result: ${JSON.stringify(data.result, null, 2)}`
2015
2126
  );
2016
2127
  break;
@@ -2029,16 +2140,16 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2029
2140
  case "agent_reasoning": {
2030
2141
  const data = event.data;
2031
2142
  activities.push(
2032
- `\u2699\uFE0F **Reasoning**: reasoning
2033
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2143
+ `\u2699\uFE0F **Analyzing request**
2144
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2034
2145
  );
2035
2146
  break;
2036
2147
  }
2037
2148
  case "agent_generate": {
2038
2149
  const data = event.data;
2039
2150
  activities.push(
2040
- `\u2699\uFE0F **Generation**: ${data.generationType}
2041
- Full Details: ${JSON.stringify(data.parts, null, 2)}`
2151
+ `\u2699\uFE0F **Preparing response**
2152
+ Details: ${JSON.stringify(data.parts, null, 2)}`
2042
2153
  );
2043
2154
  break;
2044
2155
  }
@@ -2212,7 +2323,7 @@ Make it specific and relevant.`;
2212
2323
  taskId: artifactData.taskId,
2213
2324
  artifacts: [artifactToSave]
2214
2325
  });
2215
- logger5.info(
2326
+ logger6.info(
2216
2327
  {
2217
2328
  sessionId: this.sessionId,
2218
2329
  artifactId: artifactData.artifactId,
@@ -2229,7 +2340,7 @@ Make it specific and relevant.`;
2229
2340
  span.setStatus({ code: SpanStatusCode.OK });
2230
2341
  } catch (error) {
2231
2342
  setSpanWithError(span, error);
2232
- logger5.error(
2343
+ logger6.error(
2233
2344
  {
2234
2345
  sessionId: this.sessionId,
2235
2346
  artifactId: artifactData.artifactId,
@@ -2265,7 +2376,7 @@ Make it specific and relevant.`;
2265
2376
  taskId: artifactData.taskId,
2266
2377
  artifacts: [fallbackArtifact]
2267
2378
  });
2268
- logger5.info(
2379
+ logger6.info(
2269
2380
  {
2270
2381
  sessionId: this.sessionId,
2271
2382
  artifactId: artifactData.artifactId
@@ -2274,7 +2385,7 @@ Make it specific and relevant.`;
2274
2385
  );
2275
2386
  }
2276
2387
  } catch (fallbackError) {
2277
- logger5.error(
2388
+ logger6.error(
2278
2389
  {
2279
2390
  sessionId: this.sessionId,
2280
2391
  artifactId: artifactData.artifactId,
@@ -2301,7 +2412,7 @@ var GraphSessionManager = class {
2301
2412
  const sessionId = messageId;
2302
2413
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2303
2414
  this.sessions.set(sessionId, session);
2304
- logger5.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2415
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2305
2416
  return sessionId;
2306
2417
  }
2307
2418
  /**
@@ -2312,7 +2423,7 @@ var GraphSessionManager = class {
2312
2423
  if (session) {
2313
2424
  session.initializeStatusUpdates(config, summarizerModel);
2314
2425
  } else {
2315
- logger5.error(
2426
+ logger6.error(
2316
2427
  {
2317
2428
  sessionId,
2318
2429
  availableSessions: Array.from(this.sessions.keys())
@@ -2333,7 +2444,7 @@ var GraphSessionManager = class {
2333
2444
  recordEvent(sessionId, eventType, agentId, data) {
2334
2445
  const session = this.sessions.get(sessionId);
2335
2446
  if (!session) {
2336
- logger5.warn({ sessionId }, "Attempted to record event in non-existent session");
2447
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2337
2448
  return;
2338
2449
  }
2339
2450
  session.recordEvent(eventType, agentId, data);
@@ -2344,12 +2455,12 @@ var GraphSessionManager = class {
2344
2455
  endSession(sessionId) {
2345
2456
  const session = this.sessions.get(sessionId);
2346
2457
  if (!session) {
2347
- logger5.warn({ sessionId }, "Attempted to end non-existent session");
2458
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2348
2459
  return [];
2349
2460
  }
2350
2461
  const events = session.getEvents();
2351
2462
  const summary = session.getSummary();
2352
- logger5.info({ sessionId, summary }, "GraphSession ended");
2463
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2353
2464
  session.cleanup();
2354
2465
  this.sessions.delete(sessionId);
2355
2466
  return events;
@@ -2375,7 +2486,7 @@ var GraphSessionManager = class {
2375
2486
  }
2376
2487
  };
2377
2488
  var graphSessionManager = new GraphSessionManager();
2378
- var logger6 = getLogger("ArtifactParser");
2489
+ var logger7 = getLogger("ArtifactParser");
2379
2490
  var _ArtifactParser = class _ArtifactParser {
2380
2491
  constructor(tenantId) {
2381
2492
  this.tenantId = tenantId;
@@ -2391,9 +2502,7 @@ var _ArtifactParser = class _ArtifactParser {
2391
2502
  * More robust detection that handles streaming fragments
2392
2503
  */
2393
2504
  hasIncompleteArtifact(text) {
2394
- return /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(
2395
- text
2396
- ) || /^.*<artifact:ref(?:[^>]*)$/.test(text) || // Incomplete artifact:ref at end
2505
+ return /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/.test(text) || /<artifact:ref[^>]+$/.test(text) || // Incomplete artifact ref at end
2397
2506
  this.findSafeTextBoundary(text) < text.length;
2398
2507
  }
2399
2508
  /**
@@ -2402,10 +2511,10 @@ var _ArtifactParser = class _ArtifactParser {
2402
2511
  */
2403
2512
  findSafeTextBoundary(text) {
2404
2513
  const endPatterns = [
2405
- /^.*<artifact:ref(?:[^/>]+(?:[^>]*[^/])?)?$/,
2514
+ /<artifact:ref(?![^>]*\/>).*$/,
2406
2515
  // artifact:ref that doesn't end with />
2407
- /^.*<(?:artifact(?::ref)?|a(?:r(?:t(?:i(?:f(?:a(?:c(?:t(?::(?:r(?:e(?:f)?)?)?)?)?)?)?)?)?)?)?)?$/
2408
- // Safe partial artifact pattern
2516
+ /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/
2517
+ // Any partial artifact pattern at end
2409
2518
  ];
2410
2519
  for (const pattern of endPatterns) {
2411
2520
  const match = text.match(pattern);
@@ -2441,7 +2550,7 @@ var _ArtifactParser = class _ArtifactParser {
2441
2550
  id: taskId
2442
2551
  });
2443
2552
  if (!task) {
2444
- logger6.warn({ taskId }, "Task not found when fetching artifacts");
2553
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2445
2554
  continue;
2446
2555
  }
2447
2556
  const taskArtifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2453,9 +2562,9 @@ var _ArtifactParser = class _ArtifactParser {
2453
2562
  artifacts.set(key, artifact);
2454
2563
  }
2455
2564
  }
2456
- logger6.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2565
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2457
2566
  } catch (error) {
2458
- logger6.error({ error, contextId }, "Error loading context artifacts");
2567
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2459
2568
  }
2460
2569
  return artifacts;
2461
2570
  }
@@ -2558,7 +2667,7 @@ var _ArtifactParser = class _ArtifactParser {
2558
2667
  id: taskId
2559
2668
  });
2560
2669
  if (!task) {
2561
- logger6.warn({ taskId }, "Task not found when fetching artifact");
2670
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2562
2671
  return null;
2563
2672
  }
2564
2673
  const artifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2570,7 +2679,7 @@ var _ArtifactParser = class _ArtifactParser {
2570
2679
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2571
2680
  }
2572
2681
  } catch (error) {
2573
- logger6.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2682
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2574
2683
  }
2575
2684
  return null;
2576
2685
  }
@@ -2606,11 +2715,11 @@ var _ArtifactParser = class _ArtifactParser {
2606
2715
  __publicField(_ArtifactParser, "ARTIFACT_REGEX", /<artifact:ref\s+id="([^"]*?)"\s+task="([^"]*?)"\s*\/>/gs);
2607
2716
  __publicField(_ArtifactParser, "ARTIFACT_CHECK_REGEX", /<artifact:ref\s+(?=.*id="[^"]+")(?=.*task="[^"]+")[^>]*\/>/);
2608
2717
  // Regex for catching any partial artifact pattern (< + any prefix of "artifact:ref")
2609
- __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:(r(e(f?)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2718
+ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:?(r(e(f)?)?)?)?)?)?)?)?)?)?)?)?$/g);
2610
2719
  var ArtifactParser = _ArtifactParser;
2611
2720
 
2612
2721
  // src/utils/incremental-stream-parser.ts
2613
- var logger7 = getLogger("IncrementalStreamParser");
2722
+ var logger8 = getLogger("IncrementalStreamParser");
2614
2723
  var IncrementalStreamParser = class {
2615
2724
  constructor(streamHelper, tenantId, contextId) {
2616
2725
  __publicField(this, "buffer", "");
@@ -2670,13 +2779,19 @@ var IncrementalStreamParser = class {
2670
2779
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2671
2780
  const delta = part.argsTextDelta || "";
2672
2781
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2673
- logger7.warn("JSON buffer exceeded maximum size, truncating");
2782
+ logger8.warn(
2783
+ { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
2784
+ "JSON buffer exceeded maximum size, truncating"
2785
+ );
2674
2786
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2675
2787
  }
2676
2788
  jsonBuffer += delta;
2677
2789
  for (const char of delta) {
2678
2790
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
2679
- logger7.warn("Component buffer exceeded maximum size, resetting");
2791
+ logger8.warn(
2792
+ { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
2793
+ "Component buffer exceeded maximum size, resetting"
2794
+ );
2680
2795
  componentBuffer = "";
2681
2796
  depth = 0;
2682
2797
  continue;
@@ -2691,7 +2806,7 @@ var IncrementalStreamParser = class {
2691
2806
  if (componentMatch) {
2692
2807
  const MAX_COMPONENT_SIZE = 1024 * 1024;
2693
2808
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
2694
- logger7.warn(
2809
+ logger8.warn(
2695
2810
  {
2696
2811
  size: componentMatch[0].length,
2697
2812
  maxSize: MAX_COMPONENT_SIZE
@@ -2704,7 +2819,7 @@ var IncrementalStreamParser = class {
2704
2819
  try {
2705
2820
  const component = JSON.parse(componentMatch[0]);
2706
2821
  if (typeof component !== "object" || !component.id) {
2707
- logger7.warn("Invalid component structure, skipping");
2822
+ logger8.warn({ component }, "Invalid component structure, skipping");
2708
2823
  componentBuffer = "";
2709
2824
  continue;
2710
2825
  }
@@ -2717,7 +2832,7 @@ var IncrementalStreamParser = class {
2717
2832
  componentsStreamed++;
2718
2833
  componentBuffer = "";
2719
2834
  } catch (e) {
2720
- logger7.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2835
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2721
2836
  }
2722
2837
  }
2723
2838
  }
@@ -2734,7 +2849,7 @@ var IncrementalStreamParser = class {
2734
2849
  break;
2735
2850
  }
2736
2851
  }
2737
- logger7.debug({ componentsStreamed }, "Finished streaming components");
2852
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
2738
2853
  }
2739
2854
  /**
2740
2855
  * Legacy method for backward compatibility - defaults to text processing
@@ -2878,7 +2993,7 @@ var IncrementalStreamParser = class {
2878
2993
  };
2879
2994
 
2880
2995
  // src/utils/response-formatter.ts
2881
- var logger8 = getLogger("ResponseFormatter");
2996
+ var logger9 = getLogger("ResponseFormatter");
2882
2997
  var ResponseFormatter = class {
2883
2998
  constructor(tenantId) {
2884
2999
  __publicField(this, "artifactParser");
@@ -2909,7 +3024,7 @@ var ResponseFormatter = class {
2909
3024
  return { parts };
2910
3025
  } catch (error) {
2911
3026
  setSpanWithError(span, error);
2912
- logger8.error({ error, responseObject }, "Error formatting object response");
3027
+ logger9.error({ error, responseObject }, "Error formatting object response");
2913
3028
  return {
2914
3029
  parts: [{ kind: "data", data: responseObject }]
2915
3030
  };
@@ -2960,7 +3075,7 @@ var ResponseFormatter = class {
2960
3075
  return { parts };
2961
3076
  } catch (error) {
2962
3077
  setSpanWithError(span, error);
2963
- logger8.error({ error, responseText }, "Error formatting response");
3078
+ logger9.error({ error, responseText }, "Error formatting response");
2964
3079
  return { text: responseText };
2965
3080
  } finally {
2966
3081
  span.end();
@@ -3005,7 +3120,7 @@ var ResponseFormatter = class {
3005
3120
  }
3006
3121
  }
3007
3122
  };
3008
- var logger9 = getLogger("ToolSessionManager");
3123
+ var logger10 = getLogger("ToolSessionManager");
3009
3124
  var _ToolSessionManager = class _ToolSessionManager {
3010
3125
  // 5 minutes
3011
3126
  constructor() {
@@ -3034,7 +3149,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3034
3149
  createdAt: Date.now()
3035
3150
  };
3036
3151
  this.sessions.set(sessionId, session);
3037
- logger9.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3152
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3038
3153
  return sessionId;
3039
3154
  }
3040
3155
  /**
@@ -3043,7 +3158,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3043
3158
  recordToolResult(sessionId, toolResult) {
3044
3159
  const session = this.sessions.get(sessionId);
3045
3160
  if (!session) {
3046
- logger9.warn(
3161
+ logger10.warn(
3047
3162
  { sessionId, toolCallId: toolResult.toolCallId },
3048
3163
  "Tool result recorded for unknown session"
3049
3164
  );
@@ -3057,12 +3172,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3057
3172
  getToolResult(sessionId, toolCallId) {
3058
3173
  const session = this.sessions.get(sessionId);
3059
3174
  if (!session) {
3060
- logger9.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3175
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3061
3176
  return void 0;
3062
3177
  }
3063
3178
  const result = session.toolResults.get(toolCallId);
3064
3179
  if (!result) {
3065
- logger9.warn(
3180
+ logger10.warn(
3066
3181
  {
3067
3182
  sessionId,
3068
3183
  toolCallId,
@@ -3101,10 +3216,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3101
3216
  }
3102
3217
  for (const sessionId of expiredSessions) {
3103
3218
  this.sessions.delete(sessionId);
3104
- logger9.debug({ sessionId }, "Cleaned up expired tool session");
3219
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3105
3220
  }
3106
3221
  if (expiredSessions.length > 0) {
3107
- logger9.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3222
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3108
3223
  }
3109
3224
  }
3110
3225
  };
@@ -3113,7 +3228,7 @@ var ToolSessionManager = _ToolSessionManager;
3113
3228
  var toolSessionManager = ToolSessionManager.getInstance();
3114
3229
 
3115
3230
  // src/agents/artifactTools.ts
3116
- var logger10 = getLogger("artifactTools");
3231
+ var logger11 = getLogger("artifactTools");
3117
3232
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3118
3233
  if (typeof data === "object" && data !== null) {
3119
3234
  if (Array.isArray(data)) {
@@ -3216,7 +3331,7 @@ function createPropSelectorsSchema(artifactComponents) {
3216
3331
  Object.entries(summaryProps.properties).forEach(([propName, propDef]) => {
3217
3332
  const propDescription = propDef?.description || propDef?.title || `${propName} property`;
3218
3333
  propSchema[propName] = z4.string().describe(
3219
- `JMESPath selector for ${propName} (${propDescription}) - summary version, relative to base selector`
3334
+ `JMESPath selector for ${propName} (${propDescription}) - summary version, MUST be relative to your baseSelector target level. Access fields WITHIN the items your baseSelector returns.`
3220
3335
  );
3221
3336
  });
3222
3337
  }
@@ -3228,7 +3343,7 @@ function createPropSelectorsSchema(artifactComponents) {
3228
3343
  if (!propSchema[propName]) {
3229
3344
  const propDescription = propDef?.description || propDef?.title || `${propName} property`;
3230
3345
  propSchema[propName] = z4.string().describe(
3231
- `JMESPath selector for ${propName} (${propDescription}) - full version, relative to base selector`
3346
+ `JMESPath selector for ${propName} (${propDescription}) - MUST be relative to your baseSelector target level. If baseSelector stops at a document, this accesses fields WITHIN that document. Examples: "title", "content.body", "metadata.author"`
3232
3347
  );
3233
3348
  }
3234
3349
  });
@@ -3242,7 +3357,26 @@ function createPropSelectorsSchema(artifactComponents) {
3242
3357
  return z4.union(propSelectorSchemas);
3243
3358
  }
3244
3359
  return z4.record(z4.string(), z4.string()).describe(
3245
- "Prop selectors mapping schema properties to JMESPath expressions relative to base selector"
3360
+ `Prop selectors mapping schema properties to JMESPath expressions relative to base selector. Each path is relative to the item(s) your baseSelector returns.
3361
+
3362
+ \u{1F3AF} CRITICAL: PropSelectors work ONLY on the data your baseSelector returns!
3363
+ If baseSelector = "result.docs[0]" \u2192 propSelectors access fields INSIDE that doc
3364
+ If baseSelector = "result.docs[0].content[0]" \u2192 propSelectors access fields INSIDE that content item
3365
+
3366
+ \u2705 CORRECT EXAMPLES (paths relative to baseSelector target):
3367
+ \u2022 baseSelector: "result.documents[?type=='article']" \u2192 propSelectors: {"title": "title", "url": "url"}
3368
+ \u2022 baseSelector: "result.content[0].text" \u2192 propSelectors: {"content": "content[0].text", "source": "content[0].source"}
3369
+ \u2022 baseSelector: "result.items" \u2192 propSelectors: {"name": "profile.name", "email": "contact.email"}
3370
+
3371
+ \u274C WRONG EXAMPLES (accessing data not at baseSelector level):
3372
+ \u2022 baseSelector: "result.docs[0].content[0]" \u2192 propSelectors: {"title": "title"} \u2190 title is at doc level, not content level!
3373
+ \u2022 baseSelector: "result.source.content" \u2192 propSelectors: {"title": "content[4].text"} \u2190 baseSelector ends at array, can't index into it!
3374
+ \u2022 baseSelector: "result.items" \u2192 propSelectors: {"title": "documents[0].title"} \u2190 going deeper when baseSelector should handle depth
3375
+
3376
+ \u274C NEVER USE LITERAL VALUES:
3377
+ {"title": "Robert Tran", "url": "https://linkedin.com/..."}
3378
+
3379
+ \u{1F4A1} TIP: Match your baseSelector depth to where the properties you need actually exist!`
3246
3380
  );
3247
3381
  }
3248
3382
  function createInputSchema(artifactComponents) {
@@ -3251,7 +3385,18 @@ function createInputSchema(artifactComponents) {
3251
3385
  "EXACT toolCallId from a previous tool execution - copy it exactly from the tool call result. NEVER invent or make up tool call IDs."
3252
3386
  ),
3253
3387
  baseSelector: z4.string().describe(
3254
- `JMESPath selector to get to the main data array/object. ALWAYS start with "result." Example: "result.content[?type=='text']"`
3388
+ `JMESPath selector to get to the main data array/object. ALWAYS start with "result." That is a mandatory prefix.
3389
+
3390
+ Data structures are COMPLEX and NESTED. Examples:
3391
+ \u2022 "result.content[0].text.content[2]" - parsed JSON in text field
3392
+ \u2022 "result.structuredContent.content[1]" - direct structured data
3393
+ \u2022 "result.data.items[?type=='doc']" - filtered array
3394
+
3395
+ \u{1F6A8} CRITICAL: If you need data from array[4], your baseSelector must END at array[4], NOT at the array itself!
3396
+ \u2705 CORRECT: "result.source.content[4]" \u2192 propSelectors can access fields in that item
3397
+ \u274C WRONG: "result.source.content" \u2192 propSelectors can't use content[4] because baseSelector already selected the array
3398
+
3399
+ \u{1F525} IF YOUR PATH FAILS: READ THE ERROR MESSAGE! It tells you the correct path! \u{1F525}`
3255
3400
  ),
3256
3401
  propSelectors: createPropSelectorsSchema(artifactComponents)
3257
3402
  });
@@ -3270,6 +3415,9 @@ function createSaveToolResultTool(sessionId, streamRequestId, agentId, artifactC
3270
3415
  return tool({
3271
3416
  description: `Save tool results as structured artifacts. Each artifact should represent ONE SPECIFIC, IMPORTANT, and UNIQUE document or data item.
3272
3417
 
3418
+ \u26A1 CRITICAL: JSON-like text content in tool results is AUTOMATICALLY PARSED into proper JSON objects - treat all data as structured, not text strings.
3419
+ \u{1F6A8} CRITICAL: Data structures are deeply nested. When your path fails, READ THE ERROR MESSAGE - it shows the correct path!
3420
+
3273
3421
  AVAILABLE ARTIFACT TYPES:
3274
3422
  ${availableTypesWithDescriptions}
3275
3423
 
@@ -3281,26 +3429,6 @@ Each artifact you save becomes a SEPARATE DATA COMPONENT in the structured respo
3281
3429
  \u2705 UNIQUE with distinct value from other artifacts
3282
3430
  \u2705 RENDERED AS INDIVIDUAL DATA COMPONENT in the UI
3283
3431
 
3284
- \u274C DO NOT save multiple different items in one artifact unless they are EXTREMELY SIMILAR
3285
- \u274C DO NOT batch unrelated items together - each item becomes its own data component
3286
- \u274C DO NOT save generic collections - break them into individual data components
3287
-
3288
- \u{1F3AF} STRUCTURED DATA COMPONENT PRINCIPLE:
3289
- Each artifact save creates ONE data component that will be rendered separately in the UI. If you have 5 important items, save them as 5 separate artifacts to create 5 separate data components for better user experience.
3290
-
3291
- THINK: "What is the ONE most important piece of information here that deserves its own data component?"
3292
-
3293
- EXAMPLES OF GOOD INDIVIDUAL ARTIFACTS (SEPARATE DATA COMPONENTS):
3294
- - Nick Gomez's founder profile (specific person) \u2192 Individual data component
3295
- - The /users/create API endpoint documentation (specific endpoint) \u2192 Individual data component
3296
- - Error message for authentication failure (specific error type) \u2192 Individual data component
3297
- - Configuration for Redis caching (specific config topic) \u2192 Individual data component
3298
-
3299
- EXAMPLES OF BAD BATCHING:
3300
- \u274C "All team members" \u2192 Should be separate artifacts for each important member (separate data components)
3301
- \u274C "All API endpoints" \u2192 Should be separate artifacts for each distinct endpoint (separate data components)
3302
- \u274C "All error types" \u2192 Should be separate artifacts for each error category (separate data components)
3303
-
3304
3432
  USAGE PATTERN:
3305
3433
  1. baseSelector: Navigate through nested structures to target ONE SPECIFIC item
3306
3434
  - Navigate through all necessary levels: "result.data.items.nested[?condition]"
@@ -3310,9 +3438,11 @@ USAGE PATTERN:
3310
3438
  - NOT: "result.items[*]" (too broad, gets everything)
3311
3439
 
3312
3440
  2. propSelectors: Extract properties relative to your selected item
3313
- - Always relative to the single item that baseSelector returns
3314
- - Simple paths from that item: { prop1: "field_x", prop2: "nested.field_y", prop3: "deep.nested.field_z" }
3315
- - The tool handles array iteration - your selectors work on individual items
3441
+ - \u{1F3AF} CRITICAL: Always relative to the single item that baseSelector returns
3442
+ - If baseSelector ends at a document \u2192 propSelectors access document fields
3443
+ - If baseSelector ends at content[0] \u2192 propSelectors access content[0] fields
3444
+ - Simple paths from that exact level: { prop1: "field_x", prop2: "nested.field_y" }
3445
+ - \u274C DON'T try to go back up or deeper - adjust your baseSelector instead!
3316
3446
 
3317
3447
  3. Result: ONE artifact representing ONE important, unique item \u2192 ONE data component
3318
3448
 
@@ -3321,20 +3451,12 @@ USAGE PATTERN:
3321
3451
  - Focus on getting to the right level with baseSelector, then keep propSelectors simple
3322
3452
  - Test your baseSelector: Does it return exactly the items you want?
3323
3453
 
3324
- \u26A0\uFE0F STRICT SELECTIVITY RULES FOR DATA COMPONENTS:
3325
- - ALWAYS ask: "Is this ONE specific, important thing that deserves its own data component?"
3326
- - If the answer is no, don't save it or find a more specific selector
3327
- - Multiple similar items = Multiple separate artifact saves (use the tool multiple times) \u2192 Multiple data components
3328
- - Each artifact should be independently valuable and uniquely identifiable \u2192 Each data component stands alone
3329
- - BETTER to save 3 individual, specific artifacts (3 data components) than 1 generic collection (1 data component)
3330
-
3331
- \u{1F504} MULTIPLE ARTIFACTS = MULTIPLE DATA COMPONENTS:
3332
- Remember: Each time you call this tool, you create a separate data component. Call it multiple times for multiple items to create a rich, structured response with individual data components for each important piece of information.`,
3454
+ Please use Error Messages to Debug when there is an error in the tool call.`,
3333
3455
  inputSchema,
3334
3456
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3335
3457
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3336
3458
  if (!sessionId) {
3337
- logger10.warn({ toolCallId }, "No session ID provided to save_tool_result");
3459
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3338
3460
  return {
3339
3461
  saved: false,
3340
3462
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3344,7 +3466,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3344
3466
  }
3345
3467
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3346
3468
  if (!toolResult) {
3347
- logger10.warn({ toolCallId, sessionId }, "Tool result not found in session");
3469
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3348
3470
  return {
3349
3471
  saved: false,
3350
3472
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3357,7 +3479,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3357
3479
  const baseData = jmespath.search(parsedResult, baseSelector);
3358
3480
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3359
3481
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3360
- logger10.warn(
3482
+ logger11.warn(
3361
3483
  {
3362
3484
  baseSelector,
3363
3485
  toolCallId,
@@ -3400,7 +3522,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3400
3522
  const fallbackValue = item[propName];
3401
3523
  if (fallbackValue !== null && fallbackValue !== void 0) {
3402
3524
  extractedItem[propName] = fallbackValue;
3403
- logger10.info(
3525
+ logger11.info(
3404
3526
  { propName, propSelector, context },
3405
3527
  `PropSelector failed, used fallback direct property access`
3406
3528
  );
@@ -3412,7 +3534,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3412
3534
  const fallbackValue = item[propName];
3413
3535
  if (fallbackValue !== null && fallbackValue !== void 0) {
3414
3536
  extractedItem[propName] = fallbackValue;
3415
- logger10.warn(
3537
+ logger11.warn(
3416
3538
  { propName, propSelector, context, error: error.message },
3417
3539
  `PropSelector syntax error, used fallback direct property access`
3418
3540
  );
@@ -3525,7 +3647,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3525
3647
  warnings
3526
3648
  };
3527
3649
  } catch (error) {
3528
- logger10.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3650
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3529
3651
  return {
3530
3652
  saved: false,
3531
3653
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3537,7 +3659,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3537
3659
  }
3538
3660
 
3539
3661
  // src/a2a/client.ts
3540
- var logger11 = getLogger("a2aClient");
3662
+ var logger12 = getLogger("a2aClient");
3541
3663
  var DEFAULT_BACKOFF = {
3542
3664
  initialInterval: 500,
3543
3665
  maxInterval: 6e4,
@@ -3743,7 +3865,7 @@ var A2AClient = class {
3743
3865
  try {
3744
3866
  const res = await fn();
3745
3867
  if (attempt > 0) {
3746
- logger11.info(
3868
+ logger12.info(
3747
3869
  {
3748
3870
  attempts: attempt + 1,
3749
3871
  elapsedTime: Date.now() - start
@@ -3758,7 +3880,7 @@ var A2AClient = class {
3758
3880
  }
3759
3881
  const elapsed = Date.now() - start;
3760
3882
  if (elapsed > maxElapsedTime) {
3761
- logger11.warn(
3883
+ logger12.warn(
3762
3884
  {
3763
3885
  attempts: attempt + 1,
3764
3886
  elapsedTime: elapsed,
@@ -3779,7 +3901,7 @@ var A2AClient = class {
3779
3901
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
3780
3902
  }
3781
3903
  const delayMs = Math.min(retryInterval, maxInterval);
3782
- logger11.info(
3904
+ logger12.info(
3783
3905
  {
3784
3906
  attempt: attempt + 1,
3785
3907
  delayMs,
@@ -3864,7 +3986,7 @@ var A2AClient = class {
3864
3986
  }
3865
3987
  const rpcResponse = await httpResponse.json();
3866
3988
  if (rpcResponse.id !== requestId2) {
3867
- logger11.warn(
3989
+ logger12.warn(
3868
3990
  {
3869
3991
  method,
3870
3992
  expectedId: requestId2,
@@ -4063,7 +4185,7 @@ var A2AClient = class {
4063
4185
  try {
4064
4186
  while (true) {
4065
4187
  const { done, value } = await reader.read();
4066
- logger11.info({ done, value }, "parseA2ASseStream");
4188
+ logger12.info({ done, value }, "parseA2ASseStream");
4067
4189
  if (done) {
4068
4190
  if (eventDataBuffer.trim()) {
4069
4191
  const result = this._processSseEventData(
@@ -4150,7 +4272,7 @@ var A2AClient = class {
4150
4272
  };
4151
4273
 
4152
4274
  // src/agents/relationTools.ts
4153
- var logger12 = getLogger("relationships Tools");
4275
+ var logger13 = getLogger("relationships Tools");
4154
4276
  var generateTransferToolDescription = (config) => {
4155
4277
  return `Hand off the conversation to agent ${config.id}.
4156
4278
 
@@ -4188,7 +4310,7 @@ var createTransferToAgentTool = ({
4188
4310
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4189
4311
  });
4190
4312
  }
4191
- logger12.info(
4313
+ logger13.info(
4192
4314
  {
4193
4315
  transferTo: transferConfig.id ?? "unknown",
4194
4316
  fromAgent: callingAgentId
@@ -4298,7 +4420,7 @@ function createDelegateToAgentTool({
4298
4420
  }
4299
4421
  } else {
4300
4422
  resolvedHeaders = {
4301
- Authorization: `Bearer ${delegateConfig.config.apiKey}`,
4423
+ Authorization: `Bearer ${metadata.apiKey}`,
4302
4424
  "x-inkeep-tenant-id": tenantId,
4303
4425
  "x-inkeep-project-id": projectId,
4304
4426
  "x-inkeep-graph-id": graphId,
@@ -4336,7 +4458,7 @@ function createDelegateToAgentTool({
4336
4458
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4337
4459
  }
4338
4460
  };
4339
- logger12.info({ messageToSend }, "messageToSend");
4461
+ logger13.info({ messageToSend }, "messageToSend");
4340
4462
  await createMessage(dbClient_default)({
4341
4463
  id: nanoid(),
4342
4464
  tenantId,
@@ -4398,7 +4520,7 @@ function createDelegateToAgentTool({
4398
4520
  }
4399
4521
 
4400
4522
  // src/agents/SystemPromptBuilder.ts
4401
- var logger13 = getLogger("SystemPromptBuilder");
4523
+ var logger14 = getLogger("SystemPromptBuilder");
4402
4524
  var SystemPromptBuilder = class {
4403
4525
  constructor(version, versionConfig) {
4404
4526
  this.version = version;
@@ -4414,9 +4536,12 @@ var SystemPromptBuilder = class {
4414
4536
  this.templates.set(name, content);
4415
4537
  }
4416
4538
  this.loaded = true;
4417
- logger13.debug(`Loaded ${this.templates.size} templates for version ${this.version}`);
4539
+ logger14.debug(
4540
+ { templateCount: this.templates.size, version: this.version },
4541
+ `Loaded ${this.templates.size} templates for version ${this.version}`
4542
+ );
4418
4543
  } catch (error) {
4419
- logger13.error({ error }, `Failed to load templates for version ${this.version}`);
4544
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4420
4545
  throw new Error(`Template loading failed: ${error}`);
4421
4546
  }
4422
4547
  }
@@ -4818,7 +4943,7 @@ function hasToolCallWithPrefix(prefix) {
4818
4943
  return false;
4819
4944
  };
4820
4945
  }
4821
- var logger14 = getLogger("Agent");
4946
+ var logger15 = getLogger("Agent");
4822
4947
  var CONSTANTS = {
4823
4948
  MAX_GENERATION_STEPS: 12,
4824
4949
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5071,14 +5196,14 @@ var Agent = class {
5071
5196
  for (const toolSet of tools) {
5072
5197
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5073
5198
  if (!isValidTool(originalTool)) {
5074
- logger14.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5199
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5075
5200
  continue;
5076
5201
  }
5077
5202
  const sessionWrappedTool = tool({
5078
5203
  description: originalTool.description,
5079
5204
  inputSchema: originalTool.inputSchema,
5080
5205
  execute: async (args, { toolCallId }) => {
5081
- logger14.debug({ toolName, toolCallId }, "MCP Tool Called");
5206
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5082
5207
  try {
5083
5208
  const result = await originalTool.execute(args, { toolCallId });
5084
5209
  toolSessionManager.recordToolResult(sessionId, {
@@ -5090,7 +5215,7 @@ var Agent = class {
5090
5215
  });
5091
5216
  return { result, toolCallId };
5092
5217
  } catch (error) {
5093
- logger14.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5218
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5094
5219
  throw error;
5095
5220
  }
5096
5221
  }
@@ -5175,7 +5300,7 @@ var Agent = class {
5175
5300
  selectedTools
5176
5301
  };
5177
5302
  }
5178
- logger14.info(
5303
+ logger15.info(
5179
5304
  {
5180
5305
  toolName: tool4.name,
5181
5306
  credentialReferenceId,
@@ -5215,7 +5340,7 @@ var Agent = class {
5215
5340
  async getResolvedContext(conversationId, requestContext) {
5216
5341
  try {
5217
5342
  if (!this.config.contextConfigId) {
5218
- logger14.debug({ graphId: this.config.graphId }, "No context config found for graph");
5343
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5219
5344
  return null;
5220
5345
  }
5221
5346
  const contextConfig = await getContextConfigById(dbClient_default)({
@@ -5223,7 +5348,7 @@ var Agent = class {
5223
5348
  id: this.config.contextConfigId
5224
5349
  });
5225
5350
  if (!contextConfig) {
5226
- logger14.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5351
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5227
5352
  return null;
5228
5353
  }
5229
5354
  if (!this.contextResolver) {
@@ -5240,7 +5365,7 @@ var Agent = class {
5240
5365
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5241
5366
  $env: process.env
5242
5367
  };
5243
- logger14.debug(
5368
+ logger15.debug(
5244
5369
  {
5245
5370
  conversationId,
5246
5371
  contextConfigId: contextConfig.id,
@@ -5254,7 +5379,7 @@ var Agent = class {
5254
5379
  );
5255
5380
  return contextWithBuiltins;
5256
5381
  } catch (error) {
5257
- logger14.error(
5382
+ logger15.error(
5258
5383
  {
5259
5384
  conversationId,
5260
5385
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5278,7 +5403,7 @@ var Agent = class {
5278
5403
  });
5279
5404
  return graphDefinition?.graphPrompt || void 0;
5280
5405
  } catch (error) {
5281
- logger14.warn(
5406
+ logger15.warn(
5282
5407
  {
5283
5408
  graphId: this.config.graphId,
5284
5409
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5305,7 +5430,7 @@ var Agent = class {
5305
5430
  }
5306
5431
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5307
5432
  } catch (error) {
5308
- logger14.warn(
5433
+ logger15.warn(
5309
5434
  {
5310
5435
  graphId: this.config.graphId,
5311
5436
  tenantId: this.config.tenantId,
@@ -5365,7 +5490,7 @@ Key requirements:
5365
5490
  preserveUnresolved: false
5366
5491
  });
5367
5492
  } catch (error) {
5368
- logger14.error(
5493
+ logger15.error(
5369
5494
  {
5370
5495
  conversationId,
5371
5496
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5410,7 +5535,7 @@ Key requirements:
5410
5535
  preserveUnresolved: false
5411
5536
  });
5412
5537
  } catch (error) {
5413
- logger14.error(
5538
+ logger15.error(
5414
5539
  {
5415
5540
  conversationId,
5416
5541
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5438,7 +5563,7 @@ Key requirements:
5438
5563
  artifactId: z.string().describe("The unique identifier of the artifact to get.")
5439
5564
  }),
5440
5565
  execute: async ({ artifactId }) => {
5441
- logger14.info({ artifactId }, "get_artifact executed");
5566
+ logger15.info({ artifactId }, "get_artifact executed");
5442
5567
  const artifact = await getLedgerArtifacts(dbClient_default)({
5443
5568
  scopes: {
5444
5569
  tenantId: this.config.tenantId,
@@ -5505,7 +5630,7 @@ Key requirements:
5505
5630
  graphId: this.config.graphId
5506
5631
  });
5507
5632
  } catch (error) {
5508
- logger14.error(
5633
+ logger15.error(
5509
5634
  { error, graphId: this.config.graphId },
5510
5635
  "Failed to check graph artifact components"
5511
5636
  );
@@ -5609,7 +5734,7 @@ Key requirements:
5609
5734
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5610
5735
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5611
5736
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5612
- logger14.warn(
5737
+ logger15.warn(
5613
5738
  {
5614
5739
  requestedTimeout: modelSettings.maxDuration * 1e3,
5615
5740
  appliedTimeout: timeoutMs,
@@ -5651,7 +5776,7 @@ Key requirements:
5651
5776
  }
5652
5777
  );
5653
5778
  } catch (error) {
5654
- logger14.debug("Failed to track agent reasoning");
5779
+ logger15.debug({ error }, "Failed to track agent reasoning");
5655
5780
  }
5656
5781
  }
5657
5782
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5734,7 +5859,7 @@ Key requirements:
5734
5859
  }
5735
5860
  );
5736
5861
  } catch (error) {
5737
- logger14.debug("Failed to track agent reasoning");
5862
+ logger15.debug({ error }, "Failed to track agent reasoning");
5738
5863
  }
5739
5864
  }
5740
5865
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5779,7 +5904,7 @@ Key requirements:
5779
5904
  return;
5780
5905
  }
5781
5906
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
5782
- logger14.info({ result }, "save_artifact_tool or save_tool_result");
5907
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
5783
5908
  if (result.output.artifacts) {
5784
5909
  for (const artifact of result.output.artifacts) {
5785
5910
  const artifactId = artifact?.artifactId || "N/A";
@@ -5854,7 +5979,7 @@ ${output}`;
5854
5979
  { role: "user", content: userMessage },
5855
5980
  ...reasoningFlow,
5856
5981
  {
5857
- role: "system",
5982
+ role: "user",
5858
5983
  content: await this.buildPhase2SystemPrompt()
5859
5984
  }
5860
5985
  ],
@@ -5950,7 +6075,9 @@ async function resolveModelConfig(graphId, agent) {
5950
6075
  summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
5951
6076
  };
5952
6077
  }
5953
- throw new Error("Base model configuration is required. Please configure models at the project level.");
6078
+ throw new Error(
6079
+ "Base model configuration is required. Please configure models at the project level."
6080
+ );
5954
6081
  }
5955
6082
 
5956
6083
  // src/agents/generateTaskHandler.ts
@@ -5964,7 +6091,7 @@ function parseEmbeddedJson(data) {
5964
6091
  }
5965
6092
  });
5966
6093
  }
5967
- var logger15 = getLogger("generateTaskHandler");
6094
+ var logger16 = getLogger("generateTaskHandler");
5968
6095
  var createTaskHandler = (config, credentialStoreRegistry) => {
5969
6096
  return async (task) => {
5970
6097
  try {
@@ -6014,7 +6141,33 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6014
6141
  agentId: config.agentId
6015
6142
  })
6016
6143
  ]);
6017
- logger15.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6144
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
6145
+ const enhancedInternalRelations = await Promise.all(
6146
+ internalRelations.map(async (relation) => {
6147
+ try {
6148
+ const relatedAgent = await getAgentById(dbClient_default)({
6149
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6150
+ agentId: relation.id
6151
+ });
6152
+ if (relatedAgent) {
6153
+ const relatedAgentRelations = await getRelatedAgentsForGraph(dbClient_default)({
6154
+ scopes: { tenantId: config.tenantId, projectId: config.projectId },
6155
+ graphId: config.graphId,
6156
+ agentId: relation.id
6157
+ });
6158
+ const enhancedDescription = generateDescriptionWithTransfers(
6159
+ relation.description || "",
6160
+ relatedAgentRelations.internalRelations,
6161
+ relatedAgentRelations.externalRelations
6162
+ );
6163
+ return { ...relation, description: enhancedDescription };
6164
+ }
6165
+ } catch (error) {
6166
+ logger16.warn({ agentId: relation.id, error }, "Failed to enhance agent description");
6167
+ }
6168
+ return relation;
6169
+ })
6170
+ );
6018
6171
  const agentPrompt = "prompt" in config.agentSchema ? config.agentSchema.prompt : "";
6019
6172
  const models = "models" in config.agentSchema ? config.agentSchema.models : void 0;
6020
6173
  const stopWhen = "stopWhen" in config.agentSchema ? config.agentSchema.stopWhen : void 0;
@@ -6031,7 +6184,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6031
6184
  agentPrompt,
6032
6185
  models: models || void 0,
6033
6186
  stopWhen: stopWhen || void 0,
6034
- agentRelations: internalRelations.map((relation) => ({
6187
+ agentRelations: enhancedInternalRelations.map((relation) => ({
6035
6188
  id: relation.id,
6036
6189
  tenantId: config.tenantId,
6037
6190
  projectId: config.projectId,
@@ -6045,7 +6198,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6045
6198
  agentRelations: [],
6046
6199
  transferRelations: []
6047
6200
  })),
6048
- transferRelations: internalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6201
+ transferRelations: enhancedInternalRelations.filter((relation) => relation.relationType === "transfer").map((relation) => ({
6049
6202
  baseUrl: config.baseUrl,
6050
6203
  apiKey: config.apiKey,
6051
6204
  id: relation.id,
@@ -6061,7 +6214,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6061
6214
  })),
6062
6215
  delegateRelations: [
6063
6216
  // Internal delegate relations
6064
- ...internalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6217
+ ...enhancedInternalRelations.filter((relation) => relation.relationType === "delegate").map((relation) => ({
6065
6218
  type: "internal",
6066
6219
  config: {
6067
6220
  id: relation.id,
@@ -6114,7 +6267,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6114
6267
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6115
6268
  if (taskIdMatch) {
6116
6269
  contextId = taskIdMatch[1];
6117
- logger15.info(
6270
+ logger16.info(
6118
6271
  {
6119
6272
  taskId: task.id,
6120
6273
  extractedContextId: contextId,
@@ -6130,7 +6283,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6130
6283
  const isDelegation = task.context?.metadata?.isDelegation === true;
6131
6284
  agent.setDelegationStatus(isDelegation);
6132
6285
  if (isDelegation) {
6133
- logger15.info(
6286
+ logger16.info(
6134
6287
  { agentId: config.agentId, taskId: task.id },
6135
6288
  "Delegated agent - streaming disabled"
6136
6289
  );
@@ -6142,7 +6295,8 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6142
6295
  taskId: task.id,
6143
6296
  threadId: contextId,
6144
6297
  // using conversationId as threadId for now
6145
- streamRequestId
6298
+ streamRequestId,
6299
+ ...config.apiKey ? { apiKey: config.apiKey } : {}
6146
6300
  }
6147
6301
  });
6148
6302
  const stepContents = response.steps && Array.isArray(response.steps) ? response.steps.flatMap((step) => {
@@ -6334,83 +6488,10 @@ async function getRegisteredGraph(executionContext) {
6334
6488
  const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6335
6489
  return hydrateGraph({ dbGraph, baseUrl: agentFrameworkBaseUrl, apiKey });
6336
6490
  }
6337
- getLogger("agents");
6338
- async function hydrateAgent({
6339
- dbAgent,
6340
- graphId,
6341
- baseUrl,
6342
- apiKey,
6343
- credentialStoreRegistry
6344
- }) {
6345
- try {
6346
- const taskHandlerConfig = await createTaskHandlerConfig({
6347
- tenantId: dbAgent.tenantId,
6348
- projectId: dbAgent.projectId,
6349
- graphId,
6350
- agentId: dbAgent.id,
6351
- baseUrl,
6352
- apiKey
6353
- });
6354
- const taskHandler = createTaskHandler(taskHandlerConfig, credentialStoreRegistry);
6355
- const agentCard = {
6356
- name: dbAgent.name,
6357
- description: dbAgent.description || "AI Agent",
6358
- url: baseUrl ? `${baseUrl}/a2a` : "",
6359
- version: "1.0.0",
6360
- capabilities: {
6361
- streaming: true,
6362
- // Enable streaming for A2A compliance
6363
- pushNotifications: false,
6364
- stateTransitionHistory: false
6365
- },
6366
- defaultInputModes: ["text", "text/plain"],
6367
- defaultOutputModes: ["text", "text/plain"],
6368
- skills: [],
6369
- // Add provider info if available
6370
- ...baseUrl && {
6371
- provider: {
6372
- organization: "Inkeep",
6373
- url: baseUrl
6374
- }
6375
- }
6376
- };
6377
- return {
6378
- agentId: dbAgent.id,
6379
- tenantId: dbAgent.tenantId,
6380
- projectId: dbAgent.projectId,
6381
- graphId,
6382
- agentCard,
6383
- taskHandler
6384
- };
6385
- } catch (error) {
6386
- console.error(`\u274C Failed to hydrate agent ${dbAgent.id}:`, error);
6387
- throw error;
6388
- }
6389
- }
6390
- async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6391
- const { tenantId, projectId, graphId, agentId, baseUrl } = executionContext;
6392
- if (!agentId) {
6393
- throw new Error("Agent ID is required");
6394
- }
6395
- const dbAgent = await getAgentById(dbClient_default)({
6396
- scopes: { tenantId, projectId },
6397
- agentId
6398
- });
6399
- if (!dbAgent) {
6400
- return null;
6401
- }
6402
- const agentFrameworkBaseUrl = `${baseUrl}/agents`;
6403
- return hydrateAgent({
6404
- dbAgent,
6405
- graphId,
6406
- baseUrl: agentFrameworkBaseUrl,
6407
- credentialStoreRegistry
6408
- });
6409
- }
6410
6491
 
6411
6492
  // src/routes/agents.ts
6412
6493
  var app = new OpenAPIHono();
6413
- var logger16 = getLogger("agents");
6494
+ var logger17 = getLogger("agents");
6414
6495
  app.openapi(
6415
6496
  createRoute({
6416
6497
  method: "get",
@@ -6448,7 +6529,7 @@ app.openapi(
6448
6529
  tracestate: c.req.header("tracestate"),
6449
6530
  baggage: c.req.header("baggage")
6450
6531
  };
6451
- logger16.info(
6532
+ logger17.info(
6452
6533
  {
6453
6534
  otelHeaders,
6454
6535
  path: c.req.path,
@@ -6459,7 +6540,7 @@ app.openapi(
6459
6540
  const executionContext = getRequestExecutionContext(c);
6460
6541
  const { tenantId, projectId, graphId, agentId } = executionContext;
6461
6542
  if (agentId) {
6462
- logger16.info(
6543
+ logger17.info(
6463
6544
  {
6464
6545
  message: "getRegisteredAgent (agent-level)",
6465
6546
  tenantId,
@@ -6471,13 +6552,13 @@ app.openapi(
6471
6552
  );
6472
6553
  const credentialStores = c.get("credentialStores");
6473
6554
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6474
- logger16.info({ agent }, "agent registered: well-known agent.json");
6555
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6475
6556
  if (!agent) {
6476
6557
  return c.json({ error: "Agent not found" }, 404);
6477
6558
  }
6478
6559
  return c.json(agent.agentCard);
6479
6560
  } else {
6480
- logger16.info(
6561
+ logger17.info(
6481
6562
  {
6482
6563
  message: "getRegisteredGraph (graph-level)",
6483
6564
  tenantId,
@@ -6500,7 +6581,7 @@ app.post("/a2a", async (c) => {
6500
6581
  tracestate: c.req.header("tracestate"),
6501
6582
  baggage: c.req.header("baggage")
6502
6583
  };
6503
- logger16.info(
6584
+ logger17.info(
6504
6585
  {
6505
6586
  otelHeaders,
6506
6587
  path: c.req.path,
@@ -6511,7 +6592,7 @@ app.post("/a2a", async (c) => {
6511
6592
  const executionContext = getRequestExecutionContext(c);
6512
6593
  const { tenantId, projectId, graphId, agentId } = executionContext;
6513
6594
  if (agentId) {
6514
- logger16.info(
6595
+ logger17.info(
6515
6596
  {
6516
6597
  message: "a2a (agent-level)",
6517
6598
  tenantId,
@@ -6535,7 +6616,7 @@ app.post("/a2a", async (c) => {
6535
6616
  }
6536
6617
  return a2aHandler(c, agent);
6537
6618
  } else {
6538
- logger16.info(
6619
+ logger17.info(
6539
6620
  {
6540
6621
  message: "a2a (graph-level)",
6541
6622
  tenantId,
@@ -6575,14 +6656,14 @@ app.post("/a2a", async (c) => {
6575
6656
  }
6576
6657
  });
6577
6658
  var agents_default = app;
6578
- var logger17 = getLogger("Transfer");
6659
+ var logger18 = getLogger("Transfer");
6579
6660
  async function executeTransfer({
6580
6661
  tenantId,
6581
6662
  threadId,
6582
6663
  projectId,
6583
6664
  targetAgentId
6584
6665
  }) {
6585
- logger17.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6666
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6586
6667
  await setActiveAgentForThread(dbClient_default)({
6587
6668
  scopes: { tenantId, projectId },
6588
6669
  threadId,
@@ -6777,7 +6858,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
6777
6858
  __publicField(this, "queuedOperations", []);
6778
6859
  // Timing tracking for text sequences (text-end to text-start gap)
6779
6860
  __publicField(this, "lastTextEndTimestamp", 0);
6780
- __publicField(this, "TEXT_GAP_THRESHOLD", 1e3);
6861
+ __publicField(this, "TEXT_GAP_THRESHOLD", 50);
6781
6862
  // milliseconds - if gap between text sequences is less than this, queue operations
6782
6863
  // Connection management and forced cleanup
6783
6864
  __publicField(this, "connectionDropTimer");
@@ -7123,7 +7204,7 @@ var MCPStreamHelper = class {
7123
7204
  function createMCPStreamHelper() {
7124
7205
  return new MCPStreamHelper();
7125
7206
  }
7126
- var logger18 = getLogger("ExecutionHandler");
7207
+ var logger19 = getLogger("ExecutionHandler");
7127
7208
  var ExecutionHandler = class {
7128
7209
  constructor() {
7129
7210
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7148,7 +7229,7 @@ var ExecutionHandler = class {
7148
7229
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7149
7230
  registerStreamHelper(requestId2, sseHelper);
7150
7231
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7151
- logger18.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7232
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7152
7233
  let graphConfig = null;
7153
7234
  try {
7154
7235
  graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7160,7 +7241,7 @@ var ExecutionHandler = class {
7160
7241
  );
7161
7242
  }
7162
7243
  } catch (error) {
7163
- logger18.error(
7244
+ logger19.error(
7164
7245
  {
7165
7246
  error: error instanceof Error ? error.message : "Unknown error",
7166
7247
  stack: error instanceof Error ? error.stack : void 0
@@ -7176,7 +7257,7 @@ var ExecutionHandler = class {
7176
7257
  try {
7177
7258
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7178
7259
  const taskId = `task_${conversationId}-${requestId2}`;
7179
- logger18.info(
7260
+ logger19.info(
7180
7261
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7181
7262
  "Attempting to create or reuse existing task"
7182
7263
  );
@@ -7199,7 +7280,7 @@ var ExecutionHandler = class {
7199
7280
  agent_id: currentAgentId
7200
7281
  }
7201
7282
  });
7202
- logger18.info(
7283
+ logger19.info(
7203
7284
  {
7204
7285
  taskId,
7205
7286
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7208,27 +7289,27 @@ var ExecutionHandler = class {
7208
7289
  );
7209
7290
  } catch (error) {
7210
7291
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7211
- logger18.info(
7292
+ logger19.info(
7212
7293
  { taskId, error: error.message },
7213
7294
  "Task already exists, fetching existing task"
7214
7295
  );
7215
7296
  const existingTask = await getTask(dbClient_default)({ id: taskId });
7216
7297
  if (existingTask) {
7217
7298
  task = existingTask;
7218
- logger18.info(
7299
+ logger19.info(
7219
7300
  { taskId, existingTask },
7220
7301
  "Successfully reused existing task from race condition"
7221
7302
  );
7222
7303
  } else {
7223
- logger18.error({ taskId, error }, "Task constraint failed but task not found");
7304
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7224
7305
  throw error;
7225
7306
  }
7226
7307
  } else {
7227
- logger18.error({ taskId, error }, "Failed to create task due to non-constraint error");
7308
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7228
7309
  throw error;
7229
7310
  }
7230
7311
  }
7231
- logger18.debug(
7312
+ logger19.debug(
7232
7313
  {
7233
7314
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7234
7315
  executionType: "create_initial_task",
@@ -7246,7 +7327,7 @@ var ExecutionHandler = class {
7246
7327
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7247
7328
  while (iterations < maxTransfers) {
7248
7329
  iterations++;
7249
- logger18.info(
7330
+ logger19.info(
7250
7331
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7251
7332
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7252
7333
  );
@@ -7254,10 +7335,10 @@ var ExecutionHandler = class {
7254
7335
  scopes: { tenantId, projectId },
7255
7336
  conversationId
7256
7337
  });
7257
- logger18.info({ activeAgent }, "activeAgent");
7338
+ logger19.info({ activeAgent }, "activeAgent");
7258
7339
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7259
7340
  currentAgentId = activeAgent.activeAgentId;
7260
- logger18.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7341
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7261
7342
  }
7262
7343
  const agentBaseUrl = `${baseUrl}/agents`;
7263
7344
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7298,13 +7379,13 @@ var ExecutionHandler = class {
7298
7379
  });
7299
7380
  if (!messageResponse?.result) {
7300
7381
  errorCount++;
7301
- logger18.error(
7382
+ logger19.error(
7302
7383
  { currentAgentId, iterations, errorCount },
7303
7384
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7304
7385
  );
7305
7386
  if (errorCount >= this.MAX_ERRORS) {
7306
7387
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7307
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7388
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7308
7389
  await sseHelper.writeError(errorMessage2);
7309
7390
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7310
7391
  if (task) {
@@ -7330,7 +7411,7 @@ var ExecutionHandler = class {
7330
7411
  const transferResponse = messageResponse.result;
7331
7412
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7332
7413
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7333
- logger18.info({ targetAgentId, transferReason }, "transfer response");
7414
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7334
7415
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7335
7416
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7336
7417
  projectId,
@@ -7341,7 +7422,7 @@ var ExecutionHandler = class {
7341
7422
  if (success) {
7342
7423
  fromAgentId = currentAgentId;
7343
7424
  currentAgentId = newAgentId;
7344
- logger18.info(
7425
+ logger19.info(
7345
7426
  {
7346
7427
  transferFrom: fromAgentId,
7347
7428
  transferTo: currentAgentId,
@@ -7359,7 +7440,7 @@ var ExecutionHandler = class {
7359
7440
  const graphSessionData = graphSessionManager.getSession(requestId2);
7360
7441
  if (graphSessionData) {
7361
7442
  const sessionSummary = graphSessionData.getSummary();
7362
- logger18.info(sessionSummary, "GraphSession data after completion");
7443
+ logger19.info(sessionSummary, "GraphSession data after completion");
7363
7444
  }
7364
7445
  let textContent = "";
7365
7446
  for (const part of responseParts) {
@@ -7368,78 +7449,84 @@ var ExecutionHandler = class {
7368
7449
  textContent += part.text;
7369
7450
  }
7370
7451
  }
7371
- const activeSpan = trace.getActiveSpan();
7372
- if (activeSpan) {
7373
- activeSpan.setAttributes({
7374
- "ai.response.content": textContent || "No response content",
7375
- "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7376
- "ai.agent.name": currentAgentId
7377
- });
7378
- }
7379
- await createMessage(dbClient_default)({
7380
- id: nanoid(),
7381
- tenantId,
7382
- projectId,
7383
- conversationId,
7384
- role: "agent",
7385
- content: {
7386
- text: textContent || void 0,
7387
- parts: responseParts.map((part) => ({
7388
- type: part.kind === "text" ? "text" : "data",
7389
- text: part.kind === "text" ? part.text : void 0,
7390
- data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7391
- }))
7392
- },
7393
- visibility: "user-facing",
7394
- messageType: "chat",
7395
- agentId: currentAgentId,
7396
- fromAgentId: currentAgentId,
7397
- taskId: task.id
7398
- });
7399
- const updateTaskStart = Date.now();
7400
- await updateTask(dbClient_default)({
7401
- taskId: task.id,
7402
- data: {
7403
- status: "completed",
7404
- metadata: {
7405
- ...task.metadata,
7406
- completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7407
- response: {
7408
- text: textContent,
7409
- parts: responseParts,
7410
- hasText: !!textContent,
7411
- hasData: responseParts.some((p) => p.kind === "data")
7452
+ return tracer.startActiveSpan("execution_handler.execute", {}, async (span) => {
7453
+ try {
7454
+ span.setAttributes({
7455
+ "ai.response.content": textContent || "No response content",
7456
+ "ai.response.timestamp": (/* @__PURE__ */ new Date()).toISOString(),
7457
+ "ai.agent.name": currentAgentId
7458
+ });
7459
+ await createMessage(dbClient_default)({
7460
+ id: nanoid(),
7461
+ tenantId,
7462
+ projectId,
7463
+ conversationId,
7464
+ role: "agent",
7465
+ content: {
7466
+ text: textContent || void 0,
7467
+ parts: responseParts.map((part) => ({
7468
+ type: part.kind === "text" ? "text" : "data",
7469
+ text: part.kind === "text" ? part.text : void 0,
7470
+ data: part.kind === "data" ? JSON.stringify(part.data) : void 0
7471
+ }))
7472
+ },
7473
+ visibility: "user-facing",
7474
+ messageType: "chat",
7475
+ agentId: currentAgentId,
7476
+ fromAgentId: currentAgentId,
7477
+ taskId: task.id
7478
+ });
7479
+ const updateTaskStart = Date.now();
7480
+ await updateTask(dbClient_default)({
7481
+ taskId: task.id,
7482
+ data: {
7483
+ status: "completed",
7484
+ metadata: {
7485
+ ...task.metadata,
7486
+ completed_at: (/* @__PURE__ */ new Date()).toISOString(),
7487
+ response: {
7488
+ text: textContent,
7489
+ parts: responseParts,
7490
+ hasText: !!textContent,
7491
+ hasData: responseParts.some((p) => p.kind === "data")
7492
+ }
7493
+ }
7412
7494
  }
7495
+ });
7496
+ const updateTaskEnd = Date.now();
7497
+ logger19.info(
7498
+ { duration: updateTaskEnd - updateTaskStart },
7499
+ "Completed updateTask operation"
7500
+ );
7501
+ await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7502
+ await sseHelper.complete();
7503
+ logger19.info({}, "Ending GraphSession and cleaning up");
7504
+ graphSessionManager.endSession(requestId2);
7505
+ logger19.info({}, "Cleaning up streamHelper");
7506
+ unregisterStreamHelper(requestId2);
7507
+ let response;
7508
+ if (sseHelper instanceof MCPStreamHelper) {
7509
+ const captured = sseHelper.getCapturedResponse();
7510
+ response = captured.text || "No response content";
7413
7511
  }
7512
+ logger19.info({}, "ExecutionHandler returning success");
7513
+ return { success: true, iterations, response };
7514
+ } catch (error) {
7515
+ setSpanWithError(span, error);
7516
+ throw error;
7517
+ } finally {
7518
+ span.end();
7414
7519
  }
7415
7520
  });
7416
- const updateTaskEnd = Date.now();
7417
- logger18.info(
7418
- { duration: updateTaskEnd - updateTaskStart },
7419
- "Completed updateTask operation"
7420
- );
7421
- await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7422
- await sseHelper.complete();
7423
- logger18.info({}, "Ending GraphSession and cleaning up");
7424
- graphSessionManager.endSession(requestId2);
7425
- logger18.info({}, "Cleaning up streamHelper");
7426
- unregisterStreamHelper(requestId2);
7427
- let response;
7428
- if (sseHelper instanceof MCPStreamHelper) {
7429
- const captured = sseHelper.getCapturedResponse();
7430
- response = captured.text || "No response content";
7431
- }
7432
- logger18.info({}, "ExecutionHandler returning success");
7433
- return { success: true, iterations, response };
7434
7521
  }
7435
7522
  errorCount++;
7436
- logger18.warn(
7523
+ logger19.warn(
7437
7524
  { iterations, errorCount },
7438
7525
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7439
7526
  );
7440
7527
  if (errorCount >= this.MAX_ERRORS) {
7441
7528
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7442
- logger18.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7529
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7443
7530
  await sseHelper.writeError(errorMessage2);
7444
7531
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7445
7532
  if (task) {
@@ -7461,7 +7548,7 @@ var ExecutionHandler = class {
7461
7548
  }
7462
7549
  }
7463
7550
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7464
- logger18.error({ maxTransfers, iterations }, errorMessage);
7551
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7465
7552
  await sseHelper.writeError(errorMessage);
7466
7553
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7467
7554
  if (task) {
@@ -7481,7 +7568,7 @@ var ExecutionHandler = class {
7481
7568
  unregisterStreamHelper(requestId2);
7482
7569
  return { success: false, error: errorMessage, iterations };
7483
7570
  } catch (error) {
7484
- logger18.error({ error }, "Error in execution handler");
7571
+ logger19.error({ error }, "Error in execution handler");
7485
7572
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7486
7573
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7487
7574
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7507,7 +7594,7 @@ var ExecutionHandler = class {
7507
7594
 
7508
7595
  // src/routes/chat.ts
7509
7596
  var app2 = new OpenAPIHono();
7510
- var logger19 = getLogger("completionsHandler");
7597
+ var logger20 = getLogger("completionsHandler");
7511
7598
  var chatCompletionsRoute = createRoute({
7512
7599
  method: "post",
7513
7600
  path: "/completions",
@@ -7625,7 +7712,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7625
7712
  tracestate: c.req.header("tracestate"),
7626
7713
  baggage: c.req.header("baggage")
7627
7714
  };
7628
- logger19.info(
7715
+ logger20.info(
7629
7716
  {
7630
7717
  otelHeaders,
7631
7718
  path: c.req.path,
@@ -7711,7 +7798,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7711
7798
  dbClient_default,
7712
7799
  credentialStores
7713
7800
  );
7714
- logger19.info(
7801
+ logger20.info(
7715
7802
  {
7716
7803
  tenantId,
7717
7804
  graphId,
@@ -7757,7 +7844,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7757
7844
  return streamSSE(c, async (stream2) => {
7758
7845
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
7759
7846
  await sseHelper.writeRole();
7760
- logger19.info({ agentId }, "Starting execution");
7847
+ logger20.info({ agentId }, "Starting execution");
7761
7848
  const executionHandler = new ExecutionHandler();
7762
7849
  const result = await executionHandler.execute({
7763
7850
  executionContext,
@@ -7767,7 +7854,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7767
7854
  requestId: requestId2,
7768
7855
  sseHelper
7769
7856
  });
7770
- logger19.info(
7857
+ logger20.info(
7771
7858
  { result },
7772
7859
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
7773
7860
  );
@@ -7800,7 +7887,7 @@ var getMessageText = (content) => {
7800
7887
  };
7801
7888
  var chat_default = app2;
7802
7889
  var app3 = new OpenAPIHono();
7803
- var logger20 = getLogger("chatDataStream");
7890
+ var logger21 = getLogger("chatDataStream");
7804
7891
  var chatDataStreamRoute = createRoute({
7805
7892
  method: "post",
7806
7893
  path: "/chat",
@@ -7905,7 +7992,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7905
7992
  );
7906
7993
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
7907
7994
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
7908
- logger20.info({ userText, lastUserMessage }, "userText");
7995
+ logger21.info({ userText, lastUserMessage }, "userText");
7909
7996
  const messageSpan = trace.getActiveSpan();
7910
7997
  if (messageSpan) {
7911
7998
  messageSpan.setAttributes({
@@ -7947,7 +8034,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7947
8034
  await streamHelper.writeError("Unable to process request");
7948
8035
  }
7949
8036
  } catch (err) {
7950
- logger20.error({ err }, "Streaming error");
8037
+ logger21.error({ err }, "Streaming error");
7951
8038
  await streamHelper.writeError("Internal server error");
7952
8039
  } finally {
7953
8040
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -7968,7 +8055,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7968
8055
  )
7969
8056
  );
7970
8057
  } catch (error) {
7971
- logger20.error({ error }, "chatDataStream error");
8058
+ logger21.error({ error }, "chatDataStream error");
7972
8059
  return c.json({ error: "Failed to process chat completion" }, 500);
7973
8060
  }
7974
8061
  });
@@ -7976,7 +8063,7 @@ var chatDataStream_default = app3;
7976
8063
  function createMCPSchema(schema) {
7977
8064
  return schema;
7978
8065
  }
7979
- var logger21 = getLogger("mcp");
8066
+ var logger22 = getLogger("mcp");
7980
8067
  var _MockResponseSingleton = class _MockResponseSingleton {
7981
8068
  constructor() {
7982
8069
  __publicField(this, "mockRes");
@@ -8031,21 +8118,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
8031
8118
  id: 0
8032
8119
  });
8033
8120
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
8034
- logger21.info({ sessionId }, "Spoofing initialization message to set transport state");
8121
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
8035
8122
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
8036
8123
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
8037
8124
  try {
8038
8125
  await transport.handleRequest(req, mockRes, spoofInitMessage);
8039
- logger21.info({ sessionId }, "Successfully spoofed initialization");
8126
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
8040
8127
  } catch (spoofError) {
8041
- logger21.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8128
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
8042
8129
  }
8043
8130
  };
8044
8131
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8045
8132
  const sessionId = req.headers["mcp-session-id"];
8046
- logger21.info({ sessionId }, "Received MCP session ID");
8133
+ logger22.info({ sessionId }, "Received MCP session ID");
8047
8134
  if (!sessionId) {
8048
- logger21.info({ body }, "Missing session ID");
8135
+ logger22.info({ body }, "Missing session ID");
8049
8136
  res.writeHead(400).end(
8050
8137
  JSON.stringify({
8051
8138
  jsonrpc: "2.0",
@@ -8071,7 +8158,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8071
8158
  scopes: { tenantId, projectId },
8072
8159
  conversationId: sessionId
8073
8160
  });
8074
- logger21.info(
8161
+ logger22.info(
8075
8162
  {
8076
8163
  sessionId,
8077
8164
  conversationFound: !!conversation,
@@ -8082,7 +8169,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8082
8169
  "Conversation lookup result"
8083
8170
  );
8084
8171
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8085
- logger21.info(
8172
+ logger22.info(
8086
8173
  { sessionId, conversationId: conversation?.id },
8087
8174
  "MCP session not found or invalid"
8088
8175
  );
@@ -8143,7 +8230,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8143
8230
  requestId: requestId2,
8144
8231
  sseHelper: mcpStreamHelper
8145
8232
  });
8146
- logger21.info(
8233
+ logger22.info(
8147
8234
  { result },
8148
8235
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8149
8236
  );
@@ -8217,7 +8304,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8217
8304
  dbClient_default,
8218
8305
  credentialStores
8219
8306
  );
8220
- logger21.info(
8307
+ logger22.info(
8221
8308
  {
8222
8309
  tenantId,
8223
8310
  graphId,
@@ -8278,7 +8365,7 @@ var validateRequestParameters = (c) => {
8278
8365
  };
8279
8366
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8280
8367
  const { tenantId, projectId, graphId } = executionContext;
8281
- logger21.info({ body }, "Received initialization request");
8368
+ logger22.info({ body }, "Received initialization request");
8282
8369
  const sessionId = nanoid();
8283
8370
  const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
8284
8371
  scopes: { tenantId, projectId },
@@ -8309,7 +8396,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8309
8396
  }
8310
8397
  }
8311
8398
  });
8312
- logger21.info(
8399
+ logger22.info(
8313
8400
  { sessionId, conversationId: conversation.id },
8314
8401
  "Created MCP session as conversation"
8315
8402
  );
@@ -8318,9 +8405,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8318
8405
  });
8319
8406
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8320
8407
  await server.connect(transport);
8321
- logger21.info({ sessionId }, "Server connected for initialization");
8408
+ logger22.info({ sessionId }, "Server connected for initialization");
8322
8409
  res.setHeader("Mcp-Session-Id", sessionId);
8323
- logger21.info(
8410
+ logger22.info(
8324
8411
  {
8325
8412
  sessionId,
8326
8413
  bodyMethod: body?.method,
@@ -8329,7 +8416,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8329
8416
  "About to handle initialization request"
8330
8417
  );
8331
8418
  await transport.handleRequest(req, res, body);
8332
- logger21.info({ sessionId }, "Successfully handled initialization request");
8419
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8333
8420
  return toFetchResponse(res);
8334
8421
  };
8335
8422
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8357,8 +8444,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8357
8444
  sessionId,
8358
8445
  conversation.metadata?.session_data?.mcpProtocolVersion
8359
8446
  );
8360
- logger21.info({ sessionId }, "Server connected and transport initialized");
8361
- logger21.info(
8447
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8448
+ logger22.info(
8362
8449
  {
8363
8450
  sessionId,
8364
8451
  bodyKeys: Object.keys(body || {}),
@@ -8372,9 +8459,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8372
8459
  );
8373
8460
  try {
8374
8461
  await transport.handleRequest(req, res, body);
8375
- logger21.info({ sessionId }, "Successfully handled MCP request");
8462
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8376
8463
  } catch (transportError) {
8377
- logger21.error(
8464
+ logger22.error(
8378
8465
  {
8379
8466
  sessionId,
8380
8467
  error: transportError,
@@ -8425,13 +8512,13 @@ app4.openapi(
8425
8512
  }
8426
8513
  const { executionContext } = paramValidation;
8427
8514
  const body = c.get("requestBody") || {};
8428
- logger21.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8515
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8429
8516
  const isInitRequest = body.method === "initialize";
8430
8517
  const { req, res } = toReqRes(c.req.raw);
8431
8518
  const validatedContext = c.get("validatedContext") || {};
8432
8519
  const credentialStores = c.get("credentialStores");
8433
- logger21.info({ validatedContext }, "Validated context");
8434
- logger21.info({ req }, "request");
8520
+ logger22.info({ validatedContext }, "Validated context");
8521
+ logger22.info({ req }, "request");
8435
8522
  if (isInitRequest) {
8436
8523
  return await handleInitializationRequest(
8437
8524
  body,
@@ -8453,7 +8540,7 @@ app4.openapi(
8453
8540
  );
8454
8541
  }
8455
8542
  } catch (e) {
8456
- logger21.error(
8543
+ logger22.error(
8457
8544
  {
8458
8545
  error: e instanceof Error ? e.message : e,
8459
8546
  stack: e instanceof Error ? e.stack : void 0
@@ -8465,7 +8552,7 @@ app4.openapi(
8465
8552
  }
8466
8553
  );
8467
8554
  app4.get("/", async (c) => {
8468
- logger21.info({}, "Received GET MCP request");
8555
+ logger22.info({}, "Received GET MCP request");
8469
8556
  return c.json(
8470
8557
  {
8471
8558
  jsonrpc: "2.0",
@@ -8479,7 +8566,7 @@ app4.get("/", async (c) => {
8479
8566
  );
8480
8567
  });
8481
8568
  app4.delete("/", async (c) => {
8482
- logger21.info({}, "Received DELETE MCP request");
8569
+ logger22.info({}, "Received DELETE MCP request");
8483
8570
  return c.json(
8484
8571
  {
8485
8572
  jsonrpc: "2.0",
@@ -8490,11 +8577,10 @@ app4.delete("/", async (c) => {
8490
8577
  );
8491
8578
  });
8492
8579
  var mcp_default = app4;
8493
-
8494
- // src/app.ts
8495
- var logger22 = getLogger("agents-run-api");
8580
+ var logger23 = getLogger("agents-run-api");
8496
8581
  function createExecutionHono(serverConfig, credentialStores) {
8497
8582
  const app6 = new OpenAPIHono();
8583
+ app6.use("*", otel());
8498
8584
  app6.use("*", requestId());
8499
8585
  app6.use("*", async (c, next) => {
8500
8586
  c.set("serverConfig", serverConfig);
@@ -8507,7 +8593,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8507
8593
  const body = await c.req.json();
8508
8594
  c.set("requestBody", body);
8509
8595
  } catch (error) {
8510
- logger22.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8596
+ logger23.debug({ error }, "Failed to parse JSON body, continuing without parsed body");
8511
8597
  }
8512
8598
  }
8513
8599
  return next();
@@ -8558,8 +8644,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8558
8644
  if (!isExpectedError) {
8559
8645
  const errorMessage = err instanceof Error ? err.message : String(err);
8560
8646
  const errorStack = err instanceof Error ? err.stack : void 0;
8561
- if (logger22) {
8562
- logger22.error(
8647
+ if (logger23) {
8648
+ logger23.error(
8563
8649
  {
8564
8650
  error: err,
8565
8651
  message: errorMessage,
@@ -8571,8 +8657,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8571
8657
  );
8572
8658
  }
8573
8659
  } else {
8574
- if (logger22) {
8575
- logger22.error(
8660
+ if (logger23) {
8661
+ logger23.error(
8576
8662
  {
8577
8663
  error: err,
8578
8664
  path: c.req.path,
@@ -8589,8 +8675,8 @@ function createExecutionHono(serverConfig, credentialStores) {
8589
8675
  const response = err.getResponse();
8590
8676
  return response;
8591
8677
  } catch (responseError) {
8592
- if (logger22) {
8593
- logger22.error({ error: responseError }, "Error while handling HTTPException response");
8678
+ if (logger23) {
8679
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8594
8680
  }
8595
8681
  }
8596
8682
  }
@@ -8624,7 +8710,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8624
8710
  app6.use("*", async (c, next) => {
8625
8711
  const executionContext = c.get("executionContext");
8626
8712
  if (!executionContext) {
8627
- logger22.debug({}, "Empty execution context");
8713
+ logger23.debug({}, "Empty execution context");
8628
8714
  return next();
8629
8715
  }
8630
8716
  const { tenantId, projectId, graphId } = executionContext;
@@ -8633,7 +8719,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8633
8719
  if (requestBody) {
8634
8720
  conversationId = requestBody.conversationId;
8635
8721
  if (!conversationId) {
8636
- logger22.debug({ requestBody }, "No conversation ID found in request body");
8722
+ logger23.debug({ requestBody }, "No conversation ID found in request body");
8637
8723
  }
8638
8724
  }
8639
8725
  const entries = Object.fromEntries(
@@ -8648,7 +8734,7 @@ function createExecutionHono(serverConfig, credentialStores) {
8648
8734
  })
8649
8735
  );
8650
8736
  if (!Object.keys(entries).length) {
8651
- logger22.debug({}, "Empty entries for baggage");
8737
+ logger23.debug({}, "Empty entries for baggage");
8652
8738
  return next();
8653
8739
  }
8654
8740
  const bag = Object.entries(entries).reduce(