@inkeep/agents-run-api 0.0.0-dev-20250911195722 → 0.0.0-dev-20250911212652

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,10 @@
1
- import { env, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-JIWNRFDU.js';
2
- import { __publicField } from './chunk-PKBMQBKP.js';
3
- import { getLogger as getLogger$1, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
1
+ import { env, __publicField, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-2MQ324HB.js';
2
+ import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
3
+ import { BaggageSpanProcessor, ALLOW_ALL_BAGGAGE_KEYS } from '@opentelemetry/baggage-span-processor';
4
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
5
+ import { NodeSDK } from '@opentelemetry/sdk-node';
6
+ import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-node';
7
+ import { getLogger as getLogger$1, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, setSpanWithError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
4
8
  import { Hono } from 'hono';
5
9
  import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
6
10
  import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
@@ -29,6 +33,61 @@ import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/
29
33
  import { z as z$2 } from 'zod/v3';
30
34
  import { toReqRes, toFetchResponse } from 'fetch-to-node';
31
35
 
36
+ var otlpUrl = env.OTEL_EXPORTER_OTLP_ENDPOINT;
37
+ var otlpExporter = new OTLPTraceExporter({ url: otlpUrl });
38
+ var FanOutSpanProcessor = class {
39
+ constructor(inner) {
40
+ this.inner = inner;
41
+ }
42
+ onStart(span, parent) {
43
+ this.inner.forEach((p) => p.onStart(span, parent));
44
+ }
45
+ onEnd(span) {
46
+ this.inner.forEach((p) => p.onEnd(span));
47
+ }
48
+ forceFlush() {
49
+ return Promise.all(this.inner.map((p) => p.forceFlush?.())).then(() => {
50
+ });
51
+ }
52
+ shutdown() {
53
+ return Promise.all(this.inner.map((p) => p.shutdown?.())).then(() => {
54
+ });
55
+ }
56
+ };
57
+ var maxExportBatchSize = env.OTEL_MAX_EXPORT_BATCH_SIZE ?? (env.ENVIRONMENT === "development" ? 1 : 512);
58
+ var spanProcessor = new FanOutSpanProcessor([
59
+ new BaggageSpanProcessor(ALLOW_ALL_BAGGAGE_KEYS),
60
+ new BatchSpanProcessor(otlpExporter, {
61
+ maxExportBatchSize
62
+ })
63
+ ]);
64
+ var sdk = new NodeSDK({
65
+ serviceName: "inkeep-agents-run-api",
66
+ spanProcessor,
67
+ instrumentations: [
68
+ getNodeAutoInstrumentations({
69
+ "@opentelemetry/instrumentation-http": {
70
+ enabled: true,
71
+ requestHook: (span, request) => {
72
+ const url = request?.url ?? request?.path;
73
+ if (!url) return;
74
+ const u = new URL(url, "http://localhost");
75
+ span.updateName(`${request?.method || "UNKNOWN"} ${u.pathname}`);
76
+ }
77
+ },
78
+ "@opentelemetry/instrumentation-undici": {
79
+ requestHook: (span) => {
80
+ const method = span.attributes?.["http.request.method"];
81
+ const host = span.attributes?.["server.address"];
82
+ const path = span.attributes?.["url.path"];
83
+ if (method && path)
84
+ span.updateName(host ? `${method} ${host}${path}` : `${method} ${path}`);
85
+ }
86
+ }
87
+ })
88
+ ]
89
+ });
90
+ sdk.start();
32
91
  var isDevelopment = env.ENVIRONMENT === "development";
33
92
  var loggerConfig = {
34
93
  level: env.LOG_LEVEL,
@@ -840,79 +899,11 @@ async function handleTasksResubscribe(c, agent, request) {
840
899
  });
841
900
  }
842
901
  }
843
- var FORCE_FLUSH_ENVIRONMENTS = ["development"];
844
- var logger4 = getLogger("tracer");
845
- var BASE = "inkeep-chat";
846
- var SERVICE_NAME = "inkeep-chat";
847
- var SERVICE_VERSION = "1.0.0";
848
- var createSpanName = (suffix) => `${BASE}.${suffix}`;
849
- var createNoOpSpan = () => ({
850
- setAttributes: () => ({}),
851
- recordException: () => ({}),
852
- setStatus: () => ({}),
853
- addEvent: () => ({}),
854
- end: () => {
855
- },
856
- isRecording: () => false,
857
- setAttribute: () => ({}),
858
- updateName: () => ({}),
859
- spanContext: () => ({
860
- traceId: "00000000000000000000000000000000",
861
- spanId: "0000000000000000",
862
- traceFlags: 0
863
- }),
864
- addLink: () => ({}),
865
- addLinks: () => ({})
866
- });
867
- var noopTracer = {
868
- startActiveSpan(_name, arg1, arg2, arg3) {
869
- const fn = typeof arg1 === "function" ? arg1 : typeof arg2 === "function" ? arg2 : arg3;
870
- if (!fn) throw new Error("No callback function provided");
871
- return fn(createNoOpSpan());
872
- },
873
- startSpan(_name, _options) {
874
- return createNoOpSpan();
875
- }
876
- };
877
- var globalTracerInstance = null;
878
- function handleSpanError(span, error, logger24, logMessage) {
879
- const errorMessage = error instanceof Error ? error.message : String(error);
880
- span.recordException(error);
881
- span.setStatus({
882
- code: SpanStatusCode.ERROR,
883
- message: errorMessage
884
- });
885
- }
886
- function getGlobalTracer() {
887
- if (!globalTracerInstance) {
888
- try {
889
- globalTracerInstance = trace.getTracer(SERVICE_NAME, SERVICE_VERSION);
890
- } catch (_error) {
891
- logger4.debug("OpenTelemetry tracer not available, using no-op tracer");
892
- globalTracerInstance = noopTracer;
893
- }
894
- }
895
- return globalTracerInstance;
896
- }
897
- async function forceFlushTracer() {
898
- const isOtelTracesForceFlushEnabled = env.OTEL_TRACES_FORCE_FLUSH_ENABLED;
899
- const isForceFlushEnvironment = env.ENVIRONMENT && FORCE_FLUSH_ENVIRONMENTS.includes(env.ENVIRONMENT);
900
- const shouldForceFlush = isOtelTracesForceFlushEnabled === true || isOtelTracesForceFlushEnabled == null && isForceFlushEnvironment;
901
- if (!shouldForceFlush) {
902
- return;
903
- }
904
- try {
905
- const { spanProcessor } = await import('./instrumentation-KKYHA3A3.js');
906
- if (spanProcessor && typeof spanProcessor.forceFlush === "function") {
907
- await spanProcessor.forceFlush();
908
- logger4.debug("Span processor force flush completed");
909
- } else {
910
- logger4.debug("Span processor does not support force flush or is not available");
911
- }
912
- } catch (error) {
913
- logger4.warn({ error }, "Failed to force flush tracer");
914
- }
915
- }
902
+
903
+ // package.json
904
+ var package_default = {
905
+ version: "0.1.3"};
906
+ var tracer = getTracer("agents-run-api", package_default.version);
916
907
  function agentInitializingOp(sessionId, graphId) {
917
908
  return {
918
909
  type: "agent_initializing",
@@ -949,10 +940,10 @@ function statusUpdateOp(ctx) {
949
940
  ctx
950
941
  };
951
942
  }
952
- var logger5 = getLogger("DataComponentSchema");
943
+ var logger4 = getLogger("DataComponentSchema");
953
944
  function jsonSchemaToZod(jsonSchema) {
954
945
  if (!jsonSchema || typeof jsonSchema !== "object") {
955
- logger5.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
946
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
956
947
  return z.string();
957
948
  }
958
949
  switch (jsonSchema.type) {
@@ -979,7 +970,7 @@ function jsonSchemaToZod(jsonSchema) {
979
970
  case "null":
980
971
  return z.null();
981
972
  default:
982
- logger5.warn(
973
+ logger4.warn(
983
974
  {
984
975
  unsupportedType: jsonSchema.type,
985
976
  schema: jsonSchema
@@ -1033,7 +1024,7 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
1033
1024
  required: ["artifact_id", "task_id"]
1034
1025
  });
1035
1026
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
1036
- var logger6 = getLogger("ModelFactory");
1027
+ var logger5 = getLogger("ModelFactory");
1037
1028
  var _ModelFactory = class _ModelFactory {
1038
1029
  /**
1039
1030
  * Create a language model instance from configuration
@@ -1046,7 +1037,7 @@ var _ModelFactory = class _ModelFactory {
1046
1037
  const modelSettings = config;
1047
1038
  const modelString = modelSettings.model.trim();
1048
1039
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1049
- logger6.debug(
1040
+ logger5.debug(
1050
1041
  {
1051
1042
  provider,
1052
1043
  model: modelName,
@@ -1065,7 +1056,7 @@ var _ModelFactory = class _ModelFactory {
1065
1056
  throw new Error(`Unsupported provider: ${provider}. Supported providers are: ${_ModelFactory.SUPPORTED_PROVIDERS.join(", ")}`);
1066
1057
  }
1067
1058
  } catch (error) {
1068
- logger6.error(
1059
+ logger5.error(
1069
1060
  {
1070
1061
  provider,
1071
1062
  model: modelName,
@@ -1086,7 +1077,7 @@ var _ModelFactory = class _ModelFactory {
1086
1077
  const [provider, ...modelParts] = modelString.split("/");
1087
1078
  const normalizedProvider = provider.toLowerCase();
1088
1079
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1089
- logger6.warn(
1080
+ logger5.warn(
1090
1081
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1091
1082
  "Unsupported provider detected, falling back to anthropic"
1092
1083
  );
@@ -1115,14 +1106,14 @@ var _ModelFactory = class _ModelFactory {
1115
1106
  anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1116
1107
  }
1117
1108
  if (providerOptions?.gateway) {
1118
- logger6.info(
1109
+ logger5.info(
1119
1110
  { gateway: providerOptions.gateway },
1120
1111
  "Setting up AI Gateway for Anthropic model"
1121
1112
  );
1122
1113
  Object.assign(anthropicConfig, providerOptions.gateway);
1123
1114
  }
1124
1115
  if (Object.keys(anthropicConfig).length > 0) {
1125
- logger6.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1116
+ logger5.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1126
1117
  const provider = createAnthropic(anthropicConfig);
1127
1118
  return provider(modelName);
1128
1119
  }
@@ -1137,11 +1128,11 @@ var _ModelFactory = class _ModelFactory {
1137
1128
  openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1138
1129
  }
1139
1130
  if (providerOptions?.gateway) {
1140
- logger6.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1131
+ logger5.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1141
1132
  Object.assign(openaiConfig, providerOptions.gateway);
1142
1133
  }
1143
1134
  if (Object.keys(openaiConfig).length > 0) {
1144
- logger6.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1135
+ logger5.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1145
1136
  const provider = createOpenAI(openaiConfig);
1146
1137
  return provider(modelName);
1147
1138
  }
@@ -1230,8 +1221,7 @@ function unregisterStreamHelper(requestId2) {
1230
1221
  }
1231
1222
 
1232
1223
  // src/utils/graph-session.ts
1233
- var logger7 = getLogger("GraphSession");
1234
- var tracer = getGlobalTracer();
1224
+ var logger6 = getLogger("GraphSession");
1235
1225
  var GraphSession = class {
1236
1226
  // Track scheduled timeouts for cleanup
1237
1227
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1255,7 +1245,7 @@ var GraphSession = class {
1255
1245
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1256
1246
  // Prevent unbounded growth
1257
1247
  __publicField(this, "scheduledTimeouts");
1258
- logger7.debug({ sessionId, messageId, graphId }, "GraphSession created");
1248
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1259
1249
  }
1260
1250
  /**
1261
1251
  * Initialize status updates for this session
@@ -1277,7 +1267,7 @@ var GraphSession = class {
1277
1267
  if (this.statusUpdateState.config.timeInSeconds) {
1278
1268
  this.statusUpdateTimer = setInterval(async () => {
1279
1269
  if (!this.statusUpdateState || this.isEnded) {
1280
- logger7.debug(
1270
+ logger6.debug(
1281
1271
  { sessionId: this.sessionId },
1282
1272
  "Timer triggered but session already cleaned up or ended"
1283
1273
  );
@@ -1289,7 +1279,7 @@ var GraphSession = class {
1289
1279
  }
1290
1280
  await this.checkAndSendTimeBasedUpdate();
1291
1281
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1292
- logger7.info(
1282
+ logger6.info(
1293
1283
  {
1294
1284
  sessionId: this.sessionId,
1295
1285
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1303,7 +1293,7 @@ var GraphSession = class {
1303
1293
  */
1304
1294
  recordEvent(eventType, agentId, data) {
1305
1295
  if (this.isEnded) {
1306
- logger7.debug(
1296
+ logger6.debug(
1307
1297
  {
1308
1298
  sessionId: this.sessionId,
1309
1299
  eventType,
@@ -1323,7 +1313,7 @@ var GraphSession = class {
1323
1313
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1324
1314
  const artifactId = data.artifactId;
1325
1315
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1326
- logger7.warn({
1316
+ logger6.warn({
1327
1317
  sessionId: this.sessionId,
1328
1318
  artifactId,
1329
1319
  pendingCount: this.pendingArtifacts.size,
@@ -1341,7 +1331,7 @@ var GraphSession = class {
1341
1331
  this.artifactProcessingErrors.set(artifactId, errorCount);
1342
1332
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1343
1333
  this.pendingArtifacts.delete(artifactId);
1344
- logger7.error({
1334
+ logger6.error({
1345
1335
  sessionId: this.sessionId,
1346
1336
  artifactId,
1347
1337
  errorCount,
@@ -1350,7 +1340,7 @@ var GraphSession = class {
1350
1340
  stack: error instanceof Error ? error.stack : void 0
1351
1341
  }, "Artifact processing failed after max retries, giving up");
1352
1342
  } else {
1353
- logger7.warn({
1343
+ logger6.warn({
1354
1344
  sessionId: this.sessionId,
1355
1345
  artifactId,
1356
1346
  errorCount,
@@ -1369,14 +1359,14 @@ var GraphSession = class {
1369
1359
  */
1370
1360
  checkStatusUpdates() {
1371
1361
  if (this.isEnded) {
1372
- logger7.debug(
1362
+ logger6.debug(
1373
1363
  { sessionId: this.sessionId },
1374
1364
  "Session has ended - skipping status update check"
1375
1365
  );
1376
1366
  return;
1377
1367
  }
1378
1368
  if (!this.statusUpdateState) {
1379
- logger7.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1369
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1380
1370
  return;
1381
1371
  }
1382
1372
  const statusUpdateState = this.statusUpdateState;
@@ -1387,11 +1377,11 @@ var GraphSession = class {
1387
1377
  */
1388
1378
  async checkAndSendTimeBasedUpdate() {
1389
1379
  if (this.isEnded) {
1390
- logger7.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1380
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1391
1381
  return;
1392
1382
  }
1393
1383
  if (!this.statusUpdateState) {
1394
- logger7.debug(
1384
+ logger6.debug(
1395
1385
  { sessionId: this.sessionId },
1396
1386
  "No status updates configured for time-based check"
1397
1387
  );
@@ -1404,7 +1394,7 @@ var GraphSession = class {
1404
1394
  try {
1405
1395
  await this.generateAndSendUpdate();
1406
1396
  } catch (error) {
1407
- logger7.error(
1397
+ logger6.error(
1408
1398
  {
1409
1399
  sessionId: this.sessionId,
1410
1400
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1497,29 +1487,29 @@ var GraphSession = class {
1497
1487
  */
1498
1488
  async generateAndSendUpdate() {
1499
1489
  if (this.isEnded) {
1500
- logger7.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1490
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1501
1491
  return;
1502
1492
  }
1503
1493
  if (this.isTextStreaming) {
1504
- logger7.debug(
1494
+ logger6.debug(
1505
1495
  { sessionId: this.sessionId },
1506
1496
  "Text is currently streaming - skipping status update"
1507
1497
  );
1508
1498
  return;
1509
1499
  }
1510
1500
  if (this.isGeneratingUpdate) {
1511
- logger7.debug(
1501
+ logger6.debug(
1512
1502
  { sessionId: this.sessionId },
1513
1503
  "Update already in progress - skipping duplicate generation"
1514
1504
  );
1515
1505
  return;
1516
1506
  }
1517
1507
  if (!this.statusUpdateState) {
1518
- logger7.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1508
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1519
1509
  return;
1520
1510
  }
1521
1511
  if (!this.graphId) {
1522
- logger7.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1512
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1523
1513
  return;
1524
1514
  }
1525
1515
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1532,7 +1522,7 @@ var GraphSession = class {
1532
1522
  try {
1533
1523
  const streamHelper = getStreamHelper(this.sessionId);
1534
1524
  if (!streamHelper) {
1535
- logger7.warn(
1525
+ logger6.warn(
1536
1526
  { sessionId: this.sessionId },
1537
1527
  "No stream helper found - cannot send status update"
1538
1528
  );
@@ -1553,7 +1543,7 @@ var GraphSession = class {
1553
1543
  if (result.operations && result.operations.length > 0) {
1554
1544
  for (const op of result.operations) {
1555
1545
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1556
- logger7.warn(
1546
+ logger6.warn(
1557
1547
  {
1558
1548
  sessionId: this.sessionId,
1559
1549
  operation: op
@@ -1606,7 +1596,7 @@ var GraphSession = class {
1606
1596
  this.previousSummaries.shift();
1607
1597
  }
1608
1598
  if (!operation || !operation.type || !operation.ctx) {
1609
- logger7.warn(
1599
+ logger6.warn(
1610
1600
  {
1611
1601
  sessionId: this.sessionId,
1612
1602
  operation
@@ -1621,7 +1611,7 @@ var GraphSession = class {
1621
1611
  this.statusUpdateState.lastEventCount = this.events.length;
1622
1612
  }
1623
1613
  } catch (error) {
1624
- logger7.error(
1614
+ logger6.error(
1625
1615
  {
1626
1616
  sessionId: this.sessionId,
1627
1617
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1659,7 +1649,7 @@ var GraphSession = class {
1659
1649
  this.releaseUpdateLock();
1660
1650
  }
1661
1651
  } catch (error) {
1662
- logger7.error(
1652
+ logger6.error(
1663
1653
  {
1664
1654
  sessionId: this.sessionId,
1665
1655
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1704,7 +1694,7 @@ var GraphSession = class {
1704
1694
  */
1705
1695
  async generateProgressSummary(newEvents, elapsedTime, summarizerModel, previousSummaries = []) {
1706
1696
  return tracer.startActiveSpan(
1707
- createSpanName("graph_session.generate_progress_summary"),
1697
+ "graph_session.generate_progress_summary",
1708
1698
  {
1709
1699
  attributes: {
1710
1700
  "graph_session.id": this.sessionId,
@@ -1736,7 +1726,7 @@ User's Question/Context:
1736
1726
  ${conversationHistory}
1737
1727
  ` : "";
1738
1728
  } catch (error) {
1739
- logger7.warn(
1729
+ logger6.warn(
1740
1730
  { sessionId: this.sessionId, error },
1741
1731
  "Failed to fetch conversation history for status update"
1742
1732
  );
@@ -1785,8 +1775,8 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1785
1775
  span.setStatus({ code: SpanStatusCode.OK });
1786
1776
  return text.trim();
1787
1777
  } catch (error) {
1788
- handleSpanError(span, error);
1789
- logger7.error({ error }, "Failed to generate summary, using fallback");
1778
+ setSpanWithError(span, error);
1779
+ logger6.error({ error }, "Failed to generate summary, using fallback");
1790
1780
  return this.generateFallbackSummary(newEvents, elapsedTime);
1791
1781
  } finally {
1792
1782
  span.end();
@@ -1799,7 +1789,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1799
1789
  */
1800
1790
  async generateStructuredStatusUpdate(newEvents, elapsedTime, statusComponents, summarizerModel, previousSummaries = []) {
1801
1791
  return tracer.startActiveSpan(
1802
- createSpanName("graph_session.generate_structured_update"),
1792
+ "graph_session.generate_structured_update",
1803
1793
  {
1804
1794
  attributes: {
1805
1795
  "graph_session.id": this.sessionId,
@@ -1832,7 +1822,7 @@ User's Question/Context:
1832
1822
  ${conversationHistory}
1833
1823
  ` : "";
1834
1824
  } catch (error) {
1835
- logger7.warn(
1825
+ logger6.warn(
1836
1826
  { sessionId: this.sessionId, error },
1837
1827
  "Failed to fetch conversation history for structured status update"
1838
1828
  );
@@ -1928,8 +1918,8 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1928
1918
  span.setStatus({ code: SpanStatusCode.OK });
1929
1919
  return { operations };
1930
1920
  } catch (error) {
1931
- handleSpanError(span, error);
1932
- logger7.error({ error }, "Failed to generate structured update, using fallback");
1921
+ setSpanWithError(span, error);
1922
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
1933
1923
  return { operations: [] };
1934
1924
  } finally {
1935
1925
  span.end();
@@ -2114,7 +2104,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2114
2104
  */
2115
2105
  async processArtifact(artifactData) {
2116
2106
  return tracer.startActiveSpan(
2117
- createSpanName("graph_session.process_artifact"),
2107
+ "graph_session.process_artifact",
2118
2108
  {
2119
2109
  attributes: {
2120
2110
  "graph_session.id": this.sessionId,
@@ -2145,7 +2135,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2145
2135
  );
2146
2136
  }
2147
2137
  span.setAttributes({ "validation.passed": true });
2148
- const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-YTJWHN67.js');
2138
+ const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-WZLXOMZH.js');
2149
2139
  const conversationHistory = await getFormattedConversationHistory2({
2150
2140
  tenantId: artifactData.tenantId,
2151
2141
  projectId: artifactData.projectId,
@@ -2187,7 +2177,7 @@ Make it specific and relevant.`;
2187
2177
  description: z.string().max(150).describe("Brief description of the artifact's relevance to the user's question")
2188
2178
  });
2189
2179
  const { object: result } = await tracer.startActiveSpan(
2190
- createSpanName("graph_session.generate_artifact_metadata"),
2180
+ "graph_session.generate_artifact_metadata",
2191
2181
  {
2192
2182
  attributes: {
2193
2183
  "llm.model": this.statusUpdateState?.summarizerModel?.model,
@@ -2220,7 +2210,7 @@ Make it specific and relevant.`;
2220
2210
  generationSpan.setStatus({ code: SpanStatusCode.OK });
2221
2211
  return result2;
2222
2212
  } catch (error) {
2223
- handleSpanError(generationSpan, error);
2213
+ setSpanWithError(generationSpan, error);
2224
2214
  throw error;
2225
2215
  } finally {
2226
2216
  generationSpan.end();
@@ -2254,7 +2244,7 @@ Make it specific and relevant.`;
2254
2244
  taskId: artifactData.taskId,
2255
2245
  artifacts: [artifactToSave]
2256
2246
  });
2257
- logger7.info(
2247
+ logger6.info(
2258
2248
  {
2259
2249
  sessionId: this.sessionId,
2260
2250
  artifactId: artifactData.artifactId,
@@ -2270,8 +2260,8 @@ Make it specific and relevant.`;
2270
2260
  });
2271
2261
  span.setStatus({ code: SpanStatusCode.OK });
2272
2262
  } catch (error) {
2273
- handleSpanError(span, error);
2274
- logger7.error(
2263
+ setSpanWithError(span, error);
2264
+ logger6.error(
2275
2265
  {
2276
2266
  sessionId: this.sessionId,
2277
2267
  artifactId: artifactData.artifactId,
@@ -2307,7 +2297,7 @@ Make it specific and relevant.`;
2307
2297
  taskId: artifactData.taskId,
2308
2298
  artifacts: [fallbackArtifact]
2309
2299
  });
2310
- logger7.info(
2300
+ logger6.info(
2311
2301
  {
2312
2302
  sessionId: this.sessionId,
2313
2303
  artifactId: artifactData.artifactId
@@ -2316,7 +2306,7 @@ Make it specific and relevant.`;
2316
2306
  );
2317
2307
  }
2318
2308
  } catch (fallbackError) {
2319
- logger7.error(
2309
+ logger6.error(
2320
2310
  {
2321
2311
  sessionId: this.sessionId,
2322
2312
  artifactId: artifactData.artifactId,
@@ -2343,7 +2333,7 @@ var GraphSessionManager = class {
2343
2333
  const sessionId = messageId;
2344
2334
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2345
2335
  this.sessions.set(sessionId, session);
2346
- logger7.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2336
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2347
2337
  return sessionId;
2348
2338
  }
2349
2339
  /**
@@ -2354,7 +2344,7 @@ var GraphSessionManager = class {
2354
2344
  if (session) {
2355
2345
  session.initializeStatusUpdates(config, summarizerModel);
2356
2346
  } else {
2357
- logger7.error(
2347
+ logger6.error(
2358
2348
  {
2359
2349
  sessionId,
2360
2350
  availableSessions: Array.from(this.sessions.keys())
@@ -2375,7 +2365,7 @@ var GraphSessionManager = class {
2375
2365
  recordEvent(sessionId, eventType, agentId, data) {
2376
2366
  const session = this.sessions.get(sessionId);
2377
2367
  if (!session) {
2378
- logger7.warn({ sessionId }, "Attempted to record event in non-existent session");
2368
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2379
2369
  return;
2380
2370
  }
2381
2371
  session.recordEvent(eventType, agentId, data);
@@ -2386,12 +2376,12 @@ var GraphSessionManager = class {
2386
2376
  endSession(sessionId) {
2387
2377
  const session = this.sessions.get(sessionId);
2388
2378
  if (!session) {
2389
- logger7.warn({ sessionId }, "Attempted to end non-existent session");
2379
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2390
2380
  return [];
2391
2381
  }
2392
2382
  const events = session.getEvents();
2393
2383
  const summary = session.getSummary();
2394
- logger7.info({ sessionId, summary }, "GraphSession ended");
2384
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2395
2385
  session.cleanup();
2396
2386
  this.sessions.delete(sessionId);
2397
2387
  return events;
@@ -2417,7 +2407,7 @@ var GraphSessionManager = class {
2417
2407
  }
2418
2408
  };
2419
2409
  var graphSessionManager = new GraphSessionManager();
2420
- var logger8 = getLogger("ArtifactParser");
2410
+ var logger7 = getLogger("ArtifactParser");
2421
2411
  var _ArtifactParser = class _ArtifactParser {
2422
2412
  constructor(tenantId) {
2423
2413
  this.tenantId = tenantId;
@@ -2481,7 +2471,7 @@ var _ArtifactParser = class _ArtifactParser {
2481
2471
  id: taskId
2482
2472
  });
2483
2473
  if (!task) {
2484
- logger8.warn({ taskId }, "Task not found when fetching artifacts");
2474
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2485
2475
  continue;
2486
2476
  }
2487
2477
  const taskArtifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2493,9 +2483,9 @@ var _ArtifactParser = class _ArtifactParser {
2493
2483
  artifacts.set(key, artifact);
2494
2484
  }
2495
2485
  }
2496
- logger8.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2486
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2497
2487
  } catch (error) {
2498
- logger8.error({ error, contextId }, "Error loading context artifacts");
2488
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2499
2489
  }
2500
2490
  return artifacts;
2501
2491
  }
@@ -2598,7 +2588,7 @@ var _ArtifactParser = class _ArtifactParser {
2598
2588
  id: taskId
2599
2589
  });
2600
2590
  if (!task) {
2601
- logger8.warn({ taskId }, "Task not found when fetching artifact");
2591
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2602
2592
  return null;
2603
2593
  }
2604
2594
  const artifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2610,7 +2600,7 @@ var _ArtifactParser = class _ArtifactParser {
2610
2600
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2611
2601
  }
2612
2602
  } catch (error) {
2613
- logger8.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2603
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2614
2604
  }
2615
2605
  return null;
2616
2606
  }
@@ -2650,7 +2640,7 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2650
2640
  var ArtifactParser = _ArtifactParser;
2651
2641
 
2652
2642
  // src/utils/incremental-stream-parser.ts
2653
- var logger9 = getLogger("IncrementalStreamParser");
2643
+ var logger8 = getLogger("IncrementalStreamParser");
2654
2644
  var IncrementalStreamParser = class {
2655
2645
  constructor(streamHelper, tenantId, contextId) {
2656
2646
  __publicField(this, "buffer", "");
@@ -2710,13 +2700,13 @@ var IncrementalStreamParser = class {
2710
2700
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2711
2701
  const delta = part.argsTextDelta || "";
2712
2702
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2713
- logger9.warn("JSON buffer exceeded maximum size, truncating");
2703
+ logger8.warn("JSON buffer exceeded maximum size, truncating");
2714
2704
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2715
2705
  }
2716
2706
  jsonBuffer += delta;
2717
2707
  for (const char of delta) {
2718
2708
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
2719
- logger9.warn("Component buffer exceeded maximum size, resetting");
2709
+ logger8.warn("Component buffer exceeded maximum size, resetting");
2720
2710
  componentBuffer = "";
2721
2711
  depth = 0;
2722
2712
  continue;
@@ -2731,7 +2721,7 @@ var IncrementalStreamParser = class {
2731
2721
  if (componentMatch) {
2732
2722
  const MAX_COMPONENT_SIZE = 1024 * 1024;
2733
2723
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
2734
- logger9.warn(
2724
+ logger8.warn(
2735
2725
  {
2736
2726
  size: componentMatch[0].length,
2737
2727
  maxSize: MAX_COMPONENT_SIZE
@@ -2744,7 +2734,7 @@ var IncrementalStreamParser = class {
2744
2734
  try {
2745
2735
  const component = JSON.parse(componentMatch[0]);
2746
2736
  if (typeof component !== "object" || !component.id) {
2747
- logger9.warn("Invalid component structure, skipping");
2737
+ logger8.warn("Invalid component structure, skipping");
2748
2738
  componentBuffer = "";
2749
2739
  continue;
2750
2740
  }
@@ -2757,7 +2747,7 @@ var IncrementalStreamParser = class {
2757
2747
  componentsStreamed++;
2758
2748
  componentBuffer = "";
2759
2749
  } catch (e) {
2760
- logger9.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2750
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2761
2751
  }
2762
2752
  }
2763
2753
  }
@@ -2774,7 +2764,7 @@ var IncrementalStreamParser = class {
2774
2764
  break;
2775
2765
  }
2776
2766
  }
2777
- logger9.debug({ componentsStreamed }, "Finished streaming components");
2767
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
2778
2768
  }
2779
2769
  /**
2780
2770
  * Legacy method for backward compatibility - defaults to text processing
@@ -2916,12 +2906,9 @@ var IncrementalStreamParser = class {
2916
2906
  }
2917
2907
  }
2918
2908
  };
2919
- var logger10 = getLogger("ResponseFormatter");
2920
- var RESPONSE_FORMATTER_SERVICE = "responseFormatter";
2921
- function getResponseFormatterTracer() {
2922
- const tracerProvider = trace.getTracerProvider();
2923
- return tracerProvider.getTracer(RESPONSE_FORMATTER_SERVICE, SERVICE_VERSION);
2924
- }
2909
+
2910
+ // src/utils/response-formatter.ts
2911
+ var logger9 = getLogger("ResponseFormatter");
2925
2912
  var ResponseFormatter = class {
2926
2913
  constructor(tenantId) {
2927
2914
  __publicField(this, "artifactParser");
@@ -2931,8 +2918,7 @@ var ResponseFormatter = class {
2931
2918
  * Process structured object response and replace artifact markers with actual artifacts
2932
2919
  */
2933
2920
  async formatObjectResponse(responseObject, contextId) {
2934
- const tracer3 = getResponseFormatterTracer();
2935
- return tracer3.startActiveSpan("response.formatObject", async (span) => {
2921
+ return tracer.startActiveSpan("response.format_object_response", async (span) => {
2936
2922
  try {
2937
2923
  const artifactMap = await this.artifactParser.getContextArtifacts(contextId);
2938
2924
  span.setAttributes({
@@ -2952,8 +2938,8 @@ var ResponseFormatter = class {
2952
2938
  });
2953
2939
  return { parts };
2954
2940
  } catch (error) {
2955
- span.recordException(error);
2956
- logger10.error({ error, responseObject }, "Error formatting object response");
2941
+ setSpanWithError(span, error);
2942
+ logger9.error({ error, responseObject }, "Error formatting object response");
2957
2943
  return {
2958
2944
  parts: [{ kind: "data", data: responseObject }]
2959
2945
  };
@@ -2966,8 +2952,7 @@ var ResponseFormatter = class {
2966
2952
  * Process agent response and convert artifact markers to data parts
2967
2953
  */
2968
2954
  async formatResponse(responseText, contextId) {
2969
- const tracer3 = getResponseFormatterTracer();
2970
- return tracer3.startActiveSpan("response.format", async (span) => {
2955
+ return tracer.startActiveSpan("response.format_response", async (span) => {
2971
2956
  try {
2972
2957
  span.setAttributes({
2973
2958
  "response.hasArtifactMarkers": this.artifactParser.hasArtifactMarkers(responseText),
@@ -3004,9 +2989,8 @@ var ResponseFormatter = class {
3004
2989
  });
3005
2990
  return { parts };
3006
2991
  } catch (error) {
3007
- span.recordException(error);
3008
- span.setStatus({ code: 2, message: error.message });
3009
- logger10.error({ error, responseText }, "Error formatting response");
2992
+ setSpanWithError(span, error);
2993
+ logger9.error({ error, responseText }, "Error formatting response");
3010
2994
  return { text: responseText };
3011
2995
  } finally {
3012
2996
  span.end();
@@ -3051,7 +3035,7 @@ var ResponseFormatter = class {
3051
3035
  }
3052
3036
  }
3053
3037
  };
3054
- var logger11 = getLogger("ToolSessionManager");
3038
+ var logger10 = getLogger("ToolSessionManager");
3055
3039
  var _ToolSessionManager = class _ToolSessionManager {
3056
3040
  // 5 minutes
3057
3041
  constructor() {
@@ -3080,7 +3064,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3080
3064
  createdAt: Date.now()
3081
3065
  };
3082
3066
  this.sessions.set(sessionId, session);
3083
- logger11.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3067
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3084
3068
  return sessionId;
3085
3069
  }
3086
3070
  /**
@@ -3089,7 +3073,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3089
3073
  recordToolResult(sessionId, toolResult) {
3090
3074
  const session = this.sessions.get(sessionId);
3091
3075
  if (!session) {
3092
- logger11.warn(
3076
+ logger10.warn(
3093
3077
  { sessionId, toolCallId: toolResult.toolCallId },
3094
3078
  "Tool result recorded for unknown session"
3095
3079
  );
@@ -3103,12 +3087,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3103
3087
  getToolResult(sessionId, toolCallId) {
3104
3088
  const session = this.sessions.get(sessionId);
3105
3089
  if (!session) {
3106
- logger11.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3090
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3107
3091
  return void 0;
3108
3092
  }
3109
3093
  const result = session.toolResults.get(toolCallId);
3110
3094
  if (!result) {
3111
- logger11.warn(
3095
+ logger10.warn(
3112
3096
  {
3113
3097
  sessionId,
3114
3098
  toolCallId,
@@ -3147,10 +3131,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3147
3131
  }
3148
3132
  for (const sessionId of expiredSessions) {
3149
3133
  this.sessions.delete(sessionId);
3150
- logger11.debug({ sessionId }, "Cleaned up expired tool session");
3134
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3151
3135
  }
3152
3136
  if (expiredSessions.length > 0) {
3153
- logger11.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3137
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3154
3138
  }
3155
3139
  }
3156
3140
  };
@@ -3159,7 +3143,7 @@ var ToolSessionManager = _ToolSessionManager;
3159
3143
  var toolSessionManager = ToolSessionManager.getInstance();
3160
3144
 
3161
3145
  // src/agents/artifactTools.ts
3162
- var logger12 = getLogger("artifactTools");
3146
+ var logger11 = getLogger("artifactTools");
3163
3147
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3164
3148
  if (typeof data === "object" && data !== null) {
3165
3149
  if (Array.isArray(data)) {
@@ -3380,7 +3364,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3380
3364
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3381
3365
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3382
3366
  if (!sessionId) {
3383
- logger12.warn({ toolCallId }, "No session ID provided to save_tool_result");
3367
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3384
3368
  return {
3385
3369
  saved: false,
3386
3370
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3390,7 +3374,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3390
3374
  }
3391
3375
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3392
3376
  if (!toolResult) {
3393
- logger12.warn({ toolCallId, sessionId }, "Tool result not found in session");
3377
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3394
3378
  return {
3395
3379
  saved: false,
3396
3380
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3403,7 +3387,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3403
3387
  const baseData = jmespath.search(parsedResult, baseSelector);
3404
3388
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3405
3389
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3406
- logger12.warn(
3390
+ logger11.warn(
3407
3391
  {
3408
3392
  baseSelector,
3409
3393
  toolCallId,
@@ -3446,7 +3430,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3446
3430
  const fallbackValue = item[propName];
3447
3431
  if (fallbackValue !== null && fallbackValue !== void 0) {
3448
3432
  extractedItem[propName] = fallbackValue;
3449
- logger12.info(
3433
+ logger11.info(
3450
3434
  { propName, propSelector, context },
3451
3435
  `PropSelector failed, used fallback direct property access`
3452
3436
  );
@@ -3458,7 +3442,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3458
3442
  const fallbackValue = item[propName];
3459
3443
  if (fallbackValue !== null && fallbackValue !== void 0) {
3460
3444
  extractedItem[propName] = fallbackValue;
3461
- logger12.warn(
3445
+ logger11.warn(
3462
3446
  { propName, propSelector, context, error: error.message },
3463
3447
  `PropSelector syntax error, used fallback direct property access`
3464
3448
  );
@@ -3571,7 +3555,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3571
3555
  warnings
3572
3556
  };
3573
3557
  } catch (error) {
3574
- logger12.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3558
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3575
3559
  return {
3576
3560
  saved: false,
3577
3561
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3583,7 +3567,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3583
3567
  }
3584
3568
 
3585
3569
  // src/a2a/client.ts
3586
- var logger13 = getLogger("a2aClient");
3570
+ var logger12 = getLogger("a2aClient");
3587
3571
  var DEFAULT_BACKOFF = {
3588
3572
  initialInterval: 500,
3589
3573
  maxInterval: 6e4,
@@ -3789,7 +3773,7 @@ var A2AClient = class {
3789
3773
  try {
3790
3774
  const res = await fn();
3791
3775
  if (attempt > 0) {
3792
- logger13.info(
3776
+ logger12.info(
3793
3777
  {
3794
3778
  attempts: attempt + 1,
3795
3779
  elapsedTime: Date.now() - start
@@ -3804,7 +3788,7 @@ var A2AClient = class {
3804
3788
  }
3805
3789
  const elapsed = Date.now() - start;
3806
3790
  if (elapsed > maxElapsedTime) {
3807
- logger13.warn(
3791
+ logger12.warn(
3808
3792
  {
3809
3793
  attempts: attempt + 1,
3810
3794
  elapsedTime: elapsed,
@@ -3825,7 +3809,7 @@ var A2AClient = class {
3825
3809
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
3826
3810
  }
3827
3811
  const delayMs = Math.min(retryInterval, maxInterval);
3828
- logger13.info(
3812
+ logger12.info(
3829
3813
  {
3830
3814
  attempt: attempt + 1,
3831
3815
  delayMs,
@@ -3910,7 +3894,7 @@ var A2AClient = class {
3910
3894
  }
3911
3895
  const rpcResponse = await httpResponse.json();
3912
3896
  if (rpcResponse.id !== requestId2) {
3913
- logger13.warn(
3897
+ logger12.warn(
3914
3898
  {
3915
3899
  method,
3916
3900
  expectedId: requestId2,
@@ -4109,7 +4093,7 @@ var A2AClient = class {
4109
4093
  try {
4110
4094
  while (true) {
4111
4095
  const { done, value } = await reader.read();
4112
- logger13.info({ done, value }, "parseA2ASseStream");
4096
+ logger12.info({ done, value }, "parseA2ASseStream");
4113
4097
  if (done) {
4114
4098
  if (eventDataBuffer.trim()) {
4115
4099
  const result = this._processSseEventData(
@@ -4196,7 +4180,7 @@ var A2AClient = class {
4196
4180
  };
4197
4181
 
4198
4182
  // src/agents/relationTools.ts
4199
- var logger14 = getLogger("relationships Tools");
4183
+ var logger13 = getLogger("relationships Tools");
4200
4184
  var generateTransferToolDescription = (config) => {
4201
4185
  return `Hand off the conversation to agent ${config.id}.
4202
4186
 
@@ -4234,7 +4218,7 @@ var createTransferToAgentTool = ({
4234
4218
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4235
4219
  });
4236
4220
  }
4237
- logger14.info(
4221
+ logger13.info(
4238
4222
  {
4239
4223
  transferTo: transferConfig.id ?? "unknown",
4240
4224
  fromAgent: callingAgentId
@@ -4382,7 +4366,7 @@ function createDelegateToAgentTool({
4382
4366
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4383
4367
  }
4384
4368
  };
4385
- logger14.info({ messageToSend }, "messageToSend");
4369
+ logger13.info({ messageToSend }, "messageToSend");
4386
4370
  await createMessage(dbClient_default)({
4387
4371
  id: nanoid(),
4388
4372
  tenantId,
@@ -4442,7 +4426,7 @@ function createDelegateToAgentTool({
4442
4426
  }
4443
4427
  });
4444
4428
  }
4445
- var logger15 = getLogger("SystemPromptBuilder");
4429
+ var logger14 = getLogger("SystemPromptBuilder");
4446
4430
  var SystemPromptBuilder = class {
4447
4431
  constructor(version, versionConfig) {
4448
4432
  this.version = version;
@@ -4466,9 +4450,9 @@ var SystemPromptBuilder = class {
4466
4450
  this.templates.set(name, content);
4467
4451
  }
4468
4452
  this.loaded = true;
4469
- logger15.debug(`Loaded ${this.templates.size} templates for version ${this.version}`);
4453
+ logger14.debug(`Loaded ${this.templates.size} templates for version ${this.version}`);
4470
4454
  } catch (error) {
4471
- logger15.error({ error }, `Failed to load templates for version ${this.version}`);
4455
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4472
4456
  throw new Error(`Template loading failed: ${error}`);
4473
4457
  }
4474
4458
  }
@@ -4804,8 +4788,7 @@ function hasToolCallWithPrefix(prefix) {
4804
4788
  return false;
4805
4789
  };
4806
4790
  }
4807
- var logger16 = getLogger("Agent");
4808
- var tracer2 = getGlobalTracer();
4791
+ var logger15 = getLogger("Agent");
4809
4792
  var CONSTANTS = {
4810
4793
  MAX_GENERATION_STEPS: 12,
4811
4794
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5058,14 +5041,14 @@ var Agent = class {
5058
5041
  for (const toolSet of tools) {
5059
5042
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5060
5043
  if (!isValidTool(originalTool)) {
5061
- logger16.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5044
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5062
5045
  continue;
5063
5046
  }
5064
5047
  const sessionWrappedTool = tool({
5065
5048
  description: originalTool.description,
5066
5049
  inputSchema: originalTool.inputSchema,
5067
5050
  execute: async (args, { toolCallId }) => {
5068
- logger16.debug({ toolName, toolCallId }, "MCP Tool Called");
5051
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5069
5052
  try {
5070
5053
  const result = await originalTool.execute(args, { toolCallId });
5071
5054
  toolSessionManager.recordToolResult(sessionId, {
@@ -5077,7 +5060,7 @@ var Agent = class {
5077
5060
  });
5078
5061
  return { result, toolCallId };
5079
5062
  } catch (error) {
5080
- logger16.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5063
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5081
5064
  throw error;
5082
5065
  }
5083
5066
  }
@@ -5162,7 +5145,7 @@ var Agent = class {
5162
5145
  selectedTools
5163
5146
  };
5164
5147
  }
5165
- logger16.info(
5148
+ logger15.info(
5166
5149
  {
5167
5150
  toolName: tool4.name,
5168
5151
  credentialReferenceId,
@@ -5202,7 +5185,7 @@ var Agent = class {
5202
5185
  async getResolvedContext(conversationId, requestContext) {
5203
5186
  try {
5204
5187
  if (!this.config.contextConfigId) {
5205
- logger16.debug({ graphId: this.config.graphId }, "No context config found for graph");
5188
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5206
5189
  return null;
5207
5190
  }
5208
5191
  const contextConfig = await getContextConfigById(dbClient_default)({
@@ -5210,7 +5193,7 @@ var Agent = class {
5210
5193
  id: this.config.contextConfigId
5211
5194
  });
5212
5195
  if (!contextConfig) {
5213
- logger16.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5196
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5214
5197
  return null;
5215
5198
  }
5216
5199
  if (!this.contextResolver) {
@@ -5227,7 +5210,7 @@ var Agent = class {
5227
5210
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5228
5211
  $env: process.env
5229
5212
  };
5230
- logger16.debug(
5213
+ logger15.debug(
5231
5214
  {
5232
5215
  conversationId,
5233
5216
  contextConfigId: contextConfig.id,
@@ -5241,7 +5224,7 @@ var Agent = class {
5241
5224
  );
5242
5225
  return contextWithBuiltins;
5243
5226
  } catch (error) {
5244
- logger16.error(
5227
+ logger15.error(
5245
5228
  {
5246
5229
  conversationId,
5247
5230
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5265,7 +5248,7 @@ var Agent = class {
5265
5248
  });
5266
5249
  return graphDefinition?.graphPrompt || void 0;
5267
5250
  } catch (error) {
5268
- logger16.warn(
5251
+ logger15.warn(
5269
5252
  {
5270
5253
  graphId: this.config.graphId,
5271
5254
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5292,7 +5275,7 @@ var Agent = class {
5292
5275
  }
5293
5276
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5294
5277
  } catch (error) {
5295
- logger16.warn(
5278
+ logger15.warn(
5296
5279
  {
5297
5280
  graphId: this.config.graphId,
5298
5281
  tenantId: this.config.tenantId,
@@ -5352,7 +5335,7 @@ Key requirements:
5352
5335
  preserveUnresolved: false
5353
5336
  });
5354
5337
  } catch (error) {
5355
- logger16.error(
5338
+ logger15.error(
5356
5339
  {
5357
5340
  conversationId,
5358
5341
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5397,7 +5380,7 @@ Key requirements:
5397
5380
  preserveUnresolved: false
5398
5381
  });
5399
5382
  } catch (error) {
5400
- logger16.error(
5383
+ logger15.error(
5401
5384
  {
5402
5385
  conversationId,
5403
5386
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5425,7 +5408,7 @@ Key requirements:
5425
5408
  artifactId: z.string().describe("The unique identifier of the artifact to get.")
5426
5409
  }),
5427
5410
  execute: async ({ artifactId }) => {
5428
- logger16.info({ artifactId }, "get_artifact executed");
5411
+ logger15.info({ artifactId }, "get_artifact executed");
5429
5412
  const artifact = await getLedgerArtifacts(dbClient_default)({
5430
5413
  scopes: {
5431
5414
  tenantId: this.config.tenantId,
@@ -5492,7 +5475,7 @@ Key requirements:
5492
5475
  graphId: this.config.graphId
5493
5476
  });
5494
5477
  } catch (error) {
5495
- logger16.error(
5478
+ logger15.error(
5496
5479
  { error, graphId: this.config.graphId },
5497
5480
  "Failed to check graph artifact components"
5498
5481
  );
@@ -5500,7 +5483,7 @@ Key requirements:
5500
5483
  }
5501
5484
  }
5502
5485
  async generate(userMessage, runtimeContext) {
5503
- return tracer2.startActiveSpan(createSpanName("agent.generate"), async (span) => {
5486
+ return tracer.startActiveSpan("agent.generate", async (span) => {
5504
5487
  const contextId = runtimeContext?.contextId || "default";
5505
5488
  const taskId = runtimeContext?.metadata?.taskId || "unknown";
5506
5489
  const sessionId = toolSessionManager.createSession(
@@ -5524,8 +5507,8 @@ Key requirements:
5524
5507
  functionTools,
5525
5508
  relationTools,
5526
5509
  defaultTools
5527
- ] = await tracer2.startActiveSpan(
5528
- createSpanName("agent.load_tools"),
5510
+ ] = await tracer.startActiveSpan(
5511
+ "agent.load_tools",
5529
5512
  {
5530
5513
  attributes: {
5531
5514
  "agent.name": this.config.name,
@@ -5547,11 +5530,10 @@ Key requirements:
5547
5530
  childSpan.setStatus({ code: SpanStatusCode.OK });
5548
5531
  return result;
5549
5532
  } catch (err) {
5550
- handleSpanError(childSpan, err);
5533
+ setSpanWithError(childSpan, err);
5551
5534
  throw err;
5552
5535
  } finally {
5553
5536
  childSpan.end();
5554
- await forceFlushTracer();
5555
5537
  }
5556
5538
  }
5557
5539
  );
@@ -5597,7 +5579,7 @@ Key requirements:
5597
5579
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5598
5580
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5599
5581
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5600
- logger16.warn(
5582
+ logger15.warn(
5601
5583
  {
5602
5584
  requestedTimeout: modelSettings.maxDuration * 1e3,
5603
5585
  appliedTimeout: timeoutMs,
@@ -5639,7 +5621,7 @@ Key requirements:
5639
5621
  }
5640
5622
  );
5641
5623
  } catch (error) {
5642
- logger16.debug("Failed to track agent reasoning");
5624
+ logger15.debug("Failed to track agent reasoning");
5643
5625
  }
5644
5626
  }
5645
5627
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5722,7 +5704,7 @@ Key requirements:
5722
5704
  }
5723
5705
  );
5724
5706
  } catch (error) {
5725
- logger16.debug("Failed to track agent reasoning");
5707
+ logger15.debug("Failed to track agent reasoning");
5726
5708
  }
5727
5709
  }
5728
5710
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5767,7 +5749,7 @@ Key requirements:
5767
5749
  return;
5768
5750
  }
5769
5751
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
5770
- logger16.info({ result }, "save_artifact_tool or save_tool_result");
5752
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
5771
5753
  if (result.output.artifacts) {
5772
5754
  for (const artifact of result.output.artifacts) {
5773
5755
  const artifactId = artifact?.artifactId || "N/A";
@@ -5873,7 +5855,6 @@ ${output}`;
5873
5855
  }
5874
5856
  span.setStatus({ code: SpanStatusCode.OK });
5875
5857
  span.end();
5876
- await forceFlushTracer();
5877
5858
  let formattedContent = response.formattedContent || null;
5878
5859
  if (!formattedContent) {
5879
5860
  if (response.object) {
@@ -5903,14 +5884,8 @@ ${output}`;
5903
5884
  return formattedResponse;
5904
5885
  } catch (error) {
5905
5886
  toolSessionManager.endSession(sessionId);
5906
- span.recordException(error);
5907
- span.setStatus({
5908
- code: SpanStatusCode.ERROR,
5909
- message: error.message
5910
- });
5887
+ setSpanWithError(span, error);
5911
5888
  span.end();
5912
- await forceFlushTracer();
5913
- getLogger("Agent").error(error, "Agent generate error");
5914
5889
  throw error;
5915
5890
  }
5916
5891
  });
@@ -5928,7 +5903,7 @@ function parseEmbeddedJson(data) {
5928
5903
  }
5929
5904
  });
5930
5905
  }
5931
- var logger17 = getLogger("generateTaskHandler");
5906
+ var logger16 = getLogger("generateTaskHandler");
5932
5907
  var createTaskHandler = (config, credentialStoreRegistry) => {
5933
5908
  return async (task) => {
5934
5909
  try {
@@ -5978,7 +5953,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
5978
5953
  agentId: config.agentId
5979
5954
  })
5980
5955
  ]);
5981
- logger17.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
5956
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
5982
5957
  const agentPrompt = "prompt" in config.agentSchema ? config.agentSchema.prompt : "";
5983
5958
  const models = "models" in config.agentSchema ? config.agentSchema.models : void 0;
5984
5959
  const stopWhen = "stopWhen" in config.agentSchema ? config.agentSchema.stopWhen : void 0;
@@ -6078,7 +6053,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6078
6053
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6079
6054
  if (taskIdMatch) {
6080
6055
  contextId = taskIdMatch[1];
6081
- logger17.info(
6056
+ logger16.info(
6082
6057
  {
6083
6058
  taskId: task.id,
6084
6059
  extractedContextId: contextId,
@@ -6094,7 +6069,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6094
6069
  const isDelegation = task.context?.metadata?.isDelegation === true;
6095
6070
  agent.setDelegationStatus(isDelegation);
6096
6071
  if (isDelegation) {
6097
- logger17.info(
6072
+ logger16.info(
6098
6073
  { agentId: config.agentId, taskId: task.id },
6099
6074
  "Delegated agent - streaming disabled"
6100
6075
  );
@@ -6374,7 +6349,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6374
6349
 
6375
6350
  // src/routes/agents.ts
6376
6351
  var app = new OpenAPIHono();
6377
- var logger18 = getLogger("agents");
6352
+ var logger17 = getLogger("agents");
6378
6353
  app.openapi(
6379
6354
  createRoute({
6380
6355
  method: "get",
@@ -6412,7 +6387,7 @@ app.openapi(
6412
6387
  tracestate: c.req.header("tracestate"),
6413
6388
  baggage: c.req.header("baggage")
6414
6389
  };
6415
- logger18.info(
6390
+ logger17.info(
6416
6391
  {
6417
6392
  otelHeaders,
6418
6393
  path: c.req.path,
@@ -6423,7 +6398,7 @@ app.openapi(
6423
6398
  const executionContext = getRequestExecutionContext(c);
6424
6399
  const { tenantId, projectId, graphId, agentId } = executionContext;
6425
6400
  if (agentId) {
6426
- logger18.info(
6401
+ logger17.info(
6427
6402
  {
6428
6403
  message: "getRegisteredAgent (agent-level)",
6429
6404
  tenantId,
@@ -6435,13 +6410,13 @@ app.openapi(
6435
6410
  );
6436
6411
  const credentialStores = c.get("credentialStores");
6437
6412
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6438
- logger18.info({ agent }, "agent registered: well-known agent.json");
6413
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6439
6414
  if (!agent) {
6440
6415
  return c.json({ error: "Agent not found" }, 404);
6441
6416
  }
6442
6417
  return c.json(agent.agentCard);
6443
6418
  } else {
6444
- logger18.info(
6419
+ logger17.info(
6445
6420
  {
6446
6421
  message: "getRegisteredGraph (graph-level)",
6447
6422
  tenantId,
@@ -6464,7 +6439,7 @@ app.post("/a2a", async (c) => {
6464
6439
  tracestate: c.req.header("tracestate"),
6465
6440
  baggage: c.req.header("baggage")
6466
6441
  };
6467
- logger18.info(
6442
+ logger17.info(
6468
6443
  {
6469
6444
  otelHeaders,
6470
6445
  path: c.req.path,
@@ -6475,7 +6450,7 @@ app.post("/a2a", async (c) => {
6475
6450
  const executionContext = getRequestExecutionContext(c);
6476
6451
  const { tenantId, projectId, graphId, agentId } = executionContext;
6477
6452
  if (agentId) {
6478
- logger18.info(
6453
+ logger17.info(
6479
6454
  {
6480
6455
  message: "a2a (agent-level)",
6481
6456
  tenantId,
@@ -6499,7 +6474,7 @@ app.post("/a2a", async (c) => {
6499
6474
  }
6500
6475
  return a2aHandler(c, agent);
6501
6476
  } else {
6502
- logger18.info(
6477
+ logger17.info(
6503
6478
  {
6504
6479
  message: "a2a (graph-level)",
6505
6480
  tenantId,
@@ -6539,14 +6514,14 @@ app.post("/a2a", async (c) => {
6539
6514
  }
6540
6515
  });
6541
6516
  var agents_default = app;
6542
- var logger19 = getLogger("Transfer");
6517
+ var logger18 = getLogger("Transfer");
6543
6518
  async function executeTransfer({
6544
6519
  tenantId,
6545
6520
  threadId,
6546
6521
  projectId,
6547
6522
  targetAgentId
6548
6523
  }) {
6549
- logger19.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6524
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6550
6525
  await setActiveAgentForThread(dbClient_default)({
6551
6526
  scopes: { tenantId, projectId },
6552
6527
  threadId,
@@ -7085,7 +7060,7 @@ var MCPStreamHelper = class {
7085
7060
  function createMCPStreamHelper() {
7086
7061
  return new MCPStreamHelper();
7087
7062
  }
7088
- var logger20 = getLogger("ExecutionHandler");
7063
+ var logger19 = getLogger("ExecutionHandler");
7089
7064
  var ExecutionHandler = class {
7090
7065
  constructor() {
7091
7066
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7110,7 +7085,7 @@ var ExecutionHandler = class {
7110
7085
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7111
7086
  registerStreamHelper(requestId2, sseHelper);
7112
7087
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7113
- logger20.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7088
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7114
7089
  let graphConfig = null;
7115
7090
  try {
7116
7091
  graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7122,7 +7097,7 @@ var ExecutionHandler = class {
7122
7097
  );
7123
7098
  }
7124
7099
  } catch (error) {
7125
- logger20.error(
7100
+ logger19.error(
7126
7101
  {
7127
7102
  error: error instanceof Error ? error.message : "Unknown error",
7128
7103
  stack: error instanceof Error ? error.stack : void 0
@@ -7138,7 +7113,7 @@ var ExecutionHandler = class {
7138
7113
  try {
7139
7114
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7140
7115
  const taskId = `task_${conversationId}-${requestId2}`;
7141
- logger20.info(
7116
+ logger19.info(
7142
7117
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7143
7118
  "Attempting to create or reuse existing task"
7144
7119
  );
@@ -7161,7 +7136,7 @@ var ExecutionHandler = class {
7161
7136
  agent_id: currentAgentId
7162
7137
  }
7163
7138
  });
7164
- logger20.info(
7139
+ logger19.info(
7165
7140
  {
7166
7141
  taskId,
7167
7142
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7170,21 +7145,21 @@ var ExecutionHandler = class {
7170
7145
  );
7171
7146
  } catch (error) {
7172
7147
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7173
- logger20.info({ taskId, error: error.message }, "Task already exists, fetching existing task");
7148
+ logger19.info({ taskId, error: error.message }, "Task already exists, fetching existing task");
7174
7149
  const existingTask = await getTask(dbClient_default)({ id: taskId });
7175
7150
  if (existingTask) {
7176
7151
  task = existingTask;
7177
- logger20.info({ taskId, existingTask }, "Successfully reused existing task from race condition");
7152
+ logger19.info({ taskId, existingTask }, "Successfully reused existing task from race condition");
7178
7153
  } else {
7179
- logger20.error({ taskId, error }, "Task constraint failed but task not found");
7154
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7180
7155
  throw error;
7181
7156
  }
7182
7157
  } else {
7183
- logger20.error({ taskId, error }, "Failed to create task due to non-constraint error");
7158
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7184
7159
  throw error;
7185
7160
  }
7186
7161
  }
7187
- logger20.debug(
7162
+ logger19.debug(
7188
7163
  {
7189
7164
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7190
7165
  executionType: "create_initial_task",
@@ -7202,7 +7177,7 @@ var ExecutionHandler = class {
7202
7177
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7203
7178
  while (iterations < maxTransfers) {
7204
7179
  iterations++;
7205
- logger20.info(
7180
+ logger19.info(
7206
7181
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7207
7182
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7208
7183
  );
@@ -7210,10 +7185,10 @@ var ExecutionHandler = class {
7210
7185
  scopes: { tenantId, projectId },
7211
7186
  conversationId
7212
7187
  });
7213
- logger20.info({ activeAgent }, "activeAgent");
7188
+ logger19.info({ activeAgent }, "activeAgent");
7214
7189
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7215
7190
  currentAgentId = activeAgent.activeAgentId;
7216
- logger20.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7191
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7217
7192
  }
7218
7193
  const agentBaseUrl = `${baseUrl}/agents`;
7219
7194
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7254,13 +7229,13 @@ var ExecutionHandler = class {
7254
7229
  });
7255
7230
  if (!messageResponse?.result) {
7256
7231
  errorCount++;
7257
- logger20.error(
7232
+ logger19.error(
7258
7233
  { currentAgentId, iterations, errorCount },
7259
7234
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7260
7235
  );
7261
7236
  if (errorCount >= this.MAX_ERRORS) {
7262
7237
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7263
- logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7238
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7264
7239
  await sseHelper.writeError(errorMessage2);
7265
7240
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7266
7241
  if (task) {
@@ -7286,7 +7261,7 @@ var ExecutionHandler = class {
7286
7261
  const transferResponse = messageResponse.result;
7287
7262
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7288
7263
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7289
- logger20.info({ targetAgentId, transferReason }, "transfer response");
7264
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7290
7265
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7291
7266
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7292
7267
  projectId,
@@ -7297,7 +7272,7 @@ var ExecutionHandler = class {
7297
7272
  if (success) {
7298
7273
  fromAgentId = currentAgentId;
7299
7274
  currentAgentId = newAgentId;
7300
- logger20.info(
7275
+ logger19.info(
7301
7276
  {
7302
7277
  transferFrom: fromAgentId,
7303
7278
  transferTo: currentAgentId,
@@ -7315,7 +7290,7 @@ var ExecutionHandler = class {
7315
7290
  const graphSessionData = graphSessionManager.getSession(requestId2);
7316
7291
  if (graphSessionData) {
7317
7292
  const sessionSummary = graphSessionData.getSummary();
7318
- logger20.info(sessionSummary, "GraphSession data after completion");
7293
+ logger19.info(sessionSummary, "GraphSession data after completion");
7319
7294
  }
7320
7295
  let textContent = "";
7321
7296
  for (const part of responseParts) {
@@ -7370,32 +7345,32 @@ var ExecutionHandler = class {
7370
7345
  }
7371
7346
  });
7372
7347
  const updateTaskEnd = Date.now();
7373
- logger20.info(
7348
+ logger19.info(
7374
7349
  { duration: updateTaskEnd - updateTaskStart },
7375
7350
  "Completed updateTask operation"
7376
7351
  );
7377
7352
  await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7378
7353
  await sseHelper.complete();
7379
- logger20.info("Ending GraphSession and cleaning up");
7354
+ logger19.info("Ending GraphSession and cleaning up");
7380
7355
  graphSessionManager.endSession(requestId2);
7381
- logger20.info("Cleaning up streamHelper");
7356
+ logger19.info("Cleaning up streamHelper");
7382
7357
  unregisterStreamHelper(requestId2);
7383
7358
  let response;
7384
7359
  if (sseHelper instanceof MCPStreamHelper) {
7385
7360
  const captured = sseHelper.getCapturedResponse();
7386
7361
  response = captured.text || "No response content";
7387
7362
  }
7388
- logger20.info("ExecutionHandler returning success");
7363
+ logger19.info("ExecutionHandler returning success");
7389
7364
  return { success: true, iterations, response };
7390
7365
  }
7391
7366
  errorCount++;
7392
- logger20.warn(
7367
+ logger19.warn(
7393
7368
  { iterations, errorCount },
7394
7369
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7395
7370
  );
7396
7371
  if (errorCount >= this.MAX_ERRORS) {
7397
7372
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7398
- logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7373
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7399
7374
  await sseHelper.writeError(errorMessage2);
7400
7375
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7401
7376
  if (task) {
@@ -7417,7 +7392,7 @@ var ExecutionHandler = class {
7417
7392
  }
7418
7393
  }
7419
7394
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7420
- logger20.error({ maxTransfers, iterations }, errorMessage);
7395
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7421
7396
  await sseHelper.writeError(errorMessage);
7422
7397
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7423
7398
  if (task) {
@@ -7437,7 +7412,7 @@ var ExecutionHandler = class {
7437
7412
  unregisterStreamHelper(requestId2);
7438
7413
  return { success: false, error: errorMessage, iterations };
7439
7414
  } catch (error) {
7440
- logger20.error({ error }, "Error in execution handler");
7415
+ logger19.error({ error }, "Error in execution handler");
7441
7416
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7442
7417
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7443
7418
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7463,7 +7438,7 @@ var ExecutionHandler = class {
7463
7438
 
7464
7439
  // src/routes/chat.ts
7465
7440
  var app2 = new OpenAPIHono();
7466
- var logger21 = getLogger("completionsHandler");
7441
+ var logger20 = getLogger("completionsHandler");
7467
7442
  var chatCompletionsRoute = createRoute({
7468
7443
  method: "post",
7469
7444
  path: "/completions",
@@ -7581,7 +7556,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7581
7556
  tracestate: c.req.header("tracestate"),
7582
7557
  baggage: c.req.header("baggage")
7583
7558
  };
7584
- logger21.info(
7559
+ logger20.info(
7585
7560
  {
7586
7561
  otelHeaders,
7587
7562
  path: c.req.path,
@@ -7667,7 +7642,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7667
7642
  dbClient_default,
7668
7643
  credentialStores
7669
7644
  );
7670
- logger21.info(
7645
+ logger20.info(
7671
7646
  {
7672
7647
  tenantId,
7673
7648
  graphId,
@@ -7713,7 +7688,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7713
7688
  return streamSSE(c, async (stream2) => {
7714
7689
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
7715
7690
  await sseHelper.writeRole();
7716
- logger21.info({ agentId }, "Starting execution");
7691
+ logger20.info({ agentId }, "Starting execution");
7717
7692
  const executionHandler = new ExecutionHandler();
7718
7693
  const result = await executionHandler.execute({
7719
7694
  executionContext,
@@ -7723,7 +7698,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7723
7698
  requestId: requestId2,
7724
7699
  sseHelper
7725
7700
  });
7726
- logger21.info(
7701
+ logger20.info(
7727
7702
  { result },
7728
7703
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
7729
7704
  );
@@ -7756,7 +7731,7 @@ var getMessageText = (content) => {
7756
7731
  };
7757
7732
  var chat_default = app2;
7758
7733
  var app3 = new OpenAPIHono();
7759
- var logger22 = getLogger("chatDataStream");
7734
+ var logger21 = getLogger("chatDataStream");
7760
7735
  var chatDataStreamRoute = createRoute({
7761
7736
  method: "post",
7762
7737
  path: "/chat",
@@ -7861,7 +7836,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7861
7836
  );
7862
7837
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
7863
7838
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
7864
- logger22.info({ userText, lastUserMessage }, "userText");
7839
+ logger21.info({ userText, lastUserMessage }, "userText");
7865
7840
  const messageSpan = trace.getActiveSpan();
7866
7841
  if (messageSpan) {
7867
7842
  messageSpan.setAttributes({
@@ -7903,7 +7878,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7903
7878
  await streamHelper.writeError("Unable to process request");
7904
7879
  }
7905
7880
  } catch (err) {
7906
- logger22.error({ err }, "Streaming error");
7881
+ logger21.error({ err }, "Streaming error");
7907
7882
  await streamHelper.writeError("Internal server error");
7908
7883
  } finally {
7909
7884
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -7924,7 +7899,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7924
7899
  )
7925
7900
  );
7926
7901
  } catch (error) {
7927
- logger22.error({ error }, "chatDataStream error");
7902
+ logger21.error({ error }, "chatDataStream error");
7928
7903
  return c.json({ error: "Failed to process chat completion" }, 500);
7929
7904
  }
7930
7905
  });
@@ -7932,7 +7907,7 @@ var chatDataStream_default = app3;
7932
7907
  function createMCPSchema(schema) {
7933
7908
  return schema;
7934
7909
  }
7935
- var logger23 = getLogger("mcp");
7910
+ var logger22 = getLogger("mcp");
7936
7911
  var _MockResponseSingleton = class _MockResponseSingleton {
7937
7912
  constructor() {
7938
7913
  __publicField(this, "mockRes");
@@ -7987,21 +7962,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
7987
7962
  id: 0
7988
7963
  });
7989
7964
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
7990
- logger23.info({ sessionId }, "Spoofing initialization message to set transport state");
7965
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
7991
7966
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
7992
7967
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
7993
7968
  try {
7994
7969
  await transport.handleRequest(req, mockRes, spoofInitMessage);
7995
- logger23.info({ sessionId }, "Successfully spoofed initialization");
7970
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
7996
7971
  } catch (spoofError) {
7997
- logger23.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
7972
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
7998
7973
  }
7999
7974
  };
8000
7975
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8001
7976
  const sessionId = req.headers["mcp-session-id"];
8002
- logger23.info({ sessionId }, "Received MCP session ID");
7977
+ logger22.info({ sessionId }, "Received MCP session ID");
8003
7978
  if (!sessionId) {
8004
- logger23.info({ body }, "Missing session ID");
7979
+ logger22.info({ body }, "Missing session ID");
8005
7980
  res.writeHead(400).end(
8006
7981
  JSON.stringify({
8007
7982
  jsonrpc: "2.0",
@@ -8027,7 +8002,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8027
8002
  scopes: { tenantId, projectId },
8028
8003
  conversationId: sessionId
8029
8004
  });
8030
- logger23.info(
8005
+ logger22.info(
8031
8006
  {
8032
8007
  sessionId,
8033
8008
  conversationFound: !!conversation,
@@ -8038,7 +8013,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8038
8013
  "Conversation lookup result"
8039
8014
  );
8040
8015
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8041
- logger23.info(
8016
+ logger22.info(
8042
8017
  { sessionId, conversationId: conversation?.id },
8043
8018
  "MCP session not found or invalid"
8044
8019
  );
@@ -8099,7 +8074,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8099
8074
  requestId: requestId2,
8100
8075
  sseHelper: mcpStreamHelper
8101
8076
  });
8102
- logger23.info(
8077
+ logger22.info(
8103
8078
  { result },
8104
8079
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8105
8080
  );
@@ -8173,7 +8148,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8173
8148
  dbClient_default,
8174
8149
  credentialStores
8175
8150
  );
8176
- logger23.info(
8151
+ logger22.info(
8177
8152
  {
8178
8153
  tenantId,
8179
8154
  graphId,
@@ -8234,7 +8209,7 @@ var validateRequestParameters = (c) => {
8234
8209
  };
8235
8210
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8236
8211
  const { tenantId, projectId, graphId } = executionContext;
8237
- logger23.info({ body }, "Received initialization request");
8212
+ logger22.info({ body }, "Received initialization request");
8238
8213
  const sessionId = nanoid();
8239
8214
  const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
8240
8215
  scopes: { tenantId, projectId },
@@ -8265,7 +8240,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8265
8240
  }
8266
8241
  }
8267
8242
  });
8268
- logger23.info(
8243
+ logger22.info(
8269
8244
  { sessionId, conversationId: conversation.id },
8270
8245
  "Created MCP session as conversation"
8271
8246
  );
@@ -8274,9 +8249,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8274
8249
  });
8275
8250
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8276
8251
  await server.connect(transport);
8277
- logger23.info({ sessionId }, "Server connected for initialization");
8252
+ logger22.info({ sessionId }, "Server connected for initialization");
8278
8253
  res.setHeader("Mcp-Session-Id", sessionId);
8279
- logger23.info(
8254
+ logger22.info(
8280
8255
  {
8281
8256
  sessionId,
8282
8257
  bodyMethod: body?.method,
@@ -8285,7 +8260,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8285
8260
  "About to handle initialization request"
8286
8261
  );
8287
8262
  await transport.handleRequest(req, res, body);
8288
- logger23.info({ sessionId }, "Successfully handled initialization request");
8263
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8289
8264
  return toFetchResponse(res);
8290
8265
  };
8291
8266
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8313,8 +8288,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8313
8288
  sessionId,
8314
8289
  conversation.metadata?.session_data?.mcpProtocolVersion
8315
8290
  );
8316
- logger23.info({ sessionId }, "Server connected and transport initialized");
8317
- logger23.info(
8291
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8292
+ logger22.info(
8318
8293
  {
8319
8294
  sessionId,
8320
8295
  bodyKeys: Object.keys(body || {}),
@@ -8328,9 +8303,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8328
8303
  );
8329
8304
  try {
8330
8305
  await transport.handleRequest(req, res, body);
8331
- logger23.info({ sessionId }, "Successfully handled MCP request");
8306
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8332
8307
  } catch (transportError) {
8333
- logger23.error(
8308
+ logger22.error(
8334
8309
  {
8335
8310
  sessionId,
8336
8311
  error: transportError,
@@ -8381,13 +8356,13 @@ app4.openapi(
8381
8356
  }
8382
8357
  const { executionContext } = paramValidation;
8383
8358
  const body = await c.req.json();
8384
- logger23.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8359
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8385
8360
  const isInitRequest = body.method === "initialize";
8386
8361
  const { req, res } = toReqRes(c.req.raw);
8387
8362
  const validatedContext = c.get("validatedContext") || {};
8388
8363
  const credentialStores = c.get("credentialStores");
8389
- logger23.info({ validatedContext }, "Validated context");
8390
- logger23.info({ req }, "request");
8364
+ logger22.info({ validatedContext }, "Validated context");
8365
+ logger22.info({ req }, "request");
8391
8366
  if (isInitRequest) {
8392
8367
  return await handleInitializationRequest(
8393
8368
  body,
@@ -8409,7 +8384,7 @@ app4.openapi(
8409
8384
  );
8410
8385
  }
8411
8386
  } catch (e) {
8412
- logger23.error(
8387
+ logger22.error(
8413
8388
  {
8414
8389
  error: e instanceof Error ? e.message : e,
8415
8390
  stack: e instanceof Error ? e.stack : void 0
@@ -8421,7 +8396,7 @@ app4.openapi(
8421
8396
  }
8422
8397
  );
8423
8398
  app4.get("/", async (c) => {
8424
- logger23.info("Received GET MCP request");
8399
+ logger22.info("Received GET MCP request");
8425
8400
  return c.json(
8426
8401
  {
8427
8402
  jsonrpc: "2.0",
@@ -8435,7 +8410,7 @@ app4.get("/", async (c) => {
8435
8410
  );
8436
8411
  });
8437
8412
  app4.delete("/", async (c) => {
8438
- logger23.info("Received DELETE MCP request");
8413
+ logger22.info("Received DELETE MCP request");
8439
8414
  return c.json(
8440
8415
  {
8441
8416
  jsonrpc: "2.0",
@@ -8515,9 +8490,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8515
8490
  if (!isExpectedError) {
8516
8491
  const errorMessage = err instanceof Error ? err.message : String(err);
8517
8492
  const errorStack = err instanceof Error ? err.stack : void 0;
8518
- const logger24 = getLogger();
8519
- if (logger24) {
8520
- logger24.error(
8493
+ const logger23 = getLogger();
8494
+ if (logger23) {
8495
+ logger23.error(
8521
8496
  {
8522
8497
  error: err,
8523
8498
  message: errorMessage,
@@ -8529,9 +8504,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8529
8504
  );
8530
8505
  }
8531
8506
  } else {
8532
- const logger24 = getLogger();
8533
- if (logger24) {
8534
- logger24.error(
8507
+ const logger23 = getLogger();
8508
+ if (logger23) {
8509
+ logger23.error(
8535
8510
  {
8536
8511
  error: err,
8537
8512
  path: c.req.path,
@@ -8548,9 +8523,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8548
8523
  const response = err.getResponse();
8549
8524
  return response;
8550
8525
  } catch (responseError) {
8551
- const logger24 = getLogger();
8552
- if (logger24) {
8553
- logger24.error({ error: responseError }, "Error while handling HTTPException response");
8526
+ const logger23 = getLogger();
8527
+ if (logger23) {
8528
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8554
8529
  }
8555
8530
  }
8556
8531
  }