@inkeep/agents-run-api 0.0.0-dev-20250911192304 → 0.0.0-dev-20250911210702

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,10 @@
1
- import { env, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-JIWNRFDU.js';
2
- import { __publicField } from './chunk-PKBMQBKP.js';
3
- import { getLogger as getLogger$1, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
1
+ import { env, __publicField, dbClient_default, getFormattedConversationHistory, createDefaultConversationHistoryConfig, saveA2AMessageResponse } from './chunk-2MQ324HB.js';
2
+ import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
3
+ import { BaggageSpanProcessor, ALLOW_ALL_BAGGAGE_KEYS } from '@opentelemetry/baggage-span-processor';
4
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
5
+ import { NodeSDK } from '@opentelemetry/sdk-node';
6
+ import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-node';
7
+ import { getLogger as getLogger$1, getTracer, HeadersScopeSchema, getRequestExecutionContext, getAgentGraphWithDefaultAgent, contextValidationMiddleware, getFullGraph, createOrGetConversation, getActiveAgentForConversation, setActiveAgentForConversation, getAgentById, handleContextResolution, createMessage, commonGetErrorResponses, createDefaultCredentialStores, CredentialStoreRegistry, listTaskIdsByContextId, getTask, getLedgerArtifacts, getAgentGraph, createTask, updateTask, updateConversation, handleApiError, setSpanWithError, TaskState, setActiveAgentForThread, getConversation, getRelatedAgentsForGraph, getToolsForAgent, getDataComponentsForAgent, getArtifactComponentsForAgent, validateAndGetApiKey, ContextResolver, CredentialStuffer, MCPServerType, getCredentialReference, McpClient, getContextConfigById, getFullGraphDefinition, TemplateEngine, graphHasArtifactComponents, MCPTransportType, getExternalAgent } from '@inkeep/agents-core';
4
8
  import { Hono } from 'hono';
5
9
  import { OpenAPIHono, createRoute, z as z$1 } from '@hono/zod-openapi';
6
10
  import { trace, propagation, context, SpanStatusCode } from '@opentelemetry/api';
@@ -29,6 +33,61 @@ import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/
29
33
  import { z as z$2 } from 'zod/v3';
30
34
  import { toReqRes, toFetchResponse } from 'fetch-to-node';
31
35
 
36
+ var otlpUrl = env.OTEL_EXPORTER_OTLP_ENDPOINT;
37
+ var otlpExporter = new OTLPTraceExporter({ url: otlpUrl });
38
+ var FanOutSpanProcessor = class {
39
+ constructor(inner) {
40
+ this.inner = inner;
41
+ }
42
+ onStart(span, parent) {
43
+ this.inner.forEach((p) => p.onStart(span, parent));
44
+ }
45
+ onEnd(span) {
46
+ this.inner.forEach((p) => p.onEnd(span));
47
+ }
48
+ forceFlush() {
49
+ return Promise.all(this.inner.map((p) => p.forceFlush?.())).then(() => {
50
+ });
51
+ }
52
+ shutdown() {
53
+ return Promise.all(this.inner.map((p) => p.shutdown?.())).then(() => {
54
+ });
55
+ }
56
+ };
57
+ var maxExportBatchSize = env.OTEL_MAX_EXPORT_BATCH_SIZE ?? (env.ENVIRONMENT === "development" ? 1 : 512);
58
+ var spanProcessor = new FanOutSpanProcessor([
59
+ new BaggageSpanProcessor(ALLOW_ALL_BAGGAGE_KEYS),
60
+ new BatchSpanProcessor(otlpExporter, {
61
+ maxExportBatchSize
62
+ })
63
+ ]);
64
+ var sdk = new NodeSDK({
65
+ serviceName: "inkeep-agents-run-api",
66
+ spanProcessor,
67
+ instrumentations: [
68
+ getNodeAutoInstrumentations({
69
+ "@opentelemetry/instrumentation-http": {
70
+ enabled: true,
71
+ requestHook: (span, request) => {
72
+ const url = request?.url ?? request?.path;
73
+ if (!url) return;
74
+ const u = new URL(url, "http://localhost");
75
+ span.updateName(`${request?.method || "UNKNOWN"} ${u.pathname}`);
76
+ }
77
+ },
78
+ "@opentelemetry/instrumentation-undici": {
79
+ requestHook: (span) => {
80
+ const method = span.attributes?.["http.request.method"];
81
+ const host = span.attributes?.["server.address"];
82
+ const path = span.attributes?.["url.path"];
83
+ if (method && path)
84
+ span.updateName(host ? `${method} ${host}${path}` : `${method} ${path}`);
85
+ }
86
+ }
87
+ })
88
+ ]
89
+ });
90
+ sdk.start();
32
91
  var isDevelopment = env.ENVIRONMENT === "development";
33
92
  var loggerConfig = {
34
93
  level: env.LOG_LEVEL,
@@ -840,79 +899,11 @@ async function handleTasksResubscribe(c, agent, request) {
840
899
  });
841
900
  }
842
901
  }
843
- var FORCE_FLUSH_ENVIRONMENTS = ["development"];
844
- var logger4 = getLogger("tracer");
845
- var BASE = "inkeep-chat";
846
- var SERVICE_NAME = "inkeep-chat";
847
- var SERVICE_VERSION = "1.0.0";
848
- var createSpanName = (suffix) => `${BASE}.${suffix}`;
849
- var createNoOpSpan = () => ({
850
- setAttributes: () => ({}),
851
- recordException: () => ({}),
852
- setStatus: () => ({}),
853
- addEvent: () => ({}),
854
- end: () => {
855
- },
856
- isRecording: () => false,
857
- setAttribute: () => ({}),
858
- updateName: () => ({}),
859
- spanContext: () => ({
860
- traceId: "00000000000000000000000000000000",
861
- spanId: "0000000000000000",
862
- traceFlags: 0
863
- }),
864
- addLink: () => ({}),
865
- addLinks: () => ({})
866
- });
867
- var noopTracer = {
868
- startActiveSpan(_name, arg1, arg2, arg3) {
869
- const fn = typeof arg1 === "function" ? arg1 : typeof arg2 === "function" ? arg2 : arg3;
870
- if (!fn) throw new Error("No callback function provided");
871
- return fn(createNoOpSpan());
872
- },
873
- startSpan(_name, _options) {
874
- return createNoOpSpan();
875
- }
876
- };
877
- var globalTracerInstance = null;
878
- function handleSpanError(span, error, logger24, logMessage) {
879
- const errorMessage = error instanceof Error ? error.message : String(error);
880
- span.recordException(error);
881
- span.setStatus({
882
- code: SpanStatusCode.ERROR,
883
- message: errorMessage
884
- });
885
- }
886
- function getGlobalTracer() {
887
- if (!globalTracerInstance) {
888
- try {
889
- globalTracerInstance = trace.getTracer(SERVICE_NAME, SERVICE_VERSION);
890
- } catch (_error) {
891
- logger4.debug("OpenTelemetry tracer not available, using no-op tracer");
892
- globalTracerInstance = noopTracer;
893
- }
894
- }
895
- return globalTracerInstance;
896
- }
897
- async function forceFlushTracer() {
898
- const isOtelTracesForceFlushEnabled = env.OTEL_TRACES_FORCE_FLUSH_ENABLED;
899
- const isForceFlushEnvironment = env.ENVIRONMENT && FORCE_FLUSH_ENVIRONMENTS.includes(env.ENVIRONMENT);
900
- const shouldForceFlush = isOtelTracesForceFlushEnabled === true || isOtelTracesForceFlushEnabled == null && isForceFlushEnvironment;
901
- if (!shouldForceFlush) {
902
- return;
903
- }
904
- try {
905
- const { spanProcessor } = await import('./instrumentation-KKYHA3A3.js');
906
- if (spanProcessor && typeof spanProcessor.forceFlush === "function") {
907
- await spanProcessor.forceFlush();
908
- logger4.debug("Span processor force flush completed");
909
- } else {
910
- logger4.debug("Span processor does not support force flush or is not available");
911
- }
912
- } catch (error) {
913
- logger4.warn({ error }, "Failed to force flush tracer");
914
- }
915
- }
902
+
903
+ // package.json
904
+ var package_default = {
905
+ version: "0.1.3"};
906
+ var tracer = getTracer("agents-run-api", package_default.version);
916
907
  function agentInitializingOp(sessionId, graphId) {
917
908
  return {
918
909
  type: "agent_initializing",
@@ -949,10 +940,10 @@ function statusUpdateOp(ctx) {
949
940
  ctx
950
941
  };
951
942
  }
952
- var logger5 = getLogger("DataComponentSchema");
943
+ var logger4 = getLogger("DataComponentSchema");
953
944
  function jsonSchemaToZod(jsonSchema) {
954
945
  if (!jsonSchema || typeof jsonSchema !== "object") {
955
- logger5.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
946
+ logger4.warn({ jsonSchema }, "Invalid JSON schema provided, using string fallback");
956
947
  return z.string();
957
948
  }
958
949
  switch (jsonSchema.type) {
@@ -979,7 +970,7 @@ function jsonSchemaToZod(jsonSchema) {
979
970
  case "null":
980
971
  return z.null();
981
972
  default:
982
- logger5.warn(
973
+ logger4.warn(
983
974
  {
984
975
  unsupportedType: jsonSchema.type,
985
976
  schema: jsonSchema
@@ -1033,7 +1024,7 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
1033
1024
  required: ["artifact_id", "task_id"]
1034
1025
  });
1035
1026
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
1036
- var logger6 = getLogger("ModelFactory");
1027
+ var logger5 = getLogger("ModelFactory");
1037
1028
  var _ModelFactory = class _ModelFactory {
1038
1029
  /**
1039
1030
  * Create a language model instance from configuration
@@ -1046,7 +1037,7 @@ var _ModelFactory = class _ModelFactory {
1046
1037
  const modelSettings = config;
1047
1038
  const modelString = modelSettings.model.trim();
1048
1039
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1049
- logger6.debug(
1040
+ logger5.debug(
1050
1041
  {
1051
1042
  provider,
1052
1043
  model: modelName,
@@ -1065,7 +1056,7 @@ var _ModelFactory = class _ModelFactory {
1065
1056
  throw new Error(`Unsupported provider: ${provider}. Supported providers are: ${_ModelFactory.SUPPORTED_PROVIDERS.join(", ")}`);
1066
1057
  }
1067
1058
  } catch (error) {
1068
- logger6.error(
1059
+ logger5.error(
1069
1060
  {
1070
1061
  provider,
1071
1062
  model: modelName,
@@ -1086,7 +1077,7 @@ var _ModelFactory = class _ModelFactory {
1086
1077
  const [provider, ...modelParts] = modelString.split("/");
1087
1078
  const normalizedProvider = provider.toLowerCase();
1088
1079
  if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1089
- logger6.warn(
1080
+ logger5.warn(
1090
1081
  { provider: normalizedProvider, modelName: modelParts.join("/") },
1091
1082
  "Unsupported provider detected, falling back to anthropic"
1092
1083
  );
@@ -1115,14 +1106,14 @@ var _ModelFactory = class _ModelFactory {
1115
1106
  anthropicConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1116
1107
  }
1117
1108
  if (providerOptions?.gateway) {
1118
- logger6.info(
1109
+ logger5.info(
1119
1110
  { gateway: providerOptions.gateway },
1120
1111
  "Setting up AI Gateway for Anthropic model"
1121
1112
  );
1122
1113
  Object.assign(anthropicConfig, providerOptions.gateway);
1123
1114
  }
1124
1115
  if (Object.keys(anthropicConfig).length > 0) {
1125
- logger6.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1116
+ logger5.info({ config: anthropicConfig }, "Applying custom Anthropic provider configuration");
1126
1117
  const provider = createAnthropic(anthropicConfig);
1127
1118
  return provider(modelName);
1128
1119
  }
@@ -1137,11 +1128,11 @@ var _ModelFactory = class _ModelFactory {
1137
1128
  openaiConfig.baseURL = providerOptions.baseUrl || providerOptions.baseURL;
1138
1129
  }
1139
1130
  if (providerOptions?.gateway) {
1140
- logger6.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1131
+ logger5.info({ gateway: providerOptions.gateway }, "Setting up AI Gateway for OpenAI model");
1141
1132
  Object.assign(openaiConfig, providerOptions.gateway);
1142
1133
  }
1143
1134
  if (Object.keys(openaiConfig).length > 0) {
1144
- logger6.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1135
+ logger5.info({ config: openaiConfig }, "Applying custom OpenAI provider configuration");
1145
1136
  const provider = createOpenAI(openaiConfig);
1146
1137
  return provider(modelName);
1147
1138
  }
@@ -1230,8 +1221,7 @@ function unregisterStreamHelper(requestId2) {
1230
1221
  }
1231
1222
 
1232
1223
  // src/utils/graph-session.ts
1233
- var logger7 = getLogger("GraphSession");
1234
- var tracer = getGlobalTracer();
1224
+ var logger6 = getLogger("GraphSession");
1235
1225
  var GraphSession = class {
1236
1226
  // Track scheduled timeouts for cleanup
1237
1227
  constructor(sessionId, messageId, graphId, tenantId, projectId) {
@@ -1255,7 +1245,7 @@ var GraphSession = class {
1255
1245
  __publicField(this, "MAX_PENDING_ARTIFACTS", 100);
1256
1246
  // Prevent unbounded growth
1257
1247
  __publicField(this, "scheduledTimeouts");
1258
- logger7.debug({ sessionId, messageId, graphId }, "GraphSession created");
1248
+ logger6.debug({ sessionId, messageId, graphId }, "GraphSession created");
1259
1249
  }
1260
1250
  /**
1261
1251
  * Initialize status updates for this session
@@ -1277,7 +1267,7 @@ var GraphSession = class {
1277
1267
  if (this.statusUpdateState.config.timeInSeconds) {
1278
1268
  this.statusUpdateTimer = setInterval(async () => {
1279
1269
  if (!this.statusUpdateState || this.isEnded) {
1280
- logger7.debug(
1270
+ logger6.debug(
1281
1271
  { sessionId: this.sessionId },
1282
1272
  "Timer triggered but session already cleaned up or ended"
1283
1273
  );
@@ -1289,7 +1279,7 @@ var GraphSession = class {
1289
1279
  }
1290
1280
  await this.checkAndSendTimeBasedUpdate();
1291
1281
  }, this.statusUpdateState.config.timeInSeconds * 1e3);
1292
- logger7.info(
1282
+ logger6.info(
1293
1283
  {
1294
1284
  sessionId: this.sessionId,
1295
1285
  intervalMs: this.statusUpdateState.config.timeInSeconds * 1e3
@@ -1303,7 +1293,7 @@ var GraphSession = class {
1303
1293
  */
1304
1294
  recordEvent(eventType, agentId, data) {
1305
1295
  if (this.isEnded) {
1306
- logger7.debug(
1296
+ logger6.debug(
1307
1297
  {
1308
1298
  sessionId: this.sessionId,
1309
1299
  eventType,
@@ -1323,7 +1313,7 @@ var GraphSession = class {
1323
1313
  if (eventType === "artifact_saved" && data.pendingGeneration) {
1324
1314
  const artifactId = data.artifactId;
1325
1315
  if (this.pendingArtifacts.size >= this.MAX_PENDING_ARTIFACTS) {
1326
- logger7.warn({
1316
+ logger6.warn({
1327
1317
  sessionId: this.sessionId,
1328
1318
  artifactId,
1329
1319
  pendingCount: this.pendingArtifacts.size,
@@ -1341,7 +1331,7 @@ var GraphSession = class {
1341
1331
  this.artifactProcessingErrors.set(artifactId, errorCount);
1342
1332
  if (errorCount >= this.MAX_ARTIFACT_RETRIES) {
1343
1333
  this.pendingArtifacts.delete(artifactId);
1344
- logger7.error({
1334
+ logger6.error({
1345
1335
  sessionId: this.sessionId,
1346
1336
  artifactId,
1347
1337
  errorCount,
@@ -1350,7 +1340,7 @@ var GraphSession = class {
1350
1340
  stack: error instanceof Error ? error.stack : void 0
1351
1341
  }, "Artifact processing failed after max retries, giving up");
1352
1342
  } else {
1353
- logger7.warn({
1343
+ logger6.warn({
1354
1344
  sessionId: this.sessionId,
1355
1345
  artifactId,
1356
1346
  errorCount,
@@ -1369,14 +1359,14 @@ var GraphSession = class {
1369
1359
  */
1370
1360
  checkStatusUpdates() {
1371
1361
  if (this.isEnded) {
1372
- logger7.debug(
1362
+ logger6.debug(
1373
1363
  { sessionId: this.sessionId },
1374
1364
  "Session has ended - skipping status update check"
1375
1365
  );
1376
1366
  return;
1377
1367
  }
1378
1368
  if (!this.statusUpdateState) {
1379
- logger7.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1369
+ logger6.debug({ sessionId: this.sessionId }, "No status update state - skipping check");
1380
1370
  return;
1381
1371
  }
1382
1372
  const statusUpdateState = this.statusUpdateState;
@@ -1387,11 +1377,11 @@ var GraphSession = class {
1387
1377
  */
1388
1378
  async checkAndSendTimeBasedUpdate() {
1389
1379
  if (this.isEnded) {
1390
- logger7.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1380
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - skipping time-based update");
1391
1381
  return;
1392
1382
  }
1393
1383
  if (!this.statusUpdateState) {
1394
- logger7.debug(
1384
+ logger6.debug(
1395
1385
  { sessionId: this.sessionId },
1396
1386
  "No status updates configured for time-based check"
1397
1387
  );
@@ -1404,7 +1394,7 @@ var GraphSession = class {
1404
1394
  try {
1405
1395
  await this.generateAndSendUpdate();
1406
1396
  } catch (error) {
1407
- logger7.error(
1397
+ logger6.error(
1408
1398
  {
1409
1399
  sessionId: this.sessionId,
1410
1400
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1497,29 +1487,29 @@ var GraphSession = class {
1497
1487
  */
1498
1488
  async generateAndSendUpdate() {
1499
1489
  if (this.isEnded) {
1500
- logger7.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1490
+ logger6.debug({ sessionId: this.sessionId }, "Session has ended - not generating update");
1501
1491
  return;
1502
1492
  }
1503
1493
  if (this.isTextStreaming) {
1504
- logger7.debug(
1494
+ logger6.debug(
1505
1495
  { sessionId: this.sessionId },
1506
1496
  "Text is currently streaming - skipping status update"
1507
1497
  );
1508
1498
  return;
1509
1499
  }
1510
1500
  if (this.isGeneratingUpdate) {
1511
- logger7.debug(
1501
+ logger6.debug(
1512
1502
  { sessionId: this.sessionId },
1513
1503
  "Update already in progress - skipping duplicate generation"
1514
1504
  );
1515
1505
  return;
1516
1506
  }
1517
1507
  if (!this.statusUpdateState) {
1518
- logger7.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1508
+ logger6.warn({ sessionId: this.sessionId }, "No status update state - cannot generate update");
1519
1509
  return;
1520
1510
  }
1521
1511
  if (!this.graphId) {
1522
- logger7.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1512
+ logger6.warn({ sessionId: this.sessionId }, "No graph ID - cannot generate update");
1523
1513
  return;
1524
1514
  }
1525
1515
  const newEventCount = this.events.length - this.statusUpdateState.lastEventCount;
@@ -1532,7 +1522,7 @@ var GraphSession = class {
1532
1522
  try {
1533
1523
  const streamHelper = getStreamHelper(this.sessionId);
1534
1524
  if (!streamHelper) {
1535
- logger7.warn(
1525
+ logger6.warn(
1536
1526
  { sessionId: this.sessionId },
1537
1527
  "No stream helper found - cannot send status update"
1538
1528
  );
@@ -1553,7 +1543,7 @@ var GraphSession = class {
1553
1543
  if (result.operations && result.operations.length > 0) {
1554
1544
  for (const op of result.operations) {
1555
1545
  if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1556
- logger7.warn(
1546
+ logger6.warn(
1557
1547
  {
1558
1548
  sessionId: this.sessionId,
1559
1549
  operation: op
@@ -1606,7 +1596,7 @@ var GraphSession = class {
1606
1596
  this.previousSummaries.shift();
1607
1597
  }
1608
1598
  if (!operation || !operation.type || !operation.ctx) {
1609
- logger7.warn(
1599
+ logger6.warn(
1610
1600
  {
1611
1601
  sessionId: this.sessionId,
1612
1602
  operation
@@ -1621,7 +1611,7 @@ var GraphSession = class {
1621
1611
  this.statusUpdateState.lastEventCount = this.events.length;
1622
1612
  }
1623
1613
  } catch (error) {
1624
- logger7.error(
1614
+ logger6.error(
1625
1615
  {
1626
1616
  sessionId: this.sessionId,
1627
1617
  error: error instanceof Error ? error.message : "Unknown error",
@@ -1659,7 +1649,7 @@ var GraphSession = class {
1659
1649
  this.releaseUpdateLock();
1660
1650
  }
1661
1651
  } catch (error) {
1662
- logger7.error(
1652
+ logger6.error(
1663
1653
  {
1664
1654
  sessionId: this.sessionId,
1665
1655
  error: error instanceof Error ? error.message : "Unknown error"
@@ -1704,7 +1694,7 @@ var GraphSession = class {
1704
1694
  */
1705
1695
  async generateProgressSummary(newEvents, elapsedTime, summarizerModel, previousSummaries = []) {
1706
1696
  return tracer.startActiveSpan(
1707
- createSpanName("graph_session.generate_progress_summary"),
1697
+ "graph_session.generate_progress_summary",
1708
1698
  {
1709
1699
  attributes: {
1710
1700
  "graph_session.id": this.sessionId,
@@ -1736,7 +1726,7 @@ User's Question/Context:
1736
1726
  ${conversationHistory}
1737
1727
  ` : "";
1738
1728
  } catch (error) {
1739
- logger7.warn(
1729
+ logger6.warn(
1740
1730
  { sessionId: this.sessionId, error },
1741
1731
  "Failed to fetch conversation history for status update"
1742
1732
  );
@@ -1785,8 +1775,8 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1785
1775
  span.setStatus({ code: SpanStatusCode.OK });
1786
1776
  return text.trim();
1787
1777
  } catch (error) {
1788
- handleSpanError(span, error);
1789
- logger7.error({ error }, "Failed to generate summary, using fallback");
1778
+ setSpanWithError(span, error);
1779
+ logger6.error({ error }, "Failed to generate summary, using fallback");
1790
1780
  return this.generateFallbackSummary(newEvents, elapsedTime);
1791
1781
  } finally {
1792
1782
  span.end();
@@ -1799,7 +1789,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1799
1789
  */
1800
1790
  async generateStructuredStatusUpdate(newEvents, elapsedTime, statusComponents, summarizerModel, previousSummaries = []) {
1801
1791
  return tracer.startActiveSpan(
1802
- createSpanName("graph_session.generate_structured_update"),
1792
+ "graph_session.generate_structured_update",
1803
1793
  {
1804
1794
  attributes: {
1805
1795
  "graph_session.id": this.sessionId,
@@ -1832,7 +1822,7 @@ User's Question/Context:
1832
1822
  ${conversationHistory}
1833
1823
  ` : "";
1834
1824
  } catch (error) {
1835
- logger7.warn(
1825
+ logger6.warn(
1836
1826
  { sessionId: this.sessionId, error },
1837
1827
  "Failed to fetch conversation history for structured status update"
1838
1828
  );
@@ -1928,8 +1918,8 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
1928
1918
  span.setStatus({ code: SpanStatusCode.OK });
1929
1919
  return { operations };
1930
1920
  } catch (error) {
1931
- handleSpanError(span, error);
1932
- logger7.error({ error }, "Failed to generate structured update, using fallback");
1921
+ setSpanWithError(span, error);
1922
+ logger6.error({ error }, "Failed to generate structured update, using fallback");
1933
1923
  return { operations: [] };
1934
1924
  } finally {
1935
1925
  span.end();
@@ -2114,7 +2104,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2114
2104
  */
2115
2105
  async processArtifact(artifactData) {
2116
2106
  return tracer.startActiveSpan(
2117
- createSpanName("graph_session.process_artifact"),
2107
+ "graph_session.process_artifact",
2118
2108
  {
2119
2109
  attributes: {
2120
2110
  "graph_session.id": this.sessionId,
@@ -2145,7 +2135,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2145
2135
  );
2146
2136
  }
2147
2137
  span.setAttributes({ "validation.passed": true });
2148
- const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-YTJWHN67.js');
2138
+ const { getFormattedConversationHistory: getFormattedConversationHistory2 } = await import('./conversations-WZLXOMZH.js');
2149
2139
  const conversationHistory = await getFormattedConversationHistory2({
2150
2140
  tenantId: artifactData.tenantId,
2151
2141
  projectId: artifactData.projectId,
@@ -2187,7 +2177,7 @@ Make it specific and relevant.`;
2187
2177
  description: z.string().max(150).describe("Brief description of the artifact's relevance to the user's question")
2188
2178
  });
2189
2179
  const { object: result } = await tracer.startActiveSpan(
2190
- createSpanName("graph_session.generate_artifact_metadata"),
2180
+ "graph_session.generate_artifact_metadata",
2191
2181
  {
2192
2182
  attributes: {
2193
2183
  "llm.model": this.statusUpdateState?.summarizerModel?.model,
@@ -2220,7 +2210,7 @@ Make it specific and relevant.`;
2220
2210
  generationSpan.setStatus({ code: SpanStatusCode.OK });
2221
2211
  return result2;
2222
2212
  } catch (error) {
2223
- handleSpanError(generationSpan, error);
2213
+ setSpanWithError(generationSpan, error);
2224
2214
  throw error;
2225
2215
  } finally {
2226
2216
  generationSpan.end();
@@ -2254,7 +2244,7 @@ Make it specific and relevant.`;
2254
2244
  taskId: artifactData.taskId,
2255
2245
  artifacts: [artifactToSave]
2256
2246
  });
2257
- logger7.info(
2247
+ logger6.info(
2258
2248
  {
2259
2249
  sessionId: this.sessionId,
2260
2250
  artifactId: artifactData.artifactId,
@@ -2270,8 +2260,8 @@ Make it specific and relevant.`;
2270
2260
  });
2271
2261
  span.setStatus({ code: SpanStatusCode.OK });
2272
2262
  } catch (error) {
2273
- handleSpanError(span, error);
2274
- logger7.error(
2263
+ setSpanWithError(span, error);
2264
+ logger6.error(
2275
2265
  {
2276
2266
  sessionId: this.sessionId,
2277
2267
  artifactId: artifactData.artifactId,
@@ -2307,7 +2297,7 @@ Make it specific and relevant.`;
2307
2297
  taskId: artifactData.taskId,
2308
2298
  artifacts: [fallbackArtifact]
2309
2299
  });
2310
- logger7.info(
2300
+ logger6.info(
2311
2301
  {
2312
2302
  sessionId: this.sessionId,
2313
2303
  artifactId: artifactData.artifactId
@@ -2316,7 +2306,7 @@ Make it specific and relevant.`;
2316
2306
  );
2317
2307
  }
2318
2308
  } catch (fallbackError) {
2319
- logger7.error(
2309
+ logger6.error(
2320
2310
  {
2321
2311
  sessionId: this.sessionId,
2322
2312
  artifactId: artifactData.artifactId,
@@ -2343,7 +2333,7 @@ var GraphSessionManager = class {
2343
2333
  const sessionId = messageId;
2344
2334
  const session = new GraphSession(sessionId, messageId, graphId, tenantId, projectId);
2345
2335
  this.sessions.set(sessionId, session);
2346
- logger7.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2336
+ logger6.info({ sessionId, messageId, graphId, tenantId, projectId }, "GraphSession created");
2347
2337
  return sessionId;
2348
2338
  }
2349
2339
  /**
@@ -2354,7 +2344,7 @@ var GraphSessionManager = class {
2354
2344
  if (session) {
2355
2345
  session.initializeStatusUpdates(config, summarizerModel);
2356
2346
  } else {
2357
- logger7.error(
2347
+ logger6.error(
2358
2348
  {
2359
2349
  sessionId,
2360
2350
  availableSessions: Array.from(this.sessions.keys())
@@ -2375,7 +2365,7 @@ var GraphSessionManager = class {
2375
2365
  recordEvent(sessionId, eventType, agentId, data) {
2376
2366
  const session = this.sessions.get(sessionId);
2377
2367
  if (!session) {
2378
- logger7.warn({ sessionId }, "Attempted to record event in non-existent session");
2368
+ logger6.warn({ sessionId }, "Attempted to record event in non-existent session");
2379
2369
  return;
2380
2370
  }
2381
2371
  session.recordEvent(eventType, agentId, data);
@@ -2386,12 +2376,12 @@ var GraphSessionManager = class {
2386
2376
  endSession(sessionId) {
2387
2377
  const session = this.sessions.get(sessionId);
2388
2378
  if (!session) {
2389
- logger7.warn({ sessionId }, "Attempted to end non-existent session");
2379
+ logger6.warn({ sessionId }, "Attempted to end non-existent session");
2390
2380
  return [];
2391
2381
  }
2392
2382
  const events = session.getEvents();
2393
2383
  const summary = session.getSummary();
2394
- logger7.info({ sessionId, summary }, "GraphSession ended");
2384
+ logger6.info({ sessionId, summary }, "GraphSession ended");
2395
2385
  session.cleanup();
2396
2386
  this.sessions.delete(sessionId);
2397
2387
  return events;
@@ -2417,7 +2407,7 @@ var GraphSessionManager = class {
2417
2407
  }
2418
2408
  };
2419
2409
  var graphSessionManager = new GraphSessionManager();
2420
- var logger8 = getLogger("ArtifactParser");
2410
+ var logger7 = getLogger("ArtifactParser");
2421
2411
  var _ArtifactParser = class _ArtifactParser {
2422
2412
  constructor(tenantId) {
2423
2413
  this.tenantId = tenantId;
@@ -2481,7 +2471,7 @@ var _ArtifactParser = class _ArtifactParser {
2481
2471
  id: taskId
2482
2472
  });
2483
2473
  if (!task) {
2484
- logger8.warn({ taskId }, "Task not found when fetching artifacts");
2474
+ logger7.warn({ taskId }, "Task not found when fetching artifacts");
2485
2475
  continue;
2486
2476
  }
2487
2477
  const taskArtifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2493,9 +2483,9 @@ var _ArtifactParser = class _ArtifactParser {
2493
2483
  artifacts.set(key, artifact);
2494
2484
  }
2495
2485
  }
2496
- logger8.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2486
+ logger7.debug({ contextId, count: artifacts.size }, "Loaded context artifacts");
2497
2487
  } catch (error) {
2498
- logger8.error({ error, contextId }, "Error loading context artifacts");
2488
+ logger7.error({ error, contextId }, "Error loading context artifacts");
2499
2489
  }
2500
2490
  return artifacts;
2501
2491
  }
@@ -2598,7 +2588,7 @@ var _ArtifactParser = class _ArtifactParser {
2598
2588
  id: taskId
2599
2589
  });
2600
2590
  if (!task) {
2601
- logger8.warn({ taskId }, "Task not found when fetching artifact");
2591
+ logger7.warn({ taskId }, "Task not found when fetching artifact");
2602
2592
  return null;
2603
2593
  }
2604
2594
  const artifacts = await getLedgerArtifacts(dbClient_default)({
@@ -2610,7 +2600,7 @@ var _ArtifactParser = class _ArtifactParser {
2610
2600
  return this.formatArtifactData(artifacts[0], artifactId, taskId);
2611
2601
  }
2612
2602
  } catch (error) {
2613
- logger8.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2603
+ logger7.warn({ artifactId, taskId, error }, "Failed to fetch artifact");
2614
2604
  }
2615
2605
  return null;
2616
2606
  }
@@ -2650,7 +2640,7 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2650
2640
  var ArtifactParser = _ArtifactParser;
2651
2641
 
2652
2642
  // src/utils/incremental-stream-parser.ts
2653
- var logger9 = getLogger("IncrementalStreamParser");
2643
+ var logger8 = getLogger("IncrementalStreamParser");
2654
2644
  var IncrementalStreamParser = class {
2655
2645
  constructor(streamHelper, tenantId, contextId) {
2656
2646
  __publicField(this, "buffer", "");
@@ -2710,13 +2700,13 @@ var IncrementalStreamParser = class {
2710
2700
  if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
2711
2701
  const delta = part.argsTextDelta || "";
2712
2702
  if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
2713
- logger9.warn("JSON buffer exceeded maximum size, truncating");
2703
+ logger8.warn("JSON buffer exceeded maximum size, truncating");
2714
2704
  jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2715
2705
  }
2716
2706
  jsonBuffer += delta;
2717
2707
  for (const char of delta) {
2718
2708
  if (componentBuffer.length > MAX_BUFFER_SIZE) {
2719
- logger9.warn("Component buffer exceeded maximum size, resetting");
2709
+ logger8.warn("Component buffer exceeded maximum size, resetting");
2720
2710
  componentBuffer = "";
2721
2711
  depth = 0;
2722
2712
  continue;
@@ -2731,7 +2721,7 @@ var IncrementalStreamParser = class {
2731
2721
  if (componentMatch) {
2732
2722
  const MAX_COMPONENT_SIZE = 1024 * 1024;
2733
2723
  if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
2734
- logger9.warn(
2724
+ logger8.warn(
2735
2725
  {
2736
2726
  size: componentMatch[0].length,
2737
2727
  maxSize: MAX_COMPONENT_SIZE
@@ -2744,7 +2734,7 @@ var IncrementalStreamParser = class {
2744
2734
  try {
2745
2735
  const component = JSON.parse(componentMatch[0]);
2746
2736
  if (typeof component !== "object" || !component.id) {
2747
- logger9.warn("Invalid component structure, skipping");
2737
+ logger8.warn("Invalid component structure, skipping");
2748
2738
  componentBuffer = "";
2749
2739
  continue;
2750
2740
  }
@@ -2757,7 +2747,7 @@ var IncrementalStreamParser = class {
2757
2747
  componentsStreamed++;
2758
2748
  componentBuffer = "";
2759
2749
  } catch (e) {
2760
- logger9.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2750
+ logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
2761
2751
  }
2762
2752
  }
2763
2753
  }
@@ -2774,7 +2764,7 @@ var IncrementalStreamParser = class {
2774
2764
  break;
2775
2765
  }
2776
2766
  }
2777
- logger9.debug({ componentsStreamed }, "Finished streaming components");
2767
+ logger8.debug({ componentsStreamed }, "Finished streaming components");
2778
2768
  }
2779
2769
  /**
2780
2770
  * Legacy method for backward compatibility - defaults to text processing
@@ -2916,12 +2906,9 @@ var IncrementalStreamParser = class {
2916
2906
  }
2917
2907
  }
2918
2908
  };
2919
- var logger10 = getLogger("ResponseFormatter");
2920
- var RESPONSE_FORMATTER_SERVICE = "responseFormatter";
2921
- function getResponseFormatterTracer() {
2922
- const tracerProvider = trace.getTracerProvider();
2923
- return tracerProvider.getTracer(RESPONSE_FORMATTER_SERVICE, SERVICE_VERSION);
2924
- }
2909
+
2910
+ // src/utils/response-formatter.ts
2911
+ var logger9 = getLogger("ResponseFormatter");
2925
2912
  var ResponseFormatter = class {
2926
2913
  constructor(tenantId) {
2927
2914
  __publicField(this, "artifactParser");
@@ -2931,8 +2918,7 @@ var ResponseFormatter = class {
2931
2918
  * Process structured object response and replace artifact markers with actual artifacts
2932
2919
  */
2933
2920
  async formatObjectResponse(responseObject, contextId) {
2934
- const tracer3 = getResponseFormatterTracer();
2935
- return tracer3.startActiveSpan("response.formatObject", async (span) => {
2921
+ return tracer.startActiveSpan("response.format_object_response", async (span) => {
2936
2922
  try {
2937
2923
  const artifactMap = await this.artifactParser.getContextArtifacts(contextId);
2938
2924
  span.setAttributes({
@@ -2952,8 +2938,8 @@ var ResponseFormatter = class {
2952
2938
  });
2953
2939
  return { parts };
2954
2940
  } catch (error) {
2955
- span.recordException(error);
2956
- logger10.error({ error, responseObject }, "Error formatting object response");
2941
+ setSpanWithError(span, error);
2942
+ logger9.error({ error, responseObject }, "Error formatting object response");
2957
2943
  return {
2958
2944
  parts: [{ kind: "data", data: responseObject }]
2959
2945
  };
@@ -2966,8 +2952,7 @@ var ResponseFormatter = class {
2966
2952
  * Process agent response and convert artifact markers to data parts
2967
2953
  */
2968
2954
  async formatResponse(responseText, contextId) {
2969
- const tracer3 = getResponseFormatterTracer();
2970
- return tracer3.startActiveSpan("response.format", async (span) => {
2955
+ return tracer.startActiveSpan("response.format_response", async (span) => {
2971
2956
  try {
2972
2957
  span.setAttributes({
2973
2958
  "response.hasArtifactMarkers": this.artifactParser.hasArtifactMarkers(responseText),
@@ -3004,9 +2989,8 @@ var ResponseFormatter = class {
3004
2989
  });
3005
2990
  return { parts };
3006
2991
  } catch (error) {
3007
- span.recordException(error);
3008
- span.setStatus({ code: 2, message: error.message });
3009
- logger10.error({ error, responseText }, "Error formatting response");
2992
+ setSpanWithError(span, error);
2993
+ logger9.error({ error, responseText }, "Error formatting response");
3010
2994
  return { text: responseText };
3011
2995
  } finally {
3012
2996
  span.end();
@@ -3051,7 +3035,7 @@ var ResponseFormatter = class {
3051
3035
  }
3052
3036
  }
3053
3037
  };
3054
- var logger11 = getLogger("ToolSessionManager");
3038
+ var logger10 = getLogger("ToolSessionManager");
3055
3039
  var _ToolSessionManager = class _ToolSessionManager {
3056
3040
  // 5 minutes
3057
3041
  constructor() {
@@ -3080,7 +3064,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3080
3064
  createdAt: Date.now()
3081
3065
  };
3082
3066
  this.sessions.set(sessionId, session);
3083
- logger11.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3067
+ logger10.debug({ sessionId, tenantId, contextId, taskId }, "Created tool session");
3084
3068
  return sessionId;
3085
3069
  }
3086
3070
  /**
@@ -3089,7 +3073,7 @@ var _ToolSessionManager = class _ToolSessionManager {
3089
3073
  recordToolResult(sessionId, toolResult) {
3090
3074
  const session = this.sessions.get(sessionId);
3091
3075
  if (!session) {
3092
- logger11.warn(
3076
+ logger10.warn(
3093
3077
  { sessionId, toolCallId: toolResult.toolCallId },
3094
3078
  "Tool result recorded for unknown session"
3095
3079
  );
@@ -3103,12 +3087,12 @@ var _ToolSessionManager = class _ToolSessionManager {
3103
3087
  getToolResult(sessionId, toolCallId) {
3104
3088
  const session = this.sessions.get(sessionId);
3105
3089
  if (!session) {
3106
- logger11.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3090
+ logger10.warn({ sessionId, toolCallId }, "Requested tool result for unknown session");
3107
3091
  return void 0;
3108
3092
  }
3109
3093
  const result = session.toolResults.get(toolCallId);
3110
3094
  if (!result) {
3111
- logger11.warn(
3095
+ logger10.warn(
3112
3096
  {
3113
3097
  sessionId,
3114
3098
  toolCallId,
@@ -3147,10 +3131,10 @@ var _ToolSessionManager = class _ToolSessionManager {
3147
3131
  }
3148
3132
  for (const sessionId of expiredSessions) {
3149
3133
  this.sessions.delete(sessionId);
3150
- logger11.debug({ sessionId }, "Cleaned up expired tool session");
3134
+ logger10.debug({ sessionId }, "Cleaned up expired tool session");
3151
3135
  }
3152
3136
  if (expiredSessions.length > 0) {
3153
- logger11.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3137
+ logger10.info({ expiredCount: expiredSessions.length }, "Cleaned up expired tool sessions");
3154
3138
  }
3155
3139
  }
3156
3140
  };
@@ -3159,7 +3143,7 @@ var ToolSessionManager = _ToolSessionManager;
3159
3143
  var toolSessionManager = ToolSessionManager.getInstance();
3160
3144
 
3161
3145
  // src/agents/artifactTools.ts
3162
- var logger12 = getLogger("artifactTools");
3146
+ var logger11 = getLogger("artifactTools");
3163
3147
  function buildKeyNestingMap(data, prefix = "", map = /* @__PURE__ */ new Map()) {
3164
3148
  if (typeof data === "object" && data !== null) {
3165
3149
  if (Array.isArray(data)) {
@@ -3380,7 +3364,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3380
3364
  execute: async ({ toolCallId, baseSelector, propSelectors, ...rest }, _context) => {
3381
3365
  const artifactType = "artifactType" in rest ? rest.artifactType : void 0;
3382
3366
  if (!sessionId) {
3383
- logger12.warn({ toolCallId }, "No session ID provided to save_tool_result");
3367
+ logger11.warn({ toolCallId }, "No session ID provided to save_tool_result");
3384
3368
  return {
3385
3369
  saved: false,
3386
3370
  error: `[toolCallId: ${toolCallId}] No session context available`,
@@ -3390,7 +3374,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3390
3374
  }
3391
3375
  const toolResult = toolSessionManager.getToolResult(sessionId, toolCallId);
3392
3376
  if (!toolResult) {
3393
- logger12.warn({ toolCallId, sessionId }, "Tool result not found in session");
3377
+ logger11.warn({ toolCallId, sessionId }, "Tool result not found in session");
3394
3378
  return {
3395
3379
  saved: false,
3396
3380
  error: `[toolCallId: ${toolCallId}] Tool result not found`,
@@ -3403,7 +3387,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3403
3387
  const baseData = jmespath.search(parsedResult, baseSelector);
3404
3388
  if (!baseData || Array.isArray(baseData) && baseData.length === 0) {
3405
3389
  const debugInfo = analyzeSelectorFailure(parsedResult, baseSelector);
3406
- logger12.warn(
3390
+ logger11.warn(
3407
3391
  {
3408
3392
  baseSelector,
3409
3393
  toolCallId,
@@ -3446,7 +3430,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3446
3430
  const fallbackValue = item[propName];
3447
3431
  if (fallbackValue !== null && fallbackValue !== void 0) {
3448
3432
  extractedItem[propName] = fallbackValue;
3449
- logger12.info(
3433
+ logger11.info(
3450
3434
  { propName, propSelector, context },
3451
3435
  `PropSelector failed, used fallback direct property access`
3452
3436
  );
@@ -3458,7 +3442,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3458
3442
  const fallbackValue = item[propName];
3459
3443
  if (fallbackValue !== null && fallbackValue !== void 0) {
3460
3444
  extractedItem[propName] = fallbackValue;
3461
- logger12.warn(
3445
+ logger11.warn(
3462
3446
  { propName, propSelector, context, error: error.message },
3463
3447
  `PropSelector syntax error, used fallback direct property access`
3464
3448
  );
@@ -3571,7 +3555,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3571
3555
  warnings
3572
3556
  };
3573
3557
  } catch (error) {
3574
- logger12.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3558
+ logger11.error({ error, toolCallId, sessionId }, "Error processing save_tool_result");
3575
3559
  return {
3576
3560
  saved: false,
3577
3561
  error: `[toolCallId: ${toolCallId}] ${error instanceof Error ? error.message : "Unknown error"}`,
@@ -3583,7 +3567,7 @@ Remember: Each time you call this tool, you create a separate data component. Ca
3583
3567
  }
3584
3568
 
3585
3569
  // src/a2a/client.ts
3586
- var logger13 = getLogger("a2aClient");
3570
+ var logger12 = getLogger("a2aClient");
3587
3571
  var DEFAULT_BACKOFF = {
3588
3572
  initialInterval: 500,
3589
3573
  maxInterval: 6e4,
@@ -3789,7 +3773,7 @@ var A2AClient = class {
3789
3773
  try {
3790
3774
  const res = await fn();
3791
3775
  if (attempt > 0) {
3792
- logger13.info(
3776
+ logger12.info(
3793
3777
  {
3794
3778
  attempts: attempt + 1,
3795
3779
  elapsedTime: Date.now() - start
@@ -3804,7 +3788,7 @@ var A2AClient = class {
3804
3788
  }
3805
3789
  const elapsed = Date.now() - start;
3806
3790
  if (elapsed > maxElapsedTime) {
3807
- logger13.warn(
3791
+ logger12.warn(
3808
3792
  {
3809
3793
  attempts: attempt + 1,
3810
3794
  elapsedTime: elapsed,
@@ -3825,7 +3809,7 @@ var A2AClient = class {
3825
3809
  retryInterval = initialInterval * attempt ** exponent + Math.random() * 1e3;
3826
3810
  }
3827
3811
  const delayMs = Math.min(retryInterval, maxInterval);
3828
- logger13.info(
3812
+ logger12.info(
3829
3813
  {
3830
3814
  attempt: attempt + 1,
3831
3815
  delayMs,
@@ -3910,7 +3894,7 @@ var A2AClient = class {
3910
3894
  }
3911
3895
  const rpcResponse = await httpResponse.json();
3912
3896
  if (rpcResponse.id !== requestId2) {
3913
- logger13.warn(
3897
+ logger12.warn(
3914
3898
  {
3915
3899
  method,
3916
3900
  expectedId: requestId2,
@@ -4109,7 +4093,7 @@ var A2AClient = class {
4109
4093
  try {
4110
4094
  while (true) {
4111
4095
  const { done, value } = await reader.read();
4112
- logger13.info({ done, value }, "parseA2ASseStream");
4096
+ logger12.info({ done, value }, "parseA2ASseStream");
4113
4097
  if (done) {
4114
4098
  if (eventDataBuffer.trim()) {
4115
4099
  const result = this._processSseEventData(
@@ -4196,7 +4180,7 @@ var A2AClient = class {
4196
4180
  };
4197
4181
 
4198
4182
  // src/agents/relationTools.ts
4199
- var logger14 = getLogger("relationships Tools");
4183
+ var logger13 = getLogger("relationships Tools");
4200
4184
  var generateTransferToolDescription = (config) => {
4201
4185
  return `Hand off the conversation to agent ${config.id}.
4202
4186
 
@@ -4234,7 +4218,7 @@ var createTransferToAgentTool = ({
4234
4218
  "transfer.to_agent_id": transferConfig.id ?? "unknown"
4235
4219
  });
4236
4220
  }
4237
- logger14.info(
4221
+ logger13.info(
4238
4222
  {
4239
4223
  transferTo: transferConfig.id ?? "unknown",
4240
4224
  fromAgent: callingAgentId
@@ -4382,7 +4366,7 @@ function createDelegateToAgentTool({
4382
4366
  ...isInternal ? { fromAgentId: callingAgentId } : { fromExternalAgentId: callingAgentId }
4383
4367
  }
4384
4368
  };
4385
- logger14.info({ messageToSend }, "messageToSend");
4369
+ logger13.info({ messageToSend }, "messageToSend");
4386
4370
  await createMessage(dbClient_default)({
4387
4371
  id: nanoid(),
4388
4372
  tenantId,
@@ -4442,7 +4426,7 @@ function createDelegateToAgentTool({
4442
4426
  }
4443
4427
  });
4444
4428
  }
4445
- var logger15 = getLogger("SystemPromptBuilder");
4429
+ var logger14 = getLogger("SystemPromptBuilder");
4446
4430
  var SystemPromptBuilder = class {
4447
4431
  constructor(version, versionConfig) {
4448
4432
  this.version = version;
@@ -4466,9 +4450,9 @@ var SystemPromptBuilder = class {
4466
4450
  this.templates.set(name, content);
4467
4451
  }
4468
4452
  this.loaded = true;
4469
- logger15.debug(`Loaded ${this.templates.size} templates for version ${this.version}`);
4453
+ logger14.debug(`Loaded ${this.templates.size} templates for version ${this.version}`);
4470
4454
  } catch (error) {
4471
- logger15.error({ error }, `Failed to load templates for version ${this.version}`);
4455
+ logger14.error({ error }, `Failed to load templates for version ${this.version}`);
4472
4456
  throw new Error(`Template loading failed: ${error}`);
4473
4457
  }
4474
4458
  }
@@ -4804,8 +4788,7 @@ function hasToolCallWithPrefix(prefix) {
4804
4788
  return false;
4805
4789
  };
4806
4790
  }
4807
- var logger16 = getLogger("Agent");
4808
- var tracer2 = getGlobalTracer();
4791
+ var logger15 = getLogger("Agent");
4809
4792
  var CONSTANTS = {
4810
4793
  MAX_GENERATION_STEPS: 12,
4811
4794
  PHASE_1_TIMEOUT_MS: 27e4,
@@ -5058,14 +5041,14 @@ var Agent = class {
5058
5041
  for (const toolSet of tools) {
5059
5042
  for (const [toolName, originalTool] of Object.entries(toolSet)) {
5060
5043
  if (!isValidTool(originalTool)) {
5061
- logger16.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5044
+ logger15.error({ toolName }, "Invalid MCP tool structure - missing required properties");
5062
5045
  continue;
5063
5046
  }
5064
5047
  const sessionWrappedTool = tool({
5065
5048
  description: originalTool.description,
5066
5049
  inputSchema: originalTool.inputSchema,
5067
5050
  execute: async (args, { toolCallId }) => {
5068
- logger16.debug({ toolName, toolCallId }, "MCP Tool Called");
5051
+ logger15.debug({ toolName, toolCallId }, "MCP Tool Called");
5069
5052
  try {
5070
5053
  const result = await originalTool.execute(args, { toolCallId });
5071
5054
  toolSessionManager.recordToolResult(sessionId, {
@@ -5077,7 +5060,7 @@ var Agent = class {
5077
5060
  });
5078
5061
  return { result, toolCallId };
5079
5062
  } catch (error) {
5080
- logger16.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5063
+ logger15.error({ toolName, toolCallId, error }, "MCP tool execution failed");
5081
5064
  throw error;
5082
5065
  }
5083
5066
  }
@@ -5110,6 +5093,11 @@ var Agent = class {
5110
5093
  }
5111
5094
  async getMcpTool(tool4) {
5112
5095
  const credentialReferenceId = tool4.credentialReferenceId;
5096
+ const toolsForAgent = await getToolsForAgent(dbClient_default)({
5097
+ scopes: { tenantId: this.config.tenantId, projectId: this.config.projectId },
5098
+ agentId: this.config.id
5099
+ });
5100
+ const selectedTools = toolsForAgent.data.find((t) => t.toolId === tool4.id)?.selectedTools || void 0;
5113
5101
  let serverConfig;
5114
5102
  if (credentialReferenceId && this.credentialStuffer) {
5115
5103
  const credentialReference = await getCredentialReference(dbClient_default)({
@@ -5134,7 +5122,8 @@ var Agent = class {
5134
5122
  conversationId: this.conversationId || void 0
5135
5123
  },
5136
5124
  this.convertToMCPToolConfig(tool4),
5137
- storeReference
5125
+ storeReference,
5126
+ selectedTools
5138
5127
  );
5139
5128
  } else if (tool4.headers && this.credentialStuffer) {
5140
5129
  serverConfig = await this.credentialStuffer.buildMcpServerConfig(
@@ -5144,16 +5133,19 @@ var Agent = class {
5144
5133
  contextConfigId: this.config.contextConfigId || void 0,
5145
5134
  conversationId: this.conversationId || void 0
5146
5135
  },
5147
- this.convertToMCPToolConfig(tool4)
5136
+ this.convertToMCPToolConfig(tool4),
5137
+ void 0,
5138
+ selectedTools
5148
5139
  );
5149
5140
  } else {
5150
5141
  serverConfig = {
5151
5142
  type: tool4.config.mcp.transport?.type || MCPTransportType.streamableHttp,
5152
5143
  url: tool4.config.mcp.server.url,
5153
- activeTools: tool4.config.mcp.activeTools
5144
+ activeTools: tool4.config.mcp.activeTools,
5145
+ selectedTools
5154
5146
  };
5155
5147
  }
5156
- logger16.info(
5148
+ logger15.info(
5157
5149
  {
5158
5150
  toolName: tool4.name,
5159
5151
  credentialReferenceId,
@@ -5193,7 +5185,7 @@ var Agent = class {
5193
5185
  async getResolvedContext(conversationId, requestContext) {
5194
5186
  try {
5195
5187
  if (!this.config.contextConfigId) {
5196
- logger16.debug({ graphId: this.config.graphId }, "No context config found for graph");
5188
+ logger15.debug({ graphId: this.config.graphId }, "No context config found for graph");
5197
5189
  return null;
5198
5190
  }
5199
5191
  const contextConfig = await getContextConfigById(dbClient_default)({
@@ -5201,7 +5193,7 @@ var Agent = class {
5201
5193
  id: this.config.contextConfigId
5202
5194
  });
5203
5195
  if (!contextConfig) {
5204
- logger16.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5196
+ logger15.warn({ contextConfigId: this.config.contextConfigId }, "Context config not found");
5205
5197
  return null;
5206
5198
  }
5207
5199
  if (!this.contextResolver) {
@@ -5218,7 +5210,7 @@ var Agent = class {
5218
5210
  $now: (/* @__PURE__ */ new Date()).toISOString(),
5219
5211
  $env: process.env
5220
5212
  };
5221
- logger16.debug(
5213
+ logger15.debug(
5222
5214
  {
5223
5215
  conversationId,
5224
5216
  contextConfigId: contextConfig.id,
@@ -5232,7 +5224,7 @@ var Agent = class {
5232
5224
  );
5233
5225
  return contextWithBuiltins;
5234
5226
  } catch (error) {
5235
- logger16.error(
5227
+ logger15.error(
5236
5228
  {
5237
5229
  conversationId,
5238
5230
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5256,7 +5248,7 @@ var Agent = class {
5256
5248
  });
5257
5249
  return graphDefinition?.graphPrompt || void 0;
5258
5250
  } catch (error) {
5259
- logger16.warn(
5251
+ logger15.warn(
5260
5252
  {
5261
5253
  graphId: this.config.graphId,
5262
5254
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5283,7 +5275,7 @@ var Agent = class {
5283
5275
  }
5284
5276
  return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5285
5277
  } catch (error) {
5286
- logger16.warn(
5278
+ logger15.warn(
5287
5279
  {
5288
5280
  graphId: this.config.graphId,
5289
5281
  tenantId: this.config.tenantId,
@@ -5343,7 +5335,7 @@ Key requirements:
5343
5335
  preserveUnresolved: false
5344
5336
  });
5345
5337
  } catch (error) {
5346
- logger16.error(
5338
+ logger15.error(
5347
5339
  {
5348
5340
  conversationId,
5349
5341
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5388,7 +5380,7 @@ Key requirements:
5388
5380
  preserveUnresolved: false
5389
5381
  });
5390
5382
  } catch (error) {
5391
- logger16.error(
5383
+ logger15.error(
5392
5384
  {
5393
5385
  conversationId,
5394
5386
  error: error instanceof Error ? error.message : "Unknown error"
@@ -5416,7 +5408,7 @@ Key requirements:
5416
5408
  artifactId: z.string().describe("The unique identifier of the artifact to get.")
5417
5409
  }),
5418
5410
  execute: async ({ artifactId }) => {
5419
- logger16.info({ artifactId }, "get_artifact executed");
5411
+ logger15.info({ artifactId }, "get_artifact executed");
5420
5412
  const artifact = await getLedgerArtifacts(dbClient_default)({
5421
5413
  scopes: {
5422
5414
  tenantId: this.config.tenantId,
@@ -5483,7 +5475,7 @@ Key requirements:
5483
5475
  graphId: this.config.graphId
5484
5476
  });
5485
5477
  } catch (error) {
5486
- logger16.error(
5478
+ logger15.error(
5487
5479
  { error, graphId: this.config.graphId },
5488
5480
  "Failed to check graph artifact components"
5489
5481
  );
@@ -5491,7 +5483,7 @@ Key requirements:
5491
5483
  }
5492
5484
  }
5493
5485
  async generate(userMessage, runtimeContext) {
5494
- return tracer2.startActiveSpan(createSpanName("agent.generate"), async (span) => {
5486
+ return tracer.startActiveSpan("agent.generate", async (span) => {
5495
5487
  const contextId = runtimeContext?.contextId || "default";
5496
5488
  const taskId = runtimeContext?.metadata?.taskId || "unknown";
5497
5489
  const sessionId = toolSessionManager.createSession(
@@ -5515,8 +5507,8 @@ Key requirements:
5515
5507
  functionTools,
5516
5508
  relationTools,
5517
5509
  defaultTools
5518
- ] = await tracer2.startActiveSpan(
5519
- createSpanName("agent.load_tools"),
5510
+ ] = await tracer.startActiveSpan(
5511
+ "agent.load_tools",
5520
5512
  {
5521
5513
  attributes: {
5522
5514
  "agent.name": this.config.name,
@@ -5538,11 +5530,10 @@ Key requirements:
5538
5530
  childSpan.setStatus({ code: SpanStatusCode.OK });
5539
5531
  return result;
5540
5532
  } catch (err) {
5541
- handleSpanError(childSpan, err);
5533
+ setSpanWithError(childSpan, err);
5542
5534
  throw err;
5543
5535
  } finally {
5544
5536
  childSpan.end();
5545
- await forceFlushTracer();
5546
5537
  }
5547
5538
  }
5548
5539
  );
@@ -5588,7 +5579,7 @@ Key requirements:
5588
5579
  const configuredTimeout = modelSettings.maxDuration ? Math.min(modelSettings.maxDuration * 1e3, MAX_ALLOWED_TIMEOUT_MS) : shouldStreamPhase1 ? CONSTANTS.PHASE_1_TIMEOUT_MS : CONSTANTS.NON_STREAMING_PHASE_1_TIMEOUT_MS;
5589
5580
  const timeoutMs = Math.min(configuredTimeout, MAX_ALLOWED_TIMEOUT_MS);
5590
5581
  if (modelSettings.maxDuration && modelSettings.maxDuration * 1e3 > MAX_ALLOWED_TIMEOUT_MS) {
5591
- logger16.warn(
5582
+ logger15.warn(
5592
5583
  {
5593
5584
  requestedTimeout: modelSettings.maxDuration * 1e3,
5594
5585
  appliedTimeout: timeoutMs,
@@ -5630,7 +5621,7 @@ Key requirements:
5630
5621
  }
5631
5622
  );
5632
5623
  } catch (error) {
5633
- logger16.debug("Failed to track agent reasoning");
5624
+ logger15.debug("Failed to track agent reasoning");
5634
5625
  }
5635
5626
  }
5636
5627
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5713,7 +5704,7 @@ Key requirements:
5713
5704
  }
5714
5705
  );
5715
5706
  } catch (error) {
5716
- logger16.debug("Failed to track agent reasoning");
5707
+ logger15.debug("Failed to track agent reasoning");
5717
5708
  }
5718
5709
  }
5719
5710
  if (last && "toolCalls" in last && last.toolCalls) {
@@ -5758,7 +5749,7 @@ Key requirements:
5758
5749
  return;
5759
5750
  }
5760
5751
  if (toolName === "save_artifact_tool" || toolName === "save_tool_result") {
5761
- logger16.info({ result }, "save_artifact_tool or save_tool_result");
5752
+ logger15.info({ result }, "save_artifact_tool or save_tool_result");
5762
5753
  if (result.output.artifacts) {
5763
5754
  for (const artifact of result.output.artifacts) {
5764
5755
  const artifactId = artifact?.artifactId || "N/A";
@@ -5864,7 +5855,6 @@ ${output}`;
5864
5855
  }
5865
5856
  span.setStatus({ code: SpanStatusCode.OK });
5866
5857
  span.end();
5867
- await forceFlushTracer();
5868
5858
  let formattedContent = response.formattedContent || null;
5869
5859
  if (!formattedContent) {
5870
5860
  if (response.object) {
@@ -5894,14 +5884,8 @@ ${output}`;
5894
5884
  return formattedResponse;
5895
5885
  } catch (error) {
5896
5886
  toolSessionManager.endSession(sessionId);
5897
- span.recordException(error);
5898
- span.setStatus({
5899
- code: SpanStatusCode.ERROR,
5900
- message: error.message
5901
- });
5887
+ setSpanWithError(span, error);
5902
5888
  span.end();
5903
- await forceFlushTracer();
5904
- getLogger("Agent").error(error, "Agent generate error");
5905
5889
  throw error;
5906
5890
  }
5907
5891
  });
@@ -5919,7 +5903,7 @@ function parseEmbeddedJson(data) {
5919
5903
  }
5920
5904
  });
5921
5905
  }
5922
- var logger17 = getLogger("generateTaskHandler");
5906
+ var logger16 = getLogger("generateTaskHandler");
5923
5907
  var createTaskHandler = (config, credentialStoreRegistry) => {
5924
5908
  return async (task) => {
5925
5909
  try {
@@ -5969,7 +5953,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
5969
5953
  agentId: config.agentId
5970
5954
  })
5971
5955
  ]);
5972
- logger17.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
5956
+ logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
5973
5957
  const agentPrompt = "prompt" in config.agentSchema ? config.agentSchema.prompt : "";
5974
5958
  const models = "models" in config.agentSchema ? config.agentSchema.models : void 0;
5975
5959
  const stopWhen = "stopWhen" in config.agentSchema ? config.agentSchema.stopWhen : void 0;
@@ -6069,7 +6053,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6069
6053
  const taskIdMatch = task.id.match(/^task_([^-]+-[^-]+-\d+)-/);
6070
6054
  if (taskIdMatch) {
6071
6055
  contextId = taskIdMatch[1];
6072
- logger17.info(
6056
+ logger16.info(
6073
6057
  {
6074
6058
  taskId: task.id,
6075
6059
  extractedContextId: contextId,
@@ -6085,7 +6069,7 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6085
6069
  const isDelegation = task.context?.metadata?.isDelegation === true;
6086
6070
  agent.setDelegationStatus(isDelegation);
6087
6071
  if (isDelegation) {
6088
- logger17.info(
6072
+ logger16.info(
6089
6073
  { agentId: config.agentId, taskId: task.id },
6090
6074
  "Delegated agent - streaming disabled"
6091
6075
  );
@@ -6365,7 +6349,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
6365
6349
 
6366
6350
  // src/routes/agents.ts
6367
6351
  var app = new OpenAPIHono();
6368
- var logger18 = getLogger("agents");
6352
+ var logger17 = getLogger("agents");
6369
6353
  app.openapi(
6370
6354
  createRoute({
6371
6355
  method: "get",
@@ -6403,7 +6387,7 @@ app.openapi(
6403
6387
  tracestate: c.req.header("tracestate"),
6404
6388
  baggage: c.req.header("baggage")
6405
6389
  };
6406
- logger18.info(
6390
+ logger17.info(
6407
6391
  {
6408
6392
  otelHeaders,
6409
6393
  path: c.req.path,
@@ -6414,7 +6398,7 @@ app.openapi(
6414
6398
  const executionContext = getRequestExecutionContext(c);
6415
6399
  const { tenantId, projectId, graphId, agentId } = executionContext;
6416
6400
  if (agentId) {
6417
- logger18.info(
6401
+ logger17.info(
6418
6402
  {
6419
6403
  message: "getRegisteredAgent (agent-level)",
6420
6404
  tenantId,
@@ -6426,13 +6410,13 @@ app.openapi(
6426
6410
  );
6427
6411
  const credentialStores = c.get("credentialStores");
6428
6412
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6429
- logger18.info({ agent }, "agent registered: well-known agent.json");
6413
+ logger17.info({ agent }, "agent registered: well-known agent.json");
6430
6414
  if (!agent) {
6431
6415
  return c.json({ error: "Agent not found" }, 404);
6432
6416
  }
6433
6417
  return c.json(agent.agentCard);
6434
6418
  } else {
6435
- logger18.info(
6419
+ logger17.info(
6436
6420
  {
6437
6421
  message: "getRegisteredGraph (graph-level)",
6438
6422
  tenantId,
@@ -6455,7 +6439,7 @@ app.post("/a2a", async (c) => {
6455
6439
  tracestate: c.req.header("tracestate"),
6456
6440
  baggage: c.req.header("baggage")
6457
6441
  };
6458
- logger18.info(
6442
+ logger17.info(
6459
6443
  {
6460
6444
  otelHeaders,
6461
6445
  path: c.req.path,
@@ -6466,7 +6450,7 @@ app.post("/a2a", async (c) => {
6466
6450
  const executionContext = getRequestExecutionContext(c);
6467
6451
  const { tenantId, projectId, graphId, agentId } = executionContext;
6468
6452
  if (agentId) {
6469
- logger18.info(
6453
+ logger17.info(
6470
6454
  {
6471
6455
  message: "a2a (agent-level)",
6472
6456
  tenantId,
@@ -6490,7 +6474,7 @@ app.post("/a2a", async (c) => {
6490
6474
  }
6491
6475
  return a2aHandler(c, agent);
6492
6476
  } else {
6493
- logger18.info(
6477
+ logger17.info(
6494
6478
  {
6495
6479
  message: "a2a (graph-level)",
6496
6480
  tenantId,
@@ -6530,14 +6514,14 @@ app.post("/a2a", async (c) => {
6530
6514
  }
6531
6515
  });
6532
6516
  var agents_default = app;
6533
- var logger19 = getLogger("Transfer");
6517
+ var logger18 = getLogger("Transfer");
6534
6518
  async function executeTransfer({
6535
6519
  tenantId,
6536
6520
  threadId,
6537
6521
  projectId,
6538
6522
  targetAgentId
6539
6523
  }) {
6540
- logger19.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6524
+ logger18.info({ targetAgent: targetAgentId }, "Executing transfer to agent");
6541
6525
  await setActiveAgentForThread(dbClient_default)({
6542
6526
  scopes: { tenantId, projectId },
6543
6527
  threadId,
@@ -7076,7 +7060,7 @@ var MCPStreamHelper = class {
7076
7060
  function createMCPStreamHelper() {
7077
7061
  return new MCPStreamHelper();
7078
7062
  }
7079
- var logger20 = getLogger("ExecutionHandler");
7063
+ var logger19 = getLogger("ExecutionHandler");
7080
7064
  var ExecutionHandler = class {
7081
7065
  constructor() {
7082
7066
  // Hardcoded error limit - separate from configurable stopWhen
@@ -7101,7 +7085,7 @@ var ExecutionHandler = class {
7101
7085
  const { tenantId, projectId, graphId, apiKey, baseUrl } = executionContext;
7102
7086
  registerStreamHelper(requestId2, sseHelper);
7103
7087
  graphSessionManager.createSession(requestId2, graphId, tenantId, projectId);
7104
- logger20.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7088
+ logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7105
7089
  let graphConfig = null;
7106
7090
  try {
7107
7091
  graphConfig = await getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
@@ -7113,7 +7097,7 @@ var ExecutionHandler = class {
7113
7097
  );
7114
7098
  }
7115
7099
  } catch (error) {
7116
- logger20.error(
7100
+ logger19.error(
7117
7101
  {
7118
7102
  error: error instanceof Error ? error.message : "Unknown error",
7119
7103
  stack: error instanceof Error ? error.stack : void 0
@@ -7129,7 +7113,7 @@ var ExecutionHandler = class {
7129
7113
  try {
7130
7114
  await sseHelper.writeOperation(agentInitializingOp(requestId2, graphId));
7131
7115
  const taskId = `task_${conversationId}-${requestId2}`;
7132
- logger20.info(
7116
+ logger19.info(
7133
7117
  { taskId, currentAgentId, conversationId, requestId: requestId2 },
7134
7118
  "Attempting to create or reuse existing task"
7135
7119
  );
@@ -7152,7 +7136,7 @@ var ExecutionHandler = class {
7152
7136
  agent_id: currentAgentId
7153
7137
  }
7154
7138
  });
7155
- logger20.info(
7139
+ logger19.info(
7156
7140
  {
7157
7141
  taskId,
7158
7142
  createdTaskMetadata: Array.isArray(task) ? task[0]?.metadata : task?.metadata
@@ -7161,21 +7145,21 @@ var ExecutionHandler = class {
7161
7145
  );
7162
7146
  } catch (error) {
7163
7147
  if (error?.message?.includes("UNIQUE constraint failed") || error?.message?.includes("PRIMARY KEY constraint failed") || error?.code === "SQLITE_CONSTRAINT_PRIMARYKEY") {
7164
- logger20.info({ taskId, error: error.message }, "Task already exists, fetching existing task");
7148
+ logger19.info({ taskId, error: error.message }, "Task already exists, fetching existing task");
7165
7149
  const existingTask = await getTask(dbClient_default)({ id: taskId });
7166
7150
  if (existingTask) {
7167
7151
  task = existingTask;
7168
- logger20.info({ taskId, existingTask }, "Successfully reused existing task from race condition");
7152
+ logger19.info({ taskId, existingTask }, "Successfully reused existing task from race condition");
7169
7153
  } else {
7170
- logger20.error({ taskId, error }, "Task constraint failed but task not found");
7154
+ logger19.error({ taskId, error }, "Task constraint failed but task not found");
7171
7155
  throw error;
7172
7156
  }
7173
7157
  } else {
7174
- logger20.error({ taskId, error }, "Failed to create task due to non-constraint error");
7158
+ logger19.error({ taskId, error }, "Failed to create task due to non-constraint error");
7175
7159
  throw error;
7176
7160
  }
7177
7161
  }
7178
- logger20.debug(
7162
+ logger19.debug(
7179
7163
  {
7180
7164
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7181
7165
  executionType: "create_initial_task",
@@ -7193,7 +7177,7 @@ var ExecutionHandler = class {
7193
7177
  const maxTransfers = graphConfig?.stopWhen?.transferCountIs ?? 10;
7194
7178
  while (iterations < maxTransfers) {
7195
7179
  iterations++;
7196
- logger20.info(
7180
+ logger19.info(
7197
7181
  { iterations, currentAgentId, graphId, conversationId, fromAgentId },
7198
7182
  `Execution loop iteration ${iterations} with agent ${currentAgentId}, transfer from: ${fromAgentId || "none"}`
7199
7183
  );
@@ -7201,10 +7185,10 @@ var ExecutionHandler = class {
7201
7185
  scopes: { tenantId, projectId },
7202
7186
  conversationId
7203
7187
  });
7204
- logger20.info({ activeAgent }, "activeAgent");
7188
+ logger19.info({ activeAgent }, "activeAgent");
7205
7189
  if (activeAgent && activeAgent.activeAgentId !== currentAgentId) {
7206
7190
  currentAgentId = activeAgent.activeAgentId;
7207
- logger20.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7191
+ logger19.info({ currentAgentId }, `Updated current agent to: ${currentAgentId}`);
7208
7192
  }
7209
7193
  const agentBaseUrl = `${baseUrl}/agents`;
7210
7194
  const a2aClient = new A2AClient(agentBaseUrl, {
@@ -7245,13 +7229,13 @@ var ExecutionHandler = class {
7245
7229
  });
7246
7230
  if (!messageResponse?.result) {
7247
7231
  errorCount++;
7248
- logger20.error(
7232
+ logger19.error(
7249
7233
  { currentAgentId, iterations, errorCount },
7250
7234
  `No response from agent ${currentAgentId} on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7251
7235
  );
7252
7236
  if (errorCount >= this.MAX_ERRORS) {
7253
7237
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7254
- logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7238
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7255
7239
  await sseHelper.writeError(errorMessage2);
7256
7240
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7257
7241
  if (task) {
@@ -7277,7 +7261,7 @@ var ExecutionHandler = class {
7277
7261
  const transferResponse = messageResponse.result;
7278
7262
  const targetAgentId = transferResponse.artifacts?.[0]?.parts?.[0]?.data?.targetAgentId;
7279
7263
  const transferReason = transferResponse.artifacts?.[0]?.parts?.[1]?.text;
7280
- logger20.info({ targetAgentId, transferReason }, "transfer response");
7264
+ logger19.info({ targetAgentId, transferReason }, "transfer response");
7281
7265
  currentMessage = `<transfer_context> ${transferReason} </transfer_context>`;
7282
7266
  const { success, targetAgentId: newAgentId } = await executeTransfer({
7283
7267
  projectId,
@@ -7288,7 +7272,7 @@ var ExecutionHandler = class {
7288
7272
  if (success) {
7289
7273
  fromAgentId = currentAgentId;
7290
7274
  currentAgentId = newAgentId;
7291
- logger20.info(
7275
+ logger19.info(
7292
7276
  {
7293
7277
  transferFrom: fromAgentId,
7294
7278
  transferTo: currentAgentId,
@@ -7306,7 +7290,7 @@ var ExecutionHandler = class {
7306
7290
  const graphSessionData = graphSessionManager.getSession(requestId2);
7307
7291
  if (graphSessionData) {
7308
7292
  const sessionSummary = graphSessionData.getSummary();
7309
- logger20.info(sessionSummary, "GraphSession data after completion");
7293
+ logger19.info(sessionSummary, "GraphSession data after completion");
7310
7294
  }
7311
7295
  let textContent = "";
7312
7296
  for (const part of responseParts) {
@@ -7361,32 +7345,32 @@ var ExecutionHandler = class {
7361
7345
  }
7362
7346
  });
7363
7347
  const updateTaskEnd = Date.now();
7364
- logger20.info(
7348
+ logger19.info(
7365
7349
  { duration: updateTaskEnd - updateTaskStart },
7366
7350
  "Completed updateTask operation"
7367
7351
  );
7368
7352
  await sseHelper.writeOperation(completionOp(currentAgentId, iterations));
7369
7353
  await sseHelper.complete();
7370
- logger20.info("Ending GraphSession and cleaning up");
7354
+ logger19.info("Ending GraphSession and cleaning up");
7371
7355
  graphSessionManager.endSession(requestId2);
7372
- logger20.info("Cleaning up streamHelper");
7356
+ logger19.info("Cleaning up streamHelper");
7373
7357
  unregisterStreamHelper(requestId2);
7374
7358
  let response;
7375
7359
  if (sseHelper instanceof MCPStreamHelper) {
7376
7360
  const captured = sseHelper.getCapturedResponse();
7377
7361
  response = captured.text || "No response content";
7378
7362
  }
7379
- logger20.info("ExecutionHandler returning success");
7363
+ logger19.info("ExecutionHandler returning success");
7380
7364
  return { success: true, iterations, response };
7381
7365
  }
7382
7366
  errorCount++;
7383
- logger20.warn(
7367
+ logger19.warn(
7384
7368
  { iterations, errorCount },
7385
7369
  `No valid response or transfer on iteration ${iterations} (error ${errorCount}/${this.MAX_ERRORS})`
7386
7370
  );
7387
7371
  if (errorCount >= this.MAX_ERRORS) {
7388
7372
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7389
- logger20.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7373
+ logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7390
7374
  await sseHelper.writeError(errorMessage2);
7391
7375
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7392
7376
  if (task) {
@@ -7408,7 +7392,7 @@ var ExecutionHandler = class {
7408
7392
  }
7409
7393
  }
7410
7394
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7411
- logger20.error({ maxTransfers, iterations }, errorMessage);
7395
+ logger19.error({ maxTransfers, iterations }, errorMessage);
7412
7396
  await sseHelper.writeError(errorMessage);
7413
7397
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7414
7398
  if (task) {
@@ -7428,7 +7412,7 @@ var ExecutionHandler = class {
7428
7412
  unregisterStreamHelper(requestId2);
7429
7413
  return { success: false, error: errorMessage, iterations };
7430
7414
  } catch (error) {
7431
- logger20.error({ error }, "Error in execution handler");
7415
+ logger19.error({ error }, "Error in execution handler");
7432
7416
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7433
7417
  await sseHelper.writeError(`Execution error: ${errorMessage}`);
7434
7418
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
@@ -7454,7 +7438,7 @@ var ExecutionHandler = class {
7454
7438
 
7455
7439
  // src/routes/chat.ts
7456
7440
  var app2 = new OpenAPIHono();
7457
- var logger21 = getLogger("completionsHandler");
7441
+ var logger20 = getLogger("completionsHandler");
7458
7442
  var chatCompletionsRoute = createRoute({
7459
7443
  method: "post",
7460
7444
  path: "/completions",
@@ -7572,7 +7556,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7572
7556
  tracestate: c.req.header("tracestate"),
7573
7557
  baggage: c.req.header("baggage")
7574
7558
  };
7575
- logger21.info(
7559
+ logger20.info(
7576
7560
  {
7577
7561
  otelHeaders,
7578
7562
  path: c.req.path,
@@ -7658,7 +7642,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7658
7642
  dbClient_default,
7659
7643
  credentialStores
7660
7644
  );
7661
- logger21.info(
7645
+ logger20.info(
7662
7646
  {
7663
7647
  tenantId,
7664
7648
  graphId,
@@ -7704,7 +7688,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7704
7688
  return streamSSE(c, async (stream2) => {
7705
7689
  const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
7706
7690
  await sseHelper.writeRole();
7707
- logger21.info({ agentId }, "Starting execution");
7691
+ logger20.info({ agentId }, "Starting execution");
7708
7692
  const executionHandler = new ExecutionHandler();
7709
7693
  const result = await executionHandler.execute({
7710
7694
  executionContext,
@@ -7714,7 +7698,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
7714
7698
  requestId: requestId2,
7715
7699
  sseHelper
7716
7700
  });
7717
- logger21.info(
7701
+ logger20.info(
7718
7702
  { result },
7719
7703
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
7720
7704
  );
@@ -7747,7 +7731,7 @@ var getMessageText = (content) => {
7747
7731
  };
7748
7732
  var chat_default = app2;
7749
7733
  var app3 = new OpenAPIHono();
7750
- var logger22 = getLogger("chatDataStream");
7734
+ var logger21 = getLogger("chatDataStream");
7751
7735
  var chatDataStreamRoute = createRoute({
7752
7736
  method: "post",
7753
7737
  path: "/chat",
@@ -7852,7 +7836,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7852
7836
  );
7853
7837
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
7854
7838
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
7855
- logger22.info({ userText, lastUserMessage }, "userText");
7839
+ logger21.info({ userText, lastUserMessage }, "userText");
7856
7840
  const messageSpan = trace.getActiveSpan();
7857
7841
  if (messageSpan) {
7858
7842
  messageSpan.setAttributes({
@@ -7894,7 +7878,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7894
7878
  await streamHelper.writeError("Unable to process request");
7895
7879
  }
7896
7880
  } catch (err) {
7897
- logger22.error({ err }, "Streaming error");
7881
+ logger21.error({ err }, "Streaming error");
7898
7882
  await streamHelper.writeError("Internal server error");
7899
7883
  } finally {
7900
7884
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
@@ -7915,7 +7899,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
7915
7899
  )
7916
7900
  );
7917
7901
  } catch (error) {
7918
- logger22.error({ error }, "chatDataStream error");
7902
+ logger21.error({ error }, "chatDataStream error");
7919
7903
  return c.json({ error: "Failed to process chat completion" }, 500);
7920
7904
  }
7921
7905
  });
@@ -7923,7 +7907,7 @@ var chatDataStream_default = app3;
7923
7907
  function createMCPSchema(schema) {
7924
7908
  return schema;
7925
7909
  }
7926
- var logger23 = getLogger("mcp");
7910
+ var logger22 = getLogger("mcp");
7927
7911
  var _MockResponseSingleton = class _MockResponseSingleton {
7928
7912
  constructor() {
7929
7913
  __publicField(this, "mockRes");
@@ -7978,21 +7962,21 @@ var createSpoofInitMessage = (mcpProtocolVersion) => ({
7978
7962
  id: 0
7979
7963
  });
7980
7964
  var spoofTransportInitialization = async (transport, req, sessionId, mcpProtocolVersion) => {
7981
- logger23.info({ sessionId }, "Spoofing initialization message to set transport state");
7965
+ logger22.info({ sessionId }, "Spoofing initialization message to set transport state");
7982
7966
  const spoofInitMessage = createSpoofInitMessage(mcpProtocolVersion);
7983
7967
  const mockRes = MockResponseSingleton.getInstance().getMockResponse();
7984
7968
  try {
7985
7969
  await transport.handleRequest(req, mockRes, spoofInitMessage);
7986
- logger23.info({ sessionId }, "Successfully spoofed initialization");
7970
+ logger22.info({ sessionId }, "Successfully spoofed initialization");
7987
7971
  } catch (spoofError) {
7988
- logger23.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
7972
+ logger22.warn({ sessionId, error: spoofError }, "Spoof initialization failed, continuing anyway");
7989
7973
  }
7990
7974
  };
7991
7975
  var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
7992
7976
  const sessionId = req.headers["mcp-session-id"];
7993
- logger23.info({ sessionId }, "Received MCP session ID");
7977
+ logger22.info({ sessionId }, "Received MCP session ID");
7994
7978
  if (!sessionId) {
7995
- logger23.info({ body }, "Missing session ID");
7979
+ logger22.info({ body }, "Missing session ID");
7996
7980
  res.writeHead(400).end(
7997
7981
  JSON.stringify({
7998
7982
  jsonrpc: "2.0",
@@ -8018,7 +8002,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8018
8002
  scopes: { tenantId, projectId },
8019
8003
  conversationId: sessionId
8020
8004
  });
8021
- logger23.info(
8005
+ logger22.info(
8022
8006
  {
8023
8007
  sessionId,
8024
8008
  conversationFound: !!conversation,
@@ -8029,7 +8013,7 @@ var validateSession = async (req, res, body, tenantId, projectId, graphId) => {
8029
8013
  "Conversation lookup result"
8030
8014
  );
8031
8015
  if (!conversation || conversation.metadata?.sessionData?.sessionType !== "mcp" || conversation.metadata?.sessionData?.graphId !== graphId) {
8032
- logger23.info(
8016
+ logger22.info(
8033
8017
  { sessionId, conversationId: conversation?.id },
8034
8018
  "MCP session not found or invalid"
8035
8019
  );
@@ -8090,7 +8074,7 @@ var executeAgentQuery = async (executionContext, conversationId, query, defaultA
8090
8074
  requestId: requestId2,
8091
8075
  sseHelper: mcpStreamHelper
8092
8076
  });
8093
- logger23.info(
8077
+ logger22.info(
8094
8078
  { result },
8095
8079
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8096
8080
  );
@@ -8164,7 +8148,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8164
8148
  dbClient_default,
8165
8149
  credentialStores
8166
8150
  );
8167
- logger23.info(
8151
+ logger22.info(
8168
8152
  {
8169
8153
  tenantId,
8170
8154
  graphId,
@@ -8225,7 +8209,7 @@ var validateRequestParameters = (c) => {
8225
8209
  };
8226
8210
  var handleInitializationRequest = async (body, executionContext, validatedContext, req, res, c, credentialStores) => {
8227
8211
  const { tenantId, projectId, graphId } = executionContext;
8228
- logger23.info({ body }, "Received initialization request");
8212
+ logger22.info({ body }, "Received initialization request");
8229
8213
  const sessionId = nanoid();
8230
8214
  const agentGraph = await getAgentGraphWithDefaultAgent(dbClient_default)({
8231
8215
  scopes: { tenantId, projectId },
@@ -8256,7 +8240,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8256
8240
  }
8257
8241
  }
8258
8242
  });
8259
- logger23.info(
8243
+ logger22.info(
8260
8244
  { sessionId, conversationId: conversation.id },
8261
8245
  "Created MCP session as conversation"
8262
8246
  );
@@ -8265,9 +8249,9 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8265
8249
  });
8266
8250
  const server = await getServer(validatedContext, executionContext, sessionId, credentialStores);
8267
8251
  await server.connect(transport);
8268
- logger23.info({ sessionId }, "Server connected for initialization");
8252
+ logger22.info({ sessionId }, "Server connected for initialization");
8269
8253
  res.setHeader("Mcp-Session-Id", sessionId);
8270
- logger23.info(
8254
+ logger22.info(
8271
8255
  {
8272
8256
  sessionId,
8273
8257
  bodyMethod: body?.method,
@@ -8276,7 +8260,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8276
8260
  "About to handle initialization request"
8277
8261
  );
8278
8262
  await transport.handleRequest(req, res, body);
8279
- logger23.info({ sessionId }, "Successfully handled initialization request");
8263
+ logger22.info({ sessionId }, "Successfully handled initialization request");
8280
8264
  return toFetchResponse(res);
8281
8265
  };
8282
8266
  var handleExistingSessionRequest = async (body, executionContext, validatedContext, req, res, credentialStores) => {
@@ -8304,8 +8288,8 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8304
8288
  sessionId,
8305
8289
  conversation.metadata?.session_data?.mcpProtocolVersion
8306
8290
  );
8307
- logger23.info({ sessionId }, "Server connected and transport initialized");
8308
- logger23.info(
8291
+ logger22.info({ sessionId }, "Server connected and transport initialized");
8292
+ logger22.info(
8309
8293
  {
8310
8294
  sessionId,
8311
8295
  bodyKeys: Object.keys(body || {}),
@@ -8319,9 +8303,9 @@ var handleExistingSessionRequest = async (body, executionContext, validatedConte
8319
8303
  );
8320
8304
  try {
8321
8305
  await transport.handleRequest(req, res, body);
8322
- logger23.info({ sessionId }, "Successfully handled MCP request");
8306
+ logger22.info({ sessionId }, "Successfully handled MCP request");
8323
8307
  } catch (transportError) {
8324
- logger23.error(
8308
+ logger22.error(
8325
8309
  {
8326
8310
  sessionId,
8327
8311
  error: transportError,
@@ -8372,13 +8356,13 @@ app4.openapi(
8372
8356
  }
8373
8357
  const { executionContext } = paramValidation;
8374
8358
  const body = await c.req.json();
8375
- logger23.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8359
+ logger22.info({ body, bodyKeys: Object.keys(body || {}) }, "Parsed request body");
8376
8360
  const isInitRequest = body.method === "initialize";
8377
8361
  const { req, res } = toReqRes(c.req.raw);
8378
8362
  const validatedContext = c.get("validatedContext") || {};
8379
8363
  const credentialStores = c.get("credentialStores");
8380
- logger23.info({ validatedContext }, "Validated context");
8381
- logger23.info({ req }, "request");
8364
+ logger22.info({ validatedContext }, "Validated context");
8365
+ logger22.info({ req }, "request");
8382
8366
  if (isInitRequest) {
8383
8367
  return await handleInitializationRequest(
8384
8368
  body,
@@ -8400,7 +8384,7 @@ app4.openapi(
8400
8384
  );
8401
8385
  }
8402
8386
  } catch (e) {
8403
- logger23.error(
8387
+ logger22.error(
8404
8388
  {
8405
8389
  error: e instanceof Error ? e.message : e,
8406
8390
  stack: e instanceof Error ? e.stack : void 0
@@ -8412,7 +8396,7 @@ app4.openapi(
8412
8396
  }
8413
8397
  );
8414
8398
  app4.get("/", async (c) => {
8415
- logger23.info("Received GET MCP request");
8399
+ logger22.info("Received GET MCP request");
8416
8400
  return c.json(
8417
8401
  {
8418
8402
  jsonrpc: "2.0",
@@ -8426,7 +8410,7 @@ app4.get("/", async (c) => {
8426
8410
  );
8427
8411
  });
8428
8412
  app4.delete("/", async (c) => {
8429
- logger23.info("Received DELETE MCP request");
8413
+ logger22.info("Received DELETE MCP request");
8430
8414
  return c.json(
8431
8415
  {
8432
8416
  jsonrpc: "2.0",
@@ -8506,9 +8490,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8506
8490
  if (!isExpectedError) {
8507
8491
  const errorMessage = err instanceof Error ? err.message : String(err);
8508
8492
  const errorStack = err instanceof Error ? err.stack : void 0;
8509
- const logger24 = getLogger();
8510
- if (logger24) {
8511
- logger24.error(
8493
+ const logger23 = getLogger();
8494
+ if (logger23) {
8495
+ logger23.error(
8512
8496
  {
8513
8497
  error: err,
8514
8498
  message: errorMessage,
@@ -8520,9 +8504,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8520
8504
  );
8521
8505
  }
8522
8506
  } else {
8523
- const logger24 = getLogger();
8524
- if (logger24) {
8525
- logger24.error(
8507
+ const logger23 = getLogger();
8508
+ if (logger23) {
8509
+ logger23.error(
8526
8510
  {
8527
8511
  error: err,
8528
8512
  path: c.req.path,
@@ -8539,9 +8523,9 @@ function createExecutionHono(serverConfig, credentialStores) {
8539
8523
  const response = err.getResponse();
8540
8524
  return response;
8541
8525
  } catch (responseError) {
8542
- const logger24 = getLogger();
8543
- if (logger24) {
8544
- logger24.error({ error: responseError }, "Error while handling HTTPException response");
8526
+ const logger23 = getLogger();
8527
+ if (logger23) {
8528
+ logger23.error({ error: responseError }, "Error while handling HTTPException response");
8545
8529
  }
8546
8530
  }
8547
8531
  }