@inkeep/agents-run-api 0.2.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -5,6 +5,13 @@ Object.defineProperty(exports, '__esModule', { value: true });
5
5
  var agentsCore = require('@inkeep/agents-core');
6
6
  var z5 = require('zod');
7
7
  var nanoid = require('nanoid');
8
+ var otel = require('@hono/otel');
9
+ var zodOpenapi = require('@hono/zod-openapi');
10
+ var api = require('@opentelemetry/api');
11
+ var hono = require('hono');
12
+ var cors = require('hono/cors');
13
+ var httpException = require('hono/http-exception');
14
+ var requestId = require('hono/request-id');
8
15
  var autoInstrumentationsNode = require('@opentelemetry/auto-instrumentations-node');
9
16
  var baggageSpanProcessor = require('@opentelemetry/baggage-span-processor');
10
17
  var contextAsyncHooks = require('@opentelemetry/context-async-hooks');
@@ -14,12 +21,6 @@ var resources = require('@opentelemetry/resources');
14
21
  var sdkNode = require('@opentelemetry/sdk-node');
15
22
  var sdkTraceBase = require('@opentelemetry/sdk-trace-base');
16
23
  var semanticConventions = require('@opentelemetry/semantic-conventions');
17
- var zodOpenapi = require('@hono/zod-openapi');
18
- var api = require('@opentelemetry/api');
19
- var hono = require('hono');
20
- var cors = require('hono/cors');
21
- var httpException = require('hono/http-exception');
22
- var requestId = require('hono/request-id');
23
24
  var factory = require('hono/factory');
24
25
  var swaggerUi = require('@hono/swagger-ui');
25
26
  var streaming = require('hono/streaming');
@@ -27,14 +28,15 @@ var destr = require('destr');
27
28
  var traverse = require('traverse');
28
29
  var ai = require('ai');
29
30
  var anthropic = require('@ai-sdk/anthropic');
31
+ var gateway = require('@ai-sdk/gateway');
30
32
  var google = require('@ai-sdk/google');
31
33
  var openai = require('@ai-sdk/openai');
34
+ var aiSdkProvider = require('@openrouter/ai-sdk-provider');
32
35
  var jmespath = require('jmespath');
33
36
  var mcp_js = require('@modelcontextprotocol/sdk/server/mcp.js');
34
37
  var streamableHttp_js = require('@modelcontextprotocol/sdk/server/streamableHttp.js');
35
38
  var v3 = require('zod/v3');
36
39
  var fetchToNode = require('fetch-to-node');
37
- var otel = require('@hono/otel');
38
40
 
39
41
  function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
40
42
 
@@ -67,8 +69,7 @@ var init_env = __esm({
67
69
  NANGO_SECRET_KEY: z5.z.string().optional(),
68
70
  OPENAI_API_KEY: z5.z.string().optional(),
69
71
  ANTHROPIC_API_KEY: z5.z.string(),
70
- INKEEP_AGENTS_RUN_API_BYPASS_SECRET: z5.z.string().optional(),
71
- OTEL_MAX_EXPORT_BATCH_SIZE: z5.z.coerce.number().optional()
72
+ INKEEP_AGENTS_RUN_API_BYPASS_SECRET: z5.z.string().optional()
72
73
  });
73
74
  parseEnv = () => {
74
75
  try {
@@ -282,48 +283,48 @@ var init_conversations = __esm({
282
283
  init_dbClient();
283
284
  }
284
285
  });
285
-
286
- // src/instrumentation.ts
287
- init_env();
288
- var maxExportBatchSize = env.OTEL_MAX_EXPORT_BATCH_SIZE ?? (env.ENVIRONMENT === "development" ? 1 : 512);
289
286
  var otlpExporter = new exporterTraceOtlpHttp.OTLPTraceExporter();
290
- var batchProcessor = new sdkTraceBase.BatchSpanProcessor(otlpExporter, {
291
- maxExportBatchSize
292
- });
293
- var resource = resources.resourceFromAttributes({
287
+ var defaultBatchProcessor = new sdkTraceBase.BatchSpanProcessor(otlpExporter);
288
+ var defaultResource = resources.resourceFromAttributes({
294
289
  [semanticConventions.ATTR_SERVICE_NAME]: "inkeep-agents-run-api"
295
290
  });
296
- var sdk = new sdkNode.NodeSDK({
297
- resource,
298
- contextManager: new contextAsyncHooks.AsyncLocalStorageContextManager(),
299
- textMapPropagator: new core.CompositePropagator({
300
- propagators: [new core.W3CTraceContextPropagator(), new core.W3CBaggagePropagator()]
301
- }),
302
- spanProcessors: [new baggageSpanProcessor.BaggageSpanProcessor(baggageSpanProcessor.ALLOW_ALL_BAGGAGE_KEYS), batchProcessor],
303
- instrumentations: [
304
- autoInstrumentationsNode.getNodeAutoInstrumentations({
305
- "@opentelemetry/instrumentation-http": {
306
- enabled: true,
307
- requestHook: (span, request) => {
308
- const url = request?.url ?? request?.path;
309
- if (!url) return;
310
- const u = new URL(url, "http://localhost");
311
- span.updateName(`${request?.method || "UNKNOWN"} ${u.pathname}`);
312
- }
313
- },
314
- "@opentelemetry/instrumentation-undici": {
315
- requestHook: (span) => {
316
- const method = span.attributes?.["http.request.method"];
317
- const host = span.attributes?.["server.address"];
318
- const path = span.attributes?.["url.path"];
319
- if (method && path)
320
- span.updateName(host ? `${method} ${host}${path}` : `${method} ${path}`);
321
- }
291
+ var defaultInstrumentations = [
292
+ autoInstrumentationsNode.getNodeAutoInstrumentations({
293
+ "@opentelemetry/instrumentation-http": {
294
+ enabled: true,
295
+ requestHook: (span, request) => {
296
+ const url = request?.url ?? request?.path;
297
+ if (!url) return;
298
+ const u = new URL(url, "http://localhost");
299
+ span.updateName(`${request?.method || "UNKNOWN"} ${u.pathname}`);
322
300
  }
323
- })
324
- ]
301
+ },
302
+ "@opentelemetry/instrumentation-undici": {
303
+ requestHook: (span) => {
304
+ const method = span.attributes?.["http.request.method"];
305
+ const host = span.attributes?.["server.address"];
306
+ const path = span.attributes?.["url.path"];
307
+ if (method && path)
308
+ span.updateName(host ? `${method} ${host}${path}` : `${method} ${path}`);
309
+ }
310
+ }
311
+ })
312
+ ];
313
+ var defaultSpanProcessors = [
314
+ new baggageSpanProcessor.BaggageSpanProcessor(baggageSpanProcessor.ALLOW_ALL_BAGGAGE_KEYS),
315
+ defaultBatchProcessor
316
+ ];
317
+ var defaultContextManager = new contextAsyncHooks.AsyncLocalStorageContextManager();
318
+ var defaultTextMapPropagator = new core.CompositePropagator({
319
+ propagators: [new core.W3CTraceContextPropagator(), new core.W3CBaggagePropagator()]
320
+ });
321
+ new sdkNode.NodeSDK({
322
+ resource: defaultResource,
323
+ contextManager: defaultContextManager,
324
+ textMapPropagator: defaultTextMapPropagator,
325
+ spanProcessors: defaultSpanProcessors,
326
+ instrumentations: defaultInstrumentations
325
327
  });
326
- sdk.start();
327
328
  init_dbClient();
328
329
  init_env();
329
330
 
@@ -1226,7 +1227,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1226
1227
  throw new Error("Agent ID is required");
1227
1228
  }
1228
1229
  const dbAgent = await agentsCore.getAgentById(dbClient_default)({
1229
- scopes: { tenantId, projectId },
1230
+ scopes: { tenantId, projectId, graphId },
1230
1231
  agentId
1231
1232
  });
1232
1233
  if (!dbAgent) {
@@ -1245,6 +1246,41 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1245
1246
  // src/agents/generateTaskHandler.ts
1246
1247
  init_dbClient();
1247
1248
 
1249
+ // src/utils/model-resolver.ts
1250
+ init_dbClient();
1251
+ async function resolveModelConfig(graphId, agent) {
1252
+ if (agent.models?.base?.model) {
1253
+ return {
1254
+ base: agent.models.base,
1255
+ structuredOutput: agent.models.structuredOutput || agent.models.base,
1256
+ summarizer: agent.models.summarizer || agent.models.base
1257
+ };
1258
+ }
1259
+ const graph = await agentsCore.getAgentGraphById(dbClient_default)({
1260
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId, graphId }
1261
+ });
1262
+ if (graph?.models?.base?.model) {
1263
+ return {
1264
+ base: graph.models.base,
1265
+ structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
1266
+ summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
1267
+ };
1268
+ }
1269
+ const project = await agentsCore.getProject(dbClient_default)({
1270
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
1271
+ });
1272
+ if (project?.models?.base?.model) {
1273
+ return {
1274
+ base: project.models.base,
1275
+ structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
1276
+ summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
1277
+ };
1278
+ }
1279
+ throw new Error(
1280
+ "Base model configuration is required. Please configure models at the project level."
1281
+ );
1282
+ }
1283
+
1248
1284
  // src/agents/Agent.ts
1249
1285
  init_conversations();
1250
1286
  init_dbClient();
@@ -1266,24 +1302,19 @@ function completionOp(agentId, iterations) {
1266
1302
  }
1267
1303
  };
1268
1304
  }
1269
- function errorOp(error, agentId) {
1305
+ function errorOp(message, agentId, severity = "error", code) {
1270
1306
  return {
1271
1307
  type: "error",
1272
- ctx: {
1273
- error,
1274
- agent: agentId
1275
- }
1308
+ message,
1309
+ agent: agentId,
1310
+ severity,
1311
+ code,
1312
+ timestamp: Date.now()
1276
1313
  };
1277
1314
  }
1278
1315
  function generateToolId() {
1279
1316
  return `tool_${nanoid.nanoid(8)}`;
1280
1317
  }
1281
- function statusUpdateOp(ctx) {
1282
- return {
1283
- type: "status_update",
1284
- ctx
1285
- };
1286
- }
1287
1318
  var logger4 = agentsCore.getLogger("DataComponentSchema");
1288
1319
  function jsonSchemaToZod(jsonSchema) {
1289
1320
  if (!jsonSchema || typeof jsonSchema !== "object") {
@@ -1368,6 +1399,17 @@ __publicField(_ArtifactReferenceSchema, "ARTIFACT_PROPS_SCHEMA", {
1368
1399
  required: ["artifact_id", "task_id"]
1369
1400
  });
1370
1401
  var ArtifactReferenceSchema = _ArtifactReferenceSchema;
1402
+
1403
+ // src/utils/default-status-schemas.ts
1404
+ var retrieveStatusSchema = {
1405
+ type: "retrieve",
1406
+ description: 'Use this when the system found or retrieved specific information from searches, queries, or lookups. ONLY report ACTUAL findings that appear explicitly in the tool results - never make up data, names, numbers, or details. The label must state the SPECIFIC discovery (e.g., "Found 3 authentication methods", "Database contains 500 records", "API supports JSON format") not the act of searching. Every detail must be traceable to actual tool output. NEVER invent placeholder names, fictional data, or information not present in the activities.'
1407
+ };
1408
+ var actionStatusSchema = {
1409
+ type: "action",
1410
+ description: 'Use this when the system executed a tool or performed an operation that modified state or had side effects. ONLY report ACTUAL tool executions and their results as they appear in the tool outputs - never make up tool names, parameters, or outcomes. The label must describe what specific action was performed and its concrete result based on actual tool execution data. DO NOT make up examples like "Ran test suite with X passes" unless a test suite was ACTUALLY run and reported X passes. DO NOT say "Executed database query" unless a database query was ACTUALLY executed. Only report what literally happened. NEVER invent tool names, execution results, or details not explicitly present in the tool execution activities. If a tool failed, report the actual failure, not imagined success.'
1411
+ };
1412
+ var defaultStatusSchemas = [retrieveStatusSchema, actionStatusSchema];
1371
1413
  var logger5 = agentsCore.getLogger("ModelFactory");
1372
1414
  var _ModelFactory = class _ModelFactory {
1373
1415
  /**
@@ -1381,6 +1423,18 @@ var _ModelFactory = class _ModelFactory {
1381
1423
  return openai.createOpenAI(config);
1382
1424
  case "google":
1383
1425
  return google.createGoogleGenerativeAI(config);
1426
+ case "openrouter":
1427
+ return {
1428
+ ...aiSdkProvider.createOpenRouter(config),
1429
+ textEmbeddingModel: () => {
1430
+ throw new Error("OpenRouter does not support text embeddings");
1431
+ },
1432
+ imageModel: () => {
1433
+ throw new Error("OpenRouter does not support image generation");
1434
+ }
1435
+ };
1436
+ case "gateway":
1437
+ return gateway.createGateway(config);
1384
1438
  default:
1385
1439
  throw new Error(`Unsupported provider: ${provider}`);
1386
1440
  }
@@ -1413,6 +1467,9 @@ var _ModelFactory = class _ModelFactory {
1413
1467
  );
1414
1468
  }
1415
1469
  const modelSettings = config;
1470
+ if (!modelSettings.model) {
1471
+ throw new Error("Model configuration is required");
1472
+ }
1416
1473
  const modelString = modelSettings.model.trim();
1417
1474
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1418
1475
  logger5.debug(
@@ -1437,26 +1494,29 @@ var _ModelFactory = class _ModelFactory {
1437
1494
  return openai.openai(modelName);
1438
1495
  case "google":
1439
1496
  return google.google(modelName);
1497
+ case "openrouter":
1498
+ return aiSdkProvider.openrouter(modelName);
1499
+ case "gateway":
1500
+ return gateway.gateway(modelName);
1440
1501
  default:
1441
- throw new Error(`Unsupported provider: ${provider}`);
1502
+ throw new Error(
1503
+ `Unsupported provider: ${provider}. Supported providers are: ${_ModelFactory.BUILT_IN_PROVIDERS.join(", ")}. To access other models, use OpenRouter (openrouter/model-id) or Vercel AI Gateway (gateway/model-id).`
1504
+ );
1442
1505
  }
1443
1506
  }
1444
1507
  /**
1445
1508
  * Parse model string to extract provider and model name
1446
1509
  * Examples: "anthropic/claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" }
1510
+ * "openrouter/anthropic/claude-sonnet-4" -> { provider: "openrouter", modelName: "anthropic/claude-sonnet-4" }
1447
1511
  * "claude-sonnet-4" -> { provider: "anthropic", modelName: "claude-sonnet-4" } (default to anthropic)
1448
1512
  */
1449
1513
  static parseModelString(modelString) {
1450
1514
  if (modelString.includes("/")) {
1451
1515
  const [provider, ...modelParts] = modelString.split("/");
1452
1516
  const normalizedProvider = provider.toLowerCase();
1453
- if (!_ModelFactory.SUPPORTED_PROVIDERS.includes(normalizedProvider)) {
1454
- logger5.error(
1455
- { provider: normalizedProvider, modelName: modelParts.join("/") },
1456
- "Unsupported provider detected, falling back to anthropic"
1457
- );
1517
+ if (!_ModelFactory.BUILT_IN_PROVIDERS.includes(normalizedProvider)) {
1458
1518
  throw new Error(
1459
- `Unsupported provider: ${normalizedProvider}. Please provide a model in the format of provider/model-name.`
1519
+ `Unsupported provider: ${normalizedProvider}. Supported providers are: ${_ModelFactory.BUILT_IN_PROVIDERS.join(", ")}. To access other models, use OpenRouter (openrouter/model-id) or Vercel AI Gateway (gateway/model-id).`
1460
1520
  );
1461
1521
  }
1462
1522
  return {
@@ -1465,9 +1525,7 @@ var _ModelFactory = class _ModelFactory {
1465
1525
  // In case model name has slashes
1466
1526
  };
1467
1527
  }
1468
- throw new Error(
1469
- `Invalid model provided: ${modelString}. Please provide a model in the format of provider/model-name.`
1470
- );
1528
+ throw new Error(`No provider specified in model string: ${modelString}`);
1471
1529
  }
1472
1530
  /**
1473
1531
  * Get generation parameters from provider options
@@ -1531,15 +1589,20 @@ var _ModelFactory = class _ModelFactory {
1531
1589
  }
1532
1590
  };
1533
1591
  /**
1534
- * Supported providers for security validation
1592
+ * Built-in providers that have special handling
1535
1593
  */
1536
- __publicField(_ModelFactory, "SUPPORTED_PROVIDERS", ["anthropic", "openai", "google"]);
1594
+ __publicField(_ModelFactory, "BUILT_IN_PROVIDERS", [
1595
+ "anthropic",
1596
+ "openai",
1597
+ "google",
1598
+ "openrouter",
1599
+ "gateway"
1600
+ ]);
1537
1601
  var ModelFactory = _ModelFactory;
1538
1602
 
1539
1603
  // src/utils/graph-session.ts
1540
1604
  init_conversations();
1541
1605
  init_dbClient();
1542
- var tracer = agentsCore.getTracer("agents-run-api");
1543
1606
 
1544
1607
  // src/utils/stream-registry.ts
1545
1608
  var streamHelperRegistry = /* @__PURE__ */ new Map();
@@ -1555,6 +1618,7 @@ function getStreamHelper(requestId2) {
1555
1618
  function unregisterStreamHelper(requestId2) {
1556
1619
  streamHelperRegistry.delete(requestId2);
1557
1620
  }
1621
+ var tracer = agentsCore.getTracer("agents-run-api");
1558
1622
 
1559
1623
  // src/utils/graph-session.ts
1560
1624
  var logger6 = agentsCore.getLogger("GraphSession");
@@ -1863,7 +1927,6 @@ var GraphSession = class {
1863
1927
  }
1864
1928
  this.isGeneratingUpdate = true;
1865
1929
  const statusUpdateState = this.statusUpdateState;
1866
- const graphId = this.graphId;
1867
1930
  try {
1868
1931
  const streamHelper = getStreamHelper(this.sessionId);
1869
1932
  if (!streamHelper) {
@@ -1876,81 +1939,49 @@ var GraphSession = class {
1876
1939
  }
1877
1940
  const now = Date.now();
1878
1941
  const elapsedTime = now - statusUpdateState.startTime;
1879
- let operation;
1880
- if (statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0) {
1881
- const result = await this.generateStructuredStatusUpdate(
1882
- this.events.slice(statusUpdateState.lastEventCount),
1883
- elapsedTime,
1884
- statusUpdateState.config.statusComponents,
1885
- statusUpdateState.summarizerModel,
1886
- this.previousSummaries
1887
- );
1888
- if (result.operations && result.operations.length > 0) {
1889
- for (const op of result.operations) {
1890
- if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1891
- logger6.warn(
1892
- {
1893
- sessionId: this.sessionId,
1894
- operation: op
1895
- },
1896
- "Skipping empty or invalid structured operation"
1897
- );
1898
- continue;
1899
- }
1900
- const operationToSend = {
1901
- type: "status_update",
1902
- ctx: {
1903
- operationType: op.type,
1904
- label: op.data.label,
1905
- data: Object.fromEntries(
1906
- Object.entries(op.data).filter(([key]) => !["label", "type"].includes(key))
1907
- )
1908
- }
1909
- };
1910
- await streamHelper.writeOperation(operationToSend);
1911
- }
1912
- const summaryTexts = result.operations.map(
1913
- (op) => JSON.stringify({ type: op.type, data: op.data })
1914
- );
1915
- this.previousSummaries.push(...summaryTexts);
1916
- if (this.statusUpdateState) {
1917
- this.statusUpdateState.lastUpdateTime = now;
1918
- this.statusUpdateState.lastEventCount = this.events.length;
1942
+ const statusComponents = statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0 ? statusUpdateState.config.statusComponents : defaultStatusSchemas;
1943
+ const result = await this.generateStructuredStatusUpdate(
1944
+ this.events.slice(statusUpdateState.lastEventCount),
1945
+ elapsedTime,
1946
+ statusComponents,
1947
+ statusUpdateState.summarizerModel,
1948
+ this.previousSummaries
1949
+ );
1950
+ if (result.summaries && result.summaries.length > 0) {
1951
+ for (const summary of result.summaries) {
1952
+ if (!summary || !summary.type || !summary.data || !summary.data.label || Object.keys(summary.data).length === 0) {
1953
+ logger6.warn(
1954
+ {
1955
+ sessionId: this.sessionId,
1956
+ summary
1957
+ },
1958
+ "Skipping empty or invalid structured operation"
1959
+ );
1960
+ continue;
1919
1961
  }
1920
- return;
1962
+ const summaryToSend = {
1963
+ type: summary.data.type || summary.type,
1964
+ // Preserve the actual custom type from LLM
1965
+ label: summary.data.label,
1966
+ details: Object.fromEntries(
1967
+ Object.entries(summary.data).filter(([key]) => !["label", "type"].includes(key))
1968
+ )
1969
+ };
1970
+ await streamHelper.writeSummary(summaryToSend);
1921
1971
  }
1922
- } else {
1923
- const summary = await this.generateProgressSummary(
1924
- this.events.slice(statusUpdateState.lastEventCount),
1925
- elapsedTime,
1926
- statusUpdateState.summarizerModel,
1927
- this.previousSummaries
1972
+ const summaryTexts = result.summaries.map(
1973
+ (summary) => JSON.stringify({ type: summary.type, data: summary.data })
1928
1974
  );
1929
- this.previousSummaries.push(summary);
1930
- operation = statusUpdateOp({
1931
- summary,
1932
- eventCount: this.events.length,
1933
- elapsedTime,
1934
- currentPhase: "processing",
1935
- activeAgent: "system",
1936
- graphId,
1937
- sessionId: this.sessionId
1938
- });
1975
+ this.previousSummaries.push(...summaryTexts);
1976
+ if (this.statusUpdateState) {
1977
+ this.statusUpdateState.lastUpdateTime = now;
1978
+ this.statusUpdateState.lastEventCount = this.events.length;
1979
+ }
1980
+ return;
1939
1981
  }
1940
1982
  if (this.previousSummaries.length > 3) {
1941
1983
  this.previousSummaries.shift();
1942
1984
  }
1943
- if (!operation || !operation.type || !operation.ctx) {
1944
- logger6.warn(
1945
- {
1946
- sessionId: this.sessionId,
1947
- operation
1948
- },
1949
- "Skipping empty or invalid status update operation"
1950
- );
1951
- return;
1952
- }
1953
- await streamHelper.writeOperation(operation);
1954
1985
  if (this.statusUpdateState) {
1955
1986
  this.statusUpdateState.lastUpdateTime = now;
1956
1987
  this.statusUpdateState.lastEventCount = this.events.length;
@@ -2034,103 +2065,6 @@ var GraphSession = class {
2034
2065
  this.statusUpdateState.updateLock = false;
2035
2066
  }
2036
2067
  }
2037
- /**
2038
- * Generate user-focused progress summary hiding internal operations
2039
- */
2040
- async generateProgressSummary(newEvents, elapsedTime, summarizerModel, previousSummaries = []) {
2041
- return tracer.startActiveSpan(
2042
- "graph_session.generate_progress_summary",
2043
- {
2044
- attributes: {
2045
- "graph_session.id": this.sessionId,
2046
- "events.count": newEvents.length,
2047
- "elapsed_time.seconds": Math.round(elapsedTime / 1e3),
2048
- "llm.model": summarizerModel?.model,
2049
- "previous_summaries.count": previousSummaries.length
2050
- }
2051
- },
2052
- async (span) => {
2053
- try {
2054
- const userVisibleActivities = this.extractUserVisibleActivities(newEvents);
2055
- let conversationContext = "";
2056
- if (this.tenantId && this.projectId) {
2057
- try {
2058
- const conversationHistory = await getFormattedConversationHistory({
2059
- tenantId: this.tenantId,
2060
- projectId: this.projectId,
2061
- conversationId: this.sessionId,
2062
- options: {
2063
- limit: 10,
2064
- // Get recent conversation context
2065
- maxOutputTokens: 2e3
2066
- },
2067
- filters: {}
2068
- });
2069
- conversationContext = conversationHistory.trim() ? `
2070
- User's Question/Context:
2071
- ${conversationHistory}
2072
- ` : "";
2073
- } catch (error) {
2074
- logger6.warn(
2075
- { sessionId: this.sessionId, error },
2076
- "Failed to fetch conversation history for status update"
2077
- );
2078
- }
2079
- }
2080
- const previousSummaryContext = previousSummaries.length > 0 ? `
2081
- Previous updates provided to user:
2082
- ${previousSummaries.map((s, i) => `${i + 1}. ${s}`).join("\n")}
2083
- ` : "";
2084
- const basePrompt = `Generate a meaningful status update that tells the user what specific information or result was just found/achieved.${conversationContext}${previousSummaries.length > 0 ? `
2085
- ${previousSummaryContext}` : ""}
2086
-
2087
- Activities:
2088
- ${userVisibleActivities.join("\n") || "No New Activities"}
2089
-
2090
- Describe the ACTUAL finding, result, or specific information discovered (e.g., "Found Slack bot requires admin permissions", "Identified 3 channel types for ingestion", "Configuration requires OAuth token").
2091
-
2092
- ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2093
- const prompt = basePrompt;
2094
- let modelToUse = summarizerModel;
2095
- if (!summarizerModel?.model?.trim()) {
2096
- if (!this.statusUpdateState?.baseModel?.model?.trim()) {
2097
- throw new Error(
2098
- "Either summarizer or base model is required for progress summary generation. Please configure models at the project level."
2099
- );
2100
- }
2101
- modelToUse = this.statusUpdateState.baseModel;
2102
- }
2103
- const model = ModelFactory.createModel(modelToUse);
2104
- const { text } = await ai.generateText({
2105
- model,
2106
- prompt,
2107
- experimental_telemetry: {
2108
- isEnabled: true,
2109
- functionId: `status_update_${this.sessionId}`,
2110
- recordInputs: true,
2111
- recordOutputs: true,
2112
- metadata: {
2113
- operation: "progress_summary_generation",
2114
- sessionId: this.sessionId
2115
- }
2116
- }
2117
- });
2118
- span.setAttributes({
2119
- "summary.length": text.trim().length,
2120
- "user_activities.count": userVisibleActivities.length
2121
- });
2122
- span.setStatus({ code: api.SpanStatusCode.OK });
2123
- return text.trim();
2124
- } catch (error) {
2125
- agentsCore.setSpanWithError(span, error);
2126
- logger6.error({ error }, "Failed to generate summary, using fallback");
2127
- return this.generateFallbackSummary(newEvents, elapsedTime);
2128
- } finally {
2129
- span.end();
2130
- }
2131
- }
2132
- );
2133
- }
2134
2068
  /**
2135
2069
  * Generate structured status update using configured data components
2136
2070
  */
@@ -2209,15 +2143,45 @@ Rules:
2209
2143
  - Fill in data for relevant components only
2210
2144
  - Use 'no_relevant_updates' if nothing substantially new to report. DO NOT WRITE LABELS OR USE OTHER COMPONENTS IF YOU USE THIS COMPONENT.
2211
2145
  - Never repeat previous values, make every update EXTREMELY unique. If you cannot do that the update is not worth mentioning.
2212
- - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
2146
+ - Labels MUST be short 3-7 word phrases with ACTUAL information discovered. NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION.
2147
+ - Use sentence case: only capitalize the first word and proper nouns (e.g., "Admin permissions required", not "Admin Permissions Required"). ALWAYS capitalize the first word of the label.
2213
2148
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
2214
2149
  - Include specific details, numbers, requirements, or insights discovered
2215
- - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
2216
- - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
2217
- - Present all operations as seamless actions by a single system
2218
- - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
2219
- - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
2220
- - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
2150
+ - Examples: "Admin permissions required", "Three OAuth steps found", "Token expires daily"
2151
+
2152
+ CRITICAL - HIDE ALL INTERNAL SYSTEM OPERATIONS:
2153
+ - You are ONE unified AI system presenting results to the user
2154
+ - ABSOLUTELY FORBIDDEN WORDS/PHRASES: "transfer", "transferring", "delegation", "delegating", "delegate", "agent", "routing", "route", "artifact", "saving artifact", "stored artifact", "artifact saved", "continuing", "passing to", "handing off", "switching to"
2155
+ - NEVER reveal internal architecture: No mentions of different agents, components, systems, or modules working together
2156
+ - NEVER mention artifact operations: Users don't need to know about data being saved, stored, or organized internally
2157
+ - NEVER describe handoffs or transitions: Present everything as one seamless operation
2158
+ - If you see "transfer", "delegation_sent", "delegation_returned", or "artifact_saved" events - IGNORE THEM or translate to user-facing information only
2159
+ - Focus ONLY on actual discoveries, findings, and results that matter to the user
2160
+
2161
+ - Bad examples:
2162
+ * "Transferring to search agent"
2163
+ * "Delegating research task"
2164
+ * "Routing to QA specialist"
2165
+ * "Artifact saved successfully"
2166
+ * "Storing results for later"
2167
+ * "Passing request to tool handler"
2168
+ * "Continuing with analysis"
2169
+ * "Handing off to processor"
2170
+ - Good examples:
2171
+ * "Slack bot needs admin privileges"
2172
+ * "Found 3-step OAuth flow required"
2173
+ * "Channel limit is 500 per workspace"
2174
+ * Use no_relevant_updates if nothing new to report
2175
+
2176
+ CRITICAL ANTI-HALLUCINATION RULES:
2177
+ - NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION. EVERY SINGLE UPDATE MUST BE BACKED UP WITH ACTUAL INFORMATION.
2178
+ - DO NOT MAKE UP PEOPLE, NAMES, PLACES, THINGS, ORGANIZATIONS, OR INFORMATION. IT IS OBVIOUS WHEN A PERSON/ENTITY DOES NOT EXIST.
2179
+ - Only report facts that are EXPLICITLY mentioned in the activities or tool results
2180
+ - If you don't have concrete information about something, DO NOT mention it
2181
+ - Never invent names like "John Doe", "Alice", "Bob", or any other placeholder names
2182
+ - Never create fictional companies, products, or services
2183
+ - If a tool returned no results or an error, DO NOT pretend it found something
2184
+ - Every detail in your status update must be traceable back to the actual activities provided
2221
2185
 
2222
2186
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
2223
2187
 
@@ -2232,6 +2196,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2232
2196
  }
2233
2197
  modelToUse = this.statusUpdateState.baseModel;
2234
2198
  }
2199
+ if (!modelToUse) {
2200
+ throw new Error("No model configuration available");
2201
+ }
2235
2202
  const model = ModelFactory.createModel(modelToUse);
2236
2203
  const { object } = await ai.generateObject({
2237
2204
  model,
@@ -2249,29 +2216,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2249
2216
  }
2250
2217
  });
2251
2218
  const result = object;
2252
- const operations = [];
2219
+ const summaries = [];
2253
2220
  for (const [componentId, data] of Object.entries(result)) {
2254
2221
  if (componentId === "no_relevant_updates") {
2255
2222
  continue;
2256
2223
  }
2257
2224
  if (data && typeof data === "object" && Object.keys(data).length > 0) {
2258
- operations.push({
2225
+ summaries.push({
2259
2226
  type: componentId,
2260
2227
  data
2261
2228
  });
2262
2229
  }
2263
2230
  }
2264
2231
  span.setAttributes({
2265
- "operations.count": operations.length,
2232
+ "summaries.count": summaries.length,
2266
2233
  "user_activities.count": userVisibleActivities.length,
2267
2234
  "result_keys.count": Object.keys(result).length
2268
2235
  });
2269
2236
  span.setStatus({ code: api.SpanStatusCode.OK });
2270
- return { operations };
2237
+ return { summaries };
2271
2238
  } catch (error) {
2272
2239
  agentsCore.setSpanWithError(span, error);
2273
2240
  logger6.error({ error }, "Failed to generate structured update, using fallback");
2274
- return { operations: [] };
2241
+ return { summaries: [] };
2275
2242
  } finally {
2276
2243
  span.end();
2277
2244
  }
@@ -2287,7 +2254,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2287
2254
  }
2288
2255
  return z5.z.object({
2289
2256
  label: z5.z.string().describe(
2290
- 'A short 3-5 word phrase, that is a descriptive label for the update component. This Label must be EXTREMELY unique to represent the UNIQUE update we are providing. The ACTUAL finding or result, not the action. What specific information was discovered? (e.g., "Slack requires OAuth 2.0 setup", "Found 5 integration methods", "API rate limit is 100/minute"). Include the actual detail or insight, not just that you searched or processed.'
2257
+ 'A short 3-5 word phrase, that is a descriptive label for the update component. This Label must be EXTREMELY unique to represent the UNIQUE update we are providing. The ACTUAL finding or result, not the action. What specific information was discovered? (e.g., "Slack requires OAuth 2.0 setup", "Found 5 integration methods", "API rate limit is 100/minute"). Include the actual detail or insight, not just that you searched or processed. CRITICAL: Only use facts explicitly found in the activities - NEVER invent names, people, organizations, or details that are not present in the actual tool results.'
2291
2258
  )
2292
2259
  });
2293
2260
  }
@@ -2297,7 +2264,7 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2297
2264
  buildZodSchemaFromJson(jsonSchema) {
2298
2265
  const properties = {};
2299
2266
  properties["label"] = z5.z.string().describe(
2300
- 'A short 3-5 word phrase, that is a descriptive label for the update component. This Label must be EXTREMELY unique to represent the UNIQUE update we are providing. The SPECIFIC finding, result, or insight discovered (e.g., "Slack bot needs workspace admin role", "Found ingestion requires 3 steps", "Channel history limited to 10k messages"). State the ACTUAL information found, not that you searched. What did you LEARN or DISCOVER? What specific detail is now known?'
2267
+ 'A short 3-5 word phrase, that is a descriptive label for the update component. This Label must be EXTREMELY unique to represent the UNIQUE update we are providing. The SPECIFIC finding, result, or insight discovered (e.g., "Slack bot needs workspace admin role", "Found ingestion requires 3 steps", "Channel history limited to 10k messages"). State the ACTUAL information found, not that you searched. What did you LEARN or DISCOVER? What specific detail is now known? CRITICAL: Only use facts explicitly found in the activities - NEVER invent names, people, organizations, or details that are not present in the actual tool results.'
2301
2268
  );
2302
2269
  for (const [key, value] of Object.entries(jsonSchema.properties)) {
2303
2270
  let zodType;
@@ -2374,41 +2341,12 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2374
2341
  );
2375
2342
  break;
2376
2343
  }
2377
- case "transfer": {
2378
- const data = event.data;
2379
- activities.push(
2380
- `\u{1F504} **Continuing**: ${data.reason || "Processing request"}
2381
- ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2382
- );
2383
- break;
2384
- }
2385
- case "delegation_sent": {
2386
- const data = event.data;
2387
- activities.push(
2388
- `\u{1F4E4} **Processing**: ${data.taskDescription}
2389
- ${data.context ? `Context: ${JSON.stringify(data.context, null, 2)}` : ""}`
2390
- );
2391
- break;
2392
- }
2393
- case "delegation_returned": {
2394
- const data = event.data;
2395
- activities.push(
2396
- `\u{1F4E5} **Completed subtask**
2397
- Result: ${JSON.stringify(data.result, null, 2)}`
2398
- );
2399
- break;
2400
- }
2401
- case "artifact_saved": {
2402
- const data = event.data;
2403
- activities.push(
2404
- `\u{1F4BE} **Artifact Saved**: ${data.artifactType}
2405
- ID: ${data.artifactId}
2406
- Task: ${data.taskId}
2407
- ${data.summaryData ? `Summary: ${data.summaryData}` : ""}
2408
- ${data.fullData ? `Full Data: ${data.fullData}` : ""}`
2409
- );
2344
+ // INTERNAL OPERATIONS - DO NOT EXPOSE TO STATUS UPDATES
2345
+ case "transfer":
2346
+ case "delegation_sent":
2347
+ case "delegation_returned":
2348
+ case "artifact_saved":
2410
2349
  break;
2411
- }
2412
2350
  case "agent_reasoning": {
2413
2351
  const data = event.data;
2414
2352
  activities.push(
@@ -2433,21 +2371,6 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2433
2371
  }
2434
2372
  return activities;
2435
2373
  }
2436
- /**
2437
- * Generate fallback summary when LLM fails
2438
- */
2439
- generateFallbackSummary(events, elapsedTime) {
2440
- const timeStr = Math.round(elapsedTime / 1e3);
2441
- const toolCalls = events.filter((e) => e.eventType === "tool_execution").length;
2442
- const artifacts = events.filter((e) => e.eventType === "artifact_saved").length;
2443
- if (artifacts > 0) {
2444
- return `Generated ${artifacts} result${artifacts > 1 ? "s" : ""} so far (${timeStr}s elapsed)`;
2445
- } else if (toolCalls > 0) {
2446
- return `Used ${toolCalls} tool${toolCalls > 1 ? "s" : ""} to gather information (${timeStr}s elapsed)`;
2447
- } else {
2448
- return `Processing your request... (${timeStr}s elapsed)`;
2449
- }
2450
- }
2451
2374
  /**
2452
2375
  * Process a single artifact to generate name and description using conversation context
2453
2376
  */
@@ -2522,6 +2445,9 @@ Make it specific and relevant.`;
2522
2445
  }
2523
2446
  modelToUse = this.statusUpdateState.baseModel;
2524
2447
  }
2448
+ if (!modelToUse) {
2449
+ throw new Error("No model configuration available");
2450
+ }
2525
2451
  const model = ModelFactory.createModel(modelToUse);
2526
2452
  const schema = z5.z.object({
2527
2453
  name: z5.z.string().max(50).describe("Concise, descriptive name for the artifact"),
@@ -2803,6 +2729,7 @@ var _ArtifactParser = class _ArtifactParser {
2803
2729
  }
2804
2730
  for (let i = matches.length - 1; i >= 0; i--) {
2805
2731
  const match = matches[i];
2732
+ if (match.index === void 0) continue;
2806
2733
  const startIdx = match.index;
2807
2734
  const textAfterMatch = text.slice(startIdx);
2808
2735
  if (!textAfterMatch.includes("/>")) {
@@ -2852,7 +2779,8 @@ var _ArtifactParser = class _ArtifactParser {
2852
2779
  taskId,
2853
2780
  name: artifact.name || "Processing...",
2854
2781
  description: artifact.description || "Name and description being generated...",
2855
- artifactType: artifact.metadata?.artifactType,
2782
+ type: artifact.metadata?.artifactType || artifact.artifactType,
2783
+ // Map artifactType to type for consistency
2856
2784
  artifactSummary: artifact.parts?.[0]?.data?.summary || {}
2857
2785
  };
2858
2786
  }
@@ -2869,10 +2797,11 @@ var _ArtifactParser = class _ArtifactParser {
2869
2797
  let lastIndex = 0;
2870
2798
  for (const match of matches) {
2871
2799
  const [fullMatch, artifactId, taskId] = match;
2800
+ if (match.index === void 0) continue;
2872
2801
  const matchStart = match.index;
2873
2802
  if (matchStart > lastIndex) {
2874
2803
  const textBefore = text.slice(lastIndex, matchStart);
2875
- if (textBefore.trim()) {
2804
+ if (textBefore) {
2876
2805
  parts.push({ kind: "text", text: textBefore });
2877
2806
  }
2878
2807
  }
@@ -2884,7 +2813,7 @@ var _ArtifactParser = class _ArtifactParser {
2884
2813
  }
2885
2814
  if (lastIndex < text.length) {
2886
2815
  const remainingText = text.slice(lastIndex);
2887
- if (remainingText.trim()) {
2816
+ if (remainingText) {
2888
2817
  parts.push({ kind: "text", text: remainingText });
2889
2818
  }
2890
2819
  }
@@ -2994,8 +2923,9 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2994
2923
  var ArtifactParser = _ArtifactParser;
2995
2924
 
2996
2925
  // src/utils/incremental-stream-parser.ts
2997
- var logger8 = agentsCore.getLogger("IncrementalStreamParser");
2998
- var IncrementalStreamParser = class {
2926
+ agentsCore.getLogger("IncrementalStreamParser");
2927
+ var _IncrementalStreamParser = class _IncrementalStreamParser {
2928
+ // Max number of streamed component IDs to track
2999
2929
  constructor(streamHelper, tenantId, contextId) {
3000
2930
  __publicField(this, "buffer", "");
3001
2931
  __publicField(this, "pendingTextBuffer", "");
@@ -3005,6 +2935,9 @@ var IncrementalStreamParser = class {
3005
2935
  __publicField(this, "collectedParts", []);
3006
2936
  __publicField(this, "contextId");
3007
2937
  __publicField(this, "lastChunkWasToolResult", false);
2938
+ __publicField(this, "componentAccumulator", {});
2939
+ __publicField(this, "lastStreamedComponents", /* @__PURE__ */ new Map());
2940
+ __publicField(this, "componentSnapshots", /* @__PURE__ */ new Map());
3008
2941
  this.streamHelper = streamHelper;
3009
2942
  this.contextId = contextId;
3010
2943
  this.artifactParser = new ArtifactParser(tenantId);
@@ -3019,7 +2952,7 @@ var IncrementalStreamParser = class {
3019
2952
  * Process a new text chunk for text streaming (handles artifact markers)
3020
2953
  */
3021
2954
  async processTextChunk(chunk) {
3022
- if (this.lastChunkWasToolResult && this.buffer === "" && chunk.trim()) {
2955
+ if (this.lastChunkWasToolResult && this.buffer === "" && chunk) {
3023
2956
  chunk = "\n\n" + chunk;
3024
2957
  this.lastChunkWasToolResult = false;
3025
2958
  }
@@ -3031,120 +2964,167 @@ var IncrementalStreamParser = class {
3031
2964
  this.buffer = parseResult.remainingBuffer;
3032
2965
  }
3033
2966
  /**
3034
- * Process a new object chunk for object streaming (handles JSON objects with artifact references)
2967
+ * Process object deltas directly from Vercel AI SDK's fullStream
2968
+ * Accumulates components and streams them when they're stable (unchanged between deltas)
3035
2969
  */
3036
- async processObjectChunk(chunk) {
3037
- this.buffer += chunk;
3038
- const parseResult = await this.parseObjectBuffer();
3039
- for (const part of parseResult.completeParts) {
3040
- await this.streamPart(part);
2970
+ async processObjectDelta(delta) {
2971
+ if (!delta || typeof delta !== "object") {
2972
+ return;
3041
2973
  }
3042
- this.buffer = parseResult.remainingBuffer;
3043
- }
3044
- /**
3045
- * Process tool call stream for structured output, streaming components as they complete
3046
- */
3047
- async processToolCallStream(stream2, targetToolName) {
3048
- let jsonBuffer = "";
3049
- let componentBuffer = "";
3050
- let depth = 0;
3051
- let componentsStreamed = 0;
3052
- const MAX_BUFFER_SIZE = 5 * 1024 * 1024;
3053
- for await (const part of stream2) {
3054
- if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
3055
- const delta = part.argsTextDelta || "";
3056
- if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
3057
- logger8.warn(
3058
- { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
3059
- "JSON buffer exceeded maximum size, truncating"
3060
- );
3061
- jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
2974
+ this.componentAccumulator = this.deepMerge(this.componentAccumulator, delta);
2975
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
2976
+ const components = this.componentAccumulator.dataComponents;
2977
+ const currentComponentIds = new Set(components.filter((c) => c?.id).map((c) => c.id));
2978
+ for (const [componentId, snapshot] of this.componentSnapshots.entries()) {
2979
+ if (!currentComponentIds.has(componentId) && !this.lastStreamedComponents.has(componentId)) {
2980
+ try {
2981
+ const component = JSON.parse(snapshot);
2982
+ if (this.isComponentComplete(component)) {
2983
+ await this.streamComponent(component);
2984
+ }
2985
+ } catch (e) {
2986
+ }
3062
2987
  }
3063
- jsonBuffer += delta;
3064
- for (const char of delta) {
3065
- if (componentBuffer.length > MAX_BUFFER_SIZE) {
3066
- logger8.warn(
3067
- { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
3068
- "Component buffer exceeded maximum size, resetting"
3069
- );
3070
- componentBuffer = "";
3071
- depth = 0;
3072
- continue;
2988
+ }
2989
+ for (let i = 0; i < components.length; i++) {
2990
+ const component = components[i];
2991
+ if (!component?.id) continue;
2992
+ const componentKey = component.id;
2993
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
2994
+ if (hasBeenStreamed) continue;
2995
+ const currentSnapshot = JSON.stringify(component);
2996
+ const previousSnapshot = this.componentSnapshots.get(componentKey);
2997
+ this.componentSnapshots.set(componentKey, currentSnapshot);
2998
+ if (this.componentSnapshots.size > _IncrementalStreamParser.MAX_SNAPSHOT_SIZE) {
2999
+ const firstKey = this.componentSnapshots.keys().next().value;
3000
+ if (firstKey) {
3001
+ this.componentSnapshots.delete(firstKey);
3073
3002
  }
3074
- componentBuffer += char;
3075
- if (char === "{") {
3076
- depth++;
3077
- } else if (char === "}") {
3078
- depth--;
3079
- if (depth === 2 && componentBuffer.includes('"id"')) {
3080
- const componentMatch = componentBuffer.match(/\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}/);
3081
- if (componentMatch) {
3082
- const MAX_COMPONENT_SIZE = 1024 * 1024;
3083
- if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
3084
- logger8.warn(
3085
- {
3086
- size: componentMatch[0].length,
3087
- maxSize: MAX_COMPONENT_SIZE
3088
- },
3089
- "Component exceeds size limit, skipping"
3090
- );
3091
- componentBuffer = "";
3092
- continue;
3093
- }
3094
- try {
3095
- const component = JSON.parse(componentMatch[0]);
3096
- if (typeof component !== "object" || !component.id) {
3097
- logger8.warn({ component }, "Invalid component structure, skipping");
3098
- componentBuffer = "";
3099
- continue;
3100
- }
3101
- const parts = await this.artifactParser.parseObject({
3102
- dataComponents: [component]
3103
- });
3104
- for (const part2 of parts) {
3105
- await this.streamPart(part2);
3106
- }
3107
- componentsStreamed++;
3108
- componentBuffer = "";
3109
- } catch (e) {
3110
- logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3111
- }
3112
- }
3003
+ }
3004
+ if (component.name === "Text" && component.props?.text) {
3005
+ const previousTextContent = previousSnapshot ? JSON.parse(previousSnapshot).props?.text || "" : "";
3006
+ const currentTextContent = component.props.text || "";
3007
+ if (currentTextContent.length > previousTextContent.length) {
3008
+ const newText = currentTextContent.slice(previousTextContent.length);
3009
+ if (!this.hasStartedRole) {
3010
+ await this.streamHelper.writeRole("assistant");
3011
+ this.hasStartedRole = true;
3113
3012
  }
3013
+ await this.streamHelper.streamText(newText, 50);
3014
+ this.collectedParts.push({
3015
+ kind: "text",
3016
+ text: newText
3017
+ });
3114
3018
  }
3115
- if (componentBuffer.includes('"dataComponents"') && componentBuffer.includes("[")) ;
3019
+ continue;
3116
3020
  }
3117
- } else if (part.type === "tool-call" && part.toolName === targetToolName) {
3118
- if (part.args?.dataComponents) {
3119
- const parts = await this.artifactParser.parseObject(part.args);
3120
- for (const part2 of parts) {
3121
- await this.streamPart(part2);
3021
+ if (this.isComponentComplete(component)) {
3022
+ const currentPropsSnapshot = JSON.stringify(component.props);
3023
+ const previousPropsSnapshot = previousSnapshot ? JSON.stringify(JSON.parse(previousSnapshot).props) : null;
3024
+ if (previousPropsSnapshot === currentPropsSnapshot) {
3025
+ await this.streamComponent(component);
3122
3026
  }
3123
3027
  }
3124
- break;
3125
3028
  }
3126
3029
  }
3127
- logger8.debug({ componentsStreamed }, "Finished streaming components");
3128
3030
  }
3129
3031
  /**
3130
- * Legacy method for backward compatibility - defaults to text processing
3032
+ * Stream a component and mark it as streamed
3033
+ * Note: Text components are handled separately with incremental streaming
3131
3034
  */
3132
- async processChunk(chunk) {
3133
- await this.processTextChunk(chunk);
3035
+ async streamComponent(component) {
3036
+ const parts = await this.artifactParser.parseObject({
3037
+ dataComponents: [component]
3038
+ });
3039
+ for (const part of parts) {
3040
+ await this.streamPart(part);
3041
+ }
3042
+ this.lastStreamedComponents.set(component.id, true);
3043
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3044
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3045
+ if (firstKey) {
3046
+ this.lastStreamedComponents.delete(firstKey);
3047
+ }
3048
+ }
3049
+ this.componentSnapshots.delete(component.id);
3050
+ }
3051
+ /**
3052
+ * Check if a component has the basic structure required for streaming
3053
+ * Requires id, name, and props object with content
3054
+ */
3055
+ isComponentComplete(component) {
3056
+ if (!component || !component.id || !component.name) {
3057
+ return false;
3058
+ }
3059
+ if (!component.props || typeof component.props !== "object") {
3060
+ return false;
3061
+ }
3062
+ const isArtifact = component.name === "Artifact" || component.props.artifact_id && component.props.task_id;
3063
+ if (isArtifact) {
3064
+ return Boolean(component.props.artifact_id && component.props.task_id);
3065
+ }
3066
+ return true;
3067
+ }
3068
+ /**
3069
+ * Deep merge helper for object deltas
3070
+ */
3071
+ deepMerge(target, source) {
3072
+ if (!source) return target;
3073
+ if (!target) return source;
3074
+ const result = { ...target };
3075
+ for (const key in source) {
3076
+ if (source[key] && typeof source[key] === "object" && !Array.isArray(source[key])) {
3077
+ result[key] = this.deepMerge(target[key], source[key]);
3078
+ } else {
3079
+ result[key] = source[key];
3080
+ }
3081
+ }
3082
+ return result;
3083
+ }
3084
+ /**
3085
+ * Legacy method for backward compatibility - defaults to text processing
3086
+ */
3087
+ async processChunk(chunk) {
3088
+ await this.processTextChunk(chunk);
3134
3089
  }
3135
3090
  /**
3136
3091
  * Process any remaining buffer content at the end of stream
3137
3092
  */
3138
3093
  async finalize() {
3139
- if (this.buffer.trim()) {
3094
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3095
+ const components = this.componentAccumulator.dataComponents;
3096
+ for (let i = 0; i < components.length; i++) {
3097
+ const component = components[i];
3098
+ if (!component?.id) continue;
3099
+ const componentKey = component.id;
3100
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3101
+ if (!hasBeenStreamed && this.isComponentComplete(component) && component.name !== "Text") {
3102
+ const parts = await this.artifactParser.parseObject({
3103
+ dataComponents: [component]
3104
+ });
3105
+ for (const part of parts) {
3106
+ await this.streamPart(part);
3107
+ }
3108
+ this.lastStreamedComponents.set(componentKey, true);
3109
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3110
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3111
+ if (firstKey) {
3112
+ this.lastStreamedComponents.delete(firstKey);
3113
+ }
3114
+ }
3115
+ this.componentSnapshots.delete(componentKey);
3116
+ }
3117
+ }
3118
+ }
3119
+ if (this.buffer) {
3140
3120
  const part = {
3141
3121
  kind: "text",
3142
- text: this.buffer.trim()
3122
+ text: this.buffer
3143
3123
  };
3144
3124
  await this.streamPart(part);
3145
3125
  }
3146
- if (this.pendingTextBuffer.trim()) {
3147
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "").trim();
3126
+ if (this.pendingTextBuffer) {
3127
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/artifact:ref>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3148
3128
  if (cleanedText) {
3149
3129
  this.collectedParts.push({
3150
3130
  kind: "text",
@@ -3154,6 +3134,9 @@ var IncrementalStreamParser = class {
3154
3134
  }
3155
3135
  this.pendingTextBuffer = "";
3156
3136
  }
3137
+ this.componentSnapshots.clear();
3138
+ this.lastStreamedComponents.clear();
3139
+ this.componentAccumulator = {};
3157
3140
  }
3158
3141
  /**
3159
3142
  * Get all collected parts for building the final response
@@ -3200,30 +3183,6 @@ var IncrementalStreamParser = class {
3200
3183
  remainingBuffer: ""
3201
3184
  };
3202
3185
  }
3203
- /**
3204
- * Parse buffer for complete JSON objects with artifact references (for object streaming)
3205
- */
3206
- async parseObjectBuffer() {
3207
- const completeParts = [];
3208
- try {
3209
- const parsed = JSON.parse(this.buffer);
3210
- const parts = await this.artifactParser.parseObject(parsed);
3211
- return {
3212
- completeParts: parts,
3213
- remainingBuffer: ""
3214
- };
3215
- } catch {
3216
- const { complete, remaining } = this.artifactParser.parsePartialJSON(this.buffer);
3217
- for (const obj of complete) {
3218
- const parts = await this.artifactParser.parseObject(obj);
3219
- completeParts.push(...parts);
3220
- }
3221
- return {
3222
- completeParts,
3223
- remainingBuffer: remaining
3224
- };
3225
- }
3226
- }
3227
3186
  /**
3228
3187
  * Check if text might be the start of an artifact marker
3229
3188
  */
@@ -3243,16 +3202,16 @@ var IncrementalStreamParser = class {
3243
3202
  if (part.kind === "text" && part.text) {
3244
3203
  this.pendingTextBuffer += part.text;
3245
3204
  if (!this.artifactParser.hasIncompleteArtifact(this.pendingTextBuffer)) {
3246
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3247
- if (cleanedText.trim()) {
3205
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/artifact:ref>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3206
+ if (cleanedText) {
3248
3207
  await this.streamHelper.streamText(cleanedText, 50);
3249
3208
  }
3250
3209
  this.pendingTextBuffer = "";
3251
3210
  }
3252
3211
  } else if (part.kind === "data" && part.data) {
3253
3212
  if (this.pendingTextBuffer) {
3254
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3255
- if (cleanedText.trim()) {
3213
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/artifact:ref>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3214
+ if (cleanedText) {
3256
3215
  await this.streamHelper.streamText(cleanedText, 50);
3257
3216
  }
3258
3217
  this.pendingTextBuffer = "";
@@ -3266,6 +3225,11 @@ var IncrementalStreamParser = class {
3266
3225
  }
3267
3226
  }
3268
3227
  };
3228
+ // Memory management constants
3229
+ __publicField(_IncrementalStreamParser, "MAX_SNAPSHOT_SIZE", 100);
3230
+ // Max number of snapshots to keep
3231
+ __publicField(_IncrementalStreamParser, "MAX_STREAMED_SIZE", 1e3);
3232
+ var IncrementalStreamParser = _IncrementalStreamParser;
3269
3233
 
3270
3234
  // src/utils/response-formatter.ts
3271
3235
  var logger9 = agentsCore.getLogger("ResponseFormatter");
@@ -4654,7 +4618,8 @@ function createDelegateToAgentTool({
4654
4618
  const externalAgent = await agentsCore.getExternalAgent(dbClient_default)({
4655
4619
  scopes: {
4656
4620
  tenantId,
4657
- projectId
4621
+ projectId,
4622
+ graphId
4658
4623
  },
4659
4624
  agentId: delegateConfig.config.id
4660
4625
  });
@@ -5256,6 +5221,23 @@ var Agent = class {
5256
5221
  __publicField(this, "credentialStoreRegistry");
5257
5222
  this.artifactComponents = config.artifactComponents || [];
5258
5223
  let processedDataComponents = config.dataComponents || [];
5224
+ if (processedDataComponents.length > 0) {
5225
+ processedDataComponents.push({
5226
+ id: "text-content",
5227
+ name: "Text",
5228
+ description: "Natural conversational text for the user - write naturally without mentioning technical details. Avoid redundancy and repetition with data components.",
5229
+ props: {
5230
+ type: "object",
5231
+ properties: {
5232
+ text: {
5233
+ type: "string",
5234
+ description: "Natural conversational text - respond as if having a normal conversation, never mention JSON, components, schemas, or technical implementation. Avoid redundancy and repetition with data components."
5235
+ }
5236
+ },
5237
+ required: ["text"]
5238
+ }
5239
+ });
5240
+ }
5259
5241
  if (this.artifactComponents.length > 0 && config.dataComponents && config.dataComponents.length > 0) {
5260
5242
  processedDataComponents = [
5261
5243
  ArtifactReferenceSchema.getDataComponent(config.tenantId, config.projectId),
@@ -5526,8 +5508,12 @@ var Agent = class {
5526
5508
  async getMcpTool(tool4) {
5527
5509
  const credentialReferenceId = tool4.credentialReferenceId;
5528
5510
  const toolsForAgent = await agentsCore.getToolsForAgent(dbClient_default)({
5529
- scopes: { tenantId: this.config.tenantId, projectId: this.config.projectId },
5530
- agentId: this.config.id
5511
+ scopes: {
5512
+ tenantId: this.config.tenantId,
5513
+ projectId: this.config.projectId,
5514
+ graphId: this.config.graphId,
5515
+ agentId: this.config.id
5516
+ }
5531
5517
  });
5532
5518
  const selectedTools = toolsForAgent.data.find((t) => t.toolId === tool4.id)?.selectedTools || void 0;
5533
5519
  let serverConfig;
@@ -5674,9 +5660,9 @@ var Agent = class {
5674
5660
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5675
5661
  scopes: {
5676
5662
  tenantId: this.config.tenantId,
5677
- projectId: this.config.projectId
5678
- },
5679
- graphId: this.config.graphId
5663
+ projectId: this.config.projectId,
5664
+ graphId: this.config.graphId
5665
+ }
5680
5666
  });
5681
5667
  return graphDefinition?.graphPrompt || void 0;
5682
5668
  } catch (error) {
@@ -5698,14 +5684,16 @@ var Agent = class {
5698
5684
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5699
5685
  scopes: {
5700
5686
  tenantId: this.config.tenantId,
5701
- projectId: this.config.projectId
5702
- },
5703
- graphId: this.config.graphId
5687
+ projectId: this.config.projectId,
5688
+ graphId: this.config.graphId
5689
+ }
5704
5690
  });
5705
5691
  if (!graphDefinition) {
5706
5692
  return false;
5707
5693
  }
5708
- return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5694
+ return Object.values(graphDefinition.agents).some(
5695
+ (agent) => "artifactComponents" in agent && agent.artifactComponents && agent.artifactComponents.length > 0
5696
+ );
5709
5697
  } catch (error) {
5710
5698
  logger15.warn(
5711
5699
  {
@@ -5733,7 +5721,8 @@ Key requirements:
5733
5721
  - Mix artifact references throughout your dataComponents array
5734
5722
  - Each artifact reference must use EXACT IDs from tool outputs
5735
5723
  - Reference artifacts that directly support the adjacent information
5736
- - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact`;
5724
+ - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
5725
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5737
5726
  }
5738
5727
  if (hasDataComponents && !hasArtifactComponents) {
5739
5728
  return `Generate the final structured JSON response using the configured data components. Organize the information from the research above into the appropriate structured format based on the available component schemas.
@@ -5741,7 +5730,8 @@ Key requirements:
5741
5730
  Key requirements:
5742
5731
  - Use the exact component structure and property names
5743
5732
  - Fill in all relevant data from the research
5744
- - Ensure data is organized logically and completely`;
5733
+ - Ensure data is organized logically and completely
5734
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5745
5735
  }
5746
5736
  if (!hasDataComponents && hasArtifactComponents) {
5747
5737
  return `Generate the final structured response with artifact references based on the research above. Use the artifact reference component to cite relevant information with exact artifact_id and task_id values from the tool outputs.
@@ -5751,7 +5741,7 @@ Key requirements:
5751
5741
  - Reference artifacts that support your response
5752
5742
  - Never make up or modify artifact IDs`;
5753
5743
  }
5754
- return `Generate the final response based on the research above.`;
5744
+ return `Generate the final response based on the research above. Write naturally as if having a conversation.`;
5755
5745
  }
5756
5746
  async buildSystemPrompt(runtimeContext, excludeDataComponents = false) {
5757
5747
  const conversationId = runtimeContext?.metadata?.conversationId || runtimeContext?.contextId;
@@ -5902,9 +5892,9 @@ Key requirements:
5902
5892
  return await agentsCore.graphHasArtifactComponents(dbClient_default)({
5903
5893
  scopes: {
5904
5894
  tenantId: this.config.tenantId,
5905
- projectId: this.config.projectId
5906
- },
5907
- graphId: this.config.graphId
5895
+ projectId: this.config.projectId,
5896
+ graphId: this.config.graphId
5897
+ }
5908
5898
  });
5909
5899
  } catch (error) {
5910
5900
  logger15.error(
@@ -6250,35 +6240,94 @@ ${output}`;
6250
6240
  this.getStructuredOutputModel()
6251
6241
  );
6252
6242
  const phase2TimeoutMs = structuredModelSettings.maxDuration ? structuredModelSettings.maxDuration * 1e3 : CONSTANTS.PHASE_2_TIMEOUT_MS;
6253
- const structuredResponse = await ai.generateObject({
6254
- ...structuredModelSettings,
6255
- messages: [
6256
- { role: "user", content: userMessage },
6257
- ...reasoningFlow,
6258
- {
6259
- role: "user",
6260
- content: await this.buildPhase2SystemPrompt()
6261
- }
6262
- ],
6263
- schema: z5.z.object({
6264
- dataComponents: z5.z.array(dataComponentsSchema)
6265
- }),
6266
- experimental_telemetry: {
6267
- isEnabled: true,
6268
- functionId: this.config.id,
6269
- recordInputs: true,
6270
- recordOutputs: true,
6271
- metadata: {
6272
- phase: "structured_generation"
6243
+ const shouldStreamPhase2 = this.getStreamingHelper();
6244
+ if (shouldStreamPhase2) {
6245
+ const streamResult = ai.streamObject({
6246
+ ...structuredModelSettings,
6247
+ messages: [
6248
+ { role: "user", content: userMessage },
6249
+ ...reasoningFlow,
6250
+ {
6251
+ role: "user",
6252
+ content: await this.buildPhase2SystemPrompt()
6253
+ }
6254
+ ],
6255
+ schema: z5.z.object({
6256
+ dataComponents: z5.z.array(dataComponentsSchema)
6257
+ }),
6258
+ experimental_telemetry: {
6259
+ isEnabled: true,
6260
+ functionId: this.config.id,
6261
+ recordInputs: true,
6262
+ recordOutputs: true,
6263
+ metadata: {
6264
+ phase: "structured_generation"
6265
+ }
6266
+ },
6267
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6268
+ });
6269
+ const streamHelper = this.getStreamingHelper();
6270
+ if (!streamHelper) {
6271
+ throw new Error("Stream helper is unexpectedly undefined in streaming context");
6272
+ }
6273
+ const parser = new IncrementalStreamParser(
6274
+ streamHelper,
6275
+ this.config.tenantId,
6276
+ contextId
6277
+ );
6278
+ for await (const delta of streamResult.partialObjectStream) {
6279
+ if (delta) {
6280
+ await parser.processObjectDelta(delta);
6273
6281
  }
6274
- },
6275
- abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6276
- });
6277
- response = {
6278
- ...response,
6279
- object: structuredResponse.object
6280
- };
6281
- textResponse = JSON.stringify(structuredResponse.object, null, 2);
6282
+ }
6283
+ await parser.finalize();
6284
+ const structuredResponse = await streamResult;
6285
+ const collectedParts = parser.getCollectedParts();
6286
+ if (collectedParts.length > 0) {
6287
+ response.formattedContent = {
6288
+ parts: collectedParts.map((part) => ({
6289
+ kind: part.kind,
6290
+ ...part.kind === "text" && { text: part.text },
6291
+ ...part.kind === "data" && { data: part.data }
6292
+ }))
6293
+ };
6294
+ }
6295
+ response = {
6296
+ ...response,
6297
+ object: structuredResponse.object
6298
+ };
6299
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6300
+ } else {
6301
+ const structuredResponse = await ai.generateObject({
6302
+ ...structuredModelSettings,
6303
+ messages: [
6304
+ { role: "user", content: userMessage },
6305
+ ...reasoningFlow,
6306
+ {
6307
+ role: "user",
6308
+ content: await this.buildPhase2SystemPrompt()
6309
+ }
6310
+ ],
6311
+ schema: z5.z.object({
6312
+ dataComponents: z5.z.array(dataComponentsSchema)
6313
+ }),
6314
+ experimental_telemetry: {
6315
+ isEnabled: true,
6316
+ functionId: this.config.id,
6317
+ recordInputs: true,
6318
+ recordOutputs: true,
6319
+ metadata: {
6320
+ phase: "structured_generation"
6321
+ }
6322
+ },
6323
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6324
+ });
6325
+ response = {
6326
+ ...response,
6327
+ object: structuredResponse.object
6328
+ };
6329
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6330
+ }
6282
6331
  } else {
6283
6332
  textResponse = response.text || "";
6284
6333
  }
@@ -6324,42 +6373,6 @@ ${output}`;
6324
6373
  }
6325
6374
  };
6326
6375
 
6327
- // src/utils/model-resolver.ts
6328
- init_dbClient();
6329
- async function resolveModelConfig(graphId, agent) {
6330
- if (agent.models?.base?.model) {
6331
- return {
6332
- base: agent.models.base,
6333
- structuredOutput: agent.models.structuredOutput || agent.models.base,
6334
- summarizer: agent.models.summarizer || agent.models.base
6335
- };
6336
- }
6337
- const graph = await agentsCore.getAgentGraph(dbClient_default)({
6338
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
6339
- graphId
6340
- });
6341
- if (graph?.models?.base?.model) {
6342
- return {
6343
- base: graph.models.base,
6344
- structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
6345
- summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
6346
- };
6347
- }
6348
- const project = await agentsCore.getProject(dbClient_default)({
6349
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
6350
- });
6351
- if (project?.models?.base?.model) {
6352
- return {
6353
- base: project.models.base,
6354
- structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
6355
- summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
6356
- };
6357
- }
6358
- throw new Error(
6359
- "Base model configuration is required. Please configure models at the project level."
6360
- );
6361
- }
6362
-
6363
6376
  // src/agents/generateTaskHandler.ts
6364
6377
  function parseEmbeddedJson(data) {
6365
6378
  return traverse__default.default(data).map(function(x) {
@@ -6394,31 +6407,34 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6394
6407
  agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6395
6408
  scopes: {
6396
6409
  tenantId: config.tenantId,
6397
- projectId: config.projectId
6410
+ projectId: config.projectId,
6411
+ graphId: config.graphId
6398
6412
  },
6399
- graphId: config.graphId,
6400
6413
  agentId: config.agentId
6401
6414
  }),
6402
6415
  agentsCore.getToolsForAgent(dbClient_default)({
6403
6416
  scopes: {
6404
6417
  tenantId: config.tenantId,
6405
- projectId: config.projectId
6406
- },
6407
- agentId: config.agentId
6418
+ projectId: config.projectId,
6419
+ graphId: config.graphId,
6420
+ agentId: config.agentId
6421
+ }
6408
6422
  }),
6409
6423
  agentsCore.getDataComponentsForAgent(dbClient_default)({
6410
6424
  scopes: {
6411
6425
  tenantId: config.tenantId,
6412
- projectId: config.projectId
6413
- },
6414
- agentId: config.agentId
6426
+ projectId: config.projectId,
6427
+ graphId: config.graphId,
6428
+ agentId: config.agentId
6429
+ }
6415
6430
  }),
6416
6431
  agentsCore.getArtifactComponentsForAgent(dbClient_default)({
6417
6432
  scopes: {
6418
6433
  tenantId: config.tenantId,
6419
- projectId: config.projectId
6420
- },
6421
- agentId: config.agentId
6434
+ projectId: config.projectId,
6435
+ graphId: config.graphId,
6436
+ agentId: config.agentId
6437
+ }
6422
6438
  })
6423
6439
  ]);
6424
6440
  logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
@@ -6426,13 +6442,16 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6426
6442
  internalRelations.map(async (relation) => {
6427
6443
  try {
6428
6444
  const relatedAgent = await agentsCore.getAgentById(dbClient_default)({
6429
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6445
+ scopes: {
6446
+ tenantId: config.tenantId,
6447
+ projectId: config.projectId,
6448
+ graphId: config.graphId
6449
+ },
6430
6450
  agentId: relation.id
6431
6451
  });
6432
6452
  if (relatedAgent) {
6433
6453
  const relatedAgentRelations = await agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6434
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6435
- graphId: config.graphId,
6454
+ scopes: { tenantId: config.tenantId, projectId: config.projectId, graphId: config.graphId },
6436
6455
  agentId: relation.id
6437
6456
  });
6438
6457
  const enhancedDescription = generateDescriptionWithTransfers(
@@ -6655,16 +6674,17 @@ var createTaskHandlerConfig = async (params) => {
6655
6674
  const agent = await agentsCore.getAgentById(dbClient_default)({
6656
6675
  scopes: {
6657
6676
  tenantId: params.tenantId,
6658
- projectId: params.projectId
6677
+ projectId: params.projectId,
6678
+ graphId: params.graphId
6659
6679
  },
6660
6680
  agentId: params.agentId
6661
6681
  });
6662
- const agentGraph = await agentsCore.getAgentGraph(dbClient_default)({
6682
+ const agentGraph = await agentsCore.getAgentGraphById(dbClient_default)({
6663
6683
  scopes: {
6664
6684
  tenantId: params.tenantId,
6665
- projectId: params.projectId
6666
- },
6667
- graphId: params.graphId
6685
+ projectId: params.projectId,
6686
+ graphId: params.graphId
6687
+ }
6668
6688
  });
6669
6689
  if (!agent) {
6670
6690
  throw new Error(`Agent not found: ${params.agentId}`);
@@ -6704,10 +6724,14 @@ async function hydrateGraph({
6704
6724
  apiKey
6705
6725
  }) {
6706
6726
  try {
6727
+ if (!dbGraph.defaultAgentId) {
6728
+ throw new Error(`Graph ${dbGraph.id} does not have a default agent configured`);
6729
+ }
6707
6730
  const defaultAgent = await agentsCore.getAgentById(dbClient_default)({
6708
6731
  scopes: {
6709
6732
  tenantId: dbGraph.tenantId,
6710
- projectId: dbGraph.projectId
6733
+ projectId: dbGraph.projectId,
6734
+ graphId: dbGraph.id
6711
6735
  },
6712
6736
  agentId: dbGraph.defaultAgentId
6713
6737
  });
@@ -6762,7 +6786,7 @@ async function hydrateGraph({
6762
6786
  }
6763
6787
  async function getRegisteredGraph(executionContext) {
6764
6788
  const { tenantId, projectId, graphId, baseUrl, apiKey } = executionContext;
6765
- const dbGraph = await agentsCore.getAgentGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
6789
+ const dbGraph = await agentsCore.getAgentGraphById(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
6766
6790
  if (!dbGraph) {
6767
6791
  return null;
6768
6792
  }
@@ -6821,6 +6845,7 @@ app.openapi(
6821
6845
  );
6822
6846
  const executionContext = agentsCore.getRequestExecutionContext(c);
6823
6847
  const { tenantId, projectId, graphId, agentId } = executionContext;
6848
+ console.dir("executionContext", executionContext);
6824
6849
  if (agentId) {
6825
6850
  logger17.info(
6826
6851
  {
@@ -6836,7 +6861,10 @@ app.openapi(
6836
6861
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6837
6862
  logger17.info({ agent }, "agent registered: well-known agent.json");
6838
6863
  if (!agent) {
6839
- return c.json({ error: "Agent not found" }, 404);
6864
+ throw agentsCore.createApiError({
6865
+ code: "not_found",
6866
+ message: "Agent not found"
6867
+ });
6840
6868
  }
6841
6869
  return c.json(agent.agentCard);
6842
6870
  } else {
@@ -6851,7 +6879,10 @@ app.openapi(
6851
6879
  );
6852
6880
  const graph = await getRegisteredGraph(executionContext);
6853
6881
  if (!graph) {
6854
- return c.json({ error: "Graph not found" }, 404);
6882
+ throw agentsCore.createApiError({
6883
+ code: "not_found",
6884
+ message: "Graph not found"
6885
+ });
6855
6886
  }
6856
6887
  return c.json(graph.agentCard);
6857
6888
  }
@@ -6908,8 +6939,7 @@ app.post("/a2a", async (c) => {
6908
6939
  "graph-level a2a endpoint"
6909
6940
  );
6910
6941
  const graph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
6911
- scopes: { tenantId, projectId },
6912
- graphId
6942
+ scopes: { tenantId, projectId, graphId }
6913
6943
  });
6914
6944
  if (!graph) {
6915
6945
  return c.json(
@@ -6921,6 +6951,16 @@ app.post("/a2a", async (c) => {
6921
6951
  404
6922
6952
  );
6923
6953
  }
6954
+ if (!graph.defaultAgentId) {
6955
+ return c.json(
6956
+ {
6957
+ jsonrpc: "2.0",
6958
+ error: { code: -32004, message: "Graph does not have a default agent configured" },
6959
+ id: null
6960
+ },
6961
+ 400
6962
+ );
6963
+ }
6924
6964
  executionContext.agentId = graph.defaultAgentId;
6925
6965
  const credentialStores = c.get("credentialStores");
6926
6966
  const defaultAgent = await getRegisteredAgent(executionContext, credentialStores);
@@ -6964,6 +7004,9 @@ function isTransferResponse(result) {
6964
7004
  (artifact) => artifact.parts.some((part) => part.kind === "data" && part.data?.type === "transfer")
6965
7005
  );
6966
7006
  }
7007
+
7008
+ // src/handlers/executionHandler.ts
7009
+ init_dbClient();
6967
7010
  var SSEStreamHelper = class {
6968
7011
  constructor(stream2, requestId2, timestamp) {
6969
7012
  this.stream = stream2;
@@ -6971,7 +7014,7 @@ var SSEStreamHelper = class {
6971
7014
  this.timestamp = timestamp;
6972
7015
  // Stream queuing for proper event ordering
6973
7016
  __publicField(this, "isTextStreaming", false);
6974
- __publicField(this, "queuedOperations", []);
7017
+ __publicField(this, "queuedEvents", []);
6975
7018
  }
6976
7019
  /**
6977
7020
  * Write the initial role message
@@ -7036,9 +7079,10 @@ var SSEStreamHelper = class {
7036
7079
  await this.writeContent(JSON.stringify(data));
7037
7080
  }
7038
7081
  /**
7039
- * Write error message
7082
+ * Write error message or error event
7040
7083
  */
7041
- async writeError(errorMessage) {
7084
+ async writeError(error) {
7085
+ const errorMessage = typeof error === "string" ? error : error.message;
7042
7086
  await this.writeContent(`
7043
7087
 
7044
7088
  ${errorMessage}`);
@@ -7062,22 +7106,6 @@ ${errorMessage}`);
7062
7106
  })
7063
7107
  });
7064
7108
  }
7065
- /**
7066
- * Write the final [DONE] message
7067
- */
7068
- async writeDone() {
7069
- await this.stream.writeSSE({
7070
- data: "[DONE]"
7071
- });
7072
- }
7073
- /**
7074
- * Complete the stream with finish reason and done message
7075
- */
7076
- async complete(finishReason = "stop") {
7077
- await this.flushQueuedOperations();
7078
- await this.writeCompletion(finishReason);
7079
- await this.writeDone();
7080
- }
7081
7109
  async writeData(type, data) {
7082
7110
  await this.stream.writeSSE({
7083
7111
  data: JSON.stringify({
@@ -7096,16 +7124,23 @@ ${errorMessage}`);
7096
7124
  })
7097
7125
  });
7098
7126
  }
7099
- async writeOperation(operation) {
7100
- if (operation.type === "status_update" && operation.ctx.label) {
7101
- operation = {
7102
- type: operation.type,
7103
- label: operation.ctx.label,
7104
- ctx: operation.ctx.data
7105
- };
7127
+ async writeSummary(summary) {
7128
+ if (this.isTextStreaming) {
7129
+ this.queuedEvents.push({
7130
+ type: "data-summary",
7131
+ event: summary
7132
+ });
7133
+ return;
7106
7134
  }
7135
+ await this.flushQueuedOperations();
7136
+ await this.writeData("data-summary", summary);
7137
+ }
7138
+ async writeOperation(operation) {
7107
7139
  if (this.isTextStreaming) {
7108
- this.queuedOperations.push(operation);
7140
+ this.queuedEvents.push({
7141
+ type: "data-operation",
7142
+ event: operation
7143
+ });
7109
7144
  return;
7110
7145
  }
7111
7146
  await this.flushQueuedOperations();
@@ -7115,15 +7150,31 @@ ${errorMessage}`);
7115
7150
  * Flush all queued operations in order after text streaming completes
7116
7151
  */
7117
7152
  async flushQueuedOperations() {
7118
- if (this.queuedOperations.length === 0) {
7153
+ if (this.queuedEvents.length === 0) {
7119
7154
  return;
7120
7155
  }
7121
- const operationsToFlush = [...this.queuedOperations];
7122
- this.queuedOperations = [];
7123
- for (const operation of operationsToFlush) {
7124
- await this.writeData("data-operation", operation);
7156
+ const eventsToFlush = [...this.queuedEvents];
7157
+ this.queuedEvents = [];
7158
+ for (const event of eventsToFlush) {
7159
+ await this.writeData(event.type, event.event);
7125
7160
  }
7126
7161
  }
7162
+ /**
7163
+ * Write the final [DONE] message
7164
+ */
7165
+ async writeDone() {
7166
+ await this.stream.writeSSE({
7167
+ data: "[DONE]"
7168
+ });
7169
+ }
7170
+ /**
7171
+ * Complete the stream with finish reason and done message
7172
+ */
7173
+ async complete(finishReason = "stop") {
7174
+ await this.flushQueuedOperations();
7175
+ await this.writeCompletion(finishReason);
7176
+ await this.writeDone();
7177
+ }
7127
7178
  };
7128
7179
  function createSSEStreamHelper(stream2, requestId2, timestamp) {
7129
7180
  return new SSEStreamHelper(stream2, requestId2, timestamp);
@@ -7143,10 +7194,10 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7143
7194
  __publicField(this, "isCompleted", false);
7144
7195
  // Stream queuing for proper event ordering
7145
7196
  __publicField(this, "isTextStreaming", false);
7146
- __publicField(this, "queuedOperations", []);
7197
+ __publicField(this, "queuedEvents", []);
7147
7198
  // Timing tracking for text sequences (text-end to text-start gap)
7148
7199
  __publicField(this, "lastTextEndTimestamp", 0);
7149
- __publicField(this, "TEXT_GAP_THRESHOLD", 50);
7200
+ __publicField(this, "TEXT_GAP_THRESHOLD", 2e3);
7150
7201
  // milliseconds - if gap between text sequences is less than this, queue operations
7151
7202
  // Connection management and forced cleanup
7152
7203
  __publicField(this, "connectionDropTimer");
@@ -7255,15 +7306,24 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7255
7306
  data
7256
7307
  });
7257
7308
  }
7258
- async writeError(errorMessage) {
7309
+ async writeError(error) {
7259
7310
  if (this.isCompleted) {
7260
7311
  console.warn("Attempted to write error to completed stream");
7261
7312
  return;
7262
7313
  }
7263
- this.writer.write({
7264
- type: "error",
7265
- errorText: errorMessage
7266
- });
7314
+ if (typeof error === "string") {
7315
+ this.writer.write({
7316
+ type: "error",
7317
+ message: error,
7318
+ severity: "error",
7319
+ timestamp: Date.now()
7320
+ });
7321
+ } else {
7322
+ this.writer.write({
7323
+ ...error,
7324
+ type: "error"
7325
+ });
7326
+ }
7267
7327
  }
7268
7328
  async streamData(data) {
7269
7329
  await this.writeContent(JSON.stringify(data));
@@ -7275,20 +7335,6 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7275
7335
  }
7276
7336
  this.writer.merge(stream2);
7277
7337
  }
7278
- async writeCompletion(_finishReason = "stop") {
7279
- }
7280
- async writeDone() {
7281
- }
7282
- /**
7283
- * Complete the stream and clean up all memory
7284
- * This is the primary cleanup point to prevent memory leaks between requests
7285
- */
7286
- async complete() {
7287
- if (this.isCompleted) return;
7288
- await this.flushQueuedOperations();
7289
- this.isCompleted = true;
7290
- this.cleanup();
7291
- }
7292
7338
  /**
7293
7339
  * Clean up all memory allocations
7294
7340
  * Should be called when the stream helper is no longer needed
@@ -7302,7 +7348,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7302
7348
  this.sentItems.clear();
7303
7349
  this.completedItems.clear();
7304
7350
  this.textId = null;
7305
- this.queuedOperations = [];
7351
+ this.queuedEvents = [];
7306
7352
  this.isTextStreaming = false;
7307
7353
  }
7308
7354
  /**
@@ -7368,7 +7414,9 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7368
7414
  if (this.writer && !this.isCompleted) {
7369
7415
  this.writer.write({
7370
7416
  type: "error",
7371
- errorText: `Stream terminated: ${reason}`
7417
+ message: `Stream terminated: ${reason}`,
7418
+ severity: "error",
7419
+ timestamp: Date.now()
7372
7420
  });
7373
7421
  }
7374
7422
  } catch (e) {
@@ -7391,23 +7439,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7391
7439
  isCompleted: this.isCompleted
7392
7440
  };
7393
7441
  }
7442
+ async writeSummary(summary) {
7443
+ if (this.isCompleted) {
7444
+ console.warn("Attempted to write summary to completed stream");
7445
+ return;
7446
+ }
7447
+ const now = Date.now();
7448
+ const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7449
+ if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7450
+ this.queuedEvents.push({ type: "data-summary", event: summary });
7451
+ return;
7452
+ }
7453
+ await this.flushQueuedOperations();
7454
+ await this.writer.write({
7455
+ id: "id" in summary ? summary.id : void 0,
7456
+ type: "data-summary",
7457
+ data: summary
7458
+ });
7459
+ }
7394
7460
  async writeOperation(operation) {
7395
7461
  if (this.isCompleted) {
7396
7462
  console.warn("Attempted to write operation to completed stream");
7397
7463
  return;
7398
7464
  }
7399
- if (operation.type === "status_update" && operation.ctx.label) {
7400
- operation = {
7401
- type: operation.type,
7402
- label: operation.ctx.label,
7403
- // Preserve the label for the UI
7404
- ctx: operation.ctx.data
7405
- };
7406
- }
7407
7465
  const now = Date.now();
7408
7466
  const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7409
7467
  if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7410
- this.queuedOperations.push(operation);
7468
+ this.queuedEvents.push({ type: "data-operation", event: operation });
7411
7469
  return;
7412
7470
  }
7413
7471
  await this.flushQueuedOperations();
@@ -7421,19 +7479,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7421
7479
  * Flush all queued operations in order after text streaming completes
7422
7480
  */
7423
7481
  async flushQueuedOperations() {
7424
- if (this.queuedOperations.length === 0) {
7482
+ if (this.queuedEvents.length === 0) {
7425
7483
  return;
7426
7484
  }
7427
- const operationsToFlush = [...this.queuedOperations];
7428
- this.queuedOperations = [];
7429
- for (const operation of operationsToFlush) {
7485
+ const eventsToFlush = [...this.queuedEvents];
7486
+ this.queuedEvents = [];
7487
+ for (const event of eventsToFlush) {
7430
7488
  this.writer.write({
7431
- id: "id" in operation ? operation.id : void 0,
7432
- type: "data-operation",
7433
- data: operation
7489
+ id: "id" in event.event ? event.event.id : void 0,
7490
+ type: event.type,
7491
+ data: event.event
7434
7492
  });
7435
7493
  }
7436
7494
  }
7495
+ async writeCompletion(_finishReason = "stop") {
7496
+ }
7497
+ async writeDone() {
7498
+ }
7499
+ /**
7500
+ * Complete the stream and clean up all memory
7501
+ * This is the primary cleanup point to prevent memory leaks between requests
7502
+ */
7503
+ async complete() {
7504
+ if (this.isCompleted) return;
7505
+ await this.flushQueuedOperations();
7506
+ this.isCompleted = true;
7507
+ this.cleanup();
7508
+ }
7437
7509
  };
7438
7510
  // Memory management - focused on connection completion cleanup
7439
7511
  __publicField(_VercelDataStreamHelper, "MAX_BUFFER_SIZE", 5 * 1024 * 1024);
@@ -7446,6 +7518,7 @@ var MCPStreamHelper = class {
7446
7518
  __publicField(this, "capturedText", "");
7447
7519
  __publicField(this, "capturedData", []);
7448
7520
  __publicField(this, "capturedOperations", []);
7521
+ __publicField(this, "capturedSummaries", []);
7449
7522
  __publicField(this, "hasError", false);
7450
7523
  __publicField(this, "errorMessage", "");
7451
7524
  __publicField(this, "sessionId");
@@ -7464,18 +7537,27 @@ var MCPStreamHelper = class {
7464
7537
  async streamData(data) {
7465
7538
  this.capturedData.push(data);
7466
7539
  }
7540
+ async streamSummary(summary) {
7541
+ this.capturedSummaries.push(summary);
7542
+ }
7543
+ async streamOperation(operation) {
7544
+ this.capturedOperations.push(operation);
7545
+ }
7467
7546
  async writeData(_type, data) {
7468
7547
  this.capturedData.push(data);
7469
7548
  }
7470
- async writeError(errorMessage) {
7471
- this.hasError = true;
7472
- this.errorMessage = errorMessage;
7473
- }
7474
- async complete() {
7549
+ async writeSummary(summary) {
7550
+ this.capturedSummaries.push(summary);
7475
7551
  }
7476
7552
  async writeOperation(operation) {
7477
7553
  this.capturedOperations.push(operation);
7478
7554
  }
7555
+ async writeError(error) {
7556
+ this.hasError = true;
7557
+ this.errorMessage = typeof error === "string" ? error : error.message;
7558
+ }
7559
+ async complete() {
7560
+ }
7479
7561
  /**
7480
7562
  * Get the captured response for MCP tool result
7481
7563
  */
@@ -7494,7 +7576,6 @@ function createMCPStreamHelper() {
7494
7576
  }
7495
7577
 
7496
7578
  // src/handlers/executionHandler.ts
7497
- init_dbClient();
7498
7579
  var logger19 = agentsCore.getLogger("ExecutionHandler");
7499
7580
  var ExecutionHandler = class {
7500
7581
  constructor() {
@@ -7523,7 +7604,7 @@ var ExecutionHandler = class {
7523
7604
  logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7524
7605
  let graphConfig = null;
7525
7606
  try {
7526
- graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
7607
+ graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
7527
7608
  if (graphConfig?.statusUpdates && graphConfig.statusUpdates.enabled !== false) {
7528
7609
  graphSessionManager.initializeStatusUpdates(
7529
7610
  requestId2,
@@ -7677,7 +7758,6 @@ var ExecutionHandler = class {
7677
7758
  if (errorCount >= this.MAX_ERRORS) {
7678
7759
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7679
7760
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7680
- await sseHelper.writeError(errorMessage2);
7681
7761
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7682
7762
  if (task) {
7683
7763
  await agentsCore.updateTask(dbClient_default)({
@@ -7818,7 +7898,6 @@ var ExecutionHandler = class {
7818
7898
  if (errorCount >= this.MAX_ERRORS) {
7819
7899
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7820
7900
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7821
- await sseHelper.writeError(errorMessage2);
7822
7901
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7823
7902
  if (task) {
7824
7903
  await agentsCore.updateTask(dbClient_default)({
@@ -7840,7 +7919,6 @@ var ExecutionHandler = class {
7840
7919
  }
7841
7920
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7842
7921
  logger19.error({ maxTransfers, iterations }, errorMessage);
7843
- await sseHelper.writeError(errorMessage);
7844
7922
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7845
7923
  if (task) {
7846
7924
  await agentsCore.updateTask(dbClient_default)({
@@ -7861,8 +7939,7 @@ var ExecutionHandler = class {
7861
7939
  } catch (error) {
7862
7940
  logger19.error({ error }, "Error in execution handler");
7863
7941
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7864
- await sseHelper.writeError(`Execution error: ${errorMessage}`);
7865
- await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7942
+ await sseHelper.writeOperation(errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system"));
7866
7943
  if (task) {
7867
7944
  await agentsCore.updateTask(dbClient_default)({
7868
7945
  taskId: task.id,
@@ -8024,8 +8101,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8024
8101
  const body = c.get("requestBody") || {};
8025
8102
  const conversationId = body.conversationId || nanoid.nanoid();
8026
8103
  const fullGraph = await agentsCore.getFullGraph(dbClient_default)({
8027
- scopes: { tenantId, projectId },
8028
- graphId
8104
+ scopes: { tenantId, projectId, graphId }
8029
8105
  });
8030
8106
  let agentGraph;
8031
8107
  let defaultAgentId;
@@ -8042,16 +8118,21 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8042
8118
  defaultAgentId = fullGraph.defaultAgentId || firstAgentId;
8043
8119
  } else {
8044
8120
  agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8045
- scopes: { tenantId, projectId },
8046
- graphId
8121
+ scopes: { tenantId, projectId, graphId }
8047
8122
  });
8048
8123
  if (!agentGraph) {
8049
- return c.json({ error: "Agent graph not found" }, 404);
8124
+ throw agentsCore.createApiError({
8125
+ code: "not_found",
8126
+ message: "Agent graph not found"
8127
+ });
8050
8128
  }
8051
8129
  defaultAgentId = agentGraph.defaultAgentId || "";
8052
8130
  }
8053
8131
  if (!defaultAgentId) {
8054
- return c.json({ error: "No default agent found in graph" }, 404);
8132
+ throw agentsCore.createApiError({
8133
+ code: "not_found",
8134
+ message: "No default agent found in graph"
8135
+ });
8055
8136
  }
8056
8137
  await agentsCore.createOrGetConversation(dbClient_default)({
8057
8138
  tenantId,
@@ -8072,26 +8153,30 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8072
8153
  }
8073
8154
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8074
8155
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8075
- scopes: { tenantId, projectId },
8156
+ scopes: { tenantId, projectId, graphId },
8076
8157
  agentId
8077
8158
  });
8078
8159
  if (!agentInfo) {
8079
- return c.json({ error: "Agent not found" }, 404);
8160
+ throw agentsCore.createApiError({
8161
+ code: "not_found",
8162
+ message: "Agent not found"
8163
+ });
8080
8164
  }
8081
8165
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8082
8166
  const credentialStores = c.get("credentialStores");
8083
- await agentsCore.handleContextResolution(
8167
+ await agentsCore.handleContextResolution({
8084
8168
  tenantId,
8085
8169
  projectId,
8086
- conversationId,
8087
8170
  graphId,
8088
- validatedContext,
8089
- dbClient_default,
8171
+ conversationId,
8172
+ requestContext: validatedContext,
8173
+ dbClient: dbClient_default,
8090
8174
  credentialStores
8091
- );
8175
+ });
8092
8176
  logger20.info(
8093
8177
  {
8094
8178
  tenantId,
8179
+ projectId,
8095
8180
  graphId,
8096
8181
  conversationId,
8097
8182
  defaultAgentId,
@@ -8133,41 +8218,69 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8133
8218
  });
8134
8219
  }
8135
8220
  return streaming.streamSSE(c, async (stream2) => {
8136
- const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8137
- await sseHelper.writeRole();
8138
- logger20.info({ agentId }, "Starting execution");
8139
- const executionHandler = new ExecutionHandler();
8140
- const result = await executionHandler.execute({
8141
- executionContext,
8142
- conversationId,
8143
- userMessage,
8144
- initialAgentId: agentId,
8145
- requestId: requestId2,
8146
- sseHelper
8147
- });
8148
- logger20.info(
8149
- { result },
8150
- `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8151
- );
8152
- if (!result.success) {
8153
- await sseHelper.writeError(
8154
- "Sorry, I was unable to process your request at this time. Please try again."
8221
+ try {
8222
+ const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8223
+ await sseHelper.writeRole();
8224
+ logger20.info({ agentId }, "Starting execution");
8225
+ const executionHandler = new ExecutionHandler();
8226
+ const result = await executionHandler.execute({
8227
+ executionContext,
8228
+ conversationId,
8229
+ userMessage,
8230
+ initialAgentId: agentId,
8231
+ requestId: requestId2,
8232
+ sseHelper
8233
+ });
8234
+ logger20.info(
8235
+ { result },
8236
+ `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8155
8237
  );
8238
+ if (!result.success) {
8239
+ await sseHelper.writeOperation(
8240
+ errorOp(
8241
+ "Sorry, I was unable to process your request at this time. Please try again.",
8242
+ "system"
8243
+ )
8244
+ );
8245
+ }
8246
+ await sseHelper.complete();
8247
+ } catch (error) {
8248
+ logger20.error(
8249
+ {
8250
+ error: error instanceof Error ? error.message : error,
8251
+ stack: error instanceof Error ? error.stack : void 0
8252
+ },
8253
+ "Error during streaming execution"
8254
+ );
8255
+ try {
8256
+ const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8257
+ await sseHelper.writeOperation(
8258
+ errorOp(
8259
+ "Sorry, I was unable to process your request at this time. Please try again.",
8260
+ "system"
8261
+ )
8262
+ );
8263
+ await sseHelper.complete();
8264
+ } catch (streamError) {
8265
+ logger20.error({ streamError }, "Failed to write error to stream");
8266
+ }
8156
8267
  }
8157
- await sseHelper.complete();
8158
8268
  });
8159
8269
  } catch (error) {
8160
- console.error("\u274C Error in chat completions endpoint:", {
8161
- error: error instanceof Error ? error.message : error,
8162
- stack: error instanceof Error ? error.stack : void 0
8163
- });
8164
- return c.json(
8270
+ logger20.error(
8165
8271
  {
8166
- error: "Failed to process chat completion",
8167
- message: error instanceof Error ? error.message : "Unknown error"
8272
+ error: error instanceof Error ? error.message : error,
8273
+ stack: error instanceof Error ? error.stack : void 0
8168
8274
  },
8169
- 500
8275
+ "Error in chat completions endpoint before streaming"
8170
8276
  );
8277
+ if (error && typeof error === "object" && "status" in error) {
8278
+ throw error;
8279
+ }
8280
+ throw agentsCore.createApiError({
8281
+ code: "internal_server_error",
8282
+ message: error instanceof Error ? error.message : "Failed to process chat completion"
8283
+ });
8171
8284
  }
8172
8285
  });
8173
8286
  var getMessageText = (content) => {
@@ -8234,6 +8347,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8234
8347
  try {
8235
8348
  const executionContext = agentsCore.getRequestExecutionContext(c);
8236
8349
  const { tenantId, projectId, graphId } = executionContext;
8350
+ agentsCore.loggerFactory.getLogger("chatDataStream").debug({ tenantId, projectId, graphId }, "Extracted chatDataStream parameters");
8237
8351
  const body = c.get("requestBody") || {};
8238
8352
  const conversationId = body.conversationId || nanoid.nanoid();
8239
8353
  const activeSpan = api.trace.getActiveSpan();
@@ -8246,14 +8360,22 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8246
8360
  });
8247
8361
  }
8248
8362
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8249
- scopes: { tenantId, projectId },
8250
- graphId
8363
+ scopes: { tenantId, projectId, graphId }
8251
8364
  });
8252
8365
  if (!agentGraph) {
8253
- return c.json({ error: "Agent graph not found" }, 404);
8366
+ throw agentsCore.createApiError({
8367
+ code: "not_found",
8368
+ message: "Agent graph not found"
8369
+ });
8254
8370
  }
8255
8371
  const defaultAgentId = agentGraph.defaultAgentId;
8256
8372
  const graphName = agentGraph.name;
8373
+ if (!defaultAgentId) {
8374
+ throw agentsCore.createApiError({
8375
+ code: "bad_request",
8376
+ message: "Graph does not have a default agent configured"
8377
+ });
8378
+ }
8257
8379
  const activeAgent = await agentsCore.getActiveAgentForConversation(dbClient_default)({
8258
8380
  scopes: { tenantId, projectId },
8259
8381
  conversationId
@@ -8267,23 +8389,26 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8267
8389
  }
8268
8390
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8269
8391
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8270
- scopes: { tenantId, projectId },
8392
+ scopes: { tenantId, projectId, graphId },
8271
8393
  agentId
8272
8394
  });
8273
8395
  if (!agentInfo) {
8274
- return c.json({ error: "Agent not found" }, 404);
8396
+ throw agentsCore.createApiError({
8397
+ code: "not_found",
8398
+ message: "Agent not found"
8399
+ });
8275
8400
  }
8276
8401
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8277
8402
  const credentialStores = c.get("credentialStores");
8278
- await agentsCore.handleContextResolution(
8403
+ await agentsCore.handleContextResolution({
8279
8404
  tenantId,
8280
8405
  projectId,
8281
- conversationId,
8282
8406
  graphId,
8283
- validatedContext,
8284
- dbClient_default,
8407
+ conversationId,
8408
+ requestContext: validatedContext,
8409
+ dbClient: dbClient_default,
8285
8410
  credentialStores
8286
- );
8411
+ });
8287
8412
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
8288
8413
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
8289
8414
  logger21.info({ userText, lastUserMessage }, "userText");
@@ -8325,11 +8450,11 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8325
8450
  sseHelper: streamHelper
8326
8451
  });
8327
8452
  if (!result.success) {
8328
- await streamHelper.writeError("Unable to process request");
8453
+ await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
8329
8454
  }
8330
8455
  } catch (err) {
8331
8456
  logger21.error({ err }, "Streaming error");
8332
- await streamHelper.writeError("Internal server error");
8457
+ await streamHelper.writeOperation(errorOp("Internal server error", "system"));
8333
8458
  } finally {
8334
8459
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
8335
8460
  streamHelper.cleanup();
@@ -8350,7 +8475,10 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8350
8475
  );
8351
8476
  } catch (error) {
8352
8477
  logger21.error({ error }, "chatDataStream error");
8353
- return c.json({ error: "Failed to process chat completion" }, 500);
8478
+ throw agentsCore.createApiError({
8479
+ code: "internal_server_error",
8480
+ message: "Failed to process chat completion"
8481
+ });
8354
8482
  }
8355
8483
  });
8356
8484
  var chatDataStream_default = app3;
@@ -8555,8 +8683,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8555
8683
  const { tenantId, projectId, graphId } = executionContext;
8556
8684
  setupTracing(conversationId, tenantId, graphId);
8557
8685
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8558
- scopes: { tenantId, projectId },
8559
- graphId
8686
+ scopes: { tenantId, projectId, graphId }
8560
8687
  });
8561
8688
  if (!agentGraph) {
8562
8689
  throw new Error("Agent graph not found");
@@ -8576,9 +8703,20 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8576
8703
  },
8577
8704
  async ({ query }) => {
8578
8705
  try {
8706
+ if (!agentGraph.defaultAgentId) {
8707
+ return {
8708
+ content: [
8709
+ {
8710
+ type: "text",
8711
+ text: `Graph does not have a default agent configured`
8712
+ }
8713
+ ],
8714
+ isError: true
8715
+ };
8716
+ }
8579
8717
  const defaultAgentId = agentGraph.defaultAgentId;
8580
8718
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8581
- scopes: { tenantId, projectId },
8719
+ scopes: { tenantId, projectId, graphId },
8582
8720
  agentId: defaultAgentId
8583
8721
  });
8584
8722
  if (!agentInfo) {
@@ -8592,18 +8730,19 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8592
8730
  isError: true
8593
8731
  };
8594
8732
  }
8595
- const resolvedContext = await agentsCore.handleContextResolution(
8733
+ const resolvedContext = await agentsCore.handleContextResolution({
8596
8734
  tenantId,
8597
8735
  projectId,
8598
- conversationId,
8599
8736
  graphId,
8737
+ conversationId,
8600
8738
  requestContext,
8601
- dbClient_default,
8739
+ dbClient: dbClient_default,
8602
8740
  credentialStores
8603
- );
8741
+ });
8604
8742
  logger22.info(
8605
8743
  {
8606
8744
  tenantId,
8745
+ projectId,
8607
8746
  graphId,
8608
8747
  conversationId,
8609
8748
  hasContextConfig: !!agentGraph.contextConfigId,
@@ -8665,8 +8804,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8665
8804
  logger22.info({ body }, "Received initialization request");
8666
8805
  const sessionId = nanoid.nanoid();
8667
8806
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8668
- scopes: { tenantId, projectId },
8669
- graphId
8807
+ scopes: { tenantId, projectId, graphId }
8670
8808
  });
8671
8809
  if (!agentGraph) {
8672
8810
  return c.json(
@@ -8678,6 +8816,16 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8678
8816
  { status: 404 }
8679
8817
  );
8680
8818
  }
8819
+ if (!agentGraph.defaultAgentId) {
8820
+ return c.json(
8821
+ {
8822
+ jsonrpc: "2.0",
8823
+ error: { code: -32001, message: "Graph does not have a default agent configured" },
8824
+ id: body.id || null
8825
+ },
8826
+ { status: 400 }
8827
+ );
8828
+ }
8681
8829
  const conversation = await agentsCore.createOrGetConversation(dbClient_default)({
8682
8830
  id: sessionId,
8683
8831
  tenantId,
@@ -8874,6 +9022,8 @@ app4.delete("/", async (c) => {
8874
9022
  );
8875
9023
  });
8876
9024
  var mcp_default = app4;
9025
+
9026
+ // src/app.ts
8877
9027
  var logger23 = agentsCore.getLogger("agents-run-api");
8878
9028
  function createExecutionHono(serverConfig, credentialStores) {
8879
9029
  const app6 = new zodOpenapi.OpenAPIHono();
@@ -9063,21 +9213,21 @@ function createExecutionHono(serverConfig, credentialStores) {
9063
9213
  app6.route("/v1/mcp", mcp_default);
9064
9214
  app6.route("/agents", agents_default);
9065
9215
  setupOpenAPIRoutes(app6);
9066
- app6.use("/tenants/*", async (c, next) => {
9216
+ app6.use("/tenants/*", async (_c, next) => {
9067
9217
  await next();
9068
- await batchProcessor.forceFlush();
9218
+ await defaultBatchProcessor.forceFlush();
9069
9219
  });
9070
- app6.use("/agents/*", async (c, next) => {
9220
+ app6.use("/agents/*", async (_c, next) => {
9071
9221
  await next();
9072
- await batchProcessor.forceFlush();
9222
+ await defaultBatchProcessor.forceFlush();
9073
9223
  });
9074
- app6.use("/v1/*", async (c, next) => {
9224
+ app6.use("/v1/*", async (_c, next) => {
9075
9225
  await next();
9076
- await batchProcessor.forceFlush();
9226
+ await defaultBatchProcessor.forceFlush();
9077
9227
  });
9078
- app6.use("/api/*", async (c, next) => {
9228
+ app6.use("/api/*", async (_c, next) => {
9079
9229
  await next();
9080
- await batchProcessor.forceFlush();
9230
+ await defaultBatchProcessor.forceFlush();
9081
9231
  });
9082
9232
  const baseApp = new hono.Hono();
9083
9233
  baseApp.route("/", app6);