@inkeep/agents-run-api 0.2.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +677 -426
  2. package/dist/index.js +675 -424
  3. package/package.json +2 -2
package/dist/index.cjs CHANGED
@@ -14,6 +14,7 @@ var resources = require('@opentelemetry/resources');
14
14
  var sdkNode = require('@opentelemetry/sdk-node');
15
15
  var sdkTraceBase = require('@opentelemetry/sdk-trace-base');
16
16
  var semanticConventions = require('@opentelemetry/semantic-conventions');
17
+ var otel = require('@hono/otel');
17
18
  var zodOpenapi = require('@hono/zod-openapi');
18
19
  var api = require('@opentelemetry/api');
19
20
  var hono = require('hono');
@@ -34,7 +35,6 @@ var mcp_js = require('@modelcontextprotocol/sdk/server/mcp.js');
34
35
  var streamableHttp_js = require('@modelcontextprotocol/sdk/server/streamableHttp.js');
35
36
  var v3 = require('zod/v3');
36
37
  var fetchToNode = require('fetch-to-node');
37
- var otel = require('@hono/otel');
38
38
 
39
39
  function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
40
40
 
@@ -1226,7 +1226,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1226
1226
  throw new Error("Agent ID is required");
1227
1227
  }
1228
1228
  const dbAgent = await agentsCore.getAgentById(dbClient_default)({
1229
- scopes: { tenantId, projectId },
1229
+ scopes: { tenantId, projectId, graphId },
1230
1230
  agentId
1231
1231
  });
1232
1232
  if (!dbAgent) {
@@ -1245,6 +1245,41 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1245
1245
  // src/agents/generateTaskHandler.ts
1246
1246
  init_dbClient();
1247
1247
 
1248
+ // src/utils/model-resolver.ts
1249
+ init_dbClient();
1250
+ async function resolveModelConfig(graphId, agent) {
1251
+ if (agent.models?.base?.model) {
1252
+ return {
1253
+ base: agent.models.base,
1254
+ structuredOutput: agent.models.structuredOutput || agent.models.base,
1255
+ summarizer: agent.models.summarizer || agent.models.base
1256
+ };
1257
+ }
1258
+ const graph = await agentsCore.getAgentGraphById(dbClient_default)({
1259
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId, graphId }
1260
+ });
1261
+ if (graph?.models?.base?.model) {
1262
+ return {
1263
+ base: graph.models.base,
1264
+ structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
1265
+ summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
1266
+ };
1267
+ }
1268
+ const project = await agentsCore.getProject(dbClient_default)({
1269
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
1270
+ });
1271
+ if (project?.models?.base?.model) {
1272
+ return {
1273
+ base: project.models.base,
1274
+ structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
1275
+ summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
1276
+ };
1277
+ }
1278
+ throw new Error(
1279
+ "Base model configuration is required. Please configure models at the project level."
1280
+ );
1281
+ }
1282
+
1248
1283
  // src/agents/Agent.ts
1249
1284
  init_conversations();
1250
1285
  init_dbClient();
@@ -1266,24 +1301,19 @@ function completionOp(agentId, iterations) {
1266
1301
  }
1267
1302
  };
1268
1303
  }
1269
- function errorOp(error, agentId) {
1304
+ function errorOp(message, agentId, severity = "error", code) {
1270
1305
  return {
1271
1306
  type: "error",
1272
- ctx: {
1273
- error,
1274
- agent: agentId
1275
- }
1307
+ message,
1308
+ agent: agentId,
1309
+ severity,
1310
+ code,
1311
+ timestamp: Date.now()
1276
1312
  };
1277
1313
  }
1278
1314
  function generateToolId() {
1279
1315
  return `tool_${nanoid.nanoid(8)}`;
1280
1316
  }
1281
- function statusUpdateOp(ctx) {
1282
- return {
1283
- type: "status_update",
1284
- ctx
1285
- };
1286
- }
1287
1317
  var logger4 = agentsCore.getLogger("DataComponentSchema");
1288
1318
  function jsonSchemaToZod(jsonSchema) {
1289
1319
  if (!jsonSchema || typeof jsonSchema !== "object") {
@@ -1413,6 +1443,9 @@ var _ModelFactory = class _ModelFactory {
1413
1443
  );
1414
1444
  }
1415
1445
  const modelSettings = config;
1446
+ if (!modelSettings.model) {
1447
+ throw new Error("Model configuration is required");
1448
+ }
1416
1449
  const modelString = modelSettings.model.trim();
1417
1450
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1418
1451
  logger5.debug(
@@ -1539,7 +1572,6 @@ var ModelFactory = _ModelFactory;
1539
1572
  // src/utils/graph-session.ts
1540
1573
  init_conversations();
1541
1574
  init_dbClient();
1542
- var tracer = agentsCore.getTracer("agents-run-api");
1543
1575
 
1544
1576
  // src/utils/stream-registry.ts
1545
1577
  var streamHelperRegistry = /* @__PURE__ */ new Map();
@@ -1555,6 +1587,7 @@ function getStreamHelper(requestId2) {
1555
1587
  function unregisterStreamHelper(requestId2) {
1556
1588
  streamHelperRegistry.delete(requestId2);
1557
1589
  }
1590
+ var tracer = agentsCore.getTracer("agents-run-api");
1558
1591
 
1559
1592
  // src/utils/graph-session.ts
1560
1593
  var logger6 = agentsCore.getLogger("GraphSession");
@@ -1863,7 +1896,6 @@ var GraphSession = class {
1863
1896
  }
1864
1897
  this.isGeneratingUpdate = true;
1865
1898
  const statusUpdateState = this.statusUpdateState;
1866
- const graphId = this.graphId;
1867
1899
  try {
1868
1900
  const streamHelper = getStreamHelper(this.sessionId);
1869
1901
  if (!streamHelper) {
@@ -1876,7 +1908,7 @@ var GraphSession = class {
1876
1908
  }
1877
1909
  const now = Date.now();
1878
1910
  const elapsedTime = now - statusUpdateState.startTime;
1879
- let operation;
1911
+ let summaryToSend;
1880
1912
  if (statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0) {
1881
1913
  const result = await this.generateStructuredStatusUpdate(
1882
1914
  this.events.slice(statusUpdateState.lastEventCount),
@@ -1885,32 +1917,30 @@ var GraphSession = class {
1885
1917
  statusUpdateState.summarizerModel,
1886
1918
  this.previousSummaries
1887
1919
  );
1888
- if (result.operations && result.operations.length > 0) {
1889
- for (const op of result.operations) {
1890
- if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1920
+ if (result.summaries && result.summaries.length > 0) {
1921
+ for (const summary of result.summaries) {
1922
+ if (!summary || !summary.type || !summary.data || !summary.data.label || Object.keys(summary.data).length === 0) {
1891
1923
  logger6.warn(
1892
1924
  {
1893
1925
  sessionId: this.sessionId,
1894
- operation: op
1926
+ summary
1895
1927
  },
1896
1928
  "Skipping empty or invalid structured operation"
1897
1929
  );
1898
1930
  continue;
1899
1931
  }
1900
- const operationToSend = {
1901
- type: "status_update",
1902
- ctx: {
1903
- operationType: op.type,
1904
- label: op.data.label,
1905
- data: Object.fromEntries(
1906
- Object.entries(op.data).filter(([key]) => !["label", "type"].includes(key))
1907
- )
1908
- }
1932
+ const summaryToSend2 = {
1933
+ type: summary.data.type || summary.type,
1934
+ // Preserve the actual custom type from LLM
1935
+ label: summary.data.label,
1936
+ details: Object.fromEntries(
1937
+ Object.entries(summary.data).filter(([key]) => !["label", "type"].includes(key))
1938
+ )
1909
1939
  };
1910
- await streamHelper.writeOperation(operationToSend);
1940
+ await streamHelper.writeSummary(summaryToSend2);
1911
1941
  }
1912
- const summaryTexts = result.operations.map(
1913
- (op) => JSON.stringify({ type: op.type, data: op.data })
1942
+ const summaryTexts = result.summaries.map(
1943
+ (summary) => JSON.stringify({ type: summary.type, data: summary.data })
1914
1944
  );
1915
1945
  this.previousSummaries.push(...summaryTexts);
1916
1946
  if (this.statusUpdateState) {
@@ -1927,34 +1957,20 @@ var GraphSession = class {
1927
1957
  this.previousSummaries
1928
1958
  );
1929
1959
  this.previousSummaries.push(summary);
1930
- operation = statusUpdateOp({
1931
- summary,
1932
- eventCount: this.events.length,
1933
- elapsedTime,
1934
- currentPhase: "processing",
1935
- activeAgent: "system",
1936
- graphId,
1937
- sessionId: this.sessionId
1938
- });
1939
1960
  }
1940
1961
  if (this.previousSummaries.length > 3) {
1941
1962
  this.previousSummaries.shift();
1942
1963
  }
1943
- if (!operation || !operation.type || !operation.ctx) {
1964
+ {
1944
1965
  logger6.warn(
1945
1966
  {
1946
1967
  sessionId: this.sessionId,
1947
- operation
1968
+ summaryToSend
1948
1969
  },
1949
1970
  "Skipping empty or invalid status update operation"
1950
1971
  );
1951
1972
  return;
1952
1973
  }
1953
- await streamHelper.writeOperation(operation);
1954
- if (this.statusUpdateState) {
1955
- this.statusUpdateState.lastUpdateTime = now;
1956
- this.statusUpdateState.lastEventCount = this.events.length;
1957
- }
1958
1974
  } catch (error) {
1959
1975
  logger6.error(
1960
1976
  {
@@ -2087,7 +2103,7 @@ ${previousSummaryContext}` : ""}
2087
2103
  Activities:
2088
2104
  ${userVisibleActivities.join("\n") || "No New Activities"}
2089
2105
 
2090
- Describe the ACTUAL finding, result, or specific information discovered (e.g., "Found Slack bot requires admin permissions", "Identified 3 channel types for ingestion", "Configuration requires OAuth token").
2106
+ Create a short 3-5 word label describing the ACTUAL finding. Use sentence case (only capitalize the first word and proper nouns). Examples: "Found admin permissions needed", "Identified three channel types", "OAuth token required".
2091
2107
 
2092
2108
  ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2093
2109
  const prompt = basePrompt;
@@ -2100,6 +2116,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2100
2116
  }
2101
2117
  modelToUse = this.statusUpdateState.baseModel;
2102
2118
  }
2119
+ if (!modelToUse) {
2120
+ throw new Error("No model configuration available");
2121
+ }
2103
2122
  const model = ModelFactory.createModel(modelToUse);
2104
2123
  const { text } = await ai.generateText({
2105
2124
  model,
@@ -2209,14 +2228,16 @@ Rules:
2209
2228
  - Fill in data for relevant components only
2210
2229
  - Use 'no_relevant_updates' if nothing substantially new to report. DO NOT WRITE LABELS OR USE OTHER COMPONENTS IF YOU USE THIS COMPONENT.
2211
2230
  - Never repeat previous values, make every update EXTREMELY unique. If you cannot do that the update is not worth mentioning.
2212
- - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
2231
+ - Labels MUST be short 3-5 word phrases with ACTUAL information discovered. NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION.
2232
+ - Use sentence case: only capitalize the first word and proper nouns (e.g., "Admin permissions required", not "Admin Permissions Required"). ALWAYS capitalize the first word of the label.
2213
2233
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
2214
2234
  - Include specific details, numbers, requirements, or insights discovered
2235
+ - Examples: "Admin permissions required", "Three OAuth steps found", "Token expires daily"
2215
2236
  - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
2216
- - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
2237
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", "artifact", or any internal system terminology in labels or any names of agents, tools, or systems.
2217
2238
  - Present all operations as seamless actions by a single system
2218
2239
  - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
2219
- - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
2240
+ - Bad examples: "Transferring to search agent", "continuing transfer to qa agent", "Delegating task", "Routing request", "Processing request", "Artifact found", "Artifact saved", or not using the no_relevant_updates
2220
2241
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
2221
2242
 
2222
2243
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -2232,6 +2253,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2232
2253
  }
2233
2254
  modelToUse = this.statusUpdateState.baseModel;
2234
2255
  }
2256
+ if (!modelToUse) {
2257
+ throw new Error("No model configuration available");
2258
+ }
2235
2259
  const model = ModelFactory.createModel(modelToUse);
2236
2260
  const { object } = await ai.generateObject({
2237
2261
  model,
@@ -2249,29 +2273,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2249
2273
  }
2250
2274
  });
2251
2275
  const result = object;
2252
- const operations = [];
2276
+ const summaries = [];
2253
2277
  for (const [componentId, data] of Object.entries(result)) {
2254
2278
  if (componentId === "no_relevant_updates") {
2255
2279
  continue;
2256
2280
  }
2257
2281
  if (data && typeof data === "object" && Object.keys(data).length > 0) {
2258
- operations.push({
2282
+ summaries.push({
2259
2283
  type: componentId,
2260
2284
  data
2261
2285
  });
2262
2286
  }
2263
2287
  }
2264
2288
  span.setAttributes({
2265
- "operations.count": operations.length,
2289
+ "summaries.count": summaries.length,
2266
2290
  "user_activities.count": userVisibleActivities.length,
2267
2291
  "result_keys.count": Object.keys(result).length
2268
2292
  });
2269
2293
  span.setStatus({ code: api.SpanStatusCode.OK });
2270
- return { operations };
2294
+ return { summaries };
2271
2295
  } catch (error) {
2272
2296
  agentsCore.setSpanWithError(span, error);
2273
2297
  logger6.error({ error }, "Failed to generate structured update, using fallback");
2274
- return { operations: [] };
2298
+ return { summaries: [] };
2275
2299
  } finally {
2276
2300
  span.end();
2277
2301
  }
@@ -2522,6 +2546,9 @@ Make it specific and relevant.`;
2522
2546
  }
2523
2547
  modelToUse = this.statusUpdateState.baseModel;
2524
2548
  }
2549
+ if (!modelToUse) {
2550
+ throw new Error("No model configuration available");
2551
+ }
2525
2552
  const model = ModelFactory.createModel(modelToUse);
2526
2553
  const schema = z5.z.object({
2527
2554
  name: z5.z.string().max(50).describe("Concise, descriptive name for the artifact"),
@@ -2803,6 +2830,7 @@ var _ArtifactParser = class _ArtifactParser {
2803
2830
  }
2804
2831
  for (let i = matches.length - 1; i >= 0; i--) {
2805
2832
  const match = matches[i];
2833
+ if (match.index === void 0) continue;
2806
2834
  const startIdx = match.index;
2807
2835
  const textAfterMatch = text.slice(startIdx);
2808
2836
  if (!textAfterMatch.includes("/>")) {
@@ -2852,7 +2880,8 @@ var _ArtifactParser = class _ArtifactParser {
2852
2880
  taskId,
2853
2881
  name: artifact.name || "Processing...",
2854
2882
  description: artifact.description || "Name and description being generated...",
2855
- artifactType: artifact.metadata?.artifactType,
2883
+ type: artifact.metadata?.artifactType || artifact.artifactType,
2884
+ // Map artifactType to type for consistency
2856
2885
  artifactSummary: artifact.parts?.[0]?.data?.summary || {}
2857
2886
  };
2858
2887
  }
@@ -2869,10 +2898,11 @@ var _ArtifactParser = class _ArtifactParser {
2869
2898
  let lastIndex = 0;
2870
2899
  for (const match of matches) {
2871
2900
  const [fullMatch, artifactId, taskId] = match;
2901
+ if (match.index === void 0) continue;
2872
2902
  const matchStart = match.index;
2873
2903
  if (matchStart > lastIndex) {
2874
2904
  const textBefore = text.slice(lastIndex, matchStart);
2875
- if (textBefore.trim()) {
2905
+ if (textBefore) {
2876
2906
  parts.push({ kind: "text", text: textBefore });
2877
2907
  }
2878
2908
  }
@@ -2884,7 +2914,7 @@ var _ArtifactParser = class _ArtifactParser {
2884
2914
  }
2885
2915
  if (lastIndex < text.length) {
2886
2916
  const remainingText = text.slice(lastIndex);
2887
- if (remainingText.trim()) {
2917
+ if (remainingText) {
2888
2918
  parts.push({ kind: "text", text: remainingText });
2889
2919
  }
2890
2920
  }
@@ -2994,8 +3024,9 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2994
3024
  var ArtifactParser = _ArtifactParser;
2995
3025
 
2996
3026
  // src/utils/incremental-stream-parser.ts
2997
- var logger8 = agentsCore.getLogger("IncrementalStreamParser");
2998
- var IncrementalStreamParser = class {
3027
+ agentsCore.getLogger("IncrementalStreamParser");
3028
+ var _IncrementalStreamParser = class _IncrementalStreamParser {
3029
+ // Max number of streamed component IDs to track
2999
3030
  constructor(streamHelper, tenantId, contextId) {
3000
3031
  __publicField(this, "buffer", "");
3001
3032
  __publicField(this, "pendingTextBuffer", "");
@@ -3005,6 +3036,9 @@ var IncrementalStreamParser = class {
3005
3036
  __publicField(this, "collectedParts", []);
3006
3037
  __publicField(this, "contextId");
3007
3038
  __publicField(this, "lastChunkWasToolResult", false);
3039
+ __publicField(this, "componentAccumulator", {});
3040
+ __publicField(this, "lastStreamedComponents", /* @__PURE__ */ new Map());
3041
+ __publicField(this, "componentSnapshots", /* @__PURE__ */ new Map());
3008
3042
  this.streamHelper = streamHelper;
3009
3043
  this.contextId = contextId;
3010
3044
  this.artifactParser = new ArtifactParser(tenantId);
@@ -3019,7 +3053,7 @@ var IncrementalStreamParser = class {
3019
3053
  * Process a new text chunk for text streaming (handles artifact markers)
3020
3054
  */
3021
3055
  async processTextChunk(chunk) {
3022
- if (this.lastChunkWasToolResult && this.buffer === "" && chunk.trim()) {
3056
+ if (this.lastChunkWasToolResult && this.buffer === "" && chunk) {
3023
3057
  chunk = "\n\n" + chunk;
3024
3058
  this.lastChunkWasToolResult = false;
3025
3059
  }
@@ -3031,100 +3065,122 @@ var IncrementalStreamParser = class {
3031
3065
  this.buffer = parseResult.remainingBuffer;
3032
3066
  }
3033
3067
  /**
3034
- * Process a new object chunk for object streaming (handles JSON objects with artifact references)
3068
+ * Process object deltas directly from Vercel AI SDK's fullStream
3069
+ * Accumulates components and streams them when they're stable (unchanged between deltas)
3035
3070
  */
3036
- async processObjectChunk(chunk) {
3037
- this.buffer += chunk;
3038
- const parseResult = await this.parseObjectBuffer();
3039
- for (const part of parseResult.completeParts) {
3040
- await this.streamPart(part);
3071
+ async processObjectDelta(delta) {
3072
+ if (!delta || typeof delta !== "object") {
3073
+ return;
3041
3074
  }
3042
- this.buffer = parseResult.remainingBuffer;
3043
- }
3044
- /**
3045
- * Process tool call stream for structured output, streaming components as they complete
3046
- */
3047
- async processToolCallStream(stream2, targetToolName) {
3048
- let jsonBuffer = "";
3049
- let componentBuffer = "";
3050
- let depth = 0;
3051
- let componentsStreamed = 0;
3052
- const MAX_BUFFER_SIZE = 5 * 1024 * 1024;
3053
- for await (const part of stream2) {
3054
- if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
3055
- const delta = part.argsTextDelta || "";
3056
- if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
3057
- logger8.warn(
3058
- { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
3059
- "JSON buffer exceeded maximum size, truncating"
3060
- );
3061
- jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
3075
+ this.componentAccumulator = this.deepMerge(this.componentAccumulator, delta);
3076
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3077
+ const components = this.componentAccumulator.dataComponents;
3078
+ const currentComponentIds = new Set(components.filter((c) => c?.id).map((c) => c.id));
3079
+ for (const [componentId, snapshot] of this.componentSnapshots.entries()) {
3080
+ if (!currentComponentIds.has(componentId) && !this.lastStreamedComponents.has(componentId)) {
3081
+ try {
3082
+ const component = JSON.parse(snapshot);
3083
+ if (this.isComponentComplete(component)) {
3084
+ await this.streamComponent(component);
3085
+ }
3086
+ } catch (e) {
3087
+ }
3062
3088
  }
3063
- jsonBuffer += delta;
3064
- for (const char of delta) {
3065
- if (componentBuffer.length > MAX_BUFFER_SIZE) {
3066
- logger8.warn(
3067
- { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
3068
- "Component buffer exceeded maximum size, resetting"
3069
- );
3070
- componentBuffer = "";
3071
- depth = 0;
3072
- continue;
3089
+ }
3090
+ for (let i = 0; i < components.length; i++) {
3091
+ const component = components[i];
3092
+ if (!component?.id) continue;
3093
+ const componentKey = component.id;
3094
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3095
+ if (hasBeenStreamed) continue;
3096
+ const currentSnapshot = JSON.stringify(component);
3097
+ const previousSnapshot = this.componentSnapshots.get(componentKey);
3098
+ this.componentSnapshots.set(componentKey, currentSnapshot);
3099
+ if (this.componentSnapshots.size > _IncrementalStreamParser.MAX_SNAPSHOT_SIZE) {
3100
+ const firstKey = this.componentSnapshots.keys().next().value;
3101
+ if (firstKey) {
3102
+ this.componentSnapshots.delete(firstKey);
3073
3103
  }
3074
- componentBuffer += char;
3075
- if (char === "{") {
3076
- depth++;
3077
- } else if (char === "}") {
3078
- depth--;
3079
- if (depth === 2 && componentBuffer.includes('"id"')) {
3080
- const componentMatch = componentBuffer.match(/\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}/);
3081
- if (componentMatch) {
3082
- const MAX_COMPONENT_SIZE = 1024 * 1024;
3083
- if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
3084
- logger8.warn(
3085
- {
3086
- size: componentMatch[0].length,
3087
- maxSize: MAX_COMPONENT_SIZE
3088
- },
3089
- "Component exceeds size limit, skipping"
3090
- );
3091
- componentBuffer = "";
3092
- continue;
3093
- }
3094
- try {
3095
- const component = JSON.parse(componentMatch[0]);
3096
- if (typeof component !== "object" || !component.id) {
3097
- logger8.warn({ component }, "Invalid component structure, skipping");
3098
- componentBuffer = "";
3099
- continue;
3100
- }
3101
- const parts = await this.artifactParser.parseObject({
3102
- dataComponents: [component]
3103
- });
3104
- for (const part2 of parts) {
3105
- await this.streamPart(part2);
3106
- }
3107
- componentsStreamed++;
3108
- componentBuffer = "";
3109
- } catch (e) {
3110
- logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3111
- }
3112
- }
3104
+ }
3105
+ if (component.name === "Text" && component.props?.text) {
3106
+ const previousTextContent = previousSnapshot ? JSON.parse(previousSnapshot).props?.text || "" : "";
3107
+ const currentTextContent = component.props.text || "";
3108
+ if (currentTextContent.length > previousTextContent.length) {
3109
+ const newText = currentTextContent.slice(previousTextContent.length);
3110
+ if (!this.hasStartedRole) {
3111
+ await this.streamHelper.writeRole("assistant");
3112
+ this.hasStartedRole = true;
3113
3113
  }
3114
+ await this.streamHelper.streamText(newText, 50);
3115
+ this.collectedParts.push({
3116
+ kind: "text",
3117
+ text: newText
3118
+ });
3114
3119
  }
3115
- if (componentBuffer.includes('"dataComponents"') && componentBuffer.includes("[")) ;
3120
+ continue;
3116
3121
  }
3117
- } else if (part.type === "tool-call" && part.toolName === targetToolName) {
3118
- if (part.args?.dataComponents) {
3119
- const parts = await this.artifactParser.parseObject(part.args);
3120
- for (const part2 of parts) {
3121
- await this.streamPart(part2);
3122
+ if (this.isComponentComplete(component)) {
3123
+ const currentPropsSnapshot = JSON.stringify(component.props);
3124
+ const previousPropsSnapshot = previousSnapshot ? JSON.stringify(JSON.parse(previousSnapshot).props) : null;
3125
+ if (previousPropsSnapshot === currentPropsSnapshot) {
3126
+ await this.streamComponent(component);
3122
3127
  }
3123
3128
  }
3124
- break;
3125
3129
  }
3126
3130
  }
3127
- logger8.debug({ componentsStreamed }, "Finished streaming components");
3131
+ }
3132
+ /**
3133
+ * Stream a component and mark it as streamed
3134
+ * Note: Text components are handled separately with incremental streaming
3135
+ */
3136
+ async streamComponent(component) {
3137
+ const parts = await this.artifactParser.parseObject({
3138
+ dataComponents: [component]
3139
+ });
3140
+ for (const part of parts) {
3141
+ await this.streamPart(part);
3142
+ }
3143
+ this.lastStreamedComponents.set(component.id, true);
3144
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3145
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3146
+ if (firstKey) {
3147
+ this.lastStreamedComponents.delete(firstKey);
3148
+ }
3149
+ }
3150
+ this.componentSnapshots.delete(component.id);
3151
+ }
3152
+ /**
3153
+ * Check if a component has the basic structure required for streaming
3154
+ * Requires id, name, and props object with content
3155
+ */
3156
+ isComponentComplete(component) {
3157
+ if (!component || !component.id || !component.name) {
3158
+ return false;
3159
+ }
3160
+ if (!component.props || typeof component.props !== "object") {
3161
+ return false;
3162
+ }
3163
+ const isArtifact = component.name === "Artifact" || component.props.artifact_id && component.props.task_id;
3164
+ if (isArtifact) {
3165
+ return Boolean(component.props.artifact_id && component.props.task_id);
3166
+ }
3167
+ return true;
3168
+ }
3169
+ /**
3170
+ * Deep merge helper for object deltas
3171
+ */
3172
+ deepMerge(target, source) {
3173
+ if (!source) return target;
3174
+ if (!target) return source;
3175
+ const result = { ...target };
3176
+ for (const key in source) {
3177
+ if (source[key] && typeof source[key] === "object" && !Array.isArray(source[key])) {
3178
+ result[key] = this.deepMerge(target[key], source[key]);
3179
+ } else {
3180
+ result[key] = source[key];
3181
+ }
3182
+ }
3183
+ return result;
3128
3184
  }
3129
3185
  /**
3130
3186
  * Legacy method for backward compatibility - defaults to text processing
@@ -3136,15 +3192,40 @@ var IncrementalStreamParser = class {
3136
3192
  * Process any remaining buffer content at the end of stream
3137
3193
  */
3138
3194
  async finalize() {
3139
- if (this.buffer.trim()) {
3195
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3196
+ const components = this.componentAccumulator.dataComponents;
3197
+ for (let i = 0; i < components.length; i++) {
3198
+ const component = components[i];
3199
+ if (!component?.id) continue;
3200
+ const componentKey = component.id;
3201
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3202
+ if (!hasBeenStreamed && this.isComponentComplete(component) && component.name !== "Text") {
3203
+ const parts = await this.artifactParser.parseObject({
3204
+ dataComponents: [component]
3205
+ });
3206
+ for (const part of parts) {
3207
+ await this.streamPart(part);
3208
+ }
3209
+ this.lastStreamedComponents.set(componentKey, true);
3210
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3211
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3212
+ if (firstKey) {
3213
+ this.lastStreamedComponents.delete(firstKey);
3214
+ }
3215
+ }
3216
+ this.componentSnapshots.delete(componentKey);
3217
+ }
3218
+ }
3219
+ }
3220
+ if (this.buffer) {
3140
3221
  const part = {
3141
3222
  kind: "text",
3142
- text: this.buffer.trim()
3223
+ text: this.buffer
3143
3224
  };
3144
3225
  await this.streamPart(part);
3145
3226
  }
3146
- if (this.pendingTextBuffer.trim()) {
3147
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "").trim();
3227
+ if (this.pendingTextBuffer) {
3228
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3148
3229
  if (cleanedText) {
3149
3230
  this.collectedParts.push({
3150
3231
  kind: "text",
@@ -3154,6 +3235,9 @@ var IncrementalStreamParser = class {
3154
3235
  }
3155
3236
  this.pendingTextBuffer = "";
3156
3237
  }
3238
+ this.componentSnapshots.clear();
3239
+ this.lastStreamedComponents.clear();
3240
+ this.componentAccumulator = {};
3157
3241
  }
3158
3242
  /**
3159
3243
  * Get all collected parts for building the final response
@@ -3200,30 +3284,6 @@ var IncrementalStreamParser = class {
3200
3284
  remainingBuffer: ""
3201
3285
  };
3202
3286
  }
3203
- /**
3204
- * Parse buffer for complete JSON objects with artifact references (for object streaming)
3205
- */
3206
- async parseObjectBuffer() {
3207
- const completeParts = [];
3208
- try {
3209
- const parsed = JSON.parse(this.buffer);
3210
- const parts = await this.artifactParser.parseObject(parsed);
3211
- return {
3212
- completeParts: parts,
3213
- remainingBuffer: ""
3214
- };
3215
- } catch {
3216
- const { complete, remaining } = this.artifactParser.parsePartialJSON(this.buffer);
3217
- for (const obj of complete) {
3218
- const parts = await this.artifactParser.parseObject(obj);
3219
- completeParts.push(...parts);
3220
- }
3221
- return {
3222
- completeParts,
3223
- remainingBuffer: remaining
3224
- };
3225
- }
3226
- }
3227
3287
  /**
3228
3288
  * Check if text might be the start of an artifact marker
3229
3289
  */
@@ -3244,7 +3304,7 @@ var IncrementalStreamParser = class {
3244
3304
  this.pendingTextBuffer += part.text;
3245
3305
  if (!this.artifactParser.hasIncompleteArtifact(this.pendingTextBuffer)) {
3246
3306
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3247
- if (cleanedText.trim()) {
3307
+ if (cleanedText) {
3248
3308
  await this.streamHelper.streamText(cleanedText, 50);
3249
3309
  }
3250
3310
  this.pendingTextBuffer = "";
@@ -3252,7 +3312,7 @@ var IncrementalStreamParser = class {
3252
3312
  } else if (part.kind === "data" && part.data) {
3253
3313
  if (this.pendingTextBuffer) {
3254
3314
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3255
- if (cleanedText.trim()) {
3315
+ if (cleanedText) {
3256
3316
  await this.streamHelper.streamText(cleanedText, 50);
3257
3317
  }
3258
3318
  this.pendingTextBuffer = "";
@@ -3266,6 +3326,11 @@ var IncrementalStreamParser = class {
3266
3326
  }
3267
3327
  }
3268
3328
  };
3329
+ // Memory management constants
3330
+ __publicField(_IncrementalStreamParser, "MAX_SNAPSHOT_SIZE", 100);
3331
+ // Max number of snapshots to keep
3332
+ __publicField(_IncrementalStreamParser, "MAX_STREAMED_SIZE", 1e3);
3333
+ var IncrementalStreamParser = _IncrementalStreamParser;
3269
3334
 
3270
3335
  // src/utils/response-formatter.ts
3271
3336
  var logger9 = agentsCore.getLogger("ResponseFormatter");
@@ -4654,7 +4719,8 @@ function createDelegateToAgentTool({
4654
4719
  const externalAgent = await agentsCore.getExternalAgent(dbClient_default)({
4655
4720
  scopes: {
4656
4721
  tenantId,
4657
- projectId
4722
+ projectId,
4723
+ graphId
4658
4724
  },
4659
4725
  agentId: delegateConfig.config.id
4660
4726
  });
@@ -5256,6 +5322,23 @@ var Agent = class {
5256
5322
  __publicField(this, "credentialStoreRegistry");
5257
5323
  this.artifactComponents = config.artifactComponents || [];
5258
5324
  let processedDataComponents = config.dataComponents || [];
5325
+ if (processedDataComponents.length > 0) {
5326
+ processedDataComponents.push({
5327
+ id: "text-content",
5328
+ name: "Text",
5329
+ description: "Natural conversational text for the user - write naturally without mentioning technical details. Avoid redundancy and repetition with data components.",
5330
+ props: {
5331
+ type: "object",
5332
+ properties: {
5333
+ text: {
5334
+ type: "string",
5335
+ description: "Natural conversational text - respond as if having a normal conversation, never mention JSON, components, schemas, or technical implementation. Avoid redundancy and repetition with data components."
5336
+ }
5337
+ },
5338
+ required: ["text"]
5339
+ }
5340
+ });
5341
+ }
5259
5342
  if (this.artifactComponents.length > 0 && config.dataComponents && config.dataComponents.length > 0) {
5260
5343
  processedDataComponents = [
5261
5344
  ArtifactReferenceSchema.getDataComponent(config.tenantId, config.projectId),
@@ -5526,8 +5609,12 @@ var Agent = class {
5526
5609
  async getMcpTool(tool4) {
5527
5610
  const credentialReferenceId = tool4.credentialReferenceId;
5528
5611
  const toolsForAgent = await agentsCore.getToolsForAgent(dbClient_default)({
5529
- scopes: { tenantId: this.config.tenantId, projectId: this.config.projectId },
5530
- agentId: this.config.id
5612
+ scopes: {
5613
+ tenantId: this.config.tenantId,
5614
+ projectId: this.config.projectId,
5615
+ graphId: this.config.graphId,
5616
+ agentId: this.config.id
5617
+ }
5531
5618
  });
5532
5619
  const selectedTools = toolsForAgent.data.find((t) => t.toolId === tool4.id)?.selectedTools || void 0;
5533
5620
  let serverConfig;
@@ -5674,9 +5761,9 @@ var Agent = class {
5674
5761
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5675
5762
  scopes: {
5676
5763
  tenantId: this.config.tenantId,
5677
- projectId: this.config.projectId
5678
- },
5679
- graphId: this.config.graphId
5764
+ projectId: this.config.projectId,
5765
+ graphId: this.config.graphId
5766
+ }
5680
5767
  });
5681
5768
  return graphDefinition?.graphPrompt || void 0;
5682
5769
  } catch (error) {
@@ -5698,14 +5785,16 @@ var Agent = class {
5698
5785
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5699
5786
  scopes: {
5700
5787
  tenantId: this.config.tenantId,
5701
- projectId: this.config.projectId
5702
- },
5703
- graphId: this.config.graphId
5788
+ projectId: this.config.projectId,
5789
+ graphId: this.config.graphId
5790
+ }
5704
5791
  });
5705
5792
  if (!graphDefinition) {
5706
5793
  return false;
5707
5794
  }
5708
- return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5795
+ return Object.values(graphDefinition.agents).some(
5796
+ (agent) => "artifactComponents" in agent && agent.artifactComponents && agent.artifactComponents.length > 0
5797
+ );
5709
5798
  } catch (error) {
5710
5799
  logger15.warn(
5711
5800
  {
@@ -5733,7 +5822,8 @@ Key requirements:
5733
5822
  - Mix artifact references throughout your dataComponents array
5734
5823
  - Each artifact reference must use EXACT IDs from tool outputs
5735
5824
  - Reference artifacts that directly support the adjacent information
5736
- - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact`;
5825
+ - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
5826
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5737
5827
  }
5738
5828
  if (hasDataComponents && !hasArtifactComponents) {
5739
5829
  return `Generate the final structured JSON response using the configured data components. Organize the information from the research above into the appropriate structured format based on the available component schemas.
@@ -5741,7 +5831,8 @@ Key requirements:
5741
5831
  Key requirements:
5742
5832
  - Use the exact component structure and property names
5743
5833
  - Fill in all relevant data from the research
5744
- - Ensure data is organized logically and completely`;
5834
+ - Ensure data is organized logically and completely
5835
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5745
5836
  }
5746
5837
  if (!hasDataComponents && hasArtifactComponents) {
5747
5838
  return `Generate the final structured response with artifact references based on the research above. Use the artifact reference component to cite relevant information with exact artifact_id and task_id values from the tool outputs.
@@ -5751,7 +5842,7 @@ Key requirements:
5751
5842
  - Reference artifacts that support your response
5752
5843
  - Never make up or modify artifact IDs`;
5753
5844
  }
5754
- return `Generate the final response based on the research above.`;
5845
+ return `Generate the final response based on the research above. Write naturally as if having a conversation.`;
5755
5846
  }
5756
5847
  async buildSystemPrompt(runtimeContext, excludeDataComponents = false) {
5757
5848
  const conversationId = runtimeContext?.metadata?.conversationId || runtimeContext?.contextId;
@@ -5902,9 +5993,9 @@ Key requirements:
5902
5993
  return await agentsCore.graphHasArtifactComponents(dbClient_default)({
5903
5994
  scopes: {
5904
5995
  tenantId: this.config.tenantId,
5905
- projectId: this.config.projectId
5906
- },
5907
- graphId: this.config.graphId
5996
+ projectId: this.config.projectId,
5997
+ graphId: this.config.graphId
5998
+ }
5908
5999
  });
5909
6000
  } catch (error) {
5910
6001
  logger15.error(
@@ -6250,35 +6341,94 @@ ${output}`;
6250
6341
  this.getStructuredOutputModel()
6251
6342
  );
6252
6343
  const phase2TimeoutMs = structuredModelSettings.maxDuration ? structuredModelSettings.maxDuration * 1e3 : CONSTANTS.PHASE_2_TIMEOUT_MS;
6253
- const structuredResponse = await ai.generateObject({
6254
- ...structuredModelSettings,
6255
- messages: [
6256
- { role: "user", content: userMessage },
6257
- ...reasoningFlow,
6258
- {
6259
- role: "user",
6260
- content: await this.buildPhase2SystemPrompt()
6261
- }
6262
- ],
6263
- schema: z5.z.object({
6264
- dataComponents: z5.z.array(dataComponentsSchema)
6265
- }),
6266
- experimental_telemetry: {
6267
- isEnabled: true,
6268
- functionId: this.config.id,
6269
- recordInputs: true,
6270
- recordOutputs: true,
6271
- metadata: {
6272
- phase: "structured_generation"
6344
+ const shouldStreamPhase2 = this.getStreamingHelper();
6345
+ if (shouldStreamPhase2) {
6346
+ const streamResult = ai.streamObject({
6347
+ ...structuredModelSettings,
6348
+ messages: [
6349
+ { role: "user", content: userMessage },
6350
+ ...reasoningFlow,
6351
+ {
6352
+ role: "user",
6353
+ content: await this.buildPhase2SystemPrompt()
6354
+ }
6355
+ ],
6356
+ schema: z5.z.object({
6357
+ dataComponents: z5.z.array(dataComponentsSchema)
6358
+ }),
6359
+ experimental_telemetry: {
6360
+ isEnabled: true,
6361
+ functionId: this.config.id,
6362
+ recordInputs: true,
6363
+ recordOutputs: true,
6364
+ metadata: {
6365
+ phase: "structured_generation"
6366
+ }
6367
+ },
6368
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6369
+ });
6370
+ const streamHelper = this.getStreamingHelper();
6371
+ if (!streamHelper) {
6372
+ throw new Error("Stream helper is unexpectedly undefined in streaming context");
6373
+ }
6374
+ const parser = new IncrementalStreamParser(
6375
+ streamHelper,
6376
+ this.config.tenantId,
6377
+ contextId
6378
+ );
6379
+ for await (const delta of streamResult.partialObjectStream) {
6380
+ if (delta) {
6381
+ await parser.processObjectDelta(delta);
6273
6382
  }
6274
- },
6275
- abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6276
- });
6277
- response = {
6278
- ...response,
6279
- object: structuredResponse.object
6280
- };
6281
- textResponse = JSON.stringify(structuredResponse.object, null, 2);
6383
+ }
6384
+ await parser.finalize();
6385
+ const structuredResponse = await streamResult;
6386
+ const collectedParts = parser.getCollectedParts();
6387
+ if (collectedParts.length > 0) {
6388
+ response.formattedContent = {
6389
+ parts: collectedParts.map((part) => ({
6390
+ kind: part.kind,
6391
+ ...part.kind === "text" && { text: part.text },
6392
+ ...part.kind === "data" && { data: part.data }
6393
+ }))
6394
+ };
6395
+ }
6396
+ response = {
6397
+ ...response,
6398
+ object: structuredResponse.object
6399
+ };
6400
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6401
+ } else {
6402
+ const structuredResponse = await ai.generateObject({
6403
+ ...structuredModelSettings,
6404
+ messages: [
6405
+ { role: "user", content: userMessage },
6406
+ ...reasoningFlow,
6407
+ {
6408
+ role: "user",
6409
+ content: await this.buildPhase2SystemPrompt()
6410
+ }
6411
+ ],
6412
+ schema: z5.z.object({
6413
+ dataComponents: z5.z.array(dataComponentsSchema)
6414
+ }),
6415
+ experimental_telemetry: {
6416
+ isEnabled: true,
6417
+ functionId: this.config.id,
6418
+ recordInputs: true,
6419
+ recordOutputs: true,
6420
+ metadata: {
6421
+ phase: "structured_generation"
6422
+ }
6423
+ },
6424
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6425
+ });
6426
+ response = {
6427
+ ...response,
6428
+ object: structuredResponse.object
6429
+ };
6430
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6431
+ }
6282
6432
  } else {
6283
6433
  textResponse = response.text || "";
6284
6434
  }
@@ -6324,42 +6474,6 @@ ${output}`;
6324
6474
  }
6325
6475
  };
6326
6476
 
6327
- // src/utils/model-resolver.ts
6328
- init_dbClient();
6329
- async function resolveModelConfig(graphId, agent) {
6330
- if (agent.models?.base?.model) {
6331
- return {
6332
- base: agent.models.base,
6333
- structuredOutput: agent.models.structuredOutput || agent.models.base,
6334
- summarizer: agent.models.summarizer || agent.models.base
6335
- };
6336
- }
6337
- const graph = await agentsCore.getAgentGraph(dbClient_default)({
6338
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
6339
- graphId
6340
- });
6341
- if (graph?.models?.base?.model) {
6342
- return {
6343
- base: graph.models.base,
6344
- structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
6345
- summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
6346
- };
6347
- }
6348
- const project = await agentsCore.getProject(dbClient_default)({
6349
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
6350
- });
6351
- if (project?.models?.base?.model) {
6352
- return {
6353
- base: project.models.base,
6354
- structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
6355
- summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
6356
- };
6357
- }
6358
- throw new Error(
6359
- "Base model configuration is required. Please configure models at the project level."
6360
- );
6361
- }
6362
-
6363
6477
  // src/agents/generateTaskHandler.ts
6364
6478
  function parseEmbeddedJson(data) {
6365
6479
  return traverse__default.default(data).map(function(x) {
@@ -6394,31 +6508,34 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6394
6508
  agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6395
6509
  scopes: {
6396
6510
  tenantId: config.tenantId,
6397
- projectId: config.projectId
6511
+ projectId: config.projectId,
6512
+ graphId: config.graphId
6398
6513
  },
6399
- graphId: config.graphId,
6400
6514
  agentId: config.agentId
6401
6515
  }),
6402
6516
  agentsCore.getToolsForAgent(dbClient_default)({
6403
6517
  scopes: {
6404
6518
  tenantId: config.tenantId,
6405
- projectId: config.projectId
6406
- },
6407
- agentId: config.agentId
6519
+ projectId: config.projectId,
6520
+ graphId: config.graphId,
6521
+ agentId: config.agentId
6522
+ }
6408
6523
  }),
6409
6524
  agentsCore.getDataComponentsForAgent(dbClient_default)({
6410
6525
  scopes: {
6411
6526
  tenantId: config.tenantId,
6412
- projectId: config.projectId
6413
- },
6414
- agentId: config.agentId
6527
+ projectId: config.projectId,
6528
+ graphId: config.graphId,
6529
+ agentId: config.agentId
6530
+ }
6415
6531
  }),
6416
6532
  agentsCore.getArtifactComponentsForAgent(dbClient_default)({
6417
6533
  scopes: {
6418
6534
  tenantId: config.tenantId,
6419
- projectId: config.projectId
6420
- },
6421
- agentId: config.agentId
6535
+ projectId: config.projectId,
6536
+ graphId: config.graphId,
6537
+ agentId: config.agentId
6538
+ }
6422
6539
  })
6423
6540
  ]);
6424
6541
  logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
@@ -6426,13 +6543,16 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6426
6543
  internalRelations.map(async (relation) => {
6427
6544
  try {
6428
6545
  const relatedAgent = await agentsCore.getAgentById(dbClient_default)({
6429
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6546
+ scopes: {
6547
+ tenantId: config.tenantId,
6548
+ projectId: config.projectId,
6549
+ graphId: config.graphId
6550
+ },
6430
6551
  agentId: relation.id
6431
6552
  });
6432
6553
  if (relatedAgent) {
6433
6554
  const relatedAgentRelations = await agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6434
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6435
- graphId: config.graphId,
6555
+ scopes: { tenantId: config.tenantId, projectId: config.projectId, graphId: config.graphId },
6436
6556
  agentId: relation.id
6437
6557
  });
6438
6558
  const enhancedDescription = generateDescriptionWithTransfers(
@@ -6655,16 +6775,17 @@ var createTaskHandlerConfig = async (params) => {
6655
6775
  const agent = await agentsCore.getAgentById(dbClient_default)({
6656
6776
  scopes: {
6657
6777
  tenantId: params.tenantId,
6658
- projectId: params.projectId
6778
+ projectId: params.projectId,
6779
+ graphId: params.graphId
6659
6780
  },
6660
6781
  agentId: params.agentId
6661
6782
  });
6662
- const agentGraph = await agentsCore.getAgentGraph(dbClient_default)({
6783
+ const agentGraph = await agentsCore.getAgentGraphById(dbClient_default)({
6663
6784
  scopes: {
6664
6785
  tenantId: params.tenantId,
6665
- projectId: params.projectId
6666
- },
6667
- graphId: params.graphId
6786
+ projectId: params.projectId,
6787
+ graphId: params.graphId
6788
+ }
6668
6789
  });
6669
6790
  if (!agent) {
6670
6791
  throw new Error(`Agent not found: ${params.agentId}`);
@@ -6704,10 +6825,14 @@ async function hydrateGraph({
6704
6825
  apiKey
6705
6826
  }) {
6706
6827
  try {
6828
+ if (!dbGraph.defaultAgentId) {
6829
+ throw new Error(`Graph ${dbGraph.id} does not have a default agent configured`);
6830
+ }
6707
6831
  const defaultAgent = await agentsCore.getAgentById(dbClient_default)({
6708
6832
  scopes: {
6709
6833
  tenantId: dbGraph.tenantId,
6710
- projectId: dbGraph.projectId
6834
+ projectId: dbGraph.projectId,
6835
+ graphId: dbGraph.id
6711
6836
  },
6712
6837
  agentId: dbGraph.defaultAgentId
6713
6838
  });
@@ -6762,7 +6887,7 @@ async function hydrateGraph({
6762
6887
  }
6763
6888
  async function getRegisteredGraph(executionContext) {
6764
6889
  const { tenantId, projectId, graphId, baseUrl, apiKey } = executionContext;
6765
- const dbGraph = await agentsCore.getAgentGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
6890
+ const dbGraph = await agentsCore.getAgentGraphById(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
6766
6891
  if (!dbGraph) {
6767
6892
  return null;
6768
6893
  }
@@ -6821,6 +6946,7 @@ app.openapi(
6821
6946
  );
6822
6947
  const executionContext = agentsCore.getRequestExecutionContext(c);
6823
6948
  const { tenantId, projectId, graphId, agentId } = executionContext;
6949
+ console.dir("executionContext", executionContext);
6824
6950
  if (agentId) {
6825
6951
  logger17.info(
6826
6952
  {
@@ -6836,7 +6962,10 @@ app.openapi(
6836
6962
  const agent = await getRegisteredAgent(executionContext, credentialStores);
6837
6963
  logger17.info({ agent }, "agent registered: well-known agent.json");
6838
6964
  if (!agent) {
6839
- return c.json({ error: "Agent not found" }, 404);
6965
+ throw agentsCore.createApiError({
6966
+ code: "not_found",
6967
+ message: "Agent not found"
6968
+ });
6840
6969
  }
6841
6970
  return c.json(agent.agentCard);
6842
6971
  } else {
@@ -6851,7 +6980,10 @@ app.openapi(
6851
6980
  );
6852
6981
  const graph = await getRegisteredGraph(executionContext);
6853
6982
  if (!graph) {
6854
- return c.json({ error: "Graph not found" }, 404);
6983
+ throw agentsCore.createApiError({
6984
+ code: "not_found",
6985
+ message: "Graph not found"
6986
+ });
6855
6987
  }
6856
6988
  return c.json(graph.agentCard);
6857
6989
  }
@@ -6908,8 +7040,7 @@ app.post("/a2a", async (c) => {
6908
7040
  "graph-level a2a endpoint"
6909
7041
  );
6910
7042
  const graph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
6911
- scopes: { tenantId, projectId },
6912
- graphId
7043
+ scopes: { tenantId, projectId, graphId }
6913
7044
  });
6914
7045
  if (!graph) {
6915
7046
  return c.json(
@@ -6921,6 +7052,16 @@ app.post("/a2a", async (c) => {
6921
7052
  404
6922
7053
  );
6923
7054
  }
7055
+ if (!graph.defaultAgentId) {
7056
+ return c.json(
7057
+ {
7058
+ jsonrpc: "2.0",
7059
+ error: { code: -32004, message: "Graph does not have a default agent configured" },
7060
+ id: null
7061
+ },
7062
+ 400
7063
+ );
7064
+ }
6924
7065
  executionContext.agentId = graph.defaultAgentId;
6925
7066
  const credentialStores = c.get("credentialStores");
6926
7067
  const defaultAgent = await getRegisteredAgent(executionContext, credentialStores);
@@ -6964,6 +7105,9 @@ function isTransferResponse(result) {
6964
7105
  (artifact) => artifact.parts.some((part) => part.kind === "data" && part.data?.type === "transfer")
6965
7106
  );
6966
7107
  }
7108
+
7109
+ // src/handlers/executionHandler.ts
7110
+ init_dbClient();
6967
7111
  var SSEStreamHelper = class {
6968
7112
  constructor(stream2, requestId2, timestamp) {
6969
7113
  this.stream = stream2;
@@ -6971,7 +7115,7 @@ var SSEStreamHelper = class {
6971
7115
  this.timestamp = timestamp;
6972
7116
  // Stream queuing for proper event ordering
6973
7117
  __publicField(this, "isTextStreaming", false);
6974
- __publicField(this, "queuedOperations", []);
7118
+ __publicField(this, "queuedEvents", []);
6975
7119
  }
6976
7120
  /**
6977
7121
  * Write the initial role message
@@ -7036,9 +7180,10 @@ var SSEStreamHelper = class {
7036
7180
  await this.writeContent(JSON.stringify(data));
7037
7181
  }
7038
7182
  /**
7039
- * Write error message
7183
+ * Write error message or error event
7040
7184
  */
7041
- async writeError(errorMessage) {
7185
+ async writeError(error) {
7186
+ const errorMessage = typeof error === "string" ? error : error.message;
7042
7187
  await this.writeContent(`
7043
7188
 
7044
7189
  ${errorMessage}`);
@@ -7062,22 +7207,6 @@ ${errorMessage}`);
7062
7207
  })
7063
7208
  });
7064
7209
  }
7065
- /**
7066
- * Write the final [DONE] message
7067
- */
7068
- async writeDone() {
7069
- await this.stream.writeSSE({
7070
- data: "[DONE]"
7071
- });
7072
- }
7073
- /**
7074
- * Complete the stream with finish reason and done message
7075
- */
7076
- async complete(finishReason = "stop") {
7077
- await this.flushQueuedOperations();
7078
- await this.writeCompletion(finishReason);
7079
- await this.writeDone();
7080
- }
7081
7210
  async writeData(type, data) {
7082
7211
  await this.stream.writeSSE({
7083
7212
  data: JSON.stringify({
@@ -7096,16 +7225,23 @@ ${errorMessage}`);
7096
7225
  })
7097
7226
  });
7098
7227
  }
7099
- async writeOperation(operation) {
7100
- if (operation.type === "status_update" && operation.ctx.label) {
7101
- operation = {
7102
- type: operation.type,
7103
- label: operation.ctx.label,
7104
- ctx: operation.ctx.data
7105
- };
7228
+ async writeSummary(summary) {
7229
+ if (this.isTextStreaming) {
7230
+ this.queuedEvents.push({
7231
+ type: "data-summary",
7232
+ event: summary
7233
+ });
7234
+ return;
7106
7235
  }
7236
+ await this.flushQueuedOperations();
7237
+ await this.writeData("data-summary", summary);
7238
+ }
7239
+ async writeOperation(operation) {
7107
7240
  if (this.isTextStreaming) {
7108
- this.queuedOperations.push(operation);
7241
+ this.queuedEvents.push({
7242
+ type: "data-operation",
7243
+ event: operation
7244
+ });
7109
7245
  return;
7110
7246
  }
7111
7247
  await this.flushQueuedOperations();
@@ -7115,15 +7251,31 @@ ${errorMessage}`);
7115
7251
  * Flush all queued operations in order after text streaming completes
7116
7252
  */
7117
7253
  async flushQueuedOperations() {
7118
- if (this.queuedOperations.length === 0) {
7254
+ if (this.queuedEvents.length === 0) {
7119
7255
  return;
7120
7256
  }
7121
- const operationsToFlush = [...this.queuedOperations];
7122
- this.queuedOperations = [];
7123
- for (const operation of operationsToFlush) {
7124
- await this.writeData("data-operation", operation);
7257
+ const eventsToFlush = [...this.queuedEvents];
7258
+ this.queuedEvents = [];
7259
+ for (const event of eventsToFlush) {
7260
+ await this.writeData(event.type, event.event);
7125
7261
  }
7126
7262
  }
7263
+ /**
7264
+ * Write the final [DONE] message
7265
+ */
7266
+ async writeDone() {
7267
+ await this.stream.writeSSE({
7268
+ data: "[DONE]"
7269
+ });
7270
+ }
7271
+ /**
7272
+ * Complete the stream with finish reason and done message
7273
+ */
7274
+ async complete(finishReason = "stop") {
7275
+ await this.flushQueuedOperations();
7276
+ await this.writeCompletion(finishReason);
7277
+ await this.writeDone();
7278
+ }
7127
7279
  };
7128
7280
  function createSSEStreamHelper(stream2, requestId2, timestamp) {
7129
7281
  return new SSEStreamHelper(stream2, requestId2, timestamp);
@@ -7143,7 +7295,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7143
7295
  __publicField(this, "isCompleted", false);
7144
7296
  // Stream queuing for proper event ordering
7145
7297
  __publicField(this, "isTextStreaming", false);
7146
- __publicField(this, "queuedOperations", []);
7298
+ __publicField(this, "queuedEvents", []);
7147
7299
  // Timing tracking for text sequences (text-end to text-start gap)
7148
7300
  __publicField(this, "lastTextEndTimestamp", 0);
7149
7301
  __publicField(this, "TEXT_GAP_THRESHOLD", 50);
@@ -7255,15 +7407,24 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7255
7407
  data
7256
7408
  });
7257
7409
  }
7258
- async writeError(errorMessage) {
7410
+ async writeError(error) {
7259
7411
  if (this.isCompleted) {
7260
7412
  console.warn("Attempted to write error to completed stream");
7261
7413
  return;
7262
7414
  }
7263
- this.writer.write({
7264
- type: "error",
7265
- errorText: errorMessage
7266
- });
7415
+ if (typeof error === "string") {
7416
+ this.writer.write({
7417
+ type: "error",
7418
+ message: error,
7419
+ severity: "error",
7420
+ timestamp: Date.now()
7421
+ });
7422
+ } else {
7423
+ this.writer.write({
7424
+ ...error,
7425
+ type: "error"
7426
+ });
7427
+ }
7267
7428
  }
7268
7429
  async streamData(data) {
7269
7430
  await this.writeContent(JSON.stringify(data));
@@ -7275,20 +7436,6 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7275
7436
  }
7276
7437
  this.writer.merge(stream2);
7277
7438
  }
7278
- async writeCompletion(_finishReason = "stop") {
7279
- }
7280
- async writeDone() {
7281
- }
7282
- /**
7283
- * Complete the stream and clean up all memory
7284
- * This is the primary cleanup point to prevent memory leaks between requests
7285
- */
7286
- async complete() {
7287
- if (this.isCompleted) return;
7288
- await this.flushQueuedOperations();
7289
- this.isCompleted = true;
7290
- this.cleanup();
7291
- }
7292
7439
  /**
7293
7440
  * Clean up all memory allocations
7294
7441
  * Should be called when the stream helper is no longer needed
@@ -7302,7 +7449,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7302
7449
  this.sentItems.clear();
7303
7450
  this.completedItems.clear();
7304
7451
  this.textId = null;
7305
- this.queuedOperations = [];
7452
+ this.queuedEvents = [];
7306
7453
  this.isTextStreaming = false;
7307
7454
  }
7308
7455
  /**
@@ -7368,7 +7515,9 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7368
7515
  if (this.writer && !this.isCompleted) {
7369
7516
  this.writer.write({
7370
7517
  type: "error",
7371
- errorText: `Stream terminated: ${reason}`
7518
+ message: `Stream terminated: ${reason}`,
7519
+ severity: "error",
7520
+ timestamp: Date.now()
7372
7521
  });
7373
7522
  }
7374
7523
  } catch (e) {
@@ -7391,23 +7540,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7391
7540
  isCompleted: this.isCompleted
7392
7541
  };
7393
7542
  }
7543
+ async writeSummary(summary) {
7544
+ if (this.isCompleted) {
7545
+ console.warn("Attempted to write summary to completed stream");
7546
+ return;
7547
+ }
7548
+ const now = Date.now();
7549
+ const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7550
+ if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7551
+ this.queuedEvents.push({ type: "data-summary", event: summary });
7552
+ return;
7553
+ }
7554
+ await this.flushQueuedOperations();
7555
+ await this.writer.write({
7556
+ id: "id" in summary ? summary.id : void 0,
7557
+ type: "data-summary",
7558
+ data: summary
7559
+ });
7560
+ }
7394
7561
  async writeOperation(operation) {
7395
7562
  if (this.isCompleted) {
7396
7563
  console.warn("Attempted to write operation to completed stream");
7397
7564
  return;
7398
7565
  }
7399
- if (operation.type === "status_update" && operation.ctx.label) {
7400
- operation = {
7401
- type: operation.type,
7402
- label: operation.ctx.label,
7403
- // Preserve the label for the UI
7404
- ctx: operation.ctx.data
7405
- };
7406
- }
7407
7566
  const now = Date.now();
7408
7567
  const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7409
7568
  if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7410
- this.queuedOperations.push(operation);
7569
+ this.queuedEvents.push({ type: "data-operation", event: operation });
7411
7570
  return;
7412
7571
  }
7413
7572
  await this.flushQueuedOperations();
@@ -7421,19 +7580,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7421
7580
  * Flush all queued operations in order after text streaming completes
7422
7581
  */
7423
7582
  async flushQueuedOperations() {
7424
- if (this.queuedOperations.length === 0) {
7583
+ if (this.queuedEvents.length === 0) {
7425
7584
  return;
7426
7585
  }
7427
- const operationsToFlush = [...this.queuedOperations];
7428
- this.queuedOperations = [];
7429
- for (const operation of operationsToFlush) {
7586
+ const eventsToFlush = [...this.queuedEvents];
7587
+ this.queuedEvents = [];
7588
+ for (const event of eventsToFlush) {
7430
7589
  this.writer.write({
7431
- id: "id" in operation ? operation.id : void 0,
7432
- type: "data-operation",
7433
- data: operation
7590
+ id: "id" in event.event ? event.event.id : void 0,
7591
+ type: event.type,
7592
+ data: event.event
7434
7593
  });
7435
7594
  }
7436
7595
  }
7596
+ async writeCompletion(_finishReason = "stop") {
7597
+ }
7598
+ async writeDone() {
7599
+ }
7600
+ /**
7601
+ * Complete the stream and clean up all memory
7602
+ * This is the primary cleanup point to prevent memory leaks between requests
7603
+ */
7604
+ async complete() {
7605
+ if (this.isCompleted) return;
7606
+ await this.flushQueuedOperations();
7607
+ this.isCompleted = true;
7608
+ this.cleanup();
7609
+ }
7437
7610
  };
7438
7611
  // Memory management - focused on connection completion cleanup
7439
7612
  __publicField(_VercelDataStreamHelper, "MAX_BUFFER_SIZE", 5 * 1024 * 1024);
@@ -7446,6 +7619,7 @@ var MCPStreamHelper = class {
7446
7619
  __publicField(this, "capturedText", "");
7447
7620
  __publicField(this, "capturedData", []);
7448
7621
  __publicField(this, "capturedOperations", []);
7622
+ __publicField(this, "capturedSummaries", []);
7449
7623
  __publicField(this, "hasError", false);
7450
7624
  __publicField(this, "errorMessage", "");
7451
7625
  __publicField(this, "sessionId");
@@ -7464,18 +7638,27 @@ var MCPStreamHelper = class {
7464
7638
  async streamData(data) {
7465
7639
  this.capturedData.push(data);
7466
7640
  }
7641
+ async streamSummary(summary) {
7642
+ this.capturedSummaries.push(summary);
7643
+ }
7644
+ async streamOperation(operation) {
7645
+ this.capturedOperations.push(operation);
7646
+ }
7467
7647
  async writeData(_type, data) {
7468
7648
  this.capturedData.push(data);
7469
7649
  }
7470
- async writeError(errorMessage) {
7471
- this.hasError = true;
7472
- this.errorMessage = errorMessage;
7473
- }
7474
- async complete() {
7650
+ async writeSummary(summary) {
7651
+ this.capturedSummaries.push(summary);
7475
7652
  }
7476
7653
  async writeOperation(operation) {
7477
7654
  this.capturedOperations.push(operation);
7478
7655
  }
7656
+ async writeError(error) {
7657
+ this.hasError = true;
7658
+ this.errorMessage = typeof error === "string" ? error : error.message;
7659
+ }
7660
+ async complete() {
7661
+ }
7479
7662
  /**
7480
7663
  * Get the captured response for MCP tool result
7481
7664
  */
@@ -7494,7 +7677,6 @@ function createMCPStreamHelper() {
7494
7677
  }
7495
7678
 
7496
7679
  // src/handlers/executionHandler.ts
7497
- init_dbClient();
7498
7680
  var logger19 = agentsCore.getLogger("ExecutionHandler");
7499
7681
  var ExecutionHandler = class {
7500
7682
  constructor() {
@@ -7523,7 +7705,7 @@ var ExecutionHandler = class {
7523
7705
  logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7524
7706
  let graphConfig = null;
7525
7707
  try {
7526
- graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
7708
+ graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
7527
7709
  if (graphConfig?.statusUpdates && graphConfig.statusUpdates.enabled !== false) {
7528
7710
  graphSessionManager.initializeStatusUpdates(
7529
7711
  requestId2,
@@ -7677,7 +7859,6 @@ var ExecutionHandler = class {
7677
7859
  if (errorCount >= this.MAX_ERRORS) {
7678
7860
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7679
7861
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7680
- await sseHelper.writeError(errorMessage2);
7681
7862
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7682
7863
  if (task) {
7683
7864
  await agentsCore.updateTask(dbClient_default)({
@@ -7818,7 +7999,6 @@ var ExecutionHandler = class {
7818
7999
  if (errorCount >= this.MAX_ERRORS) {
7819
8000
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7820
8001
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7821
- await sseHelper.writeError(errorMessage2);
7822
8002
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7823
8003
  if (task) {
7824
8004
  await agentsCore.updateTask(dbClient_default)({
@@ -7840,7 +8020,6 @@ var ExecutionHandler = class {
7840
8020
  }
7841
8021
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7842
8022
  logger19.error({ maxTransfers, iterations }, errorMessage);
7843
- await sseHelper.writeError(errorMessage);
7844
8023
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7845
8024
  if (task) {
7846
8025
  await agentsCore.updateTask(dbClient_default)({
@@ -7861,8 +8040,7 @@ var ExecutionHandler = class {
7861
8040
  } catch (error) {
7862
8041
  logger19.error({ error }, "Error in execution handler");
7863
8042
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7864
- await sseHelper.writeError(`Execution error: ${errorMessage}`);
7865
- await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
8043
+ await sseHelper.writeOperation(errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system"));
7866
8044
  if (task) {
7867
8045
  await agentsCore.updateTask(dbClient_default)({
7868
8046
  taskId: task.id,
@@ -8024,8 +8202,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8024
8202
  const body = c.get("requestBody") || {};
8025
8203
  const conversationId = body.conversationId || nanoid.nanoid();
8026
8204
  const fullGraph = await agentsCore.getFullGraph(dbClient_default)({
8027
- scopes: { tenantId, projectId },
8028
- graphId
8205
+ scopes: { tenantId, projectId, graphId }
8029
8206
  });
8030
8207
  let agentGraph;
8031
8208
  let defaultAgentId;
@@ -8042,16 +8219,21 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8042
8219
  defaultAgentId = fullGraph.defaultAgentId || firstAgentId;
8043
8220
  } else {
8044
8221
  agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8045
- scopes: { tenantId, projectId },
8046
- graphId
8222
+ scopes: { tenantId, projectId, graphId }
8047
8223
  });
8048
8224
  if (!agentGraph) {
8049
- return c.json({ error: "Agent graph not found" }, 404);
8225
+ throw agentsCore.createApiError({
8226
+ code: "not_found",
8227
+ message: "Agent graph not found"
8228
+ });
8050
8229
  }
8051
8230
  defaultAgentId = agentGraph.defaultAgentId || "";
8052
8231
  }
8053
8232
  if (!defaultAgentId) {
8054
- return c.json({ error: "No default agent found in graph" }, 404);
8233
+ throw agentsCore.createApiError({
8234
+ code: "not_found",
8235
+ message: "No default agent found in graph"
8236
+ });
8055
8237
  }
8056
8238
  await agentsCore.createOrGetConversation(dbClient_default)({
8057
8239
  tenantId,
@@ -8072,26 +8254,30 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8072
8254
  }
8073
8255
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8074
8256
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8075
- scopes: { tenantId, projectId },
8257
+ scopes: { tenantId, projectId, graphId },
8076
8258
  agentId
8077
8259
  });
8078
8260
  if (!agentInfo) {
8079
- return c.json({ error: "Agent not found" }, 404);
8261
+ throw agentsCore.createApiError({
8262
+ code: "not_found",
8263
+ message: "Agent not found"
8264
+ });
8080
8265
  }
8081
8266
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8082
8267
  const credentialStores = c.get("credentialStores");
8083
- await agentsCore.handleContextResolution(
8268
+ await agentsCore.handleContextResolution({
8084
8269
  tenantId,
8085
8270
  projectId,
8086
- conversationId,
8087
8271
  graphId,
8088
- validatedContext,
8089
- dbClient_default,
8272
+ conversationId,
8273
+ requestContext: validatedContext,
8274
+ dbClient: dbClient_default,
8090
8275
  credentialStores
8091
- );
8276
+ });
8092
8277
  logger20.info(
8093
8278
  {
8094
8279
  tenantId,
8280
+ projectId,
8095
8281
  graphId,
8096
8282
  conversationId,
8097
8283
  defaultAgentId,
@@ -8133,41 +8319,69 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8133
8319
  });
8134
8320
  }
8135
8321
  return streaming.streamSSE(c, async (stream2) => {
8136
- const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8137
- await sseHelper.writeRole();
8138
- logger20.info({ agentId }, "Starting execution");
8139
- const executionHandler = new ExecutionHandler();
8140
- const result = await executionHandler.execute({
8141
- executionContext,
8142
- conversationId,
8143
- userMessage,
8144
- initialAgentId: agentId,
8145
- requestId: requestId2,
8146
- sseHelper
8147
- });
8148
- logger20.info(
8149
- { result },
8150
- `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8151
- );
8152
- if (!result.success) {
8153
- await sseHelper.writeError(
8154
- "Sorry, I was unable to process your request at this time. Please try again."
8322
+ try {
8323
+ const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8324
+ await sseHelper.writeRole();
8325
+ logger20.info({ agentId }, "Starting execution");
8326
+ const executionHandler = new ExecutionHandler();
8327
+ const result = await executionHandler.execute({
8328
+ executionContext,
8329
+ conversationId,
8330
+ userMessage,
8331
+ initialAgentId: agentId,
8332
+ requestId: requestId2,
8333
+ sseHelper
8334
+ });
8335
+ logger20.info(
8336
+ { result },
8337
+ `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8338
+ );
8339
+ if (!result.success) {
8340
+ await sseHelper.writeOperation(
8341
+ errorOp(
8342
+ "Sorry, I was unable to process your request at this time. Please try again.",
8343
+ "system"
8344
+ )
8345
+ );
8346
+ }
8347
+ await sseHelper.complete();
8348
+ } catch (error) {
8349
+ logger20.error(
8350
+ {
8351
+ error: error instanceof Error ? error.message : error,
8352
+ stack: error instanceof Error ? error.stack : void 0
8353
+ },
8354
+ "Error during streaming execution"
8155
8355
  );
8356
+ try {
8357
+ const sseHelper = createSSEStreamHelper(stream2, requestId2, timestamp);
8358
+ await sseHelper.writeOperation(
8359
+ errorOp(
8360
+ "Sorry, I was unable to process your request at this time. Please try again.",
8361
+ "system"
8362
+ )
8363
+ );
8364
+ await sseHelper.complete();
8365
+ } catch (streamError) {
8366
+ logger20.error({ streamError }, "Failed to write error to stream");
8367
+ }
8156
8368
  }
8157
- await sseHelper.complete();
8158
8369
  });
8159
8370
  } catch (error) {
8160
- console.error("\u274C Error in chat completions endpoint:", {
8161
- error: error instanceof Error ? error.message : error,
8162
- stack: error instanceof Error ? error.stack : void 0
8163
- });
8164
- return c.json(
8371
+ logger20.error(
8165
8372
  {
8166
- error: "Failed to process chat completion",
8167
- message: error instanceof Error ? error.message : "Unknown error"
8373
+ error: error instanceof Error ? error.message : error,
8374
+ stack: error instanceof Error ? error.stack : void 0
8168
8375
  },
8169
- 500
8376
+ "Error in chat completions endpoint before streaming"
8170
8377
  );
8378
+ if (error && typeof error === "object" && "status" in error) {
8379
+ throw error;
8380
+ }
8381
+ throw agentsCore.createApiError({
8382
+ code: "internal_server_error",
8383
+ message: error instanceof Error ? error.message : "Failed to process chat completion"
8384
+ });
8171
8385
  }
8172
8386
  });
8173
8387
  var getMessageText = (content) => {
@@ -8234,6 +8448,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8234
8448
  try {
8235
8449
  const executionContext = agentsCore.getRequestExecutionContext(c);
8236
8450
  const { tenantId, projectId, graphId } = executionContext;
8451
+ agentsCore.loggerFactory.getLogger("chatDataStream").debug({ tenantId, projectId, graphId }, "Extracted chatDataStream parameters");
8237
8452
  const body = c.get("requestBody") || {};
8238
8453
  const conversationId = body.conversationId || nanoid.nanoid();
8239
8454
  const activeSpan = api.trace.getActiveSpan();
@@ -8246,14 +8461,22 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8246
8461
  });
8247
8462
  }
8248
8463
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8249
- scopes: { tenantId, projectId },
8250
- graphId
8464
+ scopes: { tenantId, projectId, graphId }
8251
8465
  });
8252
8466
  if (!agentGraph) {
8253
- return c.json({ error: "Agent graph not found" }, 404);
8467
+ throw agentsCore.createApiError({
8468
+ code: "not_found",
8469
+ message: "Agent graph not found"
8470
+ });
8254
8471
  }
8255
8472
  const defaultAgentId = agentGraph.defaultAgentId;
8256
8473
  const graphName = agentGraph.name;
8474
+ if (!defaultAgentId) {
8475
+ throw agentsCore.createApiError({
8476
+ code: "bad_request",
8477
+ message: "Graph does not have a default agent configured"
8478
+ });
8479
+ }
8257
8480
  const activeAgent = await agentsCore.getActiveAgentForConversation(dbClient_default)({
8258
8481
  scopes: { tenantId, projectId },
8259
8482
  conversationId
@@ -8267,23 +8490,26 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8267
8490
  }
8268
8491
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8269
8492
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8270
- scopes: { tenantId, projectId },
8493
+ scopes: { tenantId, projectId, graphId },
8271
8494
  agentId
8272
8495
  });
8273
8496
  if (!agentInfo) {
8274
- return c.json({ error: "Agent not found" }, 404);
8497
+ throw agentsCore.createApiError({
8498
+ code: "not_found",
8499
+ message: "Agent not found"
8500
+ });
8275
8501
  }
8276
8502
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8277
8503
  const credentialStores = c.get("credentialStores");
8278
- await agentsCore.handleContextResolution(
8504
+ await agentsCore.handleContextResolution({
8279
8505
  tenantId,
8280
8506
  projectId,
8281
- conversationId,
8282
8507
  graphId,
8283
- validatedContext,
8284
- dbClient_default,
8508
+ conversationId,
8509
+ requestContext: validatedContext,
8510
+ dbClient: dbClient_default,
8285
8511
  credentialStores
8286
- );
8512
+ });
8287
8513
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
8288
8514
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
8289
8515
  logger21.info({ userText, lastUserMessage }, "userText");
@@ -8325,11 +8551,11 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8325
8551
  sseHelper: streamHelper
8326
8552
  });
8327
8553
  if (!result.success) {
8328
- await streamHelper.writeError("Unable to process request");
8554
+ await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
8329
8555
  }
8330
8556
  } catch (err) {
8331
8557
  logger21.error({ err }, "Streaming error");
8332
- await streamHelper.writeError("Internal server error");
8558
+ await streamHelper.writeOperation(errorOp("Internal server error", "system"));
8333
8559
  } finally {
8334
8560
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
8335
8561
  streamHelper.cleanup();
@@ -8350,7 +8576,10 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8350
8576
  );
8351
8577
  } catch (error) {
8352
8578
  logger21.error({ error }, "chatDataStream error");
8353
- return c.json({ error: "Failed to process chat completion" }, 500);
8579
+ throw agentsCore.createApiError({
8580
+ code: "internal_server_error",
8581
+ message: "Failed to process chat completion"
8582
+ });
8354
8583
  }
8355
8584
  });
8356
8585
  var chatDataStream_default = app3;
@@ -8555,8 +8784,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8555
8784
  const { tenantId, projectId, graphId } = executionContext;
8556
8785
  setupTracing(conversationId, tenantId, graphId);
8557
8786
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8558
- scopes: { tenantId, projectId },
8559
- graphId
8787
+ scopes: { tenantId, projectId, graphId }
8560
8788
  });
8561
8789
  if (!agentGraph) {
8562
8790
  throw new Error("Agent graph not found");
@@ -8576,9 +8804,20 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8576
8804
  },
8577
8805
  async ({ query }) => {
8578
8806
  try {
8807
+ if (!agentGraph.defaultAgentId) {
8808
+ return {
8809
+ content: [
8810
+ {
8811
+ type: "text",
8812
+ text: `Graph does not have a default agent configured`
8813
+ }
8814
+ ],
8815
+ isError: true
8816
+ };
8817
+ }
8579
8818
  const defaultAgentId = agentGraph.defaultAgentId;
8580
8819
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8581
- scopes: { tenantId, projectId },
8820
+ scopes: { tenantId, projectId, graphId },
8582
8821
  agentId: defaultAgentId
8583
8822
  });
8584
8823
  if (!agentInfo) {
@@ -8592,18 +8831,19 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8592
8831
  isError: true
8593
8832
  };
8594
8833
  }
8595
- const resolvedContext = await agentsCore.handleContextResolution(
8834
+ const resolvedContext = await agentsCore.handleContextResolution({
8596
8835
  tenantId,
8597
8836
  projectId,
8598
- conversationId,
8599
8837
  graphId,
8838
+ conversationId,
8600
8839
  requestContext,
8601
- dbClient_default,
8840
+ dbClient: dbClient_default,
8602
8841
  credentialStores
8603
- );
8842
+ });
8604
8843
  logger22.info(
8605
8844
  {
8606
8845
  tenantId,
8846
+ projectId,
8607
8847
  graphId,
8608
8848
  conversationId,
8609
8849
  hasContextConfig: !!agentGraph.contextConfigId,
@@ -8665,8 +8905,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8665
8905
  logger22.info({ body }, "Received initialization request");
8666
8906
  const sessionId = nanoid.nanoid();
8667
8907
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8668
- scopes: { tenantId, projectId },
8669
- graphId
8908
+ scopes: { tenantId, projectId, graphId }
8670
8909
  });
8671
8910
  if (!agentGraph) {
8672
8911
  return c.json(
@@ -8678,6 +8917,16 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8678
8917
  { status: 404 }
8679
8918
  );
8680
8919
  }
8920
+ if (!agentGraph.defaultAgentId) {
8921
+ return c.json(
8922
+ {
8923
+ jsonrpc: "2.0",
8924
+ error: { code: -32001, message: "Graph does not have a default agent configured" },
8925
+ id: body.id || null
8926
+ },
8927
+ { status: 400 }
8928
+ );
8929
+ }
8681
8930
  const conversation = await agentsCore.createOrGetConversation(dbClient_default)({
8682
8931
  id: sessionId,
8683
8932
  tenantId,
@@ -8874,6 +9123,8 @@ app4.delete("/", async (c) => {
8874
9123
  );
8875
9124
  });
8876
9125
  var mcp_default = app4;
9126
+
9127
+ // src/app.ts
8877
9128
  var logger23 = agentsCore.getLogger("agents-run-api");
8878
9129
  function createExecutionHono(serverConfig, credentialStores) {
8879
9130
  const app6 = new zodOpenapi.OpenAPIHono();