@inkeep/agents-run-api 0.0.0-dev-20250917222639 → 0.0.0-dev-20250919052931

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +585 -391
  2. package/dist/index.js +583 -389
  3. package/package.json +2 -2
package/dist/index.cjs CHANGED
@@ -1226,7 +1226,7 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1226
1226
  throw new Error("Agent ID is required");
1227
1227
  }
1228
1228
  const dbAgent = await agentsCore.getAgentById(dbClient_default)({
1229
- scopes: { tenantId, projectId },
1229
+ scopes: { tenantId, projectId, graphId },
1230
1230
  agentId
1231
1231
  });
1232
1232
  if (!dbAgent) {
@@ -1245,6 +1245,41 @@ async function getRegisteredAgent(executionContext, credentialStoreRegistry) {
1245
1245
  // src/agents/generateTaskHandler.ts
1246
1246
  init_dbClient();
1247
1247
 
1248
+ // src/utils/model-resolver.ts
1249
+ init_dbClient();
1250
+ async function resolveModelConfig(graphId, agent) {
1251
+ if (agent.models?.base?.model) {
1252
+ return {
1253
+ base: agent.models.base,
1254
+ structuredOutput: agent.models.structuredOutput || agent.models.base,
1255
+ summarizer: agent.models.summarizer || agent.models.base
1256
+ };
1257
+ }
1258
+ const graph = await agentsCore.getAgentGraphById(dbClient_default)({
1259
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId, graphId }
1260
+ });
1261
+ if (graph?.models?.base?.model) {
1262
+ return {
1263
+ base: graph.models.base,
1264
+ structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
1265
+ summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
1266
+ };
1267
+ }
1268
+ const project = await agentsCore.getProject(dbClient_default)({
1269
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
1270
+ });
1271
+ if (project?.models?.base?.model) {
1272
+ return {
1273
+ base: project.models.base,
1274
+ structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
1275
+ summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
1276
+ };
1277
+ }
1278
+ throw new Error(
1279
+ "Base model configuration is required. Please configure models at the project level."
1280
+ );
1281
+ }
1282
+
1248
1283
  // src/agents/Agent.ts
1249
1284
  init_conversations();
1250
1285
  init_dbClient();
@@ -1266,24 +1301,19 @@ function completionOp(agentId, iterations) {
1266
1301
  }
1267
1302
  };
1268
1303
  }
1269
- function errorOp(error, agentId) {
1304
+ function errorOp(message, agentId, severity = "error", code) {
1270
1305
  return {
1271
1306
  type: "error",
1272
- ctx: {
1273
- error,
1274
- agent: agentId
1275
- }
1307
+ message,
1308
+ agent: agentId,
1309
+ severity,
1310
+ code,
1311
+ timestamp: Date.now()
1276
1312
  };
1277
1313
  }
1278
1314
  function generateToolId() {
1279
1315
  return `tool_${nanoid.nanoid(8)}`;
1280
1316
  }
1281
- function statusUpdateOp(ctx) {
1282
- return {
1283
- type: "status_update",
1284
- ctx
1285
- };
1286
- }
1287
1317
  var logger4 = agentsCore.getLogger("DataComponentSchema");
1288
1318
  function jsonSchemaToZod(jsonSchema) {
1289
1319
  if (!jsonSchema || typeof jsonSchema !== "object") {
@@ -1413,6 +1443,9 @@ var _ModelFactory = class _ModelFactory {
1413
1443
  );
1414
1444
  }
1415
1445
  const modelSettings = config;
1446
+ if (!modelSettings.model) {
1447
+ throw new Error("Model configuration is required");
1448
+ }
1416
1449
  const modelString = modelSettings.model.trim();
1417
1450
  const { provider, modelName } = _ModelFactory.parseModelString(modelString);
1418
1451
  logger5.debug(
@@ -1539,7 +1572,6 @@ var ModelFactory = _ModelFactory;
1539
1572
  // src/utils/graph-session.ts
1540
1573
  init_conversations();
1541
1574
  init_dbClient();
1542
- var tracer = agentsCore.getTracer("agents-run-api");
1543
1575
 
1544
1576
  // src/utils/stream-registry.ts
1545
1577
  var streamHelperRegistry = /* @__PURE__ */ new Map();
@@ -1555,6 +1587,7 @@ function getStreamHelper(requestId2) {
1555
1587
  function unregisterStreamHelper(requestId2) {
1556
1588
  streamHelperRegistry.delete(requestId2);
1557
1589
  }
1590
+ var tracer = agentsCore.getTracer("agents-run-api");
1558
1591
 
1559
1592
  // src/utils/graph-session.ts
1560
1593
  var logger6 = agentsCore.getLogger("GraphSession");
@@ -1863,7 +1896,6 @@ var GraphSession = class {
1863
1896
  }
1864
1897
  this.isGeneratingUpdate = true;
1865
1898
  const statusUpdateState = this.statusUpdateState;
1866
- const graphId = this.graphId;
1867
1899
  try {
1868
1900
  const streamHelper = getStreamHelper(this.sessionId);
1869
1901
  if (!streamHelper) {
@@ -1876,7 +1908,7 @@ var GraphSession = class {
1876
1908
  }
1877
1909
  const now = Date.now();
1878
1910
  const elapsedTime = now - statusUpdateState.startTime;
1879
- let operation;
1911
+ let summaryToSend;
1880
1912
  if (statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0) {
1881
1913
  const result = await this.generateStructuredStatusUpdate(
1882
1914
  this.events.slice(statusUpdateState.lastEventCount),
@@ -1885,32 +1917,30 @@ var GraphSession = class {
1885
1917
  statusUpdateState.summarizerModel,
1886
1918
  this.previousSummaries
1887
1919
  );
1888
- if (result.operations && result.operations.length > 0) {
1889
- for (const op of result.operations) {
1890
- if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1920
+ if (result.summaries && result.summaries.length > 0) {
1921
+ for (const summary of result.summaries) {
1922
+ if (!summary || !summary.type || !summary.data || !summary.data.label || Object.keys(summary.data).length === 0) {
1891
1923
  logger6.warn(
1892
1924
  {
1893
1925
  sessionId: this.sessionId,
1894
- operation: op
1926
+ summary
1895
1927
  },
1896
1928
  "Skipping empty or invalid structured operation"
1897
1929
  );
1898
1930
  continue;
1899
1931
  }
1900
- const operationToSend = {
1901
- type: "status_update",
1902
- ctx: {
1903
- operationType: op.type,
1904
- label: op.data.label,
1905
- data: Object.fromEntries(
1906
- Object.entries(op.data).filter(([key]) => !["label", "type"].includes(key))
1907
- )
1908
- }
1932
+ const summaryToSend2 = {
1933
+ type: summary.data.type || summary.type,
1934
+ // Preserve the actual custom type from LLM
1935
+ label: summary.data.label,
1936
+ details: Object.fromEntries(
1937
+ Object.entries(summary.data).filter(([key]) => !["label", "type"].includes(key))
1938
+ )
1909
1939
  };
1910
- await streamHelper.writeOperation(operationToSend);
1940
+ await streamHelper.writeSummary(summaryToSend2);
1911
1941
  }
1912
- const summaryTexts = result.operations.map(
1913
- (op) => JSON.stringify({ type: op.type, data: op.data })
1942
+ const summaryTexts = result.summaries.map(
1943
+ (summary) => JSON.stringify({ type: summary.type, data: summary.data })
1914
1944
  );
1915
1945
  this.previousSummaries.push(...summaryTexts);
1916
1946
  if (this.statusUpdateState) {
@@ -1927,34 +1957,20 @@ var GraphSession = class {
1927
1957
  this.previousSummaries
1928
1958
  );
1929
1959
  this.previousSummaries.push(summary);
1930
- operation = statusUpdateOp({
1931
- summary,
1932
- eventCount: this.events.length,
1933
- elapsedTime,
1934
- currentPhase: "processing",
1935
- activeAgent: "system",
1936
- graphId,
1937
- sessionId: this.sessionId
1938
- });
1939
1960
  }
1940
1961
  if (this.previousSummaries.length > 3) {
1941
1962
  this.previousSummaries.shift();
1942
1963
  }
1943
- if (!operation || !operation.type || !operation.ctx) {
1964
+ {
1944
1965
  logger6.warn(
1945
1966
  {
1946
1967
  sessionId: this.sessionId,
1947
- operation
1968
+ summaryToSend
1948
1969
  },
1949
1970
  "Skipping empty or invalid status update operation"
1950
1971
  );
1951
1972
  return;
1952
1973
  }
1953
- await streamHelper.writeOperation(operation);
1954
- if (this.statusUpdateState) {
1955
- this.statusUpdateState.lastUpdateTime = now;
1956
- this.statusUpdateState.lastEventCount = this.events.length;
1957
- }
1958
1974
  } catch (error) {
1959
1975
  logger6.error(
1960
1976
  {
@@ -2087,7 +2103,7 @@ ${previousSummaryContext}` : ""}
2087
2103
  Activities:
2088
2104
  ${userVisibleActivities.join("\n") || "No New Activities"}
2089
2105
 
2090
- Describe the ACTUAL finding, result, or specific information discovered (e.g., "Found Slack bot requires admin permissions", "Identified 3 channel types for ingestion", "Configuration requires OAuth token").
2106
+ Create a short 3-5 word label describing the ACTUAL finding. Use sentence case (only capitalize the first word and proper nouns). Examples: "Found admin permissions needed", "Identified three channel types", "OAuth token required".
2091
2107
 
2092
2108
  ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2093
2109
  const prompt = basePrompt;
@@ -2100,6 +2116,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2100
2116
  }
2101
2117
  modelToUse = this.statusUpdateState.baseModel;
2102
2118
  }
2119
+ if (!modelToUse) {
2120
+ throw new Error("No model configuration available");
2121
+ }
2103
2122
  const model = ModelFactory.createModel(modelToUse);
2104
2123
  const { text } = await ai.generateText({
2105
2124
  model,
@@ -2209,14 +2228,16 @@ Rules:
2209
2228
  - Fill in data for relevant components only
2210
2229
  - Use 'no_relevant_updates' if nothing substantially new to report. DO NOT WRITE LABELS OR USE OTHER COMPONENTS IF YOU USE THIS COMPONENT.
2211
2230
  - Never repeat previous values, make every update EXTREMELY unique. If you cannot do that the update is not worth mentioning.
2212
- - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
2231
+ - Labels MUST be short 3-5 word phrases with ACTUAL information discovered. NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION.
2232
+ - Use sentence case: only capitalize the first word and proper nouns (e.g., "Admin permissions required", not "Admin Permissions Required"). ALWAYS capitalize the first word of the label.
2213
2233
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
2214
2234
  - Include specific details, numbers, requirements, or insights discovered
2235
+ - Examples: "Admin permissions required", "Three OAuth steps found", "Token expires daily"
2215
2236
  - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
2216
- - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
2237
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", "artifact", or any internal system terminology in labels or any names of agents, tools, or systems.
2217
2238
  - Present all operations as seamless actions by a single system
2218
2239
  - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
2219
- - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
2240
+ - Bad examples: "Transferring to search agent", "continuing transfer to qa agent", "Delegating task", "Routing request", "Processing request", "Artifact found", "Artifact saved", or not using the no_relevant_updates
2220
2241
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
2221
2242
 
2222
2243
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -2232,6 +2253,9 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2232
2253
  }
2233
2254
  modelToUse = this.statusUpdateState.baseModel;
2234
2255
  }
2256
+ if (!modelToUse) {
2257
+ throw new Error("No model configuration available");
2258
+ }
2235
2259
  const model = ModelFactory.createModel(modelToUse);
2236
2260
  const { object } = await ai.generateObject({
2237
2261
  model,
@@ -2249,29 +2273,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2249
2273
  }
2250
2274
  });
2251
2275
  const result = object;
2252
- const operations = [];
2276
+ const summaries = [];
2253
2277
  for (const [componentId, data] of Object.entries(result)) {
2254
2278
  if (componentId === "no_relevant_updates") {
2255
2279
  continue;
2256
2280
  }
2257
2281
  if (data && typeof data === "object" && Object.keys(data).length > 0) {
2258
- operations.push({
2282
+ summaries.push({
2259
2283
  type: componentId,
2260
2284
  data
2261
2285
  });
2262
2286
  }
2263
2287
  }
2264
2288
  span.setAttributes({
2265
- "operations.count": operations.length,
2289
+ "summaries.count": summaries.length,
2266
2290
  "user_activities.count": userVisibleActivities.length,
2267
2291
  "result_keys.count": Object.keys(result).length
2268
2292
  });
2269
2293
  span.setStatus({ code: api.SpanStatusCode.OK });
2270
- return { operations };
2294
+ return { summaries };
2271
2295
  } catch (error) {
2272
2296
  agentsCore.setSpanWithError(span, error);
2273
2297
  logger6.error({ error }, "Failed to generate structured update, using fallback");
2274
- return { operations: [] };
2298
+ return { summaries: [] };
2275
2299
  } finally {
2276
2300
  span.end();
2277
2301
  }
@@ -2522,6 +2546,9 @@ Make it specific and relevant.`;
2522
2546
  }
2523
2547
  modelToUse = this.statusUpdateState.baseModel;
2524
2548
  }
2549
+ if (!modelToUse) {
2550
+ throw new Error("No model configuration available");
2551
+ }
2525
2552
  const model = ModelFactory.createModel(modelToUse);
2526
2553
  const schema = z5.z.object({
2527
2554
  name: z5.z.string().max(50).describe("Concise, descriptive name for the artifact"),
@@ -2803,6 +2830,7 @@ var _ArtifactParser = class _ArtifactParser {
2803
2830
  }
2804
2831
  for (let i = matches.length - 1; i >= 0; i--) {
2805
2832
  const match = matches[i];
2833
+ if (match.index === void 0) continue;
2806
2834
  const startIdx = match.index;
2807
2835
  const textAfterMatch = text.slice(startIdx);
2808
2836
  if (!textAfterMatch.includes("/>")) {
@@ -2852,7 +2880,8 @@ var _ArtifactParser = class _ArtifactParser {
2852
2880
  taskId,
2853
2881
  name: artifact.name || "Processing...",
2854
2882
  description: artifact.description || "Name and description being generated...",
2855
- artifactType: artifact.metadata?.artifactType,
2883
+ type: artifact.metadata?.artifactType || artifact.artifactType,
2884
+ // Map artifactType to type for consistency
2856
2885
  artifactSummary: artifact.parts?.[0]?.data?.summary || {}
2857
2886
  };
2858
2887
  }
@@ -2869,10 +2898,11 @@ var _ArtifactParser = class _ArtifactParser {
2869
2898
  let lastIndex = 0;
2870
2899
  for (const match of matches) {
2871
2900
  const [fullMatch, artifactId, taskId] = match;
2901
+ if (match.index === void 0) continue;
2872
2902
  const matchStart = match.index;
2873
2903
  if (matchStart > lastIndex) {
2874
2904
  const textBefore = text.slice(lastIndex, matchStart);
2875
- if (textBefore.trim()) {
2905
+ if (textBefore) {
2876
2906
  parts.push({ kind: "text", text: textBefore });
2877
2907
  }
2878
2908
  }
@@ -2884,7 +2914,7 @@ var _ArtifactParser = class _ArtifactParser {
2884
2914
  }
2885
2915
  if (lastIndex < text.length) {
2886
2916
  const remainingText = text.slice(lastIndex);
2887
- if (remainingText.trim()) {
2917
+ if (remainingText) {
2888
2918
  parts.push({ kind: "text", text: remainingText });
2889
2919
  }
2890
2920
  }
@@ -2994,8 +3024,9 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2994
3024
  var ArtifactParser = _ArtifactParser;
2995
3025
 
2996
3026
  // src/utils/incremental-stream-parser.ts
2997
- var logger8 = agentsCore.getLogger("IncrementalStreamParser");
2998
- var IncrementalStreamParser = class {
3027
+ agentsCore.getLogger("IncrementalStreamParser");
3028
+ var _IncrementalStreamParser = class _IncrementalStreamParser {
3029
+ // Max number of streamed component IDs to track
2999
3030
  constructor(streamHelper, tenantId, contextId) {
3000
3031
  __publicField(this, "buffer", "");
3001
3032
  __publicField(this, "pendingTextBuffer", "");
@@ -3005,6 +3036,9 @@ var IncrementalStreamParser = class {
3005
3036
  __publicField(this, "collectedParts", []);
3006
3037
  __publicField(this, "contextId");
3007
3038
  __publicField(this, "lastChunkWasToolResult", false);
3039
+ __publicField(this, "componentAccumulator", {});
3040
+ __publicField(this, "lastStreamedComponents", /* @__PURE__ */ new Map());
3041
+ __publicField(this, "componentSnapshots", /* @__PURE__ */ new Map());
3008
3042
  this.streamHelper = streamHelper;
3009
3043
  this.contextId = contextId;
3010
3044
  this.artifactParser = new ArtifactParser(tenantId);
@@ -3019,7 +3053,7 @@ var IncrementalStreamParser = class {
3019
3053
  * Process a new text chunk for text streaming (handles artifact markers)
3020
3054
  */
3021
3055
  async processTextChunk(chunk) {
3022
- if (this.lastChunkWasToolResult && this.buffer === "" && chunk.trim()) {
3056
+ if (this.lastChunkWasToolResult && this.buffer === "" && chunk) {
3023
3057
  chunk = "\n\n" + chunk;
3024
3058
  this.lastChunkWasToolResult = false;
3025
3059
  }
@@ -3031,100 +3065,122 @@ var IncrementalStreamParser = class {
3031
3065
  this.buffer = parseResult.remainingBuffer;
3032
3066
  }
3033
3067
  /**
3034
- * Process a new object chunk for object streaming (handles JSON objects with artifact references)
3068
+ * Process object deltas directly from Vercel AI SDK's fullStream
3069
+ * Accumulates components and streams them when they're stable (unchanged between deltas)
3035
3070
  */
3036
- async processObjectChunk(chunk) {
3037
- this.buffer += chunk;
3038
- const parseResult = await this.parseObjectBuffer();
3039
- for (const part of parseResult.completeParts) {
3040
- await this.streamPart(part);
3071
+ async processObjectDelta(delta) {
3072
+ if (!delta || typeof delta !== "object") {
3073
+ return;
3041
3074
  }
3042
- this.buffer = parseResult.remainingBuffer;
3043
- }
3044
- /**
3045
- * Process tool call stream for structured output, streaming components as they complete
3046
- */
3047
- async processToolCallStream(stream2, targetToolName) {
3048
- let jsonBuffer = "";
3049
- let componentBuffer = "";
3050
- let depth = 0;
3051
- let componentsStreamed = 0;
3052
- const MAX_BUFFER_SIZE = 5 * 1024 * 1024;
3053
- for await (const part of stream2) {
3054
- if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
3055
- const delta = part.argsTextDelta || "";
3056
- if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
3057
- logger8.warn(
3058
- { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
3059
- "JSON buffer exceeded maximum size, truncating"
3060
- );
3061
- jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
3075
+ this.componentAccumulator = this.deepMerge(this.componentAccumulator, delta);
3076
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3077
+ const components = this.componentAccumulator.dataComponents;
3078
+ const currentComponentIds = new Set(components.filter((c) => c?.id).map((c) => c.id));
3079
+ for (const [componentId, snapshot] of this.componentSnapshots.entries()) {
3080
+ if (!currentComponentIds.has(componentId) && !this.lastStreamedComponents.has(componentId)) {
3081
+ try {
3082
+ const component = JSON.parse(snapshot);
3083
+ if (this.isComponentComplete(component)) {
3084
+ await this.streamComponent(component);
3085
+ }
3086
+ } catch (e) {
3087
+ }
3062
3088
  }
3063
- jsonBuffer += delta;
3064
- for (const char of delta) {
3065
- if (componentBuffer.length > MAX_BUFFER_SIZE) {
3066
- logger8.warn(
3067
- { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
3068
- "Component buffer exceeded maximum size, resetting"
3069
- );
3070
- componentBuffer = "";
3071
- depth = 0;
3072
- continue;
3089
+ }
3090
+ for (let i = 0; i < components.length; i++) {
3091
+ const component = components[i];
3092
+ if (!component?.id) continue;
3093
+ const componentKey = component.id;
3094
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3095
+ if (hasBeenStreamed) continue;
3096
+ const currentSnapshot = JSON.stringify(component);
3097
+ const previousSnapshot = this.componentSnapshots.get(componentKey);
3098
+ this.componentSnapshots.set(componentKey, currentSnapshot);
3099
+ if (this.componentSnapshots.size > _IncrementalStreamParser.MAX_SNAPSHOT_SIZE) {
3100
+ const firstKey = this.componentSnapshots.keys().next().value;
3101
+ if (firstKey) {
3102
+ this.componentSnapshots.delete(firstKey);
3073
3103
  }
3074
- componentBuffer += char;
3075
- if (char === "{") {
3076
- depth++;
3077
- } else if (char === "}") {
3078
- depth--;
3079
- if (depth === 2 && componentBuffer.includes('"id"')) {
3080
- const componentMatch = componentBuffer.match(/\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}/);
3081
- if (componentMatch) {
3082
- const MAX_COMPONENT_SIZE = 1024 * 1024;
3083
- if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
3084
- logger8.warn(
3085
- {
3086
- size: componentMatch[0].length,
3087
- maxSize: MAX_COMPONENT_SIZE
3088
- },
3089
- "Component exceeds size limit, skipping"
3090
- );
3091
- componentBuffer = "";
3092
- continue;
3093
- }
3094
- try {
3095
- const component = JSON.parse(componentMatch[0]);
3096
- if (typeof component !== "object" || !component.id) {
3097
- logger8.warn({ component }, "Invalid component structure, skipping");
3098
- componentBuffer = "";
3099
- continue;
3100
- }
3101
- const parts = await this.artifactParser.parseObject({
3102
- dataComponents: [component]
3103
- });
3104
- for (const part2 of parts) {
3105
- await this.streamPart(part2);
3106
- }
3107
- componentsStreamed++;
3108
- componentBuffer = "";
3109
- } catch (e) {
3110
- logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3111
- }
3112
- }
3104
+ }
3105
+ if (component.name === "Text" && component.props?.text) {
3106
+ const previousTextContent = previousSnapshot ? JSON.parse(previousSnapshot).props?.text || "" : "";
3107
+ const currentTextContent = component.props.text || "";
3108
+ if (currentTextContent.length > previousTextContent.length) {
3109
+ const newText = currentTextContent.slice(previousTextContent.length);
3110
+ if (!this.hasStartedRole) {
3111
+ await this.streamHelper.writeRole("assistant");
3112
+ this.hasStartedRole = true;
3113
3113
  }
3114
+ await this.streamHelper.streamText(newText, 50);
3115
+ this.collectedParts.push({
3116
+ kind: "text",
3117
+ text: newText
3118
+ });
3114
3119
  }
3115
- if (componentBuffer.includes('"dataComponents"') && componentBuffer.includes("[")) ;
3120
+ continue;
3116
3121
  }
3117
- } else if (part.type === "tool-call" && part.toolName === targetToolName) {
3118
- if (part.args?.dataComponents) {
3119
- const parts = await this.artifactParser.parseObject(part.args);
3120
- for (const part2 of parts) {
3121
- await this.streamPart(part2);
3122
+ if (this.isComponentComplete(component)) {
3123
+ const currentPropsSnapshot = JSON.stringify(component.props);
3124
+ const previousPropsSnapshot = previousSnapshot ? JSON.stringify(JSON.parse(previousSnapshot).props) : null;
3125
+ if (previousPropsSnapshot === currentPropsSnapshot) {
3126
+ await this.streamComponent(component);
3122
3127
  }
3123
3128
  }
3124
- break;
3125
3129
  }
3126
3130
  }
3127
- logger8.debug({ componentsStreamed }, "Finished streaming components");
3131
+ }
3132
+ /**
3133
+ * Stream a component and mark it as streamed
3134
+ * Note: Text components are handled separately with incremental streaming
3135
+ */
3136
+ async streamComponent(component) {
3137
+ const parts = await this.artifactParser.parseObject({
3138
+ dataComponents: [component]
3139
+ });
3140
+ for (const part of parts) {
3141
+ await this.streamPart(part);
3142
+ }
3143
+ this.lastStreamedComponents.set(component.id, true);
3144
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3145
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3146
+ if (firstKey) {
3147
+ this.lastStreamedComponents.delete(firstKey);
3148
+ }
3149
+ }
3150
+ this.componentSnapshots.delete(component.id);
3151
+ }
3152
+ /**
3153
+ * Check if a component has the basic structure required for streaming
3154
+ * Requires id, name, and props object with content
3155
+ */
3156
+ isComponentComplete(component) {
3157
+ if (!component || !component.id || !component.name) {
3158
+ return false;
3159
+ }
3160
+ if (!component.props || typeof component.props !== "object") {
3161
+ return false;
3162
+ }
3163
+ const isArtifact = component.name === "Artifact" || component.props.artifact_id && component.props.task_id;
3164
+ if (isArtifact) {
3165
+ return Boolean(component.props.artifact_id && component.props.task_id);
3166
+ }
3167
+ return true;
3168
+ }
3169
+ /**
3170
+ * Deep merge helper for object deltas
3171
+ */
3172
+ deepMerge(target, source) {
3173
+ if (!source) return target;
3174
+ if (!target) return source;
3175
+ const result = { ...target };
3176
+ for (const key in source) {
3177
+ if (source[key] && typeof source[key] === "object" && !Array.isArray(source[key])) {
3178
+ result[key] = this.deepMerge(target[key], source[key]);
3179
+ } else {
3180
+ result[key] = source[key];
3181
+ }
3182
+ }
3183
+ return result;
3128
3184
  }
3129
3185
  /**
3130
3186
  * Legacy method for backward compatibility - defaults to text processing
@@ -3136,15 +3192,40 @@ var IncrementalStreamParser = class {
3136
3192
  * Process any remaining buffer content at the end of stream
3137
3193
  */
3138
3194
  async finalize() {
3139
- if (this.buffer.trim()) {
3195
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3196
+ const components = this.componentAccumulator.dataComponents;
3197
+ for (let i = 0; i < components.length; i++) {
3198
+ const component = components[i];
3199
+ if (!component?.id) continue;
3200
+ const componentKey = component.id;
3201
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3202
+ if (!hasBeenStreamed && this.isComponentComplete(component) && component.name !== "Text") {
3203
+ const parts = await this.artifactParser.parseObject({
3204
+ dataComponents: [component]
3205
+ });
3206
+ for (const part of parts) {
3207
+ await this.streamPart(part);
3208
+ }
3209
+ this.lastStreamedComponents.set(componentKey, true);
3210
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3211
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3212
+ if (firstKey) {
3213
+ this.lastStreamedComponents.delete(firstKey);
3214
+ }
3215
+ }
3216
+ this.componentSnapshots.delete(componentKey);
3217
+ }
3218
+ }
3219
+ }
3220
+ if (this.buffer) {
3140
3221
  const part = {
3141
3222
  kind: "text",
3142
- text: this.buffer.trim()
3223
+ text: this.buffer
3143
3224
  };
3144
3225
  await this.streamPart(part);
3145
3226
  }
3146
- if (this.pendingTextBuffer.trim()) {
3147
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "").trim();
3227
+ if (this.pendingTextBuffer) {
3228
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3148
3229
  if (cleanedText) {
3149
3230
  this.collectedParts.push({
3150
3231
  kind: "text",
@@ -3154,6 +3235,9 @@ var IncrementalStreamParser = class {
3154
3235
  }
3155
3236
  this.pendingTextBuffer = "";
3156
3237
  }
3238
+ this.componentSnapshots.clear();
3239
+ this.lastStreamedComponents.clear();
3240
+ this.componentAccumulator = {};
3157
3241
  }
3158
3242
  /**
3159
3243
  * Get all collected parts for building the final response
@@ -3200,30 +3284,6 @@ var IncrementalStreamParser = class {
3200
3284
  remainingBuffer: ""
3201
3285
  };
3202
3286
  }
3203
- /**
3204
- * Parse buffer for complete JSON objects with artifact references (for object streaming)
3205
- */
3206
- async parseObjectBuffer() {
3207
- const completeParts = [];
3208
- try {
3209
- const parsed = JSON.parse(this.buffer);
3210
- const parts = await this.artifactParser.parseObject(parsed);
3211
- return {
3212
- completeParts: parts,
3213
- remainingBuffer: ""
3214
- };
3215
- } catch {
3216
- const { complete, remaining } = this.artifactParser.parsePartialJSON(this.buffer);
3217
- for (const obj of complete) {
3218
- const parts = await this.artifactParser.parseObject(obj);
3219
- completeParts.push(...parts);
3220
- }
3221
- return {
3222
- completeParts,
3223
- remainingBuffer: remaining
3224
- };
3225
- }
3226
- }
3227
3287
  /**
3228
3288
  * Check if text might be the start of an artifact marker
3229
3289
  */
@@ -3244,7 +3304,7 @@ var IncrementalStreamParser = class {
3244
3304
  this.pendingTextBuffer += part.text;
3245
3305
  if (!this.artifactParser.hasIncompleteArtifact(this.pendingTextBuffer)) {
3246
3306
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3247
- if (cleanedText.trim()) {
3307
+ if (cleanedText) {
3248
3308
  await this.streamHelper.streamText(cleanedText, 50);
3249
3309
  }
3250
3310
  this.pendingTextBuffer = "";
@@ -3252,7 +3312,7 @@ var IncrementalStreamParser = class {
3252
3312
  } else if (part.kind === "data" && part.data) {
3253
3313
  if (this.pendingTextBuffer) {
3254
3314
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3255
- if (cleanedText.trim()) {
3315
+ if (cleanedText) {
3256
3316
  await this.streamHelper.streamText(cleanedText, 50);
3257
3317
  }
3258
3318
  this.pendingTextBuffer = "";
@@ -3266,6 +3326,11 @@ var IncrementalStreamParser = class {
3266
3326
  }
3267
3327
  }
3268
3328
  };
3329
+ // Memory management constants
3330
+ __publicField(_IncrementalStreamParser, "MAX_SNAPSHOT_SIZE", 100);
3331
+ // Max number of snapshots to keep
3332
+ __publicField(_IncrementalStreamParser, "MAX_STREAMED_SIZE", 1e3);
3333
+ var IncrementalStreamParser = _IncrementalStreamParser;
3269
3334
 
3270
3335
  // src/utils/response-formatter.ts
3271
3336
  var logger9 = agentsCore.getLogger("ResponseFormatter");
@@ -4654,7 +4719,8 @@ function createDelegateToAgentTool({
4654
4719
  const externalAgent = await agentsCore.getExternalAgent(dbClient_default)({
4655
4720
  scopes: {
4656
4721
  tenantId,
4657
- projectId
4722
+ projectId,
4723
+ graphId
4658
4724
  },
4659
4725
  agentId: delegateConfig.config.id
4660
4726
  });
@@ -5256,6 +5322,23 @@ var Agent = class {
5256
5322
  __publicField(this, "credentialStoreRegistry");
5257
5323
  this.artifactComponents = config.artifactComponents || [];
5258
5324
  let processedDataComponents = config.dataComponents || [];
5325
+ if (processedDataComponents.length > 0) {
5326
+ processedDataComponents.push({
5327
+ id: "text-content",
5328
+ name: "Text",
5329
+ description: "Natural conversational text for the user - write naturally without mentioning technical details. Avoid redundancy and repetition with data components.",
5330
+ props: {
5331
+ type: "object",
5332
+ properties: {
5333
+ text: {
5334
+ type: "string",
5335
+ description: "Natural conversational text - respond as if having a normal conversation, never mention JSON, components, schemas, or technical implementation. Avoid redundancy and repetition with data components."
5336
+ }
5337
+ },
5338
+ required: ["text"]
5339
+ }
5340
+ });
5341
+ }
5259
5342
  if (this.artifactComponents.length > 0 && config.dataComponents && config.dataComponents.length > 0) {
5260
5343
  processedDataComponents = [
5261
5344
  ArtifactReferenceSchema.getDataComponent(config.tenantId, config.projectId),
@@ -5526,8 +5609,12 @@ var Agent = class {
5526
5609
  async getMcpTool(tool4) {
5527
5610
  const credentialReferenceId = tool4.credentialReferenceId;
5528
5611
  const toolsForAgent = await agentsCore.getToolsForAgent(dbClient_default)({
5529
- scopes: { tenantId: this.config.tenantId, projectId: this.config.projectId },
5530
- agentId: this.config.id
5612
+ scopes: {
5613
+ tenantId: this.config.tenantId,
5614
+ projectId: this.config.projectId,
5615
+ graphId: this.config.graphId,
5616
+ agentId: this.config.id
5617
+ }
5531
5618
  });
5532
5619
  const selectedTools = toolsForAgent.data.find((t) => t.toolId === tool4.id)?.selectedTools || void 0;
5533
5620
  let serverConfig;
@@ -5674,9 +5761,9 @@ var Agent = class {
5674
5761
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5675
5762
  scopes: {
5676
5763
  tenantId: this.config.tenantId,
5677
- projectId: this.config.projectId
5678
- },
5679
- graphId: this.config.graphId
5764
+ projectId: this.config.projectId,
5765
+ graphId: this.config.graphId
5766
+ }
5680
5767
  });
5681
5768
  return graphDefinition?.graphPrompt || void 0;
5682
5769
  } catch (error) {
@@ -5698,14 +5785,16 @@ var Agent = class {
5698
5785
  const graphDefinition = await agentsCore.getFullGraphDefinition(dbClient_default)({
5699
5786
  scopes: {
5700
5787
  tenantId: this.config.tenantId,
5701
- projectId: this.config.projectId
5702
- },
5703
- graphId: this.config.graphId
5788
+ projectId: this.config.projectId,
5789
+ graphId: this.config.graphId
5790
+ }
5704
5791
  });
5705
5792
  if (!graphDefinition) {
5706
5793
  return false;
5707
5794
  }
5708
- return !!(graphDefinition.artifactComponents && Object.keys(graphDefinition.artifactComponents).length > 0);
5795
+ return Object.values(graphDefinition.agents).some(
5796
+ (agent) => "artifactComponents" in agent && agent.artifactComponents && agent.artifactComponents.length > 0
5797
+ );
5709
5798
  } catch (error) {
5710
5799
  logger15.warn(
5711
5800
  {
@@ -5733,7 +5822,8 @@ Key requirements:
5733
5822
  - Mix artifact references throughout your dataComponents array
5734
5823
  - Each artifact reference must use EXACT IDs from tool outputs
5735
5824
  - Reference artifacts that directly support the adjacent information
5736
- - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact`;
5825
+ - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
5826
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5737
5827
  }
5738
5828
  if (hasDataComponents && !hasArtifactComponents) {
5739
5829
  return `Generate the final structured JSON response using the configured data components. Organize the information from the research above into the appropriate structured format based on the available component schemas.
@@ -5741,7 +5831,8 @@ Key requirements:
5741
5831
  Key requirements:
5742
5832
  - Use the exact component structure and property names
5743
5833
  - Fill in all relevant data from the research
5744
- - Ensure data is organized logically and completely`;
5834
+ - Ensure data is organized logically and completely
5835
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5745
5836
  }
5746
5837
  if (!hasDataComponents && hasArtifactComponents) {
5747
5838
  return `Generate the final structured response with artifact references based on the research above. Use the artifact reference component to cite relevant information with exact artifact_id and task_id values from the tool outputs.
@@ -5751,7 +5842,7 @@ Key requirements:
5751
5842
  - Reference artifacts that support your response
5752
5843
  - Never make up or modify artifact IDs`;
5753
5844
  }
5754
- return `Generate the final response based on the research above.`;
5845
+ return `Generate the final response based on the research above. Write naturally as if having a conversation.`;
5755
5846
  }
5756
5847
  async buildSystemPrompt(runtimeContext, excludeDataComponents = false) {
5757
5848
  const conversationId = runtimeContext?.metadata?.conversationId || runtimeContext?.contextId;
@@ -5902,9 +5993,9 @@ Key requirements:
5902
5993
  return await agentsCore.graphHasArtifactComponents(dbClient_default)({
5903
5994
  scopes: {
5904
5995
  tenantId: this.config.tenantId,
5905
- projectId: this.config.projectId
5906
- },
5907
- graphId: this.config.graphId
5996
+ projectId: this.config.projectId,
5997
+ graphId: this.config.graphId
5998
+ }
5908
5999
  });
5909
6000
  } catch (error) {
5910
6001
  logger15.error(
@@ -6250,35 +6341,94 @@ ${output}`;
6250
6341
  this.getStructuredOutputModel()
6251
6342
  );
6252
6343
  const phase2TimeoutMs = structuredModelSettings.maxDuration ? structuredModelSettings.maxDuration * 1e3 : CONSTANTS.PHASE_2_TIMEOUT_MS;
6253
- const structuredResponse = await ai.generateObject({
6254
- ...structuredModelSettings,
6255
- messages: [
6256
- { role: "user", content: userMessage },
6257
- ...reasoningFlow,
6258
- {
6259
- role: "user",
6260
- content: await this.buildPhase2SystemPrompt()
6261
- }
6262
- ],
6263
- schema: z5.z.object({
6264
- dataComponents: z5.z.array(dataComponentsSchema)
6265
- }),
6266
- experimental_telemetry: {
6267
- isEnabled: true,
6268
- functionId: this.config.id,
6269
- recordInputs: true,
6270
- recordOutputs: true,
6271
- metadata: {
6272
- phase: "structured_generation"
6344
+ const shouldStreamPhase2 = this.getStreamingHelper();
6345
+ if (shouldStreamPhase2) {
6346
+ const streamResult = ai.streamObject({
6347
+ ...structuredModelSettings,
6348
+ messages: [
6349
+ { role: "user", content: userMessage },
6350
+ ...reasoningFlow,
6351
+ {
6352
+ role: "user",
6353
+ content: await this.buildPhase2SystemPrompt()
6354
+ }
6355
+ ],
6356
+ schema: z5.z.object({
6357
+ dataComponents: z5.z.array(dataComponentsSchema)
6358
+ }),
6359
+ experimental_telemetry: {
6360
+ isEnabled: true,
6361
+ functionId: this.config.id,
6362
+ recordInputs: true,
6363
+ recordOutputs: true,
6364
+ metadata: {
6365
+ phase: "structured_generation"
6366
+ }
6367
+ },
6368
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6369
+ });
6370
+ const streamHelper = this.getStreamingHelper();
6371
+ if (!streamHelper) {
6372
+ throw new Error("Stream helper is unexpectedly undefined in streaming context");
6373
+ }
6374
+ const parser = new IncrementalStreamParser(
6375
+ streamHelper,
6376
+ this.config.tenantId,
6377
+ contextId
6378
+ );
6379
+ for await (const delta of streamResult.partialObjectStream) {
6380
+ if (delta) {
6381
+ await parser.processObjectDelta(delta);
6273
6382
  }
6274
- },
6275
- abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6276
- });
6277
- response = {
6278
- ...response,
6279
- object: structuredResponse.object
6280
- };
6281
- textResponse = JSON.stringify(structuredResponse.object, null, 2);
6383
+ }
6384
+ await parser.finalize();
6385
+ const structuredResponse = await streamResult;
6386
+ const collectedParts = parser.getCollectedParts();
6387
+ if (collectedParts.length > 0) {
6388
+ response.formattedContent = {
6389
+ parts: collectedParts.map((part) => ({
6390
+ kind: part.kind,
6391
+ ...part.kind === "text" && { text: part.text },
6392
+ ...part.kind === "data" && { data: part.data }
6393
+ }))
6394
+ };
6395
+ }
6396
+ response = {
6397
+ ...response,
6398
+ object: structuredResponse.object
6399
+ };
6400
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6401
+ } else {
6402
+ const structuredResponse = await ai.generateObject({
6403
+ ...structuredModelSettings,
6404
+ messages: [
6405
+ { role: "user", content: userMessage },
6406
+ ...reasoningFlow,
6407
+ {
6408
+ role: "user",
6409
+ content: await this.buildPhase2SystemPrompt()
6410
+ }
6411
+ ],
6412
+ schema: z5.z.object({
6413
+ dataComponents: z5.z.array(dataComponentsSchema)
6414
+ }),
6415
+ experimental_telemetry: {
6416
+ isEnabled: true,
6417
+ functionId: this.config.id,
6418
+ recordInputs: true,
6419
+ recordOutputs: true,
6420
+ metadata: {
6421
+ phase: "structured_generation"
6422
+ }
6423
+ },
6424
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6425
+ });
6426
+ response = {
6427
+ ...response,
6428
+ object: structuredResponse.object
6429
+ };
6430
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6431
+ }
6282
6432
  } else {
6283
6433
  textResponse = response.text || "";
6284
6434
  }
@@ -6324,42 +6474,6 @@ ${output}`;
6324
6474
  }
6325
6475
  };
6326
6476
 
6327
- // src/utils/model-resolver.ts
6328
- init_dbClient();
6329
- async function resolveModelConfig(graphId, agent) {
6330
- if (agent.models?.base?.model) {
6331
- return {
6332
- base: agent.models.base,
6333
- structuredOutput: agent.models.structuredOutput || agent.models.base,
6334
- summarizer: agent.models.summarizer || agent.models.base
6335
- };
6336
- }
6337
- const graph = await agentsCore.getAgentGraph(dbClient_default)({
6338
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
6339
- graphId
6340
- });
6341
- if (graph?.models?.base?.model) {
6342
- return {
6343
- base: graph.models.base,
6344
- structuredOutput: agent.models?.structuredOutput || graph.models.structuredOutput || graph.models.base,
6345
- summarizer: agent.models?.summarizer || graph.models.summarizer || graph.models.base
6346
- };
6347
- }
6348
- const project = await agentsCore.getProject(dbClient_default)({
6349
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId }
6350
- });
6351
- if (project?.models?.base?.model) {
6352
- return {
6353
- base: project.models.base,
6354
- structuredOutput: agent.models?.structuredOutput || project.models.structuredOutput || project.models.base,
6355
- summarizer: agent.models?.summarizer || project.models.summarizer || project.models.base
6356
- };
6357
- }
6358
- throw new Error(
6359
- "Base model configuration is required. Please configure models at the project level."
6360
- );
6361
- }
6362
-
6363
6477
  // src/agents/generateTaskHandler.ts
6364
6478
  function parseEmbeddedJson(data) {
6365
6479
  return traverse__default.default(data).map(function(x) {
@@ -6394,31 +6508,34 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6394
6508
  agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6395
6509
  scopes: {
6396
6510
  tenantId: config.tenantId,
6397
- projectId: config.projectId
6511
+ projectId: config.projectId,
6512
+ graphId: config.graphId
6398
6513
  },
6399
- graphId: config.graphId,
6400
6514
  agentId: config.agentId
6401
6515
  }),
6402
6516
  agentsCore.getToolsForAgent(dbClient_default)({
6403
6517
  scopes: {
6404
6518
  tenantId: config.tenantId,
6405
- projectId: config.projectId
6406
- },
6407
- agentId: config.agentId
6519
+ projectId: config.projectId,
6520
+ graphId: config.graphId,
6521
+ agentId: config.agentId
6522
+ }
6408
6523
  }),
6409
6524
  agentsCore.getDataComponentsForAgent(dbClient_default)({
6410
6525
  scopes: {
6411
6526
  tenantId: config.tenantId,
6412
- projectId: config.projectId
6413
- },
6414
- agentId: config.agentId
6527
+ projectId: config.projectId,
6528
+ graphId: config.graphId,
6529
+ agentId: config.agentId
6530
+ }
6415
6531
  }),
6416
6532
  agentsCore.getArtifactComponentsForAgent(dbClient_default)({
6417
6533
  scopes: {
6418
6534
  tenantId: config.tenantId,
6419
- projectId: config.projectId
6420
- },
6421
- agentId: config.agentId
6535
+ projectId: config.projectId,
6536
+ graphId: config.graphId,
6537
+ agentId: config.agentId
6538
+ }
6422
6539
  })
6423
6540
  ]);
6424
6541
  logger16.info({ toolsForAgent, internalRelations, externalRelations }, "agent stuff");
@@ -6426,13 +6543,16 @@ var createTaskHandler = (config, credentialStoreRegistry) => {
6426
6543
  internalRelations.map(async (relation) => {
6427
6544
  try {
6428
6545
  const relatedAgent = await agentsCore.getAgentById(dbClient_default)({
6429
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6546
+ scopes: {
6547
+ tenantId: config.tenantId,
6548
+ projectId: config.projectId,
6549
+ graphId: config.graphId
6550
+ },
6430
6551
  agentId: relation.id
6431
6552
  });
6432
6553
  if (relatedAgent) {
6433
6554
  const relatedAgentRelations = await agentsCore.getRelatedAgentsForGraph(dbClient_default)({
6434
- scopes: { tenantId: config.tenantId, projectId: config.projectId },
6435
- graphId: config.graphId,
6555
+ scopes: { tenantId: config.tenantId, projectId: config.projectId, graphId: config.graphId },
6436
6556
  agentId: relation.id
6437
6557
  });
6438
6558
  const enhancedDescription = generateDescriptionWithTransfers(
@@ -6655,16 +6775,17 @@ var createTaskHandlerConfig = async (params) => {
6655
6775
  const agent = await agentsCore.getAgentById(dbClient_default)({
6656
6776
  scopes: {
6657
6777
  tenantId: params.tenantId,
6658
- projectId: params.projectId
6778
+ projectId: params.projectId,
6779
+ graphId: params.graphId
6659
6780
  },
6660
6781
  agentId: params.agentId
6661
6782
  });
6662
- const agentGraph = await agentsCore.getAgentGraph(dbClient_default)({
6783
+ const agentGraph = await agentsCore.getAgentGraphById(dbClient_default)({
6663
6784
  scopes: {
6664
6785
  tenantId: params.tenantId,
6665
- projectId: params.projectId
6666
- },
6667
- graphId: params.graphId
6786
+ projectId: params.projectId,
6787
+ graphId: params.graphId
6788
+ }
6668
6789
  });
6669
6790
  if (!agent) {
6670
6791
  throw new Error(`Agent not found: ${params.agentId}`);
@@ -6704,10 +6825,14 @@ async function hydrateGraph({
6704
6825
  apiKey
6705
6826
  }) {
6706
6827
  try {
6828
+ if (!dbGraph.defaultAgentId) {
6829
+ throw new Error(`Graph ${dbGraph.id} does not have a default agent configured`);
6830
+ }
6707
6831
  const defaultAgent = await agentsCore.getAgentById(dbClient_default)({
6708
6832
  scopes: {
6709
6833
  tenantId: dbGraph.tenantId,
6710
- projectId: dbGraph.projectId
6834
+ projectId: dbGraph.projectId,
6835
+ graphId: dbGraph.id
6711
6836
  },
6712
6837
  agentId: dbGraph.defaultAgentId
6713
6838
  });
@@ -6762,7 +6887,7 @@ async function hydrateGraph({
6762
6887
  }
6763
6888
  async function getRegisteredGraph(executionContext) {
6764
6889
  const { tenantId, projectId, graphId, baseUrl, apiKey } = executionContext;
6765
- const dbGraph = await agentsCore.getAgentGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
6890
+ const dbGraph = await agentsCore.getAgentGraphById(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
6766
6891
  if (!dbGraph) {
6767
6892
  return null;
6768
6893
  }
@@ -6821,6 +6946,7 @@ app.openapi(
6821
6946
  );
6822
6947
  const executionContext = agentsCore.getRequestExecutionContext(c);
6823
6948
  const { tenantId, projectId, graphId, agentId } = executionContext;
6949
+ console.dir("executionContext", executionContext);
6824
6950
  if (agentId) {
6825
6951
  logger17.info(
6826
6952
  {
@@ -6908,8 +7034,7 @@ app.post("/a2a", async (c) => {
6908
7034
  "graph-level a2a endpoint"
6909
7035
  );
6910
7036
  const graph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
6911
- scopes: { tenantId, projectId },
6912
- graphId
7037
+ scopes: { tenantId, projectId, graphId }
6913
7038
  });
6914
7039
  if (!graph) {
6915
7040
  return c.json(
@@ -6921,6 +7046,16 @@ app.post("/a2a", async (c) => {
6921
7046
  404
6922
7047
  );
6923
7048
  }
7049
+ if (!graph.defaultAgentId) {
7050
+ return c.json(
7051
+ {
7052
+ jsonrpc: "2.0",
7053
+ error: { code: -32004, message: "Graph does not have a default agent configured" },
7054
+ id: null
7055
+ },
7056
+ 400
7057
+ );
7058
+ }
6924
7059
  executionContext.agentId = graph.defaultAgentId;
6925
7060
  const credentialStores = c.get("credentialStores");
6926
7061
  const defaultAgent = await getRegisteredAgent(executionContext, credentialStores);
@@ -6964,6 +7099,9 @@ function isTransferResponse(result) {
6964
7099
  (artifact) => artifact.parts.some((part) => part.kind === "data" && part.data?.type === "transfer")
6965
7100
  );
6966
7101
  }
7102
+
7103
+ // src/handlers/executionHandler.ts
7104
+ init_dbClient();
6967
7105
  var SSEStreamHelper = class {
6968
7106
  constructor(stream2, requestId2, timestamp) {
6969
7107
  this.stream = stream2;
@@ -6971,7 +7109,7 @@ var SSEStreamHelper = class {
6971
7109
  this.timestamp = timestamp;
6972
7110
  // Stream queuing for proper event ordering
6973
7111
  __publicField(this, "isTextStreaming", false);
6974
- __publicField(this, "queuedOperations", []);
7112
+ __publicField(this, "queuedEvents", []);
6975
7113
  }
6976
7114
  /**
6977
7115
  * Write the initial role message
@@ -7036,9 +7174,10 @@ var SSEStreamHelper = class {
7036
7174
  await this.writeContent(JSON.stringify(data));
7037
7175
  }
7038
7176
  /**
7039
- * Write error message
7177
+ * Write error message or error event
7040
7178
  */
7041
- async writeError(errorMessage) {
7179
+ async writeError(error) {
7180
+ const errorMessage = typeof error === "string" ? error : error.message;
7042
7181
  await this.writeContent(`
7043
7182
 
7044
7183
  ${errorMessage}`);
@@ -7062,22 +7201,6 @@ ${errorMessage}`);
7062
7201
  })
7063
7202
  });
7064
7203
  }
7065
- /**
7066
- * Write the final [DONE] message
7067
- */
7068
- async writeDone() {
7069
- await this.stream.writeSSE({
7070
- data: "[DONE]"
7071
- });
7072
- }
7073
- /**
7074
- * Complete the stream with finish reason and done message
7075
- */
7076
- async complete(finishReason = "stop") {
7077
- await this.flushQueuedOperations();
7078
- await this.writeCompletion(finishReason);
7079
- await this.writeDone();
7080
- }
7081
7204
  async writeData(type, data) {
7082
7205
  await this.stream.writeSSE({
7083
7206
  data: JSON.stringify({
@@ -7096,16 +7219,23 @@ ${errorMessage}`);
7096
7219
  })
7097
7220
  });
7098
7221
  }
7099
- async writeOperation(operation) {
7100
- if (operation.type === "status_update" && operation.ctx.label) {
7101
- operation = {
7102
- type: operation.type,
7103
- label: operation.ctx.label,
7104
- ctx: operation.ctx.data
7105
- };
7222
+ async writeSummary(summary) {
7223
+ if (this.isTextStreaming) {
7224
+ this.queuedEvents.push({
7225
+ type: "data-summary",
7226
+ event: summary
7227
+ });
7228
+ return;
7106
7229
  }
7230
+ await this.flushQueuedOperations();
7231
+ await this.writeData("data-summary", summary);
7232
+ }
7233
+ async writeOperation(operation) {
7107
7234
  if (this.isTextStreaming) {
7108
- this.queuedOperations.push(operation);
7235
+ this.queuedEvents.push({
7236
+ type: "data-operation",
7237
+ event: operation
7238
+ });
7109
7239
  return;
7110
7240
  }
7111
7241
  await this.flushQueuedOperations();
@@ -7115,15 +7245,31 @@ ${errorMessage}`);
7115
7245
  * Flush all queued operations in order after text streaming completes
7116
7246
  */
7117
7247
  async flushQueuedOperations() {
7118
- if (this.queuedOperations.length === 0) {
7248
+ if (this.queuedEvents.length === 0) {
7119
7249
  return;
7120
7250
  }
7121
- const operationsToFlush = [...this.queuedOperations];
7122
- this.queuedOperations = [];
7123
- for (const operation of operationsToFlush) {
7124
- await this.writeData("data-operation", operation);
7251
+ const eventsToFlush = [...this.queuedEvents];
7252
+ this.queuedEvents = [];
7253
+ for (const event of eventsToFlush) {
7254
+ await this.writeData(event.type, event.event);
7125
7255
  }
7126
7256
  }
7257
+ /**
7258
+ * Write the final [DONE] message
7259
+ */
7260
+ async writeDone() {
7261
+ await this.stream.writeSSE({
7262
+ data: "[DONE]"
7263
+ });
7264
+ }
7265
+ /**
7266
+ * Complete the stream with finish reason and done message
7267
+ */
7268
+ async complete(finishReason = "stop") {
7269
+ await this.flushQueuedOperations();
7270
+ await this.writeCompletion(finishReason);
7271
+ await this.writeDone();
7272
+ }
7127
7273
  };
7128
7274
  function createSSEStreamHelper(stream2, requestId2, timestamp) {
7129
7275
  return new SSEStreamHelper(stream2, requestId2, timestamp);
@@ -7143,7 +7289,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7143
7289
  __publicField(this, "isCompleted", false);
7144
7290
  // Stream queuing for proper event ordering
7145
7291
  __publicField(this, "isTextStreaming", false);
7146
- __publicField(this, "queuedOperations", []);
7292
+ __publicField(this, "queuedEvents", []);
7147
7293
  // Timing tracking for text sequences (text-end to text-start gap)
7148
7294
  __publicField(this, "lastTextEndTimestamp", 0);
7149
7295
  __publicField(this, "TEXT_GAP_THRESHOLD", 50);
@@ -7255,15 +7401,24 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7255
7401
  data
7256
7402
  });
7257
7403
  }
7258
- async writeError(errorMessage) {
7404
+ async writeError(error) {
7259
7405
  if (this.isCompleted) {
7260
7406
  console.warn("Attempted to write error to completed stream");
7261
7407
  return;
7262
7408
  }
7263
- this.writer.write({
7264
- type: "error",
7265
- errorText: errorMessage
7266
- });
7409
+ if (typeof error === "string") {
7410
+ this.writer.write({
7411
+ type: "error",
7412
+ message: error,
7413
+ severity: "error",
7414
+ timestamp: Date.now()
7415
+ });
7416
+ } else {
7417
+ this.writer.write({
7418
+ ...error,
7419
+ type: "error"
7420
+ });
7421
+ }
7267
7422
  }
7268
7423
  async streamData(data) {
7269
7424
  await this.writeContent(JSON.stringify(data));
@@ -7275,20 +7430,6 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7275
7430
  }
7276
7431
  this.writer.merge(stream2);
7277
7432
  }
7278
- async writeCompletion(_finishReason = "stop") {
7279
- }
7280
- async writeDone() {
7281
- }
7282
- /**
7283
- * Complete the stream and clean up all memory
7284
- * This is the primary cleanup point to prevent memory leaks between requests
7285
- */
7286
- async complete() {
7287
- if (this.isCompleted) return;
7288
- await this.flushQueuedOperations();
7289
- this.isCompleted = true;
7290
- this.cleanup();
7291
- }
7292
7433
  /**
7293
7434
  * Clean up all memory allocations
7294
7435
  * Should be called when the stream helper is no longer needed
@@ -7302,7 +7443,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7302
7443
  this.sentItems.clear();
7303
7444
  this.completedItems.clear();
7304
7445
  this.textId = null;
7305
- this.queuedOperations = [];
7446
+ this.queuedEvents = [];
7306
7447
  this.isTextStreaming = false;
7307
7448
  }
7308
7449
  /**
@@ -7368,7 +7509,9 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7368
7509
  if (this.writer && !this.isCompleted) {
7369
7510
  this.writer.write({
7370
7511
  type: "error",
7371
- errorText: `Stream terminated: ${reason}`
7512
+ message: `Stream terminated: ${reason}`,
7513
+ severity: "error",
7514
+ timestamp: Date.now()
7372
7515
  });
7373
7516
  }
7374
7517
  } catch (e) {
@@ -7391,23 +7534,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7391
7534
  isCompleted: this.isCompleted
7392
7535
  };
7393
7536
  }
7537
+ async writeSummary(summary) {
7538
+ if (this.isCompleted) {
7539
+ console.warn("Attempted to write summary to completed stream");
7540
+ return;
7541
+ }
7542
+ const now = Date.now();
7543
+ const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7544
+ if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7545
+ this.queuedEvents.push({ type: "data-summary", event: summary });
7546
+ return;
7547
+ }
7548
+ await this.flushQueuedOperations();
7549
+ await this.writer.write({
7550
+ id: "id" in summary ? summary.id : void 0,
7551
+ type: "data-summary",
7552
+ data: summary
7553
+ });
7554
+ }
7394
7555
  async writeOperation(operation) {
7395
7556
  if (this.isCompleted) {
7396
7557
  console.warn("Attempted to write operation to completed stream");
7397
7558
  return;
7398
7559
  }
7399
- if (operation.type === "status_update" && operation.ctx.label) {
7400
- operation = {
7401
- type: operation.type,
7402
- label: operation.ctx.label,
7403
- // Preserve the label for the UI
7404
- ctx: operation.ctx.data
7405
- };
7406
- }
7407
7560
  const now = Date.now();
7408
7561
  const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7409
7562
  if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7410
- this.queuedOperations.push(operation);
7563
+ this.queuedEvents.push({ type: "data-operation", event: operation });
7411
7564
  return;
7412
7565
  }
7413
7566
  await this.flushQueuedOperations();
@@ -7421,19 +7574,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7421
7574
  * Flush all queued operations in order after text streaming completes
7422
7575
  */
7423
7576
  async flushQueuedOperations() {
7424
- if (this.queuedOperations.length === 0) {
7577
+ if (this.queuedEvents.length === 0) {
7425
7578
  return;
7426
7579
  }
7427
- const operationsToFlush = [...this.queuedOperations];
7428
- this.queuedOperations = [];
7429
- for (const operation of operationsToFlush) {
7580
+ const eventsToFlush = [...this.queuedEvents];
7581
+ this.queuedEvents = [];
7582
+ for (const event of eventsToFlush) {
7430
7583
  this.writer.write({
7431
- id: "id" in operation ? operation.id : void 0,
7432
- type: "data-operation",
7433
- data: operation
7584
+ id: "id" in event.event ? event.event.id : void 0,
7585
+ type: event.type,
7586
+ data: event.event
7434
7587
  });
7435
7588
  }
7436
7589
  }
7590
+ async writeCompletion(_finishReason = "stop") {
7591
+ }
7592
+ async writeDone() {
7593
+ }
7594
+ /**
7595
+ * Complete the stream and clean up all memory
7596
+ * This is the primary cleanup point to prevent memory leaks between requests
7597
+ */
7598
+ async complete() {
7599
+ if (this.isCompleted) return;
7600
+ await this.flushQueuedOperations();
7601
+ this.isCompleted = true;
7602
+ this.cleanup();
7603
+ }
7437
7604
  };
7438
7605
  // Memory management - focused on connection completion cleanup
7439
7606
  __publicField(_VercelDataStreamHelper, "MAX_BUFFER_SIZE", 5 * 1024 * 1024);
@@ -7446,6 +7613,7 @@ var MCPStreamHelper = class {
7446
7613
  __publicField(this, "capturedText", "");
7447
7614
  __publicField(this, "capturedData", []);
7448
7615
  __publicField(this, "capturedOperations", []);
7616
+ __publicField(this, "capturedSummaries", []);
7449
7617
  __publicField(this, "hasError", false);
7450
7618
  __publicField(this, "errorMessage", "");
7451
7619
  __publicField(this, "sessionId");
@@ -7464,18 +7632,27 @@ var MCPStreamHelper = class {
7464
7632
  async streamData(data) {
7465
7633
  this.capturedData.push(data);
7466
7634
  }
7635
+ async streamSummary(summary) {
7636
+ this.capturedSummaries.push(summary);
7637
+ }
7638
+ async streamOperation(operation) {
7639
+ this.capturedOperations.push(operation);
7640
+ }
7467
7641
  async writeData(_type, data) {
7468
7642
  this.capturedData.push(data);
7469
7643
  }
7470
- async writeError(errorMessage) {
7471
- this.hasError = true;
7472
- this.errorMessage = errorMessage;
7473
- }
7474
- async complete() {
7644
+ async writeSummary(summary) {
7645
+ this.capturedSummaries.push(summary);
7475
7646
  }
7476
7647
  async writeOperation(operation) {
7477
7648
  this.capturedOperations.push(operation);
7478
7649
  }
7650
+ async writeError(error) {
7651
+ this.hasError = true;
7652
+ this.errorMessage = typeof error === "string" ? error : error.message;
7653
+ }
7654
+ async complete() {
7655
+ }
7479
7656
  /**
7480
7657
  * Get the captured response for MCP tool result
7481
7658
  */
@@ -7494,7 +7671,6 @@ function createMCPStreamHelper() {
7494
7671
  }
7495
7672
 
7496
7673
  // src/handlers/executionHandler.ts
7497
- init_dbClient();
7498
7674
  var logger19 = agentsCore.getLogger("ExecutionHandler");
7499
7675
  var ExecutionHandler = class {
7500
7676
  constructor() {
@@ -7523,7 +7699,7 @@ var ExecutionHandler = class {
7523
7699
  logger19.info({ sessionId: requestId2, graphId }, "Created GraphSession for message execution");
7524
7700
  let graphConfig = null;
7525
7701
  try {
7526
- graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId }, graphId });
7702
+ graphConfig = await agentsCore.getFullGraph(dbClient_default)({ scopes: { tenantId, projectId, graphId } });
7527
7703
  if (graphConfig?.statusUpdates && graphConfig.statusUpdates.enabled !== false) {
7528
7704
  graphSessionManager.initializeStatusUpdates(
7529
7705
  requestId2,
@@ -7677,7 +7853,6 @@ var ExecutionHandler = class {
7677
7853
  if (errorCount >= this.MAX_ERRORS) {
7678
7854
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7679
7855
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7680
- await sseHelper.writeError(errorMessage2);
7681
7856
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7682
7857
  if (task) {
7683
7858
  await agentsCore.updateTask(dbClient_default)({
@@ -7818,7 +7993,6 @@ var ExecutionHandler = class {
7818
7993
  if (errorCount >= this.MAX_ERRORS) {
7819
7994
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7820
7995
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7821
- await sseHelper.writeError(errorMessage2);
7822
7996
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7823
7997
  if (task) {
7824
7998
  await agentsCore.updateTask(dbClient_default)({
@@ -7840,7 +8014,6 @@ var ExecutionHandler = class {
7840
8014
  }
7841
8015
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7842
8016
  logger19.error({ maxTransfers, iterations }, errorMessage);
7843
- await sseHelper.writeError(errorMessage);
7844
8017
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7845
8018
  if (task) {
7846
8019
  await agentsCore.updateTask(dbClient_default)({
@@ -7861,8 +8034,7 @@ var ExecutionHandler = class {
7861
8034
  } catch (error) {
7862
8035
  logger19.error({ error }, "Error in execution handler");
7863
8036
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7864
- await sseHelper.writeError(`Execution error: ${errorMessage}`);
7865
- await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
8037
+ await sseHelper.writeOperation(errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system"));
7866
8038
  if (task) {
7867
8039
  await agentsCore.updateTask(dbClient_default)({
7868
8040
  taskId: task.id,
@@ -8024,8 +8196,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8024
8196
  const body = c.get("requestBody") || {};
8025
8197
  const conversationId = body.conversationId || nanoid.nanoid();
8026
8198
  const fullGraph = await agentsCore.getFullGraph(dbClient_default)({
8027
- scopes: { tenantId, projectId },
8028
- graphId
8199
+ scopes: { tenantId, projectId, graphId }
8029
8200
  });
8030
8201
  let agentGraph;
8031
8202
  let defaultAgentId;
@@ -8042,8 +8213,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8042
8213
  defaultAgentId = fullGraph.defaultAgentId || firstAgentId;
8043
8214
  } else {
8044
8215
  agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8045
- scopes: { tenantId, projectId },
8046
- graphId
8216
+ scopes: { tenantId, projectId, graphId }
8047
8217
  });
8048
8218
  if (!agentGraph) {
8049
8219
  return c.json({ error: "Agent graph not found" }, 404);
@@ -8072,7 +8242,7 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8072
8242
  }
8073
8243
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8074
8244
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8075
- scopes: { tenantId, projectId },
8245
+ scopes: { tenantId, projectId, graphId },
8076
8246
  agentId
8077
8247
  });
8078
8248
  if (!agentInfo) {
@@ -8080,18 +8250,19 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8080
8250
  }
8081
8251
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8082
8252
  const credentialStores = c.get("credentialStores");
8083
- await agentsCore.handleContextResolution(
8253
+ await agentsCore.handleContextResolution({
8084
8254
  tenantId,
8085
8255
  projectId,
8086
- conversationId,
8087
8256
  graphId,
8088
- validatedContext,
8089
- dbClient_default,
8257
+ conversationId,
8258
+ requestContext: validatedContext,
8259
+ dbClient: dbClient_default,
8090
8260
  credentialStores
8091
- );
8261
+ });
8092
8262
  logger20.info(
8093
8263
  {
8094
8264
  tenantId,
8265
+ projectId,
8095
8266
  graphId,
8096
8267
  conversationId,
8097
8268
  defaultAgentId,
@@ -8150,8 +8321,8 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8150
8321
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8151
8322
  );
8152
8323
  if (!result.success) {
8153
- await sseHelper.writeError(
8154
- "Sorry, I was unable to process your request at this time. Please try again."
8324
+ await sseHelper.writeOperation(
8325
+ errorOp("Sorry, I was unable to process your request at this time. Please try again.", "system")
8155
8326
  );
8156
8327
  }
8157
8328
  await sseHelper.complete();
@@ -8234,6 +8405,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8234
8405
  try {
8235
8406
  const executionContext = agentsCore.getRequestExecutionContext(c);
8236
8407
  const { tenantId, projectId, graphId } = executionContext;
8408
+ agentsCore.loggerFactory.getLogger("chatDataStream").debug({ tenantId, projectId, graphId }, "Extracted chatDataStream parameters");
8237
8409
  const body = c.get("requestBody") || {};
8238
8410
  const conversationId = body.conversationId || nanoid.nanoid();
8239
8411
  const activeSpan = api.trace.getActiveSpan();
@@ -8246,14 +8418,16 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8246
8418
  });
8247
8419
  }
8248
8420
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8249
- scopes: { tenantId, projectId },
8250
- graphId
8421
+ scopes: { tenantId, projectId, graphId }
8251
8422
  });
8252
8423
  if (!agentGraph) {
8253
8424
  return c.json({ error: "Agent graph not found" }, 404);
8254
8425
  }
8255
8426
  const defaultAgentId = agentGraph.defaultAgentId;
8256
8427
  const graphName = agentGraph.name;
8428
+ if (!defaultAgentId) {
8429
+ return c.json({ error: "Graph does not have a default agent configured" }, 400);
8430
+ }
8257
8431
  const activeAgent = await agentsCore.getActiveAgentForConversation(dbClient_default)({
8258
8432
  scopes: { tenantId, projectId },
8259
8433
  conversationId
@@ -8267,7 +8441,7 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8267
8441
  }
8268
8442
  const agentId = activeAgent?.activeAgentId || defaultAgentId;
8269
8443
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8270
- scopes: { tenantId, projectId },
8444
+ scopes: { tenantId, projectId, graphId },
8271
8445
  agentId
8272
8446
  });
8273
8447
  if (!agentInfo) {
@@ -8275,15 +8449,15 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8275
8449
  }
8276
8450
  const validatedContext = c.get("validatedContext") || body.requestContext || {};
8277
8451
  const credentialStores = c.get("credentialStores");
8278
- await agentsCore.handleContextResolution(
8452
+ await agentsCore.handleContextResolution({
8279
8453
  tenantId,
8280
8454
  projectId,
8281
- conversationId,
8282
8455
  graphId,
8283
- validatedContext,
8284
- dbClient_default,
8456
+ conversationId,
8457
+ requestContext: validatedContext,
8458
+ dbClient: dbClient_default,
8285
8459
  credentialStores
8286
- );
8460
+ });
8287
8461
  const lastUserMessage = body.messages.filter((m) => m.role === "user").slice(-1)[0];
8288
8462
  const userText = typeof lastUserMessage?.content === "string" ? lastUserMessage.content : lastUserMessage?.parts?.map((p) => p.text).join("") || "";
8289
8463
  logger21.info({ userText, lastUserMessage }, "userText");
@@ -8325,11 +8499,11 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8325
8499
  sseHelper: streamHelper
8326
8500
  });
8327
8501
  if (!result.success) {
8328
- await streamHelper.writeError("Unable to process request");
8502
+ await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
8329
8503
  }
8330
8504
  } catch (err) {
8331
8505
  logger21.error({ err }, "Streaming error");
8332
- await streamHelper.writeError("Internal server error");
8506
+ await streamHelper.writeOperation(errorOp("Internal server error", "system"));
8333
8507
  } finally {
8334
8508
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
8335
8509
  streamHelper.cleanup();
@@ -8555,8 +8729,7 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8555
8729
  const { tenantId, projectId, graphId } = executionContext;
8556
8730
  setupTracing(conversationId, tenantId, graphId);
8557
8731
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8558
- scopes: { tenantId, projectId },
8559
- graphId
8732
+ scopes: { tenantId, projectId, graphId }
8560
8733
  });
8561
8734
  if (!agentGraph) {
8562
8735
  throw new Error("Agent graph not found");
@@ -8576,9 +8749,20 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8576
8749
  },
8577
8750
  async ({ query }) => {
8578
8751
  try {
8752
+ if (!agentGraph.defaultAgentId) {
8753
+ return {
8754
+ content: [
8755
+ {
8756
+ type: "text",
8757
+ text: `Graph does not have a default agent configured`
8758
+ }
8759
+ ],
8760
+ isError: true
8761
+ };
8762
+ }
8579
8763
  const defaultAgentId = agentGraph.defaultAgentId;
8580
8764
  const agentInfo = await agentsCore.getAgentById(dbClient_default)({
8581
- scopes: { tenantId, projectId },
8765
+ scopes: { tenantId, projectId, graphId },
8582
8766
  agentId: defaultAgentId
8583
8767
  });
8584
8768
  if (!agentInfo) {
@@ -8592,18 +8776,19 @@ var getServer = async (requestContext, executionContext, conversationId, credent
8592
8776
  isError: true
8593
8777
  };
8594
8778
  }
8595
- const resolvedContext = await agentsCore.handleContextResolution(
8779
+ const resolvedContext = await agentsCore.handleContextResolution({
8596
8780
  tenantId,
8597
8781
  projectId,
8598
- conversationId,
8599
8782
  graphId,
8783
+ conversationId,
8600
8784
  requestContext,
8601
- dbClient_default,
8785
+ dbClient: dbClient_default,
8602
8786
  credentialStores
8603
- );
8787
+ });
8604
8788
  logger22.info(
8605
8789
  {
8606
8790
  tenantId,
8791
+ projectId,
8607
8792
  graphId,
8608
8793
  conversationId,
8609
8794
  hasContextConfig: !!agentGraph.contextConfigId,
@@ -8665,8 +8850,7 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8665
8850
  logger22.info({ body }, "Received initialization request");
8666
8851
  const sessionId = nanoid.nanoid();
8667
8852
  const agentGraph = await agentsCore.getAgentGraphWithDefaultAgent(dbClient_default)({
8668
- scopes: { tenantId, projectId },
8669
- graphId
8853
+ scopes: { tenantId, projectId, graphId }
8670
8854
  });
8671
8855
  if (!agentGraph) {
8672
8856
  return c.json(
@@ -8678,6 +8862,16 @@ var handleInitializationRequest = async (body, executionContext, validatedContex
8678
8862
  { status: 404 }
8679
8863
  );
8680
8864
  }
8865
+ if (!agentGraph.defaultAgentId) {
8866
+ return c.json(
8867
+ {
8868
+ jsonrpc: "2.0",
8869
+ error: { code: -32001, message: "Graph does not have a default agent configured" },
8870
+ id: body.id || null
8871
+ },
8872
+ { status: 400 }
8873
+ );
8874
+ }
8681
8875
  const conversation = await agentsCore.createOrGetConversation(dbClient_default)({
8682
8876
  id: sessionId,
8683
8877
  tenantId,