@inkeep/agents-run-api 0.0.0-dev-20250917205552 → 0.0.0-dev-20250919020857

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.cjs +425 -293
  2. package/dist/index.js +426 -294
  3. package/package.json +2 -2
package/dist/index.cjs CHANGED
@@ -1266,24 +1266,19 @@ function completionOp(agentId, iterations) {
1266
1266
  }
1267
1267
  };
1268
1268
  }
1269
- function errorOp(error, agentId) {
1269
+ function errorOp(message, agentId, severity = "error", code) {
1270
1270
  return {
1271
1271
  type: "error",
1272
- ctx: {
1273
- error,
1274
- agent: agentId
1275
- }
1272
+ message,
1273
+ agent: agentId,
1274
+ severity,
1275
+ code,
1276
+ timestamp: Date.now()
1276
1277
  };
1277
1278
  }
1278
1279
  function generateToolId() {
1279
1280
  return `tool_${nanoid.nanoid(8)}`;
1280
1281
  }
1281
- function statusUpdateOp(ctx) {
1282
- return {
1283
- type: "status_update",
1284
- ctx
1285
- };
1286
- }
1287
1282
  var logger4 = agentsCore.getLogger("DataComponentSchema");
1288
1283
  function jsonSchemaToZod(jsonSchema) {
1289
1284
  if (!jsonSchema || typeof jsonSchema !== "object") {
@@ -1539,7 +1534,6 @@ var ModelFactory = _ModelFactory;
1539
1534
  // src/utils/graph-session.ts
1540
1535
  init_conversations();
1541
1536
  init_dbClient();
1542
- var tracer = agentsCore.getTracer("agents-run-api");
1543
1537
 
1544
1538
  // src/utils/stream-registry.ts
1545
1539
  var streamHelperRegistry = /* @__PURE__ */ new Map();
@@ -1555,6 +1549,7 @@ function getStreamHelper(requestId2) {
1555
1549
  function unregisterStreamHelper(requestId2) {
1556
1550
  streamHelperRegistry.delete(requestId2);
1557
1551
  }
1552
+ var tracer = agentsCore.getTracer("agents-run-api");
1558
1553
 
1559
1554
  // src/utils/graph-session.ts
1560
1555
  var logger6 = agentsCore.getLogger("GraphSession");
@@ -1863,7 +1858,6 @@ var GraphSession = class {
1863
1858
  }
1864
1859
  this.isGeneratingUpdate = true;
1865
1860
  const statusUpdateState = this.statusUpdateState;
1866
- const graphId = this.graphId;
1867
1861
  try {
1868
1862
  const streamHelper = getStreamHelper(this.sessionId);
1869
1863
  if (!streamHelper) {
@@ -1876,7 +1870,7 @@ var GraphSession = class {
1876
1870
  }
1877
1871
  const now = Date.now();
1878
1872
  const elapsedTime = now - statusUpdateState.startTime;
1879
- let operation;
1873
+ let summaryToSend;
1880
1874
  if (statusUpdateState.config.statusComponents && statusUpdateState.config.statusComponents.length > 0) {
1881
1875
  const result = await this.generateStructuredStatusUpdate(
1882
1876
  this.events.slice(statusUpdateState.lastEventCount),
@@ -1885,32 +1879,30 @@ var GraphSession = class {
1885
1879
  statusUpdateState.summarizerModel,
1886
1880
  this.previousSummaries
1887
1881
  );
1888
- if (result.operations && result.operations.length > 0) {
1889
- for (const op of result.operations) {
1890
- if (!op || !op.type || !op.data || Object.keys(op.data).length === 0) {
1882
+ if (result.summaries && result.summaries.length > 0) {
1883
+ for (const summary of result.summaries) {
1884
+ if (!summary || !summary.type || !summary.data || !summary.data.label || Object.keys(summary.data).length === 0) {
1891
1885
  logger6.warn(
1892
1886
  {
1893
1887
  sessionId: this.sessionId,
1894
- operation: op
1888
+ summary
1895
1889
  },
1896
1890
  "Skipping empty or invalid structured operation"
1897
1891
  );
1898
1892
  continue;
1899
1893
  }
1900
- const operationToSend = {
1901
- type: "status_update",
1902
- ctx: {
1903
- operationType: op.type,
1904
- label: op.data.label,
1905
- data: Object.fromEntries(
1906
- Object.entries(op.data).filter(([key]) => !["label", "type"].includes(key))
1907
- )
1908
- }
1894
+ const summaryToSend2 = {
1895
+ type: summary.data.type || summary.type,
1896
+ // Preserve the actual custom type from LLM
1897
+ label: summary.data.label,
1898
+ details: Object.fromEntries(
1899
+ Object.entries(summary.data).filter(([key]) => !["label", "type"].includes(key))
1900
+ )
1909
1901
  };
1910
- await streamHelper.writeOperation(operationToSend);
1902
+ await streamHelper.writeSummary(summaryToSend2);
1911
1903
  }
1912
- const summaryTexts = result.operations.map(
1913
- (op) => JSON.stringify({ type: op.type, data: op.data })
1904
+ const summaryTexts = result.summaries.map(
1905
+ (summary) => JSON.stringify({ type: summary.type, data: summary.data })
1914
1906
  );
1915
1907
  this.previousSummaries.push(...summaryTexts);
1916
1908
  if (this.statusUpdateState) {
@@ -1927,34 +1919,20 @@ var GraphSession = class {
1927
1919
  this.previousSummaries
1928
1920
  );
1929
1921
  this.previousSummaries.push(summary);
1930
- operation = statusUpdateOp({
1931
- summary,
1932
- eventCount: this.events.length,
1933
- elapsedTime,
1934
- currentPhase: "processing",
1935
- activeAgent: "system",
1936
- graphId,
1937
- sessionId: this.sessionId
1938
- });
1939
1922
  }
1940
1923
  if (this.previousSummaries.length > 3) {
1941
1924
  this.previousSummaries.shift();
1942
1925
  }
1943
- if (!operation || !operation.type || !operation.ctx) {
1926
+ {
1944
1927
  logger6.warn(
1945
1928
  {
1946
1929
  sessionId: this.sessionId,
1947
- operation
1930
+ summaryToSend
1948
1931
  },
1949
1932
  "Skipping empty or invalid status update operation"
1950
1933
  );
1951
1934
  return;
1952
1935
  }
1953
- await streamHelper.writeOperation(operation);
1954
- if (this.statusUpdateState) {
1955
- this.statusUpdateState.lastUpdateTime = now;
1956
- this.statusUpdateState.lastEventCount = this.events.length;
1957
- }
1958
1936
  } catch (error) {
1959
1937
  logger6.error(
1960
1938
  {
@@ -2087,7 +2065,7 @@ ${previousSummaryContext}` : ""}
2087
2065
  Activities:
2088
2066
  ${userVisibleActivities.join("\n") || "No New Activities"}
2089
2067
 
2090
- Describe the ACTUAL finding, result, or specific information discovered (e.g., "Found Slack bot requires admin permissions", "Identified 3 channel types for ingestion", "Configuration requires OAuth token").
2068
+ Create a short 3-5 word label describing the ACTUAL finding. Use sentence case (only capitalize the first word and proper nouns). Examples: "Found admin permissions needed", "Identified three channel types", "OAuth token required".
2091
2069
 
2092
2070
  ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2093
2071
  const prompt = basePrompt;
@@ -2209,14 +2187,16 @@ Rules:
2209
2187
  - Fill in data for relevant components only
2210
2188
  - Use 'no_relevant_updates' if nothing substantially new to report. DO NOT WRITE LABELS OR USE OTHER COMPONENTS IF YOU USE THIS COMPONENT.
2211
2189
  - Never repeat previous values, make every update EXTREMELY unique. If you cannot do that the update is not worth mentioning.
2212
- - Labels MUST contain the ACTUAL information discovered ("Found X", "Learned Y", "Discovered Z requires A")
2190
+ - Labels MUST be short 3-5 word phrases with ACTUAL information discovered. NEVER MAKE UP SOMETHING WITHOUT BACKING IT UP WITH ACTUAL INFORMATION.
2191
+ - Use sentence case: only capitalize the first word and proper nouns (e.g., "Admin permissions required", not "Admin Permissions Required"). ALWAYS capitalize the first word of the label.
2213
2192
  - DO NOT use action words like "Searching", "Processing", "Analyzing" - state what was FOUND
2214
2193
  - Include specific details, numbers, requirements, or insights discovered
2194
+ - Examples: "Admin permissions required", "Three OAuth steps found", "Token expires daily"
2215
2195
  - You are ONE unified AI system - NEVER mention agents, transfers, delegations, or routing
2216
- - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", or any internal system terminology in labels
2196
+ - CRITICAL: NEVER use the words "transfer", "delegation", "agent", "routing", "artifact", or any internal system terminology in labels or any names of agents, tools, or systems.
2217
2197
  - Present all operations as seamless actions by a single system
2218
2198
  - Anonymize all internal operations so that the information appears descriptive and USER FRIENDLY. HIDE ALL INTERNAL OPERATIONS!
2219
- - Bad examples: "Transferring to search agent", "Delegating task", "Routing request", "Processing request", or not using the no_relevant_updates
2199
+ - Bad examples: "Transferring to search agent", "continuing transfer to qa agent", "Delegating task", "Routing request", "Processing request", "Artifact found", "Artifact saved", or not using the no_relevant_updates
2220
2200
  - Good examples: "Slack bot needs admin privileges", "Found 3-step OAuth flow required", "Channel limit is 500 per workspace", or use the no_relevant_updates component if nothing new to report.
2221
2201
 
2222
2202
  REMEMBER YOU CAN ONLY USE 'no_relevant_updates' ALONE! IT CANNOT BE CONCATENATED WITH OTHER STATUS UPDATES!
@@ -2249,29 +2229,29 @@ ${this.statusUpdateState?.config.prompt?.trim() || ""}`;
2249
2229
  }
2250
2230
  });
2251
2231
  const result = object;
2252
- const operations = [];
2232
+ const summaries = [];
2253
2233
  for (const [componentId, data] of Object.entries(result)) {
2254
2234
  if (componentId === "no_relevant_updates") {
2255
2235
  continue;
2256
2236
  }
2257
2237
  if (data && typeof data === "object" && Object.keys(data).length > 0) {
2258
- operations.push({
2238
+ summaries.push({
2259
2239
  type: componentId,
2260
2240
  data
2261
2241
  });
2262
2242
  }
2263
2243
  }
2264
2244
  span.setAttributes({
2265
- "operations.count": operations.length,
2245
+ "summaries.count": summaries.length,
2266
2246
  "user_activities.count": userVisibleActivities.length,
2267
2247
  "result_keys.count": Object.keys(result).length
2268
2248
  });
2269
2249
  span.setStatus({ code: api.SpanStatusCode.OK });
2270
- return { operations };
2250
+ return { summaries };
2271
2251
  } catch (error) {
2272
2252
  agentsCore.setSpanWithError(span, error);
2273
2253
  logger6.error({ error }, "Failed to generate structured update, using fallback");
2274
- return { operations: [] };
2254
+ return { summaries: [] };
2275
2255
  } finally {
2276
2256
  span.end();
2277
2257
  }
@@ -2852,7 +2832,8 @@ var _ArtifactParser = class _ArtifactParser {
2852
2832
  taskId,
2853
2833
  name: artifact.name || "Processing...",
2854
2834
  description: artifact.description || "Name and description being generated...",
2855
- artifactType: artifact.metadata?.artifactType,
2835
+ type: artifact.metadata?.artifactType || artifact.artifactType,
2836
+ // Map artifactType to type for consistency
2856
2837
  artifactSummary: artifact.parts?.[0]?.data?.summary || {}
2857
2838
  };
2858
2839
  }
@@ -2872,7 +2853,7 @@ var _ArtifactParser = class _ArtifactParser {
2872
2853
  const matchStart = match.index;
2873
2854
  if (matchStart > lastIndex) {
2874
2855
  const textBefore = text.slice(lastIndex, matchStart);
2875
- if (textBefore.trim()) {
2856
+ if (textBefore) {
2876
2857
  parts.push({ kind: "text", text: textBefore });
2877
2858
  }
2878
2859
  }
@@ -2884,7 +2865,7 @@ var _ArtifactParser = class _ArtifactParser {
2884
2865
  }
2885
2866
  if (lastIndex < text.length) {
2886
2867
  const remainingText = text.slice(lastIndex);
2887
- if (remainingText.trim()) {
2868
+ if (remainingText) {
2888
2869
  parts.push({ kind: "text", text: remainingText });
2889
2870
  }
2890
2871
  }
@@ -2994,8 +2975,9 @@ __publicField(_ArtifactParser, "INCOMPLETE_ARTIFACT_REGEX", /<(a(r(t(i(f(a(c(t(:
2994
2975
  var ArtifactParser = _ArtifactParser;
2995
2976
 
2996
2977
  // src/utils/incremental-stream-parser.ts
2997
- var logger8 = agentsCore.getLogger("IncrementalStreamParser");
2998
- var IncrementalStreamParser = class {
2978
+ agentsCore.getLogger("IncrementalStreamParser");
2979
+ var _IncrementalStreamParser = class _IncrementalStreamParser {
2980
+ // Max number of streamed component IDs to track
2999
2981
  constructor(streamHelper, tenantId, contextId) {
3000
2982
  __publicField(this, "buffer", "");
3001
2983
  __publicField(this, "pendingTextBuffer", "");
@@ -3005,6 +2987,9 @@ var IncrementalStreamParser = class {
3005
2987
  __publicField(this, "collectedParts", []);
3006
2988
  __publicField(this, "contextId");
3007
2989
  __publicField(this, "lastChunkWasToolResult", false);
2990
+ __publicField(this, "componentAccumulator", {});
2991
+ __publicField(this, "lastStreamedComponents", /* @__PURE__ */ new Map());
2992
+ __publicField(this, "componentSnapshots", /* @__PURE__ */ new Map());
3008
2993
  this.streamHelper = streamHelper;
3009
2994
  this.contextId = contextId;
3010
2995
  this.artifactParser = new ArtifactParser(tenantId);
@@ -3019,7 +3004,7 @@ var IncrementalStreamParser = class {
3019
3004
  * Process a new text chunk for text streaming (handles artifact markers)
3020
3005
  */
3021
3006
  async processTextChunk(chunk) {
3022
- if (this.lastChunkWasToolResult && this.buffer === "" && chunk.trim()) {
3007
+ if (this.lastChunkWasToolResult && this.buffer === "" && chunk) {
3023
3008
  chunk = "\n\n" + chunk;
3024
3009
  this.lastChunkWasToolResult = false;
3025
3010
  }
@@ -3031,100 +3016,122 @@ var IncrementalStreamParser = class {
3031
3016
  this.buffer = parseResult.remainingBuffer;
3032
3017
  }
3033
3018
  /**
3034
- * Process a new object chunk for object streaming (handles JSON objects with artifact references)
3019
+ * Process object deltas directly from Vercel AI SDK's fullStream
3020
+ * Accumulates components and streams them when they're stable (unchanged between deltas)
3035
3021
  */
3036
- async processObjectChunk(chunk) {
3037
- this.buffer += chunk;
3038
- const parseResult = await this.parseObjectBuffer();
3039
- for (const part of parseResult.completeParts) {
3040
- await this.streamPart(part);
3022
+ async processObjectDelta(delta) {
3023
+ if (!delta || typeof delta !== "object") {
3024
+ return;
3041
3025
  }
3042
- this.buffer = parseResult.remainingBuffer;
3043
- }
3044
- /**
3045
- * Process tool call stream for structured output, streaming components as they complete
3046
- */
3047
- async processToolCallStream(stream2, targetToolName) {
3048
- let jsonBuffer = "";
3049
- let componentBuffer = "";
3050
- let depth = 0;
3051
- let componentsStreamed = 0;
3052
- const MAX_BUFFER_SIZE = 5 * 1024 * 1024;
3053
- for await (const part of stream2) {
3054
- if (part.type === "tool-call-delta" && part.toolName === targetToolName) {
3055
- const delta = part.argsTextDelta || "";
3056
- if (jsonBuffer.length + delta.length > MAX_BUFFER_SIZE) {
3057
- logger8.warn(
3058
- { bufferSize: jsonBuffer.length + delta.length, maxSize: MAX_BUFFER_SIZE },
3059
- "JSON buffer exceeded maximum size, truncating"
3060
- );
3061
- jsonBuffer = jsonBuffer.slice(-MAX_BUFFER_SIZE / 2);
3026
+ this.componentAccumulator = this.deepMerge(this.componentAccumulator, delta);
3027
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3028
+ const components = this.componentAccumulator.dataComponents;
3029
+ const currentComponentIds = new Set(components.filter((c) => c?.id).map((c) => c.id));
3030
+ for (const [componentId, snapshot] of this.componentSnapshots.entries()) {
3031
+ if (!currentComponentIds.has(componentId) && !this.lastStreamedComponents.has(componentId)) {
3032
+ try {
3033
+ const component = JSON.parse(snapshot);
3034
+ if (this.isComponentComplete(component)) {
3035
+ await this.streamComponent(component);
3036
+ }
3037
+ } catch (e) {
3038
+ }
3062
3039
  }
3063
- jsonBuffer += delta;
3064
- for (const char of delta) {
3065
- if (componentBuffer.length > MAX_BUFFER_SIZE) {
3066
- logger8.warn(
3067
- { bufferSize: componentBuffer.length, maxSize: MAX_BUFFER_SIZE },
3068
- "Component buffer exceeded maximum size, resetting"
3069
- );
3070
- componentBuffer = "";
3071
- depth = 0;
3072
- continue;
3040
+ }
3041
+ for (let i = 0; i < components.length; i++) {
3042
+ const component = components[i];
3043
+ if (!component?.id) continue;
3044
+ const componentKey = component.id;
3045
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3046
+ if (hasBeenStreamed) continue;
3047
+ const currentSnapshot = JSON.stringify(component);
3048
+ const previousSnapshot = this.componentSnapshots.get(componentKey);
3049
+ this.componentSnapshots.set(componentKey, currentSnapshot);
3050
+ if (this.componentSnapshots.size > _IncrementalStreamParser.MAX_SNAPSHOT_SIZE) {
3051
+ const firstKey = this.componentSnapshots.keys().next().value;
3052
+ if (firstKey) {
3053
+ this.componentSnapshots.delete(firstKey);
3073
3054
  }
3074
- componentBuffer += char;
3075
- if (char === "{") {
3076
- depth++;
3077
- } else if (char === "}") {
3078
- depth--;
3079
- if (depth === 2 && componentBuffer.includes('"id"')) {
3080
- const componentMatch = componentBuffer.match(/\{[^{}]*(?:\{[^{}]*\}[^{}]*)*\}/);
3081
- if (componentMatch) {
3082
- const MAX_COMPONENT_SIZE = 1024 * 1024;
3083
- if (componentMatch[0].length > MAX_COMPONENT_SIZE) {
3084
- logger8.warn(
3085
- {
3086
- size: componentMatch[0].length,
3087
- maxSize: MAX_COMPONENT_SIZE
3088
- },
3089
- "Component exceeds size limit, skipping"
3090
- );
3091
- componentBuffer = "";
3092
- continue;
3093
- }
3094
- try {
3095
- const component = JSON.parse(componentMatch[0]);
3096
- if (typeof component !== "object" || !component.id) {
3097
- logger8.warn({ component }, "Invalid component structure, skipping");
3098
- componentBuffer = "";
3099
- continue;
3100
- }
3101
- const parts = await this.artifactParser.parseObject({
3102
- dataComponents: [component]
3103
- });
3104
- for (const part2 of parts) {
3105
- await this.streamPart(part2);
3106
- }
3107
- componentsStreamed++;
3108
- componentBuffer = "";
3109
- } catch (e) {
3110
- logger8.debug({ error: e }, "Failed to parse component, continuing to accumulate");
3111
- }
3112
- }
3055
+ }
3056
+ if (component.name === "Text" && component.props?.text) {
3057
+ const previousTextContent = previousSnapshot ? JSON.parse(previousSnapshot).props?.text || "" : "";
3058
+ const currentTextContent = component.props.text || "";
3059
+ if (currentTextContent.length > previousTextContent.length) {
3060
+ const newText = currentTextContent.slice(previousTextContent.length);
3061
+ if (!this.hasStartedRole) {
3062
+ await this.streamHelper.writeRole("assistant");
3063
+ this.hasStartedRole = true;
3113
3064
  }
3065
+ await this.streamHelper.streamText(newText, 50);
3066
+ this.collectedParts.push({
3067
+ kind: "text",
3068
+ text: newText
3069
+ });
3114
3070
  }
3115
- if (componentBuffer.includes('"dataComponents"') && componentBuffer.includes("[")) ;
3071
+ continue;
3116
3072
  }
3117
- } else if (part.type === "tool-call" && part.toolName === targetToolName) {
3118
- if (part.args?.dataComponents) {
3119
- const parts = await this.artifactParser.parseObject(part.args);
3120
- for (const part2 of parts) {
3121
- await this.streamPart(part2);
3073
+ if (this.isComponentComplete(component)) {
3074
+ const currentPropsSnapshot = JSON.stringify(component.props);
3075
+ const previousPropsSnapshot = previousSnapshot ? JSON.stringify(JSON.parse(previousSnapshot).props) : null;
3076
+ if (previousPropsSnapshot === currentPropsSnapshot) {
3077
+ await this.streamComponent(component);
3122
3078
  }
3123
3079
  }
3124
- break;
3125
3080
  }
3126
3081
  }
3127
- logger8.debug({ componentsStreamed }, "Finished streaming components");
3082
+ }
3083
+ /**
3084
+ * Stream a component and mark it as streamed
3085
+ * Note: Text components are handled separately with incremental streaming
3086
+ */
3087
+ async streamComponent(component) {
3088
+ const parts = await this.artifactParser.parseObject({
3089
+ dataComponents: [component]
3090
+ });
3091
+ for (const part of parts) {
3092
+ await this.streamPart(part);
3093
+ }
3094
+ this.lastStreamedComponents.set(component.id, true);
3095
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3096
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3097
+ if (firstKey) {
3098
+ this.lastStreamedComponents.delete(firstKey);
3099
+ }
3100
+ }
3101
+ this.componentSnapshots.delete(component.id);
3102
+ }
3103
+ /**
3104
+ * Check if a component has the basic structure required for streaming
3105
+ * Requires id, name, and props object with content
3106
+ */
3107
+ isComponentComplete(component) {
3108
+ if (!component || !component.id || !component.name) {
3109
+ return false;
3110
+ }
3111
+ if (!component.props || typeof component.props !== "object") {
3112
+ return false;
3113
+ }
3114
+ const isArtifact = component.name === "Artifact" || component.props.artifact_id && component.props.task_id;
3115
+ if (isArtifact) {
3116
+ return Boolean(component.props.artifact_id && component.props.task_id);
3117
+ }
3118
+ return true;
3119
+ }
3120
+ /**
3121
+ * Deep merge helper for object deltas
3122
+ */
3123
+ deepMerge(target, source) {
3124
+ if (!source) return target;
3125
+ if (!target) return source;
3126
+ const result = { ...target };
3127
+ for (const key in source) {
3128
+ if (source[key] && typeof source[key] === "object" && !Array.isArray(source[key])) {
3129
+ result[key] = this.deepMerge(target[key], source[key]);
3130
+ } else {
3131
+ result[key] = source[key];
3132
+ }
3133
+ }
3134
+ return result;
3128
3135
  }
3129
3136
  /**
3130
3137
  * Legacy method for backward compatibility - defaults to text processing
@@ -3136,15 +3143,40 @@ var IncrementalStreamParser = class {
3136
3143
  * Process any remaining buffer content at the end of stream
3137
3144
  */
3138
3145
  async finalize() {
3139
- if (this.buffer.trim()) {
3146
+ if (this.componentAccumulator.dataComponents && Array.isArray(this.componentAccumulator.dataComponents)) {
3147
+ const components = this.componentAccumulator.dataComponents;
3148
+ for (let i = 0; i < components.length; i++) {
3149
+ const component = components[i];
3150
+ if (!component?.id) continue;
3151
+ const componentKey = component.id;
3152
+ const hasBeenStreamed = this.lastStreamedComponents.has(componentKey);
3153
+ if (!hasBeenStreamed && this.isComponentComplete(component) && component.name !== "Text") {
3154
+ const parts = await this.artifactParser.parseObject({
3155
+ dataComponents: [component]
3156
+ });
3157
+ for (const part of parts) {
3158
+ await this.streamPart(part);
3159
+ }
3160
+ this.lastStreamedComponents.set(componentKey, true);
3161
+ if (this.lastStreamedComponents.size > _IncrementalStreamParser.MAX_STREAMED_SIZE) {
3162
+ const firstKey = this.lastStreamedComponents.keys().next().value;
3163
+ if (firstKey) {
3164
+ this.lastStreamedComponents.delete(firstKey);
3165
+ }
3166
+ }
3167
+ this.componentSnapshots.delete(componentKey);
3168
+ }
3169
+ }
3170
+ }
3171
+ if (this.buffer) {
3140
3172
  const part = {
3141
3173
  kind: "text",
3142
- text: this.buffer.trim()
3174
+ text: this.buffer
3143
3175
  };
3144
3176
  await this.streamPart(part);
3145
3177
  }
3146
- if (this.pendingTextBuffer.trim()) {
3147
- const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "").trim();
3178
+ if (this.pendingTextBuffer) {
3179
+ const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3148
3180
  if (cleanedText) {
3149
3181
  this.collectedParts.push({
3150
3182
  kind: "text",
@@ -3154,6 +3186,9 @@ var IncrementalStreamParser = class {
3154
3186
  }
3155
3187
  this.pendingTextBuffer = "";
3156
3188
  }
3189
+ this.componentSnapshots.clear();
3190
+ this.lastStreamedComponents.clear();
3191
+ this.componentAccumulator = {};
3157
3192
  }
3158
3193
  /**
3159
3194
  * Get all collected parts for building the final response
@@ -3200,30 +3235,6 @@ var IncrementalStreamParser = class {
3200
3235
  remainingBuffer: ""
3201
3236
  };
3202
3237
  }
3203
- /**
3204
- * Parse buffer for complete JSON objects with artifact references (for object streaming)
3205
- */
3206
- async parseObjectBuffer() {
3207
- const completeParts = [];
3208
- try {
3209
- const parsed = JSON.parse(this.buffer);
3210
- const parts = await this.artifactParser.parseObject(parsed);
3211
- return {
3212
- completeParts: parts,
3213
- remainingBuffer: ""
3214
- };
3215
- } catch {
3216
- const { complete, remaining } = this.artifactParser.parsePartialJSON(this.buffer);
3217
- for (const obj of complete) {
3218
- const parts = await this.artifactParser.parseObject(obj);
3219
- completeParts.push(...parts);
3220
- }
3221
- return {
3222
- completeParts,
3223
- remainingBuffer: remaining
3224
- };
3225
- }
3226
- }
3227
3238
  /**
3228
3239
  * Check if text might be the start of an artifact marker
3229
3240
  */
@@ -3244,7 +3255,7 @@ var IncrementalStreamParser = class {
3244
3255
  this.pendingTextBuffer += part.text;
3245
3256
  if (!this.artifactParser.hasIncompleteArtifact(this.pendingTextBuffer)) {
3246
3257
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3247
- if (cleanedText.trim()) {
3258
+ if (cleanedText) {
3248
3259
  await this.streamHelper.streamText(cleanedText, 50);
3249
3260
  }
3250
3261
  this.pendingTextBuffer = "";
@@ -3252,7 +3263,7 @@ var IncrementalStreamParser = class {
3252
3263
  } else if (part.kind === "data" && part.data) {
3253
3264
  if (this.pendingTextBuffer) {
3254
3265
  const cleanedText = this.pendingTextBuffer.replace(/<\/?artifact:ref(?:\s[^>]*)?>\/?>/g, "").replace(/<\/?artifact(?:\s[^>]*)?>\/?>/g, "").replace(/<\/(?:\w+:)?artifact>/g, "");
3255
- if (cleanedText.trim()) {
3266
+ if (cleanedText) {
3256
3267
  await this.streamHelper.streamText(cleanedText, 50);
3257
3268
  }
3258
3269
  this.pendingTextBuffer = "";
@@ -3266,6 +3277,11 @@ var IncrementalStreamParser = class {
3266
3277
  }
3267
3278
  }
3268
3279
  };
3280
+ // Memory management constants
3281
+ __publicField(_IncrementalStreamParser, "MAX_SNAPSHOT_SIZE", 100);
3282
+ // Max number of snapshots to keep
3283
+ __publicField(_IncrementalStreamParser, "MAX_STREAMED_SIZE", 1e3);
3284
+ var IncrementalStreamParser = _IncrementalStreamParser;
3269
3285
 
3270
3286
  // src/utils/response-formatter.ts
3271
3287
  var logger9 = agentsCore.getLogger("ResponseFormatter");
@@ -5256,6 +5272,23 @@ var Agent = class {
5256
5272
  __publicField(this, "credentialStoreRegistry");
5257
5273
  this.artifactComponents = config.artifactComponents || [];
5258
5274
  let processedDataComponents = config.dataComponents || [];
5275
+ if (processedDataComponents.length > 0) {
5276
+ processedDataComponents.push({
5277
+ id: "text-content",
5278
+ name: "Text",
5279
+ description: "Natural conversational text for the user - write naturally without mentioning technical details. Avoid redundancy and repetition with data components.",
5280
+ props: {
5281
+ type: "object",
5282
+ properties: {
5283
+ text: {
5284
+ type: "string",
5285
+ description: "Natural conversational text - respond as if having a normal conversation, never mention JSON, components, schemas, or technical implementation. Avoid redundancy and repetition with data components."
5286
+ }
5287
+ },
5288
+ required: ["text"]
5289
+ }
5290
+ });
5291
+ }
5259
5292
  if (this.artifactComponents.length > 0 && config.dataComponents && config.dataComponents.length > 0) {
5260
5293
  processedDataComponents = [
5261
5294
  ArtifactReferenceSchema.getDataComponent(config.tenantId, config.projectId),
@@ -5733,7 +5766,8 @@ Key requirements:
5733
5766
  - Mix artifact references throughout your dataComponents array
5734
5767
  - Each artifact reference must use EXACT IDs from tool outputs
5735
5768
  - Reference artifacts that directly support the adjacent information
5736
- - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact`;
5769
+ - Follow the pattern: Data \u2192 Supporting Artifact \u2192 Next Data \u2192 Next Artifact
5770
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5737
5771
  }
5738
5772
  if (hasDataComponents && !hasArtifactComponents) {
5739
5773
  return `Generate the final structured JSON response using the configured data components. Organize the information from the research above into the appropriate structured format based on the available component schemas.
@@ -5741,7 +5775,8 @@ Key requirements:
5741
5775
  Key requirements:
5742
5776
  - Use the exact component structure and property names
5743
5777
  - Fill in all relevant data from the research
5744
- - Ensure data is organized logically and completely`;
5778
+ - Ensure data is organized logically and completely
5779
+ - IMPORTANT: In Text components, write naturally as if having a conversation - do NOT mention components, schemas, JSON, structured data, or any technical implementation details`;
5745
5780
  }
5746
5781
  if (!hasDataComponents && hasArtifactComponents) {
5747
5782
  return `Generate the final structured response with artifact references based on the research above. Use the artifact reference component to cite relevant information with exact artifact_id and task_id values from the tool outputs.
@@ -5751,7 +5786,7 @@ Key requirements:
5751
5786
  - Reference artifacts that support your response
5752
5787
  - Never make up or modify artifact IDs`;
5753
5788
  }
5754
- return `Generate the final response based on the research above.`;
5789
+ return `Generate the final response based on the research above. Write naturally as if having a conversation.`;
5755
5790
  }
5756
5791
  async buildSystemPrompt(runtimeContext, excludeDataComponents = false) {
5757
5792
  const conversationId = runtimeContext?.metadata?.conversationId || runtimeContext?.contextId;
@@ -6250,35 +6285,94 @@ ${output}`;
6250
6285
  this.getStructuredOutputModel()
6251
6286
  );
6252
6287
  const phase2TimeoutMs = structuredModelSettings.maxDuration ? structuredModelSettings.maxDuration * 1e3 : CONSTANTS.PHASE_2_TIMEOUT_MS;
6253
- const structuredResponse = await ai.generateObject({
6254
- ...structuredModelSettings,
6255
- messages: [
6256
- { role: "user", content: userMessage },
6257
- ...reasoningFlow,
6258
- {
6259
- role: "user",
6260
- content: await this.buildPhase2SystemPrompt()
6261
- }
6262
- ],
6263
- schema: z5.z.object({
6264
- dataComponents: z5.z.array(dataComponentsSchema)
6265
- }),
6266
- experimental_telemetry: {
6267
- isEnabled: true,
6268
- functionId: this.config.id,
6269
- recordInputs: true,
6270
- recordOutputs: true,
6271
- metadata: {
6272
- phase: "structured_generation"
6288
+ const shouldStreamPhase2 = this.getStreamingHelper();
6289
+ if (shouldStreamPhase2) {
6290
+ const streamResult = ai.streamObject({
6291
+ ...structuredModelSettings,
6292
+ messages: [
6293
+ { role: "user", content: userMessage },
6294
+ ...reasoningFlow,
6295
+ {
6296
+ role: "user",
6297
+ content: await this.buildPhase2SystemPrompt()
6298
+ }
6299
+ ],
6300
+ schema: z5.z.object({
6301
+ dataComponents: z5.z.array(dataComponentsSchema)
6302
+ }),
6303
+ experimental_telemetry: {
6304
+ isEnabled: true,
6305
+ functionId: this.config.id,
6306
+ recordInputs: true,
6307
+ recordOutputs: true,
6308
+ metadata: {
6309
+ phase: "structured_generation"
6310
+ }
6311
+ },
6312
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6313
+ });
6314
+ const streamHelper = this.getStreamingHelper();
6315
+ if (!streamHelper) {
6316
+ throw new Error("Stream helper is unexpectedly undefined in streaming context");
6317
+ }
6318
+ const parser = new IncrementalStreamParser(
6319
+ streamHelper,
6320
+ this.config.tenantId,
6321
+ contextId
6322
+ );
6323
+ for await (const delta of streamResult.partialObjectStream) {
6324
+ if (delta) {
6325
+ await parser.processObjectDelta(delta);
6273
6326
  }
6274
- },
6275
- abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6276
- });
6277
- response = {
6278
- ...response,
6279
- object: structuredResponse.object
6280
- };
6281
- textResponse = JSON.stringify(structuredResponse.object, null, 2);
6327
+ }
6328
+ await parser.finalize();
6329
+ const structuredResponse = await streamResult;
6330
+ const collectedParts = parser.getCollectedParts();
6331
+ if (collectedParts.length > 0) {
6332
+ response.formattedContent = {
6333
+ parts: collectedParts.map((part) => ({
6334
+ kind: part.kind,
6335
+ ...part.kind === "text" && { text: part.text },
6336
+ ...part.kind === "data" && { data: part.data }
6337
+ }))
6338
+ };
6339
+ }
6340
+ response = {
6341
+ ...response,
6342
+ object: structuredResponse.object
6343
+ };
6344
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6345
+ } else {
6346
+ const structuredResponse = await ai.generateObject({
6347
+ ...structuredModelSettings,
6348
+ messages: [
6349
+ { role: "user", content: userMessage },
6350
+ ...reasoningFlow,
6351
+ {
6352
+ role: "user",
6353
+ content: await this.buildPhase2SystemPrompt()
6354
+ }
6355
+ ],
6356
+ schema: z5.z.object({
6357
+ dataComponents: z5.z.array(dataComponentsSchema)
6358
+ }),
6359
+ experimental_telemetry: {
6360
+ isEnabled: true,
6361
+ functionId: this.config.id,
6362
+ recordInputs: true,
6363
+ recordOutputs: true,
6364
+ metadata: {
6365
+ phase: "structured_generation"
6366
+ }
6367
+ },
6368
+ abortSignal: AbortSignal.timeout(phase2TimeoutMs)
6369
+ });
6370
+ response = {
6371
+ ...response,
6372
+ object: structuredResponse.object
6373
+ };
6374
+ textResponse = JSON.stringify(structuredResponse.object, null, 2);
6375
+ }
6282
6376
  } else {
6283
6377
  textResponse = response.text || "";
6284
6378
  }
@@ -6334,10 +6428,13 @@ async function resolveModelConfig(graphId, agent) {
6334
6428
  summarizer: agent.models.summarizer || agent.models.base
6335
6429
  };
6336
6430
  }
6337
- const graph = await agentsCore.getAgentGraph(dbClient_default)({
6338
- scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
6339
- graphId
6340
- });
6431
+ let graph = null;
6432
+ if (graphId) {
6433
+ graph = await agentsCore.getAgentGraph(dbClient_default)({
6434
+ scopes: { tenantId: agent.tenantId, projectId: agent.projectId },
6435
+ graphId
6436
+ });
6437
+ }
6341
6438
  if (graph?.models?.base?.model) {
6342
6439
  return {
6343
6440
  base: graph.models.base,
@@ -6971,7 +7068,7 @@ var SSEStreamHelper = class {
6971
7068
  this.timestamp = timestamp;
6972
7069
  // Stream queuing for proper event ordering
6973
7070
  __publicField(this, "isTextStreaming", false);
6974
- __publicField(this, "queuedOperations", []);
7071
+ __publicField(this, "queuedEvents", []);
6975
7072
  }
6976
7073
  /**
6977
7074
  * Write the initial role message
@@ -7036,9 +7133,10 @@ var SSEStreamHelper = class {
7036
7133
  await this.writeContent(JSON.stringify(data));
7037
7134
  }
7038
7135
  /**
7039
- * Write error message
7136
+ * Write error message or error event
7040
7137
  */
7041
- async writeError(errorMessage) {
7138
+ async writeError(error) {
7139
+ const errorMessage = typeof error === "string" ? error : error.message;
7042
7140
  await this.writeContent(`
7043
7141
 
7044
7142
  ${errorMessage}`);
@@ -7062,22 +7160,6 @@ ${errorMessage}`);
7062
7160
  })
7063
7161
  });
7064
7162
  }
7065
- /**
7066
- * Write the final [DONE] message
7067
- */
7068
- async writeDone() {
7069
- await this.stream.writeSSE({
7070
- data: "[DONE]"
7071
- });
7072
- }
7073
- /**
7074
- * Complete the stream with finish reason and done message
7075
- */
7076
- async complete(finishReason = "stop") {
7077
- await this.flushQueuedOperations();
7078
- await this.writeCompletion(finishReason);
7079
- await this.writeDone();
7080
- }
7081
7163
  async writeData(type, data) {
7082
7164
  await this.stream.writeSSE({
7083
7165
  data: JSON.stringify({
@@ -7096,16 +7178,23 @@ ${errorMessage}`);
7096
7178
  })
7097
7179
  });
7098
7180
  }
7099
- async writeOperation(operation) {
7100
- if (operation.type === "status_update" && operation.ctx.label) {
7101
- operation = {
7102
- type: operation.type,
7103
- label: operation.ctx.label,
7104
- ctx: operation.ctx.data
7105
- };
7181
+ async writeSummary(summary) {
7182
+ if (this.isTextStreaming) {
7183
+ this.queuedEvents.push({
7184
+ type: "data-summary",
7185
+ event: summary
7186
+ });
7187
+ return;
7106
7188
  }
7189
+ await this.flushQueuedOperations();
7190
+ await this.writeData("data-summary", summary);
7191
+ }
7192
+ async writeOperation(operation) {
7107
7193
  if (this.isTextStreaming) {
7108
- this.queuedOperations.push(operation);
7194
+ this.queuedEvents.push({
7195
+ type: "data-operation",
7196
+ event: operation
7197
+ });
7109
7198
  return;
7110
7199
  }
7111
7200
  await this.flushQueuedOperations();
@@ -7115,15 +7204,31 @@ ${errorMessage}`);
7115
7204
  * Flush all queued operations in order after text streaming completes
7116
7205
  */
7117
7206
  async flushQueuedOperations() {
7118
- if (this.queuedOperations.length === 0) {
7207
+ if (this.queuedEvents.length === 0) {
7119
7208
  return;
7120
7209
  }
7121
- const operationsToFlush = [...this.queuedOperations];
7122
- this.queuedOperations = [];
7123
- for (const operation of operationsToFlush) {
7124
- await this.writeData("data-operation", operation);
7210
+ const eventsToFlush = [...this.queuedEvents];
7211
+ this.queuedEvents = [];
7212
+ for (const event of eventsToFlush) {
7213
+ await this.writeData(event.type, event.event);
7125
7214
  }
7126
7215
  }
7216
+ /**
7217
+ * Write the final [DONE] message
7218
+ */
7219
+ async writeDone() {
7220
+ await this.stream.writeSSE({
7221
+ data: "[DONE]"
7222
+ });
7223
+ }
7224
+ /**
7225
+ * Complete the stream with finish reason and done message
7226
+ */
7227
+ async complete(finishReason = "stop") {
7228
+ await this.flushQueuedOperations();
7229
+ await this.writeCompletion(finishReason);
7230
+ await this.writeDone();
7231
+ }
7127
7232
  };
7128
7233
  function createSSEStreamHelper(stream2, requestId2, timestamp) {
7129
7234
  return new SSEStreamHelper(stream2, requestId2, timestamp);
@@ -7143,7 +7248,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7143
7248
  __publicField(this, "isCompleted", false);
7144
7249
  // Stream queuing for proper event ordering
7145
7250
  __publicField(this, "isTextStreaming", false);
7146
- __publicField(this, "queuedOperations", []);
7251
+ __publicField(this, "queuedEvents", []);
7147
7252
  // Timing tracking for text sequences (text-end to text-start gap)
7148
7253
  __publicField(this, "lastTextEndTimestamp", 0);
7149
7254
  __publicField(this, "TEXT_GAP_THRESHOLD", 50);
@@ -7255,15 +7360,24 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7255
7360
  data
7256
7361
  });
7257
7362
  }
7258
- async writeError(errorMessage) {
7363
+ async writeError(error) {
7259
7364
  if (this.isCompleted) {
7260
7365
  console.warn("Attempted to write error to completed stream");
7261
7366
  return;
7262
7367
  }
7263
- this.writer.write({
7264
- type: "error",
7265
- errorText: errorMessage
7266
- });
7368
+ if (typeof error === "string") {
7369
+ this.writer.write({
7370
+ type: "error",
7371
+ message: error,
7372
+ severity: "error",
7373
+ timestamp: Date.now()
7374
+ });
7375
+ } else {
7376
+ this.writer.write({
7377
+ type: "error",
7378
+ ...error
7379
+ });
7380
+ }
7267
7381
  }
7268
7382
  async streamData(data) {
7269
7383
  await this.writeContent(JSON.stringify(data));
@@ -7275,20 +7389,6 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7275
7389
  }
7276
7390
  this.writer.merge(stream2);
7277
7391
  }
7278
- async writeCompletion(_finishReason = "stop") {
7279
- }
7280
- async writeDone() {
7281
- }
7282
- /**
7283
- * Complete the stream and clean up all memory
7284
- * This is the primary cleanup point to prevent memory leaks between requests
7285
- */
7286
- async complete() {
7287
- if (this.isCompleted) return;
7288
- await this.flushQueuedOperations();
7289
- this.isCompleted = true;
7290
- this.cleanup();
7291
- }
7292
7392
  /**
7293
7393
  * Clean up all memory allocations
7294
7394
  * Should be called when the stream helper is no longer needed
@@ -7302,7 +7402,7 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7302
7402
  this.sentItems.clear();
7303
7403
  this.completedItems.clear();
7304
7404
  this.textId = null;
7305
- this.queuedOperations = [];
7405
+ this.queuedEvents = [];
7306
7406
  this.isTextStreaming = false;
7307
7407
  }
7308
7408
  /**
@@ -7368,7 +7468,9 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7368
7468
  if (this.writer && !this.isCompleted) {
7369
7469
  this.writer.write({
7370
7470
  type: "error",
7371
- errorText: `Stream terminated: ${reason}`
7471
+ message: `Stream terminated: ${reason}`,
7472
+ severity: "error",
7473
+ timestamp: Date.now()
7372
7474
  });
7373
7475
  }
7374
7476
  } catch (e) {
@@ -7391,23 +7493,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7391
7493
  isCompleted: this.isCompleted
7392
7494
  };
7393
7495
  }
7496
+ async writeSummary(summary) {
7497
+ if (this.isCompleted) {
7498
+ console.warn("Attempted to write summary to completed stream");
7499
+ return;
7500
+ }
7501
+ const now = Date.now();
7502
+ const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7503
+ if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7504
+ this.queuedEvents.push({ type: "data-summary", event: summary });
7505
+ return;
7506
+ }
7507
+ await this.flushQueuedOperations();
7508
+ await this.writer.write({
7509
+ id: "id" in summary ? summary.id : void 0,
7510
+ type: "data-summary",
7511
+ data: summary
7512
+ });
7513
+ }
7394
7514
  async writeOperation(operation) {
7395
7515
  if (this.isCompleted) {
7396
7516
  console.warn("Attempted to write operation to completed stream");
7397
7517
  return;
7398
7518
  }
7399
- if (operation.type === "status_update" && operation.ctx.label) {
7400
- operation = {
7401
- type: operation.type,
7402
- label: operation.ctx.label,
7403
- // Preserve the label for the UI
7404
- ctx: operation.ctx.data
7405
- };
7406
- }
7407
7519
  const now = Date.now();
7408
7520
  const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
7409
7521
  if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
7410
- this.queuedOperations.push(operation);
7522
+ this.queuedEvents.push({ type: "data-operation", event: operation });
7411
7523
  return;
7412
7524
  }
7413
7525
  await this.flushQueuedOperations();
@@ -7421,19 +7533,33 @@ var _VercelDataStreamHelper = class _VercelDataStreamHelper {
7421
7533
  * Flush all queued operations in order after text streaming completes
7422
7534
  */
7423
7535
  async flushQueuedOperations() {
7424
- if (this.queuedOperations.length === 0) {
7536
+ if (this.queuedEvents.length === 0) {
7425
7537
  return;
7426
7538
  }
7427
- const operationsToFlush = [...this.queuedOperations];
7428
- this.queuedOperations = [];
7429
- for (const operation of operationsToFlush) {
7539
+ const eventsToFlush = [...this.queuedEvents];
7540
+ this.queuedEvents = [];
7541
+ for (const event of eventsToFlush) {
7430
7542
  this.writer.write({
7431
- id: "id" in operation ? operation.id : void 0,
7432
- type: "data-operation",
7433
- data: operation
7543
+ id: "id" in event.event ? event.event.id : void 0,
7544
+ type: event.type,
7545
+ data: event.event
7434
7546
  });
7435
7547
  }
7436
7548
  }
7549
+ async writeCompletion(_finishReason = "stop") {
7550
+ }
7551
+ async writeDone() {
7552
+ }
7553
+ /**
7554
+ * Complete the stream and clean up all memory
7555
+ * This is the primary cleanup point to prevent memory leaks between requests
7556
+ */
7557
+ async complete() {
7558
+ if (this.isCompleted) return;
7559
+ await this.flushQueuedOperations();
7560
+ this.isCompleted = true;
7561
+ this.cleanup();
7562
+ }
7437
7563
  };
7438
7564
  // Memory management - focused on connection completion cleanup
7439
7565
  __publicField(_VercelDataStreamHelper, "MAX_BUFFER_SIZE", 5 * 1024 * 1024);
@@ -7446,6 +7572,7 @@ var MCPStreamHelper = class {
7446
7572
  __publicField(this, "capturedText", "");
7447
7573
  __publicField(this, "capturedData", []);
7448
7574
  __publicField(this, "capturedOperations", []);
7575
+ __publicField(this, "capturedSummaries", []);
7449
7576
  __publicField(this, "hasError", false);
7450
7577
  __publicField(this, "errorMessage", "");
7451
7578
  __publicField(this, "sessionId");
@@ -7464,18 +7591,27 @@ var MCPStreamHelper = class {
7464
7591
  async streamData(data) {
7465
7592
  this.capturedData.push(data);
7466
7593
  }
7594
+ async streamSummary(summary) {
7595
+ this.capturedSummaries.push(summary);
7596
+ }
7597
+ async streamOperation(operation) {
7598
+ this.capturedOperations.push(operation);
7599
+ }
7467
7600
  async writeData(_type, data) {
7468
7601
  this.capturedData.push(data);
7469
7602
  }
7470
- async writeError(errorMessage) {
7471
- this.hasError = true;
7472
- this.errorMessage = errorMessage;
7473
- }
7474
- async complete() {
7603
+ async writeSummary(summary) {
7604
+ this.capturedSummaries.push(summary);
7475
7605
  }
7476
7606
  async writeOperation(operation) {
7477
7607
  this.capturedOperations.push(operation);
7478
7608
  }
7609
+ async writeError(error) {
7610
+ this.hasError = true;
7611
+ this.errorMessage = typeof error === "string" ? error : error.message;
7612
+ }
7613
+ async complete() {
7614
+ }
7479
7615
  /**
7480
7616
  * Get the captured response for MCP tool result
7481
7617
  */
@@ -7677,7 +7813,6 @@ var ExecutionHandler = class {
7677
7813
  if (errorCount >= this.MAX_ERRORS) {
7678
7814
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7679
7815
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7680
- await sseHelper.writeError(errorMessage2);
7681
7816
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7682
7817
  if (task) {
7683
7818
  await agentsCore.updateTask(dbClient_default)({
@@ -7818,7 +7953,6 @@ var ExecutionHandler = class {
7818
7953
  if (errorCount >= this.MAX_ERRORS) {
7819
7954
  const errorMessage2 = `Maximum error limit (${this.MAX_ERRORS}) reached`;
7820
7955
  logger19.error({ maxErrors: this.MAX_ERRORS, errorCount }, errorMessage2);
7821
- await sseHelper.writeError(errorMessage2);
7822
7956
  await sseHelper.writeOperation(errorOp(errorMessage2, currentAgentId || "system"));
7823
7957
  if (task) {
7824
7958
  await agentsCore.updateTask(dbClient_default)({
@@ -7840,7 +7974,6 @@ var ExecutionHandler = class {
7840
7974
  }
7841
7975
  const errorMessage = `Maximum transfer limit (${maxTransfers}) reached without completion`;
7842
7976
  logger19.error({ maxTransfers, iterations }, errorMessage);
7843
- await sseHelper.writeError(errorMessage);
7844
7977
  await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7845
7978
  if (task) {
7846
7979
  await agentsCore.updateTask(dbClient_default)({
@@ -7861,8 +7994,7 @@ var ExecutionHandler = class {
7861
7994
  } catch (error) {
7862
7995
  logger19.error({ error }, "Error in execution handler");
7863
7996
  const errorMessage = error instanceof Error ? error.message : "Unknown execution error";
7864
- await sseHelper.writeError(`Execution error: ${errorMessage}`);
7865
- await sseHelper.writeOperation(errorOp(errorMessage, currentAgentId || "system"));
7997
+ await sseHelper.writeOperation(errorOp(`Execution error: ${errorMessage}`, currentAgentId || "system"));
7866
7998
  if (task) {
7867
7999
  await agentsCore.updateTask(dbClient_default)({
7868
8000
  taskId: task.id,
@@ -8150,8 +8282,8 @@ app2.openapi(chatCompletionsRoute, async (c) => {
8150
8282
  `Execution completed: ${result.success ? "success" : "failed"} after ${result.iterations} iterations`
8151
8283
  );
8152
8284
  if (!result.success) {
8153
- await sseHelper.writeError(
8154
- "Sorry, I was unable to process your request at this time. Please try again."
8285
+ await sseHelper.writeOperation(
8286
+ errorOp("Sorry, I was unable to process your request at this time. Please try again.", "system")
8155
8287
  );
8156
8288
  }
8157
8289
  await sseHelper.complete();
@@ -8325,11 +8457,11 @@ app3.openapi(chatDataStreamRoute, async (c) => {
8325
8457
  sseHelper: streamHelper
8326
8458
  });
8327
8459
  if (!result.success) {
8328
- await streamHelper.writeError("Unable to process request");
8460
+ await streamHelper.writeOperation(errorOp("Unable to process request", "system"));
8329
8461
  }
8330
8462
  } catch (err) {
8331
8463
  logger21.error({ err }, "Streaming error");
8332
- await streamHelper.writeError("Internal server error");
8464
+ await streamHelper.writeOperation(errorOp("Internal server error", "system"));
8333
8465
  } finally {
8334
8466
  if ("cleanup" in streamHelper && typeof streamHelper.cleanup === "function") {
8335
8467
  streamHelper.cleanup();