@inkeep/agents-run-api 0.0.0-dev-20250910232631 → 0.0.0-dev-20250910233151
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3COYP2VE.js +70 -0
- package/dist/chunk-JIWNRFDU.js +236 -0
- package/dist/chunk-PKBMQBKP.js +5 -0
- package/dist/conversations-YTJWHN67.js +1 -0
- package/dist/index.cjs +9060 -0
- package/dist/index.d.cts +15 -0
- package/dist/index.d.ts +13 -9
- package/dist/index.js +8633 -27
- package/dist/instrumentation-KKYHA3A3.js +1 -0
- package/package.json +3 -3
- package/dist/__tests__/setup.d.ts +0 -4
- package/dist/__tests__/setup.d.ts.map +0 -1
- package/dist/__tests__/setup.js +0 -80
- package/dist/__tests__/utils/testProject.d.ts +0 -18
- package/dist/__tests__/utils/testProject.d.ts.map +0 -1
- package/dist/__tests__/utils/testProject.js +0 -26
- package/dist/__tests__/utils/testRequest.d.ts +0 -8
- package/dist/__tests__/utils/testRequest.d.ts.map +0 -1
- package/dist/__tests__/utils/testRequest.js +0 -32
- package/dist/__tests__/utils/testTenant.d.ts +0 -64
- package/dist/__tests__/utils/testTenant.d.ts.map +0 -1
- package/dist/__tests__/utils/testTenant.js +0 -71
- package/dist/a2a/client.d.ts +0 -182
- package/dist/a2a/client.d.ts.map +0 -1
- package/dist/a2a/client.js +0 -645
- package/dist/a2a/handlers.d.ts +0 -4
- package/dist/a2a/handlers.d.ts.map +0 -1
- package/dist/a2a/handlers.js +0 -656
- package/dist/a2a/transfer.d.ts +0 -18
- package/dist/a2a/transfer.d.ts.map +0 -1
- package/dist/a2a/transfer.js +0 -22
- package/dist/a2a/types.d.ts +0 -63
- package/dist/a2a/types.d.ts.map +0 -1
- package/dist/a2a/types.js +0 -1
- package/dist/agents/Agent.d.ts +0 -151
- package/dist/agents/Agent.d.ts.map +0 -1
- package/dist/agents/Agent.js +0 -1164
- package/dist/agents/ModelFactory.d.ts +0 -62
- package/dist/agents/ModelFactory.d.ts.map +0 -1
- package/dist/agents/ModelFactory.js +0 -208
- package/dist/agents/SystemPromptBuilder.d.ts +0 -14
- package/dist/agents/SystemPromptBuilder.d.ts.map +0 -1
- package/dist/agents/SystemPromptBuilder.js +0 -62
- package/dist/agents/ToolSessionManager.d.ts +0 -53
- package/dist/agents/ToolSessionManager.d.ts.map +0 -1
- package/dist/agents/ToolSessionManager.js +0 -106
- package/dist/agents/artifactTools.d.ts +0 -30
- package/dist/agents/artifactTools.d.ts.map +0 -1
- package/dist/agents/artifactTools.js +0 -463
- package/dist/agents/generateTaskHandler.d.ts +0 -41
- package/dist/agents/generateTaskHandler.d.ts.map +0 -1
- package/dist/agents/generateTaskHandler.js +0 -350
- package/dist/agents/relationTools.d.ts +0 -35
- package/dist/agents/relationTools.d.ts.map +0 -1
- package/dist/agents/relationTools.js +0 -246
- package/dist/agents/types.d.ts +0 -23
- package/dist/agents/types.d.ts.map +0 -1
- package/dist/agents/types.js +0 -1
- package/dist/agents/versions/V1Config.d.ts +0 -21
- package/dist/agents/versions/V1Config.d.ts.map +0 -1
- package/dist/agents/versions/V1Config.js +0 -285
- package/dist/app.d.ts +0 -5
- package/dist/app.d.ts.map +0 -1
- package/dist/app.js +0 -219
- package/dist/data/agentGraph.d.ts +0 -4
- package/dist/data/agentGraph.d.ts.map +0 -1
- package/dist/data/agentGraph.js +0 -73
- package/dist/data/agents.d.ts +0 -4
- package/dist/data/agents.d.ts.map +0 -1
- package/dist/data/agents.js +0 -78
- package/dist/data/conversations.d.ts +0 -59
- package/dist/data/conversations.d.ts.map +0 -1
- package/dist/data/conversations.js +0 -216
- package/dist/data/db/clean.d.ts +0 -6
- package/dist/data/db/clean.d.ts.map +0 -1
- package/dist/data/db/clean.js +0 -77
- package/dist/data/db/dbClient.d.ts +0 -3
- package/dist/data/db/dbClient.d.ts.map +0 -1
- package/dist/data/db/dbClient.js +0 -13
- package/dist/env.d.ts +0 -45
- package/dist/env.d.ts.map +0 -1
- package/dist/env.js +0 -64
- package/dist/handlers/executionHandler.d.ts +0 -36
- package/dist/handlers/executionHandler.d.ts.map +0 -1
- package/dist/handlers/executionHandler.js +0 -415
- package/dist/index.d.ts.map +0 -1
- package/dist/instrumentation.d.ts +0 -13
- package/dist/instrumentation.d.ts.map +0 -1
- package/dist/instrumentation.js +0 -66
- package/dist/logger.d.ts +0 -4
- package/dist/logger.d.ts.map +0 -1
- package/dist/logger.js +0 -32
- package/dist/middleware/api-key-auth.d.ts +0 -22
- package/dist/middleware/api-key-auth.d.ts.map +0 -1
- package/dist/middleware/api-key-auth.js +0 -139
- package/dist/middleware/index.d.ts +0 -2
- package/dist/middleware/index.d.ts.map +0 -1
- package/dist/middleware/index.js +0 -1
- package/dist/openapi.d.ts +0 -2
- package/dist/openapi.d.ts.map +0 -1
- package/dist/openapi.js +0 -36
- package/dist/routes/agents.d.ts +0 -10
- package/dist/routes/agents.d.ts.map +0 -1
- package/dist/routes/agents.js +0 -158
- package/dist/routes/chat.d.ts +0 -10
- package/dist/routes/chat.d.ts.map +0 -1
- package/dist/routes/chat.js +0 -307
- package/dist/routes/chatDataStream.d.ts +0 -10
- package/dist/routes/chatDataStream.d.ts.map +0 -1
- package/dist/routes/chatDataStream.js +0 -185
- package/dist/routes/mcp.d.ts +0 -10
- package/dist/routes/mcp.d.ts.map +0 -1
- package/dist/routes/mcp.js +0 -500
- package/dist/tracer.d.ts +0 -24
- package/dist/tracer.d.ts.map +0 -1
- package/dist/tracer.js +0 -107
- package/dist/types/chat.d.ts +0 -25
- package/dist/types/chat.d.ts.map +0 -1
- package/dist/types/chat.js +0 -1
- package/dist/types/execution-context.d.ts +0 -14
- package/dist/types/execution-context.d.ts.map +0 -1
- package/dist/types/execution-context.js +0 -14
- package/dist/utils/agent-operations.d.ts +0 -93
- package/dist/utils/agent-operations.d.ts.map +0 -1
- package/dist/utils/agent-operations.js +0 -78
- package/dist/utils/artifact-component-schema.d.ts +0 -29
- package/dist/utils/artifact-component-schema.d.ts.map +0 -1
- package/dist/utils/artifact-component-schema.js +0 -119
- package/dist/utils/artifact-parser.d.ts +0 -71
- package/dist/utils/artifact-parser.d.ts.map +0 -1
- package/dist/utils/artifact-parser.js +0 -253
- package/dist/utils/cleanup.d.ts +0 -19
- package/dist/utils/cleanup.d.ts.map +0 -1
- package/dist/utils/cleanup.js +0 -66
- package/dist/utils/data-component-schema.d.ts +0 -6
- package/dist/utils/data-component-schema.d.ts.map +0 -1
- package/dist/utils/data-component-schema.js +0 -43
- package/dist/utils/graph-session.d.ts +0 -230
- package/dist/utils/graph-session.d.ts.map +0 -1
- package/dist/utils/graph-session.js +0 -1199
- package/dist/utils/incremental-stream-parser.d.ts +0 -62
- package/dist/utils/incremental-stream-parser.d.ts.map +0 -1
- package/dist/utils/incremental-stream-parser.js +0 -330
- package/dist/utils/response-formatter.d.ts +0 -26
- package/dist/utils/response-formatter.d.ts.map +0 -1
- package/dist/utils/response-formatter.js +0 -158
- package/dist/utils/stream-helpers.d.ts +0 -186
- package/dist/utils/stream-helpers.d.ts.map +0 -1
- package/dist/utils/stream-helpers.js +0 -603
- package/dist/utils/stream-registry.d.ts +0 -18
- package/dist/utils/stream-registry.d.ts.map +0 -1
- package/dist/utils/stream-registry.js +0 -33
|
@@ -1,603 +0,0 @@
|
|
|
1
|
-
import { parsePartialJson } from 'ai';
|
|
2
|
-
export class SSEStreamHelper {
|
|
3
|
-
stream;
|
|
4
|
-
requestId;
|
|
5
|
-
timestamp;
|
|
6
|
-
// Stream queuing for proper event ordering
|
|
7
|
-
isTextStreaming = false;
|
|
8
|
-
queuedOperations = [];
|
|
9
|
-
constructor(stream, requestId, timestamp) {
|
|
10
|
-
this.stream = stream;
|
|
11
|
-
this.requestId = requestId;
|
|
12
|
-
this.timestamp = timestamp;
|
|
13
|
-
}
|
|
14
|
-
/**
|
|
15
|
-
* Write the initial role message
|
|
16
|
-
*/
|
|
17
|
-
async writeRole(role = 'assistant') {
|
|
18
|
-
await this.stream.writeSSE({
|
|
19
|
-
data: JSON.stringify({
|
|
20
|
-
id: this.requestId,
|
|
21
|
-
object: 'chat.completion.chunk',
|
|
22
|
-
created: this.timestamp,
|
|
23
|
-
choices: [
|
|
24
|
-
{
|
|
25
|
-
index: 0,
|
|
26
|
-
delta: {
|
|
27
|
-
role,
|
|
28
|
-
},
|
|
29
|
-
finish_reason: null,
|
|
30
|
-
},
|
|
31
|
-
],
|
|
32
|
-
}),
|
|
33
|
-
});
|
|
34
|
-
}
|
|
35
|
-
/**
|
|
36
|
-
* Write content chunk
|
|
37
|
-
*/
|
|
38
|
-
async writeContent(content) {
|
|
39
|
-
await this.stream.writeSSE({
|
|
40
|
-
data: JSON.stringify({
|
|
41
|
-
id: this.requestId,
|
|
42
|
-
object: 'chat.completion.chunk',
|
|
43
|
-
created: this.timestamp,
|
|
44
|
-
choices: [
|
|
45
|
-
{
|
|
46
|
-
index: 0,
|
|
47
|
-
delta: {
|
|
48
|
-
content,
|
|
49
|
-
},
|
|
50
|
-
finish_reason: null,
|
|
51
|
-
},
|
|
52
|
-
],
|
|
53
|
-
}),
|
|
54
|
-
});
|
|
55
|
-
}
|
|
56
|
-
/**
|
|
57
|
-
* Stream text word by word with optional delay
|
|
58
|
-
*/
|
|
59
|
-
async streamText(text, delayMs = 100) {
|
|
60
|
-
const words = text.split(' ');
|
|
61
|
-
// Mark that text streaming is starting
|
|
62
|
-
this.isTextStreaming = true;
|
|
63
|
-
try {
|
|
64
|
-
for (let i = 0; i < words.length; i++) {
|
|
65
|
-
await this.stream.sleep(delayMs);
|
|
66
|
-
const content = i === 0 ? words[i] : ` ${words[i]}`;
|
|
67
|
-
await this.writeContent(content);
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
finally {
|
|
71
|
-
// Mark that text streaming has finished
|
|
72
|
-
this.isTextStreaming = false;
|
|
73
|
-
// Flush any queued operations now that text sequence is complete
|
|
74
|
-
await this.flushQueuedOperations();
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
async streamData(data) {
|
|
78
|
-
await this.writeContent(JSON.stringify(data));
|
|
79
|
-
}
|
|
80
|
-
/**
|
|
81
|
-
* Write error message
|
|
82
|
-
*/
|
|
83
|
-
async writeError(errorMessage) {
|
|
84
|
-
await this.writeContent(`\n\n${errorMessage}`);
|
|
85
|
-
}
|
|
86
|
-
/**
|
|
87
|
-
* Write the final completion message
|
|
88
|
-
*/
|
|
89
|
-
async writeCompletion(finishReason = 'stop') {
|
|
90
|
-
await this.stream.writeSSE({
|
|
91
|
-
data: JSON.stringify({
|
|
92
|
-
id: this.requestId,
|
|
93
|
-
object: 'chat.completion.chunk',
|
|
94
|
-
created: this.timestamp,
|
|
95
|
-
choices: [
|
|
96
|
-
{
|
|
97
|
-
index: 0,
|
|
98
|
-
delta: {},
|
|
99
|
-
finish_reason: finishReason,
|
|
100
|
-
},
|
|
101
|
-
],
|
|
102
|
-
}),
|
|
103
|
-
});
|
|
104
|
-
}
|
|
105
|
-
/**
|
|
106
|
-
* Write the final [DONE] message
|
|
107
|
-
*/
|
|
108
|
-
async writeDone() {
|
|
109
|
-
await this.stream.writeSSE({
|
|
110
|
-
data: '[DONE]',
|
|
111
|
-
});
|
|
112
|
-
}
|
|
113
|
-
/**
|
|
114
|
-
* Complete the stream with finish reason and done message
|
|
115
|
-
*/
|
|
116
|
-
async complete(finishReason = 'stop') {
|
|
117
|
-
// Flush any remaining queued operations before completing
|
|
118
|
-
await this.flushQueuedOperations();
|
|
119
|
-
await this.writeCompletion(finishReason);
|
|
120
|
-
await this.writeDone();
|
|
121
|
-
}
|
|
122
|
-
async writeData(type, data) {
|
|
123
|
-
await this.stream.writeSSE({
|
|
124
|
-
data: JSON.stringify({
|
|
125
|
-
id: this.requestId,
|
|
126
|
-
object: 'chat.completion.chunk',
|
|
127
|
-
created: this.timestamp,
|
|
128
|
-
choices: [
|
|
129
|
-
{
|
|
130
|
-
index: 0,
|
|
131
|
-
delta: {
|
|
132
|
-
content: JSON.stringify({ type, data }),
|
|
133
|
-
},
|
|
134
|
-
finish_reason: null,
|
|
135
|
-
},
|
|
136
|
-
],
|
|
137
|
-
}),
|
|
138
|
-
});
|
|
139
|
-
}
|
|
140
|
-
async writeOperation(operation) {
|
|
141
|
-
if (operation.type === 'status_update' && operation.ctx.label) {
|
|
142
|
-
operation = {
|
|
143
|
-
type: operation.type,
|
|
144
|
-
label: operation.ctx.label,
|
|
145
|
-
ctx: operation.ctx.data,
|
|
146
|
-
};
|
|
147
|
-
}
|
|
148
|
-
// Queue operation if text is currently streaming
|
|
149
|
-
if (this.isTextStreaming) {
|
|
150
|
-
this.queuedOperations.push(operation);
|
|
151
|
-
return;
|
|
152
|
-
}
|
|
153
|
-
// If not streaming, flush any queued operations first, then send this one
|
|
154
|
-
await this.flushQueuedOperations();
|
|
155
|
-
await this.writeData('data-operation', operation);
|
|
156
|
-
}
|
|
157
|
-
/**
|
|
158
|
-
* Flush all queued operations in order after text streaming completes
|
|
159
|
-
*/
|
|
160
|
-
async flushQueuedOperations() {
|
|
161
|
-
if (this.queuedOperations.length === 0) {
|
|
162
|
-
return;
|
|
163
|
-
}
|
|
164
|
-
const operationsToFlush = [...this.queuedOperations];
|
|
165
|
-
this.queuedOperations = []; // Clear the queue
|
|
166
|
-
for (const operation of operationsToFlush) {
|
|
167
|
-
await this.writeData('data-operation', operation);
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
/**
|
|
172
|
-
* Factory function to create SSE stream helper
|
|
173
|
-
*/
|
|
174
|
-
export function createSSEStreamHelper(stream, requestId, timestamp) {
|
|
175
|
-
return new SSEStreamHelper(stream, requestId, timestamp);
|
|
176
|
-
}
|
|
177
|
-
export class VercelDataStreamHelper {
|
|
178
|
-
writer;
|
|
179
|
-
textId = null;
|
|
180
|
-
jsonBuffer = '';
|
|
181
|
-
sentItems = new Map(); // Track what we've sent for each index
|
|
182
|
-
completedItems = new Set(); // Track completed items
|
|
183
|
-
sessionId;
|
|
184
|
-
// Memory management - focused on connection completion cleanup
|
|
185
|
-
static MAX_BUFFER_SIZE = 5 * 1024 * 1024; // 5MB limit (more generous during request)
|
|
186
|
-
isCompleted = false;
|
|
187
|
-
// Stream queuing for proper event ordering
|
|
188
|
-
isTextStreaming = false;
|
|
189
|
-
queuedOperations = [];
|
|
190
|
-
// Timing tracking for text sequences (text-end to text-start gap)
|
|
191
|
-
lastTextEndTimestamp = 0;
|
|
192
|
-
TEXT_GAP_THRESHOLD = 1000; // milliseconds - if gap between text sequences is less than this, queue operations
|
|
193
|
-
// Connection management and forced cleanup
|
|
194
|
-
connectionDropTimer;
|
|
195
|
-
MAX_LIFETIME_MS = 600_000; // 10 minutes max lifetime
|
|
196
|
-
constructor(writer) {
|
|
197
|
-
this.writer = writer;
|
|
198
|
-
// Set maximum lifetime timer to prevent memory leaks from abandoned connections
|
|
199
|
-
this.connectionDropTimer = setTimeout(() => {
|
|
200
|
-
this.forceCleanup('Connection lifetime exceeded');
|
|
201
|
-
}, this.MAX_LIFETIME_MS);
|
|
202
|
-
}
|
|
203
|
-
setSessionId(sessionId) {
|
|
204
|
-
this.sessionId = sessionId;
|
|
205
|
-
}
|
|
206
|
-
// This mirrors SSEStreamHelper API but outputs using Vercel AI SDK writer
|
|
207
|
-
async writeRole(_ = 'assistant') {
|
|
208
|
-
// noop
|
|
209
|
-
}
|
|
210
|
-
async writeContent(content) {
|
|
211
|
-
if (this.isCompleted) {
|
|
212
|
-
console.warn('Attempted to write content to completed stream');
|
|
213
|
-
return;
|
|
214
|
-
}
|
|
215
|
-
if (!this.textId)
|
|
216
|
-
this.textId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
217
|
-
// Only prevent catastrophic buffer growth during request
|
|
218
|
-
if (this.jsonBuffer.length + content.length > VercelDataStreamHelper.MAX_BUFFER_SIZE) {
|
|
219
|
-
// JSON-aware truncation to prevent corruption
|
|
220
|
-
const newBuffer = this.truncateJsonBufferSafely(this.jsonBuffer);
|
|
221
|
-
// If we couldn't find a safe truncation point, clear the buffer entirely
|
|
222
|
-
// This is safer than keeping potentially corrupted JSON
|
|
223
|
-
if (newBuffer.length === this.jsonBuffer.length) {
|
|
224
|
-
console.warn('VercelDataStreamHelper: Could not find safe JSON truncation point, clearing buffer');
|
|
225
|
-
this.jsonBuffer = '';
|
|
226
|
-
// Clear tracking as we're starting fresh
|
|
227
|
-
this.sentItems.clear();
|
|
228
|
-
}
|
|
229
|
-
else {
|
|
230
|
-
this.jsonBuffer = newBuffer;
|
|
231
|
-
// Update tracking indices based on the new buffer content
|
|
232
|
-
this.reindexSentItems();
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
this.jsonBuffer += content;
|
|
236
|
-
const { value, state } = await parsePartialJson(this.jsonBuffer);
|
|
237
|
-
if (!['repaired-parse', 'successful-parse'].includes(state))
|
|
238
|
-
return;
|
|
239
|
-
if (!Array.isArray(value))
|
|
240
|
-
return;
|
|
241
|
-
for (let i = 0; i < value.length; i++) {
|
|
242
|
-
const { type, ...data } = value[i];
|
|
243
|
-
// TODO: Check for kind data and JSON.stringify
|
|
244
|
-
// Create a content hash to check if this item has changed
|
|
245
|
-
const currentContent = JSON.stringify(data);
|
|
246
|
-
const lastSentContent = this.sentItems.get(i);
|
|
247
|
-
// Only send if content has changed or is new
|
|
248
|
-
if (currentContent !== lastSentContent) {
|
|
249
|
-
const chunk = {
|
|
250
|
-
type: 'data-component',
|
|
251
|
-
id: `${this.textId}-${i}`,
|
|
252
|
-
data: { type, ...data },
|
|
253
|
-
};
|
|
254
|
-
this.writer.write(chunk);
|
|
255
|
-
this.sentItems.set(i, currentContent);
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
async streamText(text, delayMs = 100) {
|
|
260
|
-
if (this.isCompleted) {
|
|
261
|
-
console.warn('Attempted to stream text to completed stream');
|
|
262
|
-
return;
|
|
263
|
-
}
|
|
264
|
-
// For plain text, write directly to the stream as text chunks
|
|
265
|
-
if (!this.textId)
|
|
266
|
-
this.textId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
267
|
-
// ------------------------------
|
|
268
|
-
// New Vercel data-stream v2 format
|
|
269
|
-
// ------------------------------
|
|
270
|
-
// Emit "text-start" once at the beginning, followed by a single "text-delta"
|
|
271
|
-
// for the entire text chunk, and finish with "text-end".
|
|
272
|
-
// Don't artificially split by words - let the agent determine chunk boundaries.
|
|
273
|
-
const id = this.textId;
|
|
274
|
-
// Check gap from last text-end to this text-start
|
|
275
|
-
const startTime = Date.now();
|
|
276
|
-
const gapFromLastSequence = this.lastTextEndTimestamp > 0
|
|
277
|
-
? startTime - this.lastTextEndTimestamp
|
|
278
|
-
: Number.MAX_SAFE_INTEGER;
|
|
279
|
-
// If gap is large enough, flush any queued operations before starting new text
|
|
280
|
-
if (gapFromLastSequence >= this.TEXT_GAP_THRESHOLD) {
|
|
281
|
-
await this.flushQueuedOperations();
|
|
282
|
-
}
|
|
283
|
-
// Mark that text streaming is starting
|
|
284
|
-
this.isTextStreaming = true;
|
|
285
|
-
try {
|
|
286
|
-
this.writer.write({
|
|
287
|
-
type: 'text-start',
|
|
288
|
-
id,
|
|
289
|
-
});
|
|
290
|
-
// Optional delay before sending the text chunk
|
|
291
|
-
if (delayMs > 0) {
|
|
292
|
-
await new Promise((r) => setTimeout(r, delayMs));
|
|
293
|
-
}
|
|
294
|
-
// Send the entire text as a single delta
|
|
295
|
-
this.writer.write({
|
|
296
|
-
type: 'text-delta',
|
|
297
|
-
id,
|
|
298
|
-
delta: text,
|
|
299
|
-
});
|
|
300
|
-
// End
|
|
301
|
-
this.writer.write({
|
|
302
|
-
type: 'text-end',
|
|
303
|
-
id,
|
|
304
|
-
});
|
|
305
|
-
// Track when this text sequence ended
|
|
306
|
-
this.lastTextEndTimestamp = Date.now();
|
|
307
|
-
}
|
|
308
|
-
finally {
|
|
309
|
-
// Mark that text streaming has finished
|
|
310
|
-
this.isTextStreaming = false;
|
|
311
|
-
// DO NOT flush operations here - wait for gap threshold
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
async writeData(type, data) {
|
|
315
|
-
if (this.isCompleted) {
|
|
316
|
-
console.warn('Attempted to write data to completed stream');
|
|
317
|
-
return;
|
|
318
|
-
}
|
|
319
|
-
// For data-artifact, check if we should delay it based on text timing
|
|
320
|
-
if (type === 'data-artifact') {
|
|
321
|
-
const now = Date.now();
|
|
322
|
-
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
323
|
-
// If we're within the gap threshold from last text-end, it means more text might be coming
|
|
324
|
-
// In this case, write the artifact but don't reset timing - let operations stay queued
|
|
325
|
-
if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
|
|
326
|
-
// Artifact arrives during or shortly after text - maintain timing continuity
|
|
327
|
-
this.writer.write({
|
|
328
|
-
type: `${type}`,
|
|
329
|
-
data,
|
|
330
|
-
});
|
|
331
|
-
return;
|
|
332
|
-
}
|
|
333
|
-
}
|
|
334
|
-
this.writer.write({
|
|
335
|
-
type: `${type}`,
|
|
336
|
-
data,
|
|
337
|
-
});
|
|
338
|
-
}
|
|
339
|
-
async writeError(errorMessage) {
|
|
340
|
-
if (this.isCompleted) {
|
|
341
|
-
console.warn('Attempted to write error to completed stream');
|
|
342
|
-
return;
|
|
343
|
-
}
|
|
344
|
-
this.writer.write({
|
|
345
|
-
type: 'error',
|
|
346
|
-
errorText: errorMessage,
|
|
347
|
-
});
|
|
348
|
-
}
|
|
349
|
-
async streamData(data) {
|
|
350
|
-
await this.writeContent(JSON.stringify(data));
|
|
351
|
-
}
|
|
352
|
-
async mergeStream(stream) {
|
|
353
|
-
if (this.isCompleted) {
|
|
354
|
-
console.warn('Attempted to merge stream to completed stream');
|
|
355
|
-
return;
|
|
356
|
-
}
|
|
357
|
-
this.writer.merge(stream);
|
|
358
|
-
}
|
|
359
|
-
async writeCompletion(_finishReason = 'stop') {
|
|
360
|
-
// Completion is handled automatically by Vercel's writer
|
|
361
|
-
}
|
|
362
|
-
async writeDone() {
|
|
363
|
-
// Done is handled automatically by Vercel's writer
|
|
364
|
-
}
|
|
365
|
-
/**
|
|
366
|
-
* Complete the stream and clean up all memory
|
|
367
|
-
* This is the primary cleanup point to prevent memory leaks between requests
|
|
368
|
-
*/
|
|
369
|
-
async complete() {
|
|
370
|
-
if (this.isCompleted)
|
|
371
|
-
return;
|
|
372
|
-
// Flush any remaining queued operations before completing
|
|
373
|
-
await this.flushQueuedOperations();
|
|
374
|
-
// Mark as completed to prevent further writes
|
|
375
|
-
this.isCompleted = true;
|
|
376
|
-
// Clean up all buffers and references
|
|
377
|
-
this.cleanup();
|
|
378
|
-
}
|
|
379
|
-
/**
|
|
380
|
-
* Clean up all memory allocations
|
|
381
|
-
* Should be called when the stream helper is no longer needed
|
|
382
|
-
*/
|
|
383
|
-
cleanup() {
|
|
384
|
-
// Clear the connection drop timer
|
|
385
|
-
if (this.connectionDropTimer) {
|
|
386
|
-
clearTimeout(this.connectionDropTimer);
|
|
387
|
-
this.connectionDropTimer = undefined;
|
|
388
|
-
}
|
|
389
|
-
this.jsonBuffer = '';
|
|
390
|
-
this.sentItems.clear();
|
|
391
|
-
this.completedItems.clear();
|
|
392
|
-
this.textId = null;
|
|
393
|
-
this.queuedOperations = [];
|
|
394
|
-
this.isTextStreaming = false;
|
|
395
|
-
}
|
|
396
|
-
/**
|
|
397
|
-
* JSON-aware buffer truncation that preserves complete JSON structures
|
|
398
|
-
*/
|
|
399
|
-
truncateJsonBufferSafely(buffer) {
|
|
400
|
-
const keepSize = Math.floor(VercelDataStreamHelper.MAX_BUFFER_SIZE * 0.6); // Be more conservative
|
|
401
|
-
if (buffer.length <= keepSize)
|
|
402
|
-
return buffer;
|
|
403
|
-
// Start from the end and work backwards to find complete JSON structures
|
|
404
|
-
let depth = 0;
|
|
405
|
-
let inString = false;
|
|
406
|
-
let escaping = false;
|
|
407
|
-
let lastCompleteStructureEnd = -1;
|
|
408
|
-
// Scan backwards from the target keep size
|
|
409
|
-
for (let i = Math.min(keepSize + 1000, buffer.length - 1); i >= keepSize; i--) {
|
|
410
|
-
const char = buffer[i];
|
|
411
|
-
if (escaping) {
|
|
412
|
-
escaping = false;
|
|
413
|
-
continue;
|
|
414
|
-
}
|
|
415
|
-
if (char === '\\') {
|
|
416
|
-
escaping = true;
|
|
417
|
-
continue;
|
|
418
|
-
}
|
|
419
|
-
if (char === '"') {
|
|
420
|
-
inString = !inString;
|
|
421
|
-
continue;
|
|
422
|
-
}
|
|
423
|
-
if (inString)
|
|
424
|
-
continue;
|
|
425
|
-
if (char === '}' || char === ']') {
|
|
426
|
-
depth++;
|
|
427
|
-
}
|
|
428
|
-
else if (char === '{' || char === '[') {
|
|
429
|
-
depth--;
|
|
430
|
-
// If we've returned to depth 0, we have a complete structure
|
|
431
|
-
if (depth === 0) {
|
|
432
|
-
lastCompleteStructureEnd = i - 1;
|
|
433
|
-
break;
|
|
434
|
-
}
|
|
435
|
-
}
|
|
436
|
-
}
|
|
437
|
-
// If we found a safe truncation point, use it
|
|
438
|
-
if (lastCompleteStructureEnd > 0) {
|
|
439
|
-
return buffer.slice(lastCompleteStructureEnd + 1);
|
|
440
|
-
}
|
|
441
|
-
// Fallback: look for newlines between structures
|
|
442
|
-
for (let i = keepSize; i < Math.min(keepSize + 500, buffer.length); i++) {
|
|
443
|
-
if (buffer[i] === '\n' && buffer[i + 1] && buffer[i + 1].match(/[{[]]/)) {
|
|
444
|
-
return buffer.slice(i + 1);
|
|
445
|
-
}
|
|
446
|
-
}
|
|
447
|
-
// Return original buffer if no safe point found (caller will handle clearing)
|
|
448
|
-
return buffer;
|
|
449
|
-
}
|
|
450
|
-
/**
|
|
451
|
-
* Reindex sent items after buffer truncation
|
|
452
|
-
*/
|
|
453
|
-
reindexSentItems() {
|
|
454
|
-
// After truncation, we need to clear sent items as indices are no longer valid
|
|
455
|
-
this.sentItems.clear();
|
|
456
|
-
this.completedItems.clear();
|
|
457
|
-
}
|
|
458
|
-
/**
|
|
459
|
-
* Force cleanup on connection drop or timeout
|
|
460
|
-
*/
|
|
461
|
-
forceCleanup(reason) {
|
|
462
|
-
console.warn(`VercelDataStreamHelper: Forcing cleanup - ${reason}`);
|
|
463
|
-
// Mark as completed to prevent further writes
|
|
464
|
-
this.isCompleted = true;
|
|
465
|
-
// Clean up all resources
|
|
466
|
-
this.cleanup();
|
|
467
|
-
// Try to write an error if the writer is still available
|
|
468
|
-
try {
|
|
469
|
-
if (this.writer && !this.isCompleted) {
|
|
470
|
-
this.writer.write({
|
|
471
|
-
type: 'error',
|
|
472
|
-
errorText: `Stream terminated: ${reason}`,
|
|
473
|
-
});
|
|
474
|
-
}
|
|
475
|
-
}
|
|
476
|
-
catch (e) {
|
|
477
|
-
// Writer may be unavailable, ignore errors
|
|
478
|
-
}
|
|
479
|
-
}
|
|
480
|
-
/**
|
|
481
|
-
* Check if the stream has been completed and cleaned up
|
|
482
|
-
*/
|
|
483
|
-
isStreamCompleted() {
|
|
484
|
-
return this.isCompleted;
|
|
485
|
-
}
|
|
486
|
-
/**
|
|
487
|
-
* Get current memory usage stats (for debugging/monitoring)
|
|
488
|
-
*/
|
|
489
|
-
getMemoryStats() {
|
|
490
|
-
return {
|
|
491
|
-
bufferSize: this.jsonBuffer.length,
|
|
492
|
-
sentItemsCount: this.sentItems.size,
|
|
493
|
-
completedItemsCount: this.completedItems.size,
|
|
494
|
-
isCompleted: this.isCompleted,
|
|
495
|
-
};
|
|
496
|
-
}
|
|
497
|
-
async writeOperation(operation) {
|
|
498
|
-
if (this.isCompleted) {
|
|
499
|
-
console.warn('Attempted to write operation to completed stream');
|
|
500
|
-
return;
|
|
501
|
-
}
|
|
502
|
-
if (operation.type === 'status_update' && operation.ctx.label) {
|
|
503
|
-
operation = {
|
|
504
|
-
type: operation.type,
|
|
505
|
-
label: operation.ctx.label, // Preserve the label for the UI
|
|
506
|
-
ctx: operation.ctx.data,
|
|
507
|
-
};
|
|
508
|
-
}
|
|
509
|
-
// Check timing gap from last text-end
|
|
510
|
-
const now = Date.now();
|
|
511
|
-
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
512
|
-
// ALWAYS queue operation if:
|
|
513
|
-
// 1. Text is currently streaming, OR
|
|
514
|
-
// 2. We're within the gap threshold from last text-end (more text might be coming)
|
|
515
|
-
if (this.isTextStreaming || gapFromLastTextEnd < this.TEXT_GAP_THRESHOLD) {
|
|
516
|
-
this.queuedOperations.push(operation);
|
|
517
|
-
return;
|
|
518
|
-
}
|
|
519
|
-
// If not streaming and gap is large enough, flush any queued operations first, then send this one
|
|
520
|
-
await this.flushQueuedOperations();
|
|
521
|
-
this.writer.write({
|
|
522
|
-
id: 'id' in operation ? operation.id : undefined,
|
|
523
|
-
type: 'data-operation',
|
|
524
|
-
data: operation,
|
|
525
|
-
});
|
|
526
|
-
}
|
|
527
|
-
/**
|
|
528
|
-
* Flush all queued operations in order after text streaming completes
|
|
529
|
-
*/
|
|
530
|
-
async flushQueuedOperations() {
|
|
531
|
-
if (this.queuedOperations.length === 0) {
|
|
532
|
-
return;
|
|
533
|
-
}
|
|
534
|
-
const operationsToFlush = [...this.queuedOperations];
|
|
535
|
-
this.queuedOperations = []; // Clear the queue
|
|
536
|
-
for (const operation of operationsToFlush) {
|
|
537
|
-
this.writer.write({
|
|
538
|
-
id: 'id' in operation ? operation.id : undefined,
|
|
539
|
-
type: 'data-operation',
|
|
540
|
-
data: operation,
|
|
541
|
-
});
|
|
542
|
-
}
|
|
543
|
-
}
|
|
544
|
-
}
|
|
545
|
-
export function createVercelStreamHelper(writer) {
|
|
546
|
-
return new VercelDataStreamHelper(writer);
|
|
547
|
-
}
|
|
548
|
-
/**
|
|
549
|
-
* MCP Stream Helper that captures content instead of streaming
|
|
550
|
-
* Used for MCP tool responses which require a single response message
|
|
551
|
-
*/
|
|
552
|
-
export class MCPStreamHelper {
|
|
553
|
-
capturedText = '';
|
|
554
|
-
capturedData = [];
|
|
555
|
-
capturedOperations = [];
|
|
556
|
-
hasError = false;
|
|
557
|
-
errorMessage = '';
|
|
558
|
-
sessionId;
|
|
559
|
-
setSessionId(sessionId) {
|
|
560
|
-
this.sessionId = sessionId;
|
|
561
|
-
}
|
|
562
|
-
async writeRole(_role) {
|
|
563
|
-
// No-op for MCP
|
|
564
|
-
}
|
|
565
|
-
async writeContent(content) {
|
|
566
|
-
this.capturedText += content;
|
|
567
|
-
}
|
|
568
|
-
async streamText(text, _delayMs) {
|
|
569
|
-
// Capture text without streaming delay
|
|
570
|
-
this.capturedText += text;
|
|
571
|
-
}
|
|
572
|
-
async streamData(data) {
|
|
573
|
-
this.capturedData.push(data);
|
|
574
|
-
}
|
|
575
|
-
async writeData(_type, data) {
|
|
576
|
-
this.capturedData.push(data);
|
|
577
|
-
}
|
|
578
|
-
async writeError(errorMessage) {
|
|
579
|
-
this.hasError = true;
|
|
580
|
-
this.errorMessage = errorMessage;
|
|
581
|
-
}
|
|
582
|
-
async complete() {
|
|
583
|
-
// No-op for MCP
|
|
584
|
-
}
|
|
585
|
-
async writeOperation(operation) {
|
|
586
|
-
this.capturedOperations.push(operation);
|
|
587
|
-
}
|
|
588
|
-
/**
|
|
589
|
-
* Get the captured response for MCP tool result
|
|
590
|
-
*/
|
|
591
|
-
getCapturedResponse() {
|
|
592
|
-
return {
|
|
593
|
-
text: this.capturedText,
|
|
594
|
-
data: this.capturedData,
|
|
595
|
-
operations: this.capturedOperations,
|
|
596
|
-
hasError: this.hasError,
|
|
597
|
-
errorMessage: this.errorMessage,
|
|
598
|
-
};
|
|
599
|
-
}
|
|
600
|
-
}
|
|
601
|
-
export function createMCPStreamHelper() {
|
|
602
|
-
return new MCPStreamHelper();
|
|
603
|
-
}
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
import type { StreamHelper } from './stream-helpers';
|
|
2
|
-
/**
|
|
3
|
-
* Register a StreamHelper for a specific request ID
|
|
4
|
-
*/
|
|
5
|
-
export declare function registerStreamHelper(requestId: string, streamHelper: StreamHelper): void;
|
|
6
|
-
/**
|
|
7
|
-
* Get a StreamHelper by request ID
|
|
8
|
-
*/
|
|
9
|
-
export declare function getStreamHelper(requestId: string): StreamHelper | undefined;
|
|
10
|
-
/**
|
|
11
|
-
* Unregister a StreamHelper for a specific request ID
|
|
12
|
-
*/
|
|
13
|
-
export declare function unregisterStreamHelper(requestId: string): void;
|
|
14
|
-
/**
|
|
15
|
-
* Get registry size (for debugging)
|
|
16
|
-
*/
|
|
17
|
-
export declare function getRegistrySize(): number;
|
|
18
|
-
//# sourceMappingURL=stream-registry.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"stream-registry.d.ts","sourceRoot":"","sources":["../../src/utils/stream-registry.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAQrD;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY,GAAG,IAAI,CAOxF;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,SAAS,EAAE,MAAM,GAAG,YAAY,GAAG,SAAS,CAE3E;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAE9D;AAED;;GAEG;AACH,wBAAgB,eAAe,IAAI,MAAM,CAExC"}
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Global registry for StreamHelper instances
|
|
3
|
-
* Allows agents to access streamHelper via requestId across A2A boundaries
|
|
4
|
-
*/
|
|
5
|
-
const streamHelperRegistry = new Map();
|
|
6
|
-
/**
|
|
7
|
-
* Register a StreamHelper for a specific request ID
|
|
8
|
-
*/
|
|
9
|
-
export function registerStreamHelper(requestId, streamHelper) {
|
|
10
|
-
streamHelperRegistry.set(requestId, streamHelper);
|
|
11
|
-
// Set sessionId for stream helpers that support it
|
|
12
|
-
if ('setSessionId' in streamHelper && typeof streamHelper.setSessionId === 'function') {
|
|
13
|
-
streamHelper.setSessionId(requestId);
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
/**
|
|
17
|
-
* Get a StreamHelper by request ID
|
|
18
|
-
*/
|
|
19
|
-
export function getStreamHelper(requestId) {
|
|
20
|
-
return streamHelperRegistry.get(requestId);
|
|
21
|
-
}
|
|
22
|
-
/**
|
|
23
|
-
* Unregister a StreamHelper for a specific request ID
|
|
24
|
-
*/
|
|
25
|
-
export function unregisterStreamHelper(requestId) {
|
|
26
|
-
streamHelperRegistry.delete(requestId);
|
|
27
|
-
}
|
|
28
|
-
/**
|
|
29
|
-
* Get registry size (for debugging)
|
|
30
|
-
*/
|
|
31
|
-
export function getRegistrySize() {
|
|
32
|
-
return streamHelperRegistry.size;
|
|
33
|
-
}
|