@inkeep/agents-run-api 0.39.4 → 0.40.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase1/system-prompt.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase1/thinking-preparation.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase1/tool.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase2/data-component.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase2/data-components.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/phase2/system-prompt.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/shared/artifact-retrieval-guidance.js +5 -0
- package/dist/_virtual/_raw_/home/runner/work/agents/agents/agents-run-api/templates/v1/shared/artifact.js +5 -0
- package/dist/a2a/client.d.ts +184 -0
- package/dist/a2a/client.js +510 -0
- package/dist/a2a/handlers.d.ts +7 -0
- package/dist/a2a/handlers.js +560 -0
- package/dist/a2a/transfer.d.ts +22 -0
- package/dist/a2a/transfer.js +46 -0
- package/dist/a2a/types.d.ts +79 -0
- package/dist/a2a/types.js +22 -0
- package/dist/agents/Agent.d.ts +266 -0
- package/dist/agents/Agent.js +1927 -0
- package/dist/agents/ModelFactory.d.ts +63 -0
- package/dist/agents/ModelFactory.js +194 -0
- package/dist/agents/SystemPromptBuilder.d.ts +21 -0
- package/dist/agents/SystemPromptBuilder.js +48 -0
- package/dist/agents/ToolSessionManager.d.ts +63 -0
- package/dist/agents/ToolSessionManager.js +146 -0
- package/dist/agents/generateTaskHandler.d.ts +49 -0
- package/dist/agents/generateTaskHandler.js +521 -0
- package/dist/agents/relationTools.d.ts +57 -0
- package/dist/agents/relationTools.js +262 -0
- package/dist/agents/types.d.ts +28 -0
- package/dist/agents/types.js +1 -0
- package/dist/agents/versions/v1/Phase1Config.d.ts +27 -0
- package/dist/agents/versions/v1/Phase1Config.js +424 -0
- package/dist/agents/versions/v1/Phase2Config.d.ts +31 -0
- package/dist/agents/versions/v1/Phase2Config.js +330 -0
- package/dist/constants/execution-limits/defaults.d.ts +51 -0
- package/dist/constants/execution-limits/defaults.js +52 -0
- package/dist/constants/execution-limits/index.d.ts +6 -0
- package/dist/constants/execution-limits/index.js +21 -0
- package/dist/create-app.d.ts +9 -0
- package/dist/create-app.js +195 -0
- package/dist/data/agent.d.ts +7 -0
- package/dist/data/agent.js +72 -0
- package/dist/data/agents.d.ts +34 -0
- package/dist/data/agents.js +139 -0
- package/dist/data/conversations.d.ts +128 -0
- package/dist/data/conversations.js +522 -0
- package/dist/data/db/dbClient.d.ts +6 -0
- package/dist/data/db/dbClient.js +17 -0
- package/dist/env.d.ts +57 -0
- package/dist/env.js +1 -2
- package/dist/handlers/executionHandler.d.ts +39 -0
- package/dist/handlers/executionHandler.js +456 -0
- package/dist/index.d.ts +8 -29
- package/dist/index.js +5 -11235
- package/dist/instrumentation.d.ts +1 -2
- package/dist/instrumentation.js +66 -3
- package/dist/{logger2.js → logger.d.ts} +1 -2
- package/dist/logger.js +1 -1
- package/dist/middleware/api-key-auth.d.ts +26 -0
- package/dist/middleware/api-key-auth.js +240 -0
- package/dist/middleware/index.d.ts +2 -0
- package/dist/middleware/index.js +3 -0
- package/dist/openapi.d.ts +4 -0
- package/dist/openapi.js +54 -0
- package/dist/routes/agents.d.ts +12 -0
- package/dist/routes/agents.js +147 -0
- package/dist/routes/chat.d.ts +13 -0
- package/dist/routes/chat.js +293 -0
- package/dist/routes/chatDataStream.d.ts +13 -0
- package/dist/routes/chatDataStream.js +352 -0
- package/dist/routes/mcp.d.ts +13 -0
- package/dist/routes/mcp.js +495 -0
- package/dist/services/AgentSession.d.ts +356 -0
- package/dist/services/AgentSession.js +1208 -0
- package/dist/services/ArtifactParser.d.ts +105 -0
- package/dist/services/ArtifactParser.js +338 -0
- package/dist/services/ArtifactService.d.ts +123 -0
- package/dist/services/ArtifactService.js +612 -0
- package/dist/services/BaseCompressor.d.ts +183 -0
- package/dist/services/BaseCompressor.js +504 -0
- package/dist/services/ConversationCompressor.d.ts +32 -0
- package/dist/services/ConversationCompressor.js +91 -0
- package/dist/services/IncrementalStreamParser.d.ts +98 -0
- package/dist/services/IncrementalStreamParser.js +327 -0
- package/dist/services/MidGenerationCompressor.d.ts +63 -0
- package/dist/services/MidGenerationCompressor.js +104 -0
- package/dist/services/PendingToolApprovalManager.d.ts +62 -0
- package/dist/services/PendingToolApprovalManager.js +133 -0
- package/dist/services/ResponseFormatter.d.ts +39 -0
- package/dist/services/ResponseFormatter.js +152 -0
- package/dist/tools/NativeSandboxExecutor.d.ts +38 -0
- package/dist/tools/NativeSandboxExecutor.js +432 -0
- package/dist/tools/SandboxExecutorFactory.d.ts +36 -0
- package/dist/tools/SandboxExecutorFactory.js +80 -0
- package/dist/tools/VercelSandboxExecutor.d.ts +71 -0
- package/dist/tools/VercelSandboxExecutor.js +340 -0
- package/dist/tools/distill-conversation-history-tool.d.ts +62 -0
- package/dist/tools/distill-conversation-history-tool.js +206 -0
- package/dist/tools/distill-conversation-tool.d.ts +41 -0
- package/dist/tools/distill-conversation-tool.js +141 -0
- package/dist/tools/sandbox-utils.d.ts +18 -0
- package/dist/tools/sandbox-utils.js +53 -0
- package/dist/types/chat.d.ts +27 -0
- package/dist/types/chat.js +1 -0
- package/dist/types/execution-context.d.ts +46 -0
- package/dist/types/execution-context.js +27 -0
- package/dist/types/xml.d.ts +5 -0
- package/dist/utils/SchemaProcessor.d.ts +52 -0
- package/dist/utils/SchemaProcessor.js +182 -0
- package/dist/utils/agent-operations.d.ts +62 -0
- package/dist/utils/agent-operations.js +53 -0
- package/dist/utils/artifact-component-schema.d.ts +42 -0
- package/dist/utils/artifact-component-schema.js +186 -0
- package/dist/utils/cleanup.d.ts +21 -0
- package/dist/utils/cleanup.js +59 -0
- package/dist/utils/data-component-schema.d.ts +2 -0
- package/dist/utils/data-component-schema.js +3 -0
- package/dist/utils/default-status-schemas.d.ts +20 -0
- package/dist/utils/default-status-schemas.js +24 -0
- package/dist/utils/json-postprocessor.d.ts +13 -0
- package/dist/{json-postprocessor.cjs → utils/json-postprocessor.js} +1 -2
- package/dist/utils/model-context-utils.d.ts +39 -0
- package/dist/utils/model-context-utils.js +181 -0
- package/dist/utils/model-resolver.d.ts +6 -0
- package/dist/utils/model-resolver.js +34 -0
- package/dist/utils/schema-validation.d.ts +44 -0
- package/dist/utils/schema-validation.js +97 -0
- package/dist/utils/stream-helpers.d.ts +197 -0
- package/dist/utils/stream-helpers.js +518 -0
- package/dist/utils/stream-registry.d.ts +22 -0
- package/dist/utils/stream-registry.js +34 -0
- package/dist/utils/token-estimator.d.ts +69 -0
- package/dist/utils/token-estimator.js +53 -0
- package/dist/utils/tracer.d.ts +7 -0
- package/dist/utils/tracer.js +7 -0
- package/package.json +5 -20
- package/dist/SandboxExecutorFactory.cjs +0 -895
- package/dist/SandboxExecutorFactory.js +0 -893
- package/dist/SandboxExecutorFactory.js.map +0 -1
- package/dist/chunk-VBDAOXYI.cjs +0 -927
- package/dist/chunk-VBDAOXYI.js +0 -832
- package/dist/chunk-VBDAOXYI.js.map +0 -1
- package/dist/chunk.cjs +0 -34
- package/dist/conversations.cjs +0 -7
- package/dist/conversations.js +0 -7
- package/dist/conversations2.cjs +0 -209
- package/dist/conversations2.js +0 -180
- package/dist/conversations2.js.map +0 -1
- package/dist/dbClient.cjs +0 -9676
- package/dist/dbClient.js +0 -9670
- package/dist/dbClient.js.map +0 -1
- package/dist/dbClient2.cjs +0 -5
- package/dist/dbClient2.js +0 -5
- package/dist/env.cjs +0 -59
- package/dist/env.js.map +0 -1
- package/dist/execution-limits.cjs +0 -260
- package/dist/execution-limits.js +0 -63
- package/dist/execution-limits.js.map +0 -1
- package/dist/index.cjs +0 -11260
- package/dist/index.d.cts +0 -36
- package/dist/index.d.cts.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/instrumentation.cjs +0 -12
- package/dist/instrumentation.d.cts +0 -18
- package/dist/instrumentation.d.cts.map +0 -1
- package/dist/instrumentation.d.ts.map +0 -1
- package/dist/instrumentation2.cjs +0 -116
- package/dist/instrumentation2.js +0 -69
- package/dist/instrumentation2.js.map +0 -1
- package/dist/json-postprocessor.js +0 -20
- package/dist/json-postprocessor.js.map +0 -1
- package/dist/logger.cjs +0 -5
- package/dist/logger2.cjs +0 -1
- package/dist/nodefs.cjs +0 -29
- package/dist/nodefs.js +0 -27
- package/dist/nodefs.js.map +0 -1
- package/dist/opfs-ahp.cjs +0 -367
- package/dist/opfs-ahp.js +0 -368
- package/dist/opfs-ahp.js.map +0 -1
|
@@ -0,0 +1,518 @@
|
|
|
1
|
+
import { STREAM_BUFFER_MAX_SIZE_BYTES, STREAM_MAX_LIFETIME_MS, STREAM_TEXT_GAP_THRESHOLD_MS } from "../constants/execution-limits/index.js";
|
|
2
|
+
import { parsePartialJson } from "ai";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/stream-helpers.ts
|
|
5
|
+
var SSEStreamHelper = class {
|
|
6
|
+
isTextStreaming = false;
|
|
7
|
+
queuedEvents = [];
|
|
8
|
+
constructor(stream, requestId, timestamp) {
|
|
9
|
+
this.stream = stream;
|
|
10
|
+
this.requestId = requestId;
|
|
11
|
+
this.timestamp = timestamp;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Write the initial role message
|
|
15
|
+
*/
|
|
16
|
+
async writeRole(role = "assistant") {
|
|
17
|
+
await this.stream.writeSSE({ data: JSON.stringify({
|
|
18
|
+
id: this.requestId,
|
|
19
|
+
object: "chat.completion.chunk",
|
|
20
|
+
created: this.timestamp,
|
|
21
|
+
choices: [{
|
|
22
|
+
index: 0,
|
|
23
|
+
delta: { role },
|
|
24
|
+
finish_reason: null
|
|
25
|
+
}]
|
|
26
|
+
}) });
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Write content chunk
|
|
30
|
+
*/
|
|
31
|
+
async writeContent(content) {
|
|
32
|
+
await this.stream.writeSSE({ data: JSON.stringify({
|
|
33
|
+
id: this.requestId,
|
|
34
|
+
object: "chat.completion.chunk",
|
|
35
|
+
created: this.timestamp,
|
|
36
|
+
choices: [{
|
|
37
|
+
index: 0,
|
|
38
|
+
delta: { content },
|
|
39
|
+
finish_reason: null
|
|
40
|
+
}]
|
|
41
|
+
}) });
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Stream text word by word with optional delay
|
|
45
|
+
*/
|
|
46
|
+
async streamText(text, delayMs = 100) {
|
|
47
|
+
const words = text.split(" ");
|
|
48
|
+
this.isTextStreaming = true;
|
|
49
|
+
try {
|
|
50
|
+
for (let i = 0; i < words.length; i++) {
|
|
51
|
+
await this.stream.sleep(delayMs);
|
|
52
|
+
const content = i === 0 ? words[i] : ` ${words[i]}`;
|
|
53
|
+
await this.writeContent(content);
|
|
54
|
+
}
|
|
55
|
+
} finally {
|
|
56
|
+
this.isTextStreaming = false;
|
|
57
|
+
await this.flushQueuedOperations();
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
async streamData(data) {
|
|
61
|
+
await this.writeContent(JSON.stringify(data));
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Write error message or error event
|
|
65
|
+
*/
|
|
66
|
+
async writeError(error) {
|
|
67
|
+
const errorMessage = typeof error === "string" ? error : error.message;
|
|
68
|
+
await this.writeContent(`\n\n${errorMessage}`);
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Write the final completion message
|
|
72
|
+
*/
|
|
73
|
+
async writeCompletion(finishReason = "stop") {
|
|
74
|
+
await this.stream.writeSSE({ data: JSON.stringify({
|
|
75
|
+
id: this.requestId,
|
|
76
|
+
object: "chat.completion.chunk",
|
|
77
|
+
created: this.timestamp,
|
|
78
|
+
choices: [{
|
|
79
|
+
index: 0,
|
|
80
|
+
delta: {},
|
|
81
|
+
finish_reason: finishReason
|
|
82
|
+
}]
|
|
83
|
+
}) });
|
|
84
|
+
}
|
|
85
|
+
async writeData(type, data) {
|
|
86
|
+
await this.stream.writeSSE({ data: JSON.stringify({
|
|
87
|
+
id: this.requestId,
|
|
88
|
+
object: "chat.completion.chunk",
|
|
89
|
+
created: this.timestamp,
|
|
90
|
+
choices: [{
|
|
91
|
+
index: 0,
|
|
92
|
+
delta: { content: JSON.stringify({
|
|
93
|
+
type,
|
|
94
|
+
data
|
|
95
|
+
}) },
|
|
96
|
+
finish_reason: null
|
|
97
|
+
}]
|
|
98
|
+
}) });
|
|
99
|
+
}
|
|
100
|
+
async writeSummary(summary) {
|
|
101
|
+
if (this.isTextStreaming) {
|
|
102
|
+
this.queuedEvents.push({
|
|
103
|
+
type: "data-summary",
|
|
104
|
+
event: summary
|
|
105
|
+
});
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
await this.flushQueuedOperations();
|
|
109
|
+
await this.writeData("data-summary", summary);
|
|
110
|
+
}
|
|
111
|
+
async writeOperation(operation) {
|
|
112
|
+
if (this.isTextStreaming) {
|
|
113
|
+
this.queuedEvents.push({
|
|
114
|
+
type: "data-operation",
|
|
115
|
+
event: operation
|
|
116
|
+
});
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
await this.flushQueuedOperations();
|
|
120
|
+
await this.writeData("data-operation", operation);
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Flush all queued operations in order after text streaming completes
|
|
124
|
+
*/
|
|
125
|
+
async flushQueuedOperations() {
|
|
126
|
+
if (this.queuedEvents.length === 0) return;
|
|
127
|
+
const eventsToFlush = [...this.queuedEvents];
|
|
128
|
+
this.queuedEvents = [];
|
|
129
|
+
for (const event of eventsToFlush) await this.writeData(event.type, event.event);
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Write the final [DONE] message
|
|
133
|
+
*/
|
|
134
|
+
async writeDone() {
|
|
135
|
+
await this.stream.writeSSE({ data: "[DONE]" });
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Complete the stream with finish reason and done message
|
|
139
|
+
*/
|
|
140
|
+
async complete(finishReason = "stop") {
|
|
141
|
+
await this.flushQueuedOperations();
|
|
142
|
+
await this.writeCompletion(finishReason);
|
|
143
|
+
await this.writeDone();
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
/**
|
|
147
|
+
* Factory function to create SSE stream helper
|
|
148
|
+
*/
|
|
149
|
+
function createSSEStreamHelper(stream, requestId, timestamp) {
|
|
150
|
+
return new SSEStreamHelper(stream, requestId, timestamp);
|
|
151
|
+
}
|
|
152
|
+
var VercelDataStreamHelper = class VercelDataStreamHelper {
|
|
153
|
+
textId = null;
|
|
154
|
+
jsonBuffer = "";
|
|
155
|
+
sentItems = /* @__PURE__ */ new Map();
|
|
156
|
+
completedItems = /* @__PURE__ */ new Set();
|
|
157
|
+
sessionId;
|
|
158
|
+
static MAX_BUFFER_SIZE = STREAM_BUFFER_MAX_SIZE_BYTES;
|
|
159
|
+
isCompleted = false;
|
|
160
|
+
isTextStreaming = false;
|
|
161
|
+
queuedEvents = [];
|
|
162
|
+
lastTextEndTimestamp = 0;
|
|
163
|
+
connectionDropTimer;
|
|
164
|
+
constructor(writer) {
|
|
165
|
+
this.writer = writer;
|
|
166
|
+
this.connectionDropTimer = setTimeout(() => {
|
|
167
|
+
this.forceCleanup("Connection lifetime exceeded");
|
|
168
|
+
}, STREAM_MAX_LIFETIME_MS);
|
|
169
|
+
}
|
|
170
|
+
setSessionId(sessionId) {
|
|
171
|
+
this.sessionId = sessionId;
|
|
172
|
+
}
|
|
173
|
+
async writeRole(_ = "assistant") {}
|
|
174
|
+
async writeContent(content) {
|
|
175
|
+
if (this.isCompleted) {
|
|
176
|
+
console.warn("Attempted to write content to completed stream");
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
if (!this.textId) this.textId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
180
|
+
if (this.jsonBuffer.length + content.length > VercelDataStreamHelper.MAX_BUFFER_SIZE) {
|
|
181
|
+
const newBuffer = this.truncateJsonBufferSafely(this.jsonBuffer);
|
|
182
|
+
if (newBuffer.length === this.jsonBuffer.length) {
|
|
183
|
+
console.warn("VercelDataStreamHelper: Could not find safe JSON truncation point, clearing buffer");
|
|
184
|
+
this.jsonBuffer = "";
|
|
185
|
+
this.sentItems.clear();
|
|
186
|
+
} else {
|
|
187
|
+
this.jsonBuffer = newBuffer;
|
|
188
|
+
this.reindexSentItems();
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
this.jsonBuffer += content;
|
|
192
|
+
const { value, state } = await parsePartialJson(this.jsonBuffer);
|
|
193
|
+
if (!["repaired-parse", "successful-parse"].includes(state)) return;
|
|
194
|
+
if (!Array.isArray(value)) return;
|
|
195
|
+
for (let i = 0; i < value.length; i++) {
|
|
196
|
+
const { type, ...data } = value[i];
|
|
197
|
+
const currentContent = JSON.stringify(data);
|
|
198
|
+
if (currentContent !== this.sentItems.get(i)) {
|
|
199
|
+
const chunk = {
|
|
200
|
+
type: "data-component",
|
|
201
|
+
id: `${this.textId}-${i}`,
|
|
202
|
+
data: {
|
|
203
|
+
type,
|
|
204
|
+
...data
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
this.writer.write(chunk);
|
|
208
|
+
this.sentItems.set(i, currentContent);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
async streamText(text, delayMs = 100) {
|
|
213
|
+
if (this.isCompleted) {
|
|
214
|
+
console.warn("Attempted to stream text to completed stream");
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
if (!this.textId) this.textId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
218
|
+
const id = this.textId;
|
|
219
|
+
const startTime = Date.now();
|
|
220
|
+
if ((this.lastTextEndTimestamp > 0 ? startTime - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER) >= STREAM_TEXT_GAP_THRESHOLD_MS) await this.flushQueuedOperations();
|
|
221
|
+
this.isTextStreaming = true;
|
|
222
|
+
try {
|
|
223
|
+
this.writer.write({
|
|
224
|
+
type: "text-start",
|
|
225
|
+
id
|
|
226
|
+
});
|
|
227
|
+
if (delayMs > 0) await new Promise((r) => setTimeout(r, delayMs));
|
|
228
|
+
this.writer.write({
|
|
229
|
+
type: "text-delta",
|
|
230
|
+
id,
|
|
231
|
+
delta: text
|
|
232
|
+
});
|
|
233
|
+
this.writer.write({
|
|
234
|
+
type: "text-end",
|
|
235
|
+
id
|
|
236
|
+
});
|
|
237
|
+
this.lastTextEndTimestamp = Date.now();
|
|
238
|
+
} finally {
|
|
239
|
+
this.isTextStreaming = false;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
async writeData(type, data) {
|
|
243
|
+
if (this.isCompleted) {
|
|
244
|
+
console.warn("Attempted to write data to completed stream");
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
if (type === "data-artifact") {
|
|
248
|
+
const now = Date.now();
|
|
249
|
+
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
250
|
+
if (this.isTextStreaming || gapFromLastTextEnd < STREAM_TEXT_GAP_THRESHOLD_MS) {
|
|
251
|
+
this.writer.write({
|
|
252
|
+
type: `${type}`,
|
|
253
|
+
data
|
|
254
|
+
});
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
this.writer.write({
|
|
259
|
+
type: `${type}`,
|
|
260
|
+
data
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
async writeError(error) {
|
|
264
|
+
if (this.isCompleted) {
|
|
265
|
+
console.warn("Attempted to write error to completed stream");
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
if (typeof error === "string") this.writer.write({
|
|
269
|
+
type: "error",
|
|
270
|
+
message: error,
|
|
271
|
+
severity: "error",
|
|
272
|
+
timestamp: Date.now()
|
|
273
|
+
});
|
|
274
|
+
else this.writer.write({
|
|
275
|
+
...error,
|
|
276
|
+
type: "error"
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
async streamData(data) {
|
|
280
|
+
await this.writeContent(JSON.stringify(data));
|
|
281
|
+
}
|
|
282
|
+
async mergeStream(stream) {
|
|
283
|
+
if (this.isCompleted) {
|
|
284
|
+
console.warn("Attempted to merge stream to completed stream");
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
287
|
+
this.writer.merge(stream);
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Clean up all memory allocations
|
|
291
|
+
* Should be called when the stream helper is no longer needed
|
|
292
|
+
*/
|
|
293
|
+
cleanup() {
|
|
294
|
+
if (this.connectionDropTimer) {
|
|
295
|
+
clearTimeout(this.connectionDropTimer);
|
|
296
|
+
this.connectionDropTimer = void 0;
|
|
297
|
+
}
|
|
298
|
+
this.jsonBuffer = "";
|
|
299
|
+
this.sentItems.clear();
|
|
300
|
+
this.completedItems.clear();
|
|
301
|
+
this.textId = null;
|
|
302
|
+
this.queuedEvents = [];
|
|
303
|
+
this.isTextStreaming = false;
|
|
304
|
+
}
|
|
305
|
+
/**
|
|
306
|
+
* JSON-aware buffer truncation that preserves complete JSON structures
|
|
307
|
+
*/
|
|
308
|
+
truncateJsonBufferSafely(buffer) {
|
|
309
|
+
const keepSize = Math.floor(VercelDataStreamHelper.MAX_BUFFER_SIZE * .6);
|
|
310
|
+
if (buffer.length <= keepSize) return buffer;
|
|
311
|
+
let depth = 0;
|
|
312
|
+
let inString = false;
|
|
313
|
+
let escaping = false;
|
|
314
|
+
let lastCompleteStructureEnd = -1;
|
|
315
|
+
for (let i = Math.min(keepSize + 1e3, buffer.length - 1); i >= keepSize; i--) {
|
|
316
|
+
const char = buffer[i];
|
|
317
|
+
if (escaping) {
|
|
318
|
+
escaping = false;
|
|
319
|
+
continue;
|
|
320
|
+
}
|
|
321
|
+
if (char === "\\") {
|
|
322
|
+
escaping = true;
|
|
323
|
+
continue;
|
|
324
|
+
}
|
|
325
|
+
if (char === "\"") {
|
|
326
|
+
inString = !inString;
|
|
327
|
+
continue;
|
|
328
|
+
}
|
|
329
|
+
if (inString) continue;
|
|
330
|
+
if (char === "}" || char === "]") depth++;
|
|
331
|
+
else if (char === "{" || char === "[") {
|
|
332
|
+
depth--;
|
|
333
|
+
if (depth === 0) {
|
|
334
|
+
lastCompleteStructureEnd = i - 1;
|
|
335
|
+
break;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
if (lastCompleteStructureEnd > 0) return buffer.slice(lastCompleteStructureEnd + 1);
|
|
340
|
+
for (let i = keepSize; i < Math.min(keepSize + 500, buffer.length); i++) if (buffer[i] === "\n" && buffer[i + 1] && buffer[i + 1].match(/[{[]]/)) return buffer.slice(i + 1);
|
|
341
|
+
return buffer;
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Reindex sent items after buffer truncation
|
|
345
|
+
*/
|
|
346
|
+
reindexSentItems() {
|
|
347
|
+
this.sentItems.clear();
|
|
348
|
+
this.completedItems.clear();
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Force cleanup on connection drop or timeout
|
|
352
|
+
*/
|
|
353
|
+
forceCleanup(reason) {
|
|
354
|
+
console.warn(`VercelDataStreamHelper: Forcing cleanup - ${reason}`);
|
|
355
|
+
this.isCompleted = true;
|
|
356
|
+
this.cleanup();
|
|
357
|
+
try {
|
|
358
|
+
if (this.writer && !this.isCompleted) this.writer.write({
|
|
359
|
+
type: "error",
|
|
360
|
+
message: `Stream terminated: ${reason}`,
|
|
361
|
+
severity: "error",
|
|
362
|
+
timestamp: Date.now()
|
|
363
|
+
});
|
|
364
|
+
} catch (_e) {}
|
|
365
|
+
}
|
|
366
|
+
/**
|
|
367
|
+
* Check if the stream has been completed and cleaned up
|
|
368
|
+
*/
|
|
369
|
+
isStreamCompleted() {
|
|
370
|
+
return this.isCompleted;
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Get current memory usage stats (for debugging/monitoring)
|
|
374
|
+
*/
|
|
375
|
+
getMemoryStats() {
|
|
376
|
+
return {
|
|
377
|
+
bufferSize: this.jsonBuffer.length,
|
|
378
|
+
sentItemsCount: this.sentItems.size,
|
|
379
|
+
completedItemsCount: this.completedItems.size,
|
|
380
|
+
isCompleted: this.isCompleted
|
|
381
|
+
};
|
|
382
|
+
}
|
|
383
|
+
async writeSummary(summary) {
|
|
384
|
+
if (this.isCompleted) {
|
|
385
|
+
console.warn("Attempted to write summary to completed stream");
|
|
386
|
+
return;
|
|
387
|
+
}
|
|
388
|
+
const now = Date.now();
|
|
389
|
+
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
390
|
+
if (this.isTextStreaming || gapFromLastTextEnd < STREAM_TEXT_GAP_THRESHOLD_MS) {
|
|
391
|
+
this.queuedEvents.push({
|
|
392
|
+
type: "data-summary",
|
|
393
|
+
event: summary
|
|
394
|
+
});
|
|
395
|
+
return;
|
|
396
|
+
}
|
|
397
|
+
await this.flushQueuedOperations();
|
|
398
|
+
await this.writer.write({
|
|
399
|
+
id: "id" in summary ? summary.id : void 0,
|
|
400
|
+
type: "data-summary",
|
|
401
|
+
data: summary
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
async writeOperation(operation) {
|
|
405
|
+
if (this.isCompleted) {
|
|
406
|
+
console.warn("Attempted to write operation to completed stream");
|
|
407
|
+
return;
|
|
408
|
+
}
|
|
409
|
+
const now = Date.now();
|
|
410
|
+
const gapFromLastTextEnd = this.lastTextEndTimestamp > 0 ? now - this.lastTextEndTimestamp : Number.MAX_SAFE_INTEGER;
|
|
411
|
+
if (operation.type !== "tool_call" && operation.type !== "tool_result" && (this.isTextStreaming || gapFromLastTextEnd < STREAM_TEXT_GAP_THRESHOLD_MS)) {
|
|
412
|
+
this.queuedEvents.push({
|
|
413
|
+
type: "data-operation",
|
|
414
|
+
event: operation
|
|
415
|
+
});
|
|
416
|
+
return;
|
|
417
|
+
}
|
|
418
|
+
await this.flushQueuedOperations();
|
|
419
|
+
this.writer.write({
|
|
420
|
+
id: "id" in operation ? operation.id : void 0,
|
|
421
|
+
type: "data-operation",
|
|
422
|
+
data: operation
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Flush all queued operations in order after text streaming completes
|
|
427
|
+
*/
|
|
428
|
+
async flushQueuedOperations() {
|
|
429
|
+
if (this.queuedEvents.length === 0) return;
|
|
430
|
+
const eventsToFlush = [...this.queuedEvents];
|
|
431
|
+
this.queuedEvents = [];
|
|
432
|
+
for (const event of eventsToFlush) this.writer.write({
|
|
433
|
+
id: "id" in event.event ? event.event.id : void 0,
|
|
434
|
+
type: event.type,
|
|
435
|
+
data: event.event
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
async writeCompletion(_finishReason = "stop") {}
|
|
439
|
+
async writeDone() {}
|
|
440
|
+
/**
|
|
441
|
+
* Complete the stream and clean up all memory
|
|
442
|
+
* This is the primary cleanup point to prevent memory leaks between requests
|
|
443
|
+
*/
|
|
444
|
+
async complete() {
|
|
445
|
+
if (this.isCompleted) return;
|
|
446
|
+
await this.flushQueuedOperations();
|
|
447
|
+
this.isCompleted = true;
|
|
448
|
+
this.cleanup();
|
|
449
|
+
}
|
|
450
|
+
};
|
|
451
|
+
function createVercelStreamHelper(writer) {
|
|
452
|
+
return new VercelDataStreamHelper(writer);
|
|
453
|
+
}
|
|
454
|
+
/**
|
|
455
|
+
* Buffering Stream Helper that captures content instead of streaming
|
|
456
|
+
* Used for MCP tool responses and non-streaming API responses that require a single complete message
|
|
457
|
+
*/
|
|
458
|
+
var BufferingStreamHelper = class {
|
|
459
|
+
capturedText = "";
|
|
460
|
+
capturedData = [];
|
|
461
|
+
capturedOperations = [];
|
|
462
|
+
capturedSummaries = [];
|
|
463
|
+
hasError = false;
|
|
464
|
+
errorMessage = "";
|
|
465
|
+
sessionId;
|
|
466
|
+
setSessionId(sessionId) {
|
|
467
|
+
this.sessionId = sessionId;
|
|
468
|
+
}
|
|
469
|
+
async writeRole(_role) {}
|
|
470
|
+
async writeContent(content) {
|
|
471
|
+
this.capturedText += content;
|
|
472
|
+
}
|
|
473
|
+
async streamText(text, _delayMs) {
|
|
474
|
+
this.capturedText += text;
|
|
475
|
+
}
|
|
476
|
+
async streamData(data) {
|
|
477
|
+
this.capturedData.push(data);
|
|
478
|
+
}
|
|
479
|
+
async streamSummary(summary) {
|
|
480
|
+
this.capturedSummaries.push(summary);
|
|
481
|
+
}
|
|
482
|
+
async streamOperation(operation) {
|
|
483
|
+
this.capturedOperations.push(operation);
|
|
484
|
+
}
|
|
485
|
+
async writeData(_type, data) {
|
|
486
|
+
this.capturedData.push(data);
|
|
487
|
+
}
|
|
488
|
+
async writeSummary(summary) {
|
|
489
|
+
this.capturedSummaries.push(summary);
|
|
490
|
+
}
|
|
491
|
+
async writeOperation(operation) {
|
|
492
|
+
this.capturedOperations.push(operation);
|
|
493
|
+
}
|
|
494
|
+
async writeError(error) {
|
|
495
|
+
this.hasError = true;
|
|
496
|
+
this.errorMessage = typeof error === "string" ? error : error.message;
|
|
497
|
+
}
|
|
498
|
+
async complete() {}
|
|
499
|
+
/**
|
|
500
|
+
* Get the captured response for non-streaming output
|
|
501
|
+
*/
|
|
502
|
+
getCapturedResponse() {
|
|
503
|
+
return {
|
|
504
|
+
text: this.capturedText,
|
|
505
|
+
data: this.capturedData,
|
|
506
|
+
operations: this.capturedOperations,
|
|
507
|
+
hasError: this.hasError,
|
|
508
|
+
errorMessage: this.errorMessage
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
};
|
|
512
|
+
function createBufferingStreamHelper() {
|
|
513
|
+
return new BufferingStreamHelper();
|
|
514
|
+
}
|
|
515
|
+
const createMCPStreamHelper = createBufferingStreamHelper;
|
|
516
|
+
|
|
517
|
+
//#endregion
|
|
518
|
+
export { BufferingStreamHelper, SSEStreamHelper, VercelDataStreamHelper, createBufferingStreamHelper, createMCPStreamHelper, createSSEStreamHelper, createVercelStreamHelper };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { StreamHelper } from "./stream-helpers.js";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/stream-registry.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Register a StreamHelper for a specific request ID
|
|
7
|
+
*/
|
|
8
|
+
declare function registerStreamHelper(requestId: string, streamHelper: StreamHelper): void;
|
|
9
|
+
/**
|
|
10
|
+
* Get a StreamHelper by request ID
|
|
11
|
+
*/
|
|
12
|
+
declare function getStreamHelper(requestId: string): StreamHelper | undefined;
|
|
13
|
+
/**
|
|
14
|
+
* Unregister a StreamHelper for a specific request ID
|
|
15
|
+
*/
|
|
16
|
+
declare function unregisterStreamHelper(requestId: string): void;
|
|
17
|
+
/**
|
|
18
|
+
* Get registry size (for debugging)
|
|
19
|
+
*/
|
|
20
|
+
declare function getRegistrySize(): number;
|
|
21
|
+
//#endregion
|
|
22
|
+
export { getRegistrySize, getStreamHelper, registerStreamHelper, unregisterStreamHelper };
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
//#region src/utils/stream-registry.ts
|
|
2
|
+
/**
|
|
3
|
+
* Global registry for StreamHelper instances
|
|
4
|
+
* Allows agents to access streamHelper via requestId across A2A boundaries
|
|
5
|
+
*/
|
|
6
|
+
const streamHelperRegistry = /* @__PURE__ */ new Map();
|
|
7
|
+
/**
|
|
8
|
+
* Register a StreamHelper for a specific request ID
|
|
9
|
+
*/
|
|
10
|
+
function registerStreamHelper(requestId, streamHelper) {
|
|
11
|
+
streamHelperRegistry.set(requestId, streamHelper);
|
|
12
|
+
if ("setSessionId" in streamHelper && typeof streamHelper.setSessionId === "function") streamHelper.setSessionId(requestId);
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Get a StreamHelper by request ID
|
|
16
|
+
*/
|
|
17
|
+
function getStreamHelper(requestId) {
|
|
18
|
+
return streamHelperRegistry.get(requestId);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Unregister a StreamHelper for a specific request ID
|
|
22
|
+
*/
|
|
23
|
+
function unregisterStreamHelper(requestId) {
|
|
24
|
+
streamHelperRegistry.delete(requestId);
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get registry size (for debugging)
|
|
28
|
+
*/
|
|
29
|
+
function getRegistrySize() {
|
|
30
|
+
return streamHelperRegistry.size;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
//#endregion
|
|
34
|
+
export { getRegistrySize, getStreamHelper, registerStreamHelper, unregisterStreamHelper };
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
//#region src/utils/token-estimator.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Token estimation utility for context tracking.
|
|
4
|
+
*
|
|
5
|
+
* Uses character-based approximation (~4 characters per token) which:
|
|
6
|
+
* - Works universally for all models (OpenAI, Anthropic, Gemini, custom)
|
|
7
|
+
* - Requires no external dependencies
|
|
8
|
+
* - Is fast (simple string length calculation)
|
|
9
|
+
* - Is accurate enough for relative comparisons between context components
|
|
10
|
+
*/
|
|
11
|
+
/**
|
|
12
|
+
* Breakdown of estimated token usage for each context component.
|
|
13
|
+
* All values are approximate token counts.
|
|
14
|
+
*/
|
|
15
|
+
interface ContextBreakdown {
|
|
16
|
+
/** Base system prompt template tokens */
|
|
17
|
+
systemPromptTemplate: number;
|
|
18
|
+
/** Core instructions (corePrompt) tokens */
|
|
19
|
+
coreInstructions: number;
|
|
20
|
+
/** Agent-level context (prompt) tokens */
|
|
21
|
+
agentPrompt: number;
|
|
22
|
+
/** Tools section (MCP, function, relation tools) tokens */
|
|
23
|
+
toolsSection: number;
|
|
24
|
+
/** Artifacts section tokens */
|
|
25
|
+
artifactsSection: number;
|
|
26
|
+
/** Data components section tokens (Phase 2) */
|
|
27
|
+
dataComponents: number;
|
|
28
|
+
/** Artifact component instructions tokens */
|
|
29
|
+
artifactComponents: number;
|
|
30
|
+
/** Transfer instructions tokens */
|
|
31
|
+
transferInstructions: number;
|
|
32
|
+
/** Delegation instructions tokens */
|
|
33
|
+
delegationInstructions: number;
|
|
34
|
+
/** Thinking preparation instructions tokens */
|
|
35
|
+
thinkingPreparation: number;
|
|
36
|
+
/** Conversation history tokens */
|
|
37
|
+
conversationHistory: number;
|
|
38
|
+
/** Total estimated tokens */
|
|
39
|
+
total: number;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Creates an empty context breakdown with all values set to 0.
|
|
43
|
+
*/
|
|
44
|
+
declare function createEmptyBreakdown(): ContextBreakdown;
|
|
45
|
+
/**
|
|
46
|
+
* Estimates the number of tokens in a text string using character-based approximation.
|
|
47
|
+
*
|
|
48
|
+
* @param text - The text to estimate tokens for
|
|
49
|
+
* @returns Estimated token count (approximately text.length / 4)
|
|
50
|
+
*/
|
|
51
|
+
declare function estimateTokens(text: string | undefined | null): number;
|
|
52
|
+
/**
|
|
53
|
+
* Calculates the total from all breakdown components and updates the total field.
|
|
54
|
+
*
|
|
55
|
+
* @param breakdown - The context breakdown to calculate total for
|
|
56
|
+
* @returns The breakdown with updated total
|
|
57
|
+
*/
|
|
58
|
+
declare function calculateBreakdownTotal(breakdown: ContextBreakdown): ContextBreakdown;
|
|
59
|
+
/**
|
|
60
|
+
* Result from prompt assembly that includes both the prompt and token breakdown.
|
|
61
|
+
*/
|
|
62
|
+
interface AssembleResult {
|
|
63
|
+
/** The assembled prompt string */
|
|
64
|
+
prompt: string;
|
|
65
|
+
/** Token breakdown for each component */
|
|
66
|
+
breakdown: ContextBreakdown;
|
|
67
|
+
}
|
|
68
|
+
//#endregion
|
|
69
|
+
export { AssembleResult, ContextBreakdown, calculateBreakdownTotal, createEmptyBreakdown, estimateTokens };
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
//#region src/utils/token-estimator.ts
|
|
2
|
+
/**
|
|
3
|
+
* Token estimation utility for context tracking.
|
|
4
|
+
*
|
|
5
|
+
* Uses character-based approximation (~4 characters per token) which:
|
|
6
|
+
* - Works universally for all models (OpenAI, Anthropic, Gemini, custom)
|
|
7
|
+
* - Requires no external dependencies
|
|
8
|
+
* - Is fast (simple string length calculation)
|
|
9
|
+
* - Is accurate enough for relative comparisons between context components
|
|
10
|
+
*/
|
|
11
|
+
const CHARS_PER_TOKEN = 4;
|
|
12
|
+
/**
|
|
13
|
+
* Creates an empty context breakdown with all values set to 0.
|
|
14
|
+
*/
|
|
15
|
+
function createEmptyBreakdown() {
|
|
16
|
+
return {
|
|
17
|
+
systemPromptTemplate: 0,
|
|
18
|
+
coreInstructions: 0,
|
|
19
|
+
agentPrompt: 0,
|
|
20
|
+
toolsSection: 0,
|
|
21
|
+
artifactsSection: 0,
|
|
22
|
+
dataComponents: 0,
|
|
23
|
+
artifactComponents: 0,
|
|
24
|
+
transferInstructions: 0,
|
|
25
|
+
delegationInstructions: 0,
|
|
26
|
+
thinkingPreparation: 0,
|
|
27
|
+
conversationHistory: 0,
|
|
28
|
+
total: 0
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Estimates the number of tokens in a text string using character-based approximation.
|
|
33
|
+
*
|
|
34
|
+
* @param text - The text to estimate tokens for
|
|
35
|
+
* @returns Estimated token count (approximately text.length / 4)
|
|
36
|
+
*/
|
|
37
|
+
function estimateTokens(text) {
|
|
38
|
+
if (!text) return 0;
|
|
39
|
+
return Math.ceil(text.length / CHARS_PER_TOKEN);
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Calculates the total from all breakdown components and updates the total field.
|
|
43
|
+
*
|
|
44
|
+
* @param breakdown - The context breakdown to calculate total for
|
|
45
|
+
* @returns The breakdown with updated total
|
|
46
|
+
*/
|
|
47
|
+
function calculateBreakdownTotal(breakdown) {
|
|
48
|
+
breakdown.total = breakdown.systemPromptTemplate + breakdown.coreInstructions + breakdown.agentPrompt + breakdown.toolsSection + breakdown.artifactsSection + breakdown.dataComponents + breakdown.artifactComponents + breakdown.transferInstructions + breakdown.delegationInstructions + breakdown.thinkingPreparation + breakdown.conversationHistory;
|
|
49
|
+
return breakdown;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
//#endregion
|
|
53
|
+
export { calculateBreakdownTotal, createEmptyBreakdown, estimateTokens };
|