ai 0.0.0-e27b4ed4-20240419203611
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +37 -0
- package/dist/index.d.mts +1770 -0
- package/dist/index.d.ts +1770 -0
- package/dist/index.js +2958 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +2887 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +174 -0
- package/prompts/dist/index.d.mts +267 -0
- package/prompts/dist/index.d.ts +267 -0
- package/prompts/dist/index.js +178 -0
- package/prompts/dist/index.js.map +1 -0
- package/prompts/dist/index.mjs +146 -0
- package/prompts/dist/index.mjs.map +1 -0
- package/react/dist/index.d.mts +487 -0
- package/react/dist/index.d.ts +504 -0
- package/react/dist/index.js +1310 -0
- package/react/dist/index.js.map +1 -0
- package/react/dist/index.mjs +1271 -0
- package/react/dist/index.mjs.map +1 -0
- package/react/dist/index.server.d.mts +17 -0
- package/react/dist/index.server.d.ts +17 -0
- package/react/dist/index.server.js +50 -0
- package/react/dist/index.server.js.map +1 -0
- package/react/dist/index.server.mjs +23 -0
- package/react/dist/index.server.mjs.map +1 -0
- package/rsc/dist/index.d.ts +289 -0
- package/rsc/dist/index.mjs +18 -0
- package/rsc/dist/rsc-client.d.mts +1 -0
- package/rsc/dist/rsc-client.mjs +18 -0
- package/rsc/dist/rsc-client.mjs.map +1 -0
- package/rsc/dist/rsc-server.d.mts +225 -0
- package/rsc/dist/rsc-server.mjs +1246 -0
- package/rsc/dist/rsc-server.mjs.map +1 -0
- package/rsc/dist/rsc-shared.d.mts +94 -0
- package/rsc/dist/rsc-shared.mjs +346 -0
- package/rsc/dist/rsc-shared.mjs.map +1 -0
- package/solid/dist/index.d.mts +351 -0
- package/solid/dist/index.d.ts +351 -0
- package/solid/dist/index.js +1002 -0
- package/solid/dist/index.js.map +1 -0
- package/solid/dist/index.mjs +974 -0
- package/solid/dist/index.mjs.map +1 -0
- package/svelte/dist/index.d.mts +348 -0
- package/svelte/dist/index.d.ts +348 -0
- package/svelte/dist/index.js +1556 -0
- package/svelte/dist/index.js.map +1 -0
- package/svelte/dist/index.mjs +1528 -0
- package/svelte/dist/index.mjs.map +1 -0
- package/vue/dist/index.d.mts +345 -0
- package/vue/dist/index.d.ts +345 -0
- package/vue/dist/index.js +1002 -0
- package/vue/dist/index.js.map +1 -0
- package/vue/dist/index.mjs +964 -0
- package/vue/dist/index.mjs.map +1 -0
package/dist/index.js
ADDED
@@ -0,0 +1,2958 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __create = Object.create;
|
3
|
+
var __defProp = Object.defineProperty;
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
8
|
+
var __export = (target, all) => {
|
9
|
+
for (var name in all)
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
11
|
+
};
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
14
|
+
for (let key of __getOwnPropNames(from))
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
17
|
+
}
|
18
|
+
return to;
|
19
|
+
};
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
26
|
+
mod
|
27
|
+
));
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
29
|
+
|
30
|
+
// streams/index.ts
|
31
|
+
var streams_exports = {};
|
32
|
+
__export(streams_exports, {
|
33
|
+
AIStream: () => AIStream,
|
34
|
+
AWSBedrockAnthropicMessagesStream: () => AWSBedrockAnthropicMessagesStream,
|
35
|
+
AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
|
36
|
+
AWSBedrockCohereStream: () => AWSBedrockCohereStream,
|
37
|
+
AWSBedrockLlama2Stream: () => AWSBedrockLlama2Stream,
|
38
|
+
AWSBedrockStream: () => AWSBedrockStream,
|
39
|
+
AnthropicStream: () => AnthropicStream,
|
40
|
+
AssistantResponse: () => AssistantResponse,
|
41
|
+
CohereStream: () => CohereStream,
|
42
|
+
GenerateObjectResult: () => GenerateObjectResult,
|
43
|
+
GenerateTextResult: () => GenerateTextResult,
|
44
|
+
GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
|
45
|
+
HuggingFaceStream: () => HuggingFaceStream,
|
46
|
+
InkeepStream: () => InkeepStream,
|
47
|
+
LangChainStream: () => LangChainStream,
|
48
|
+
MistralStream: () => MistralStream,
|
49
|
+
OpenAIStream: () => OpenAIStream,
|
50
|
+
ReplicateStream: () => ReplicateStream,
|
51
|
+
StreamData: () => StreamData,
|
52
|
+
StreamObjectResult: () => StreamObjectResult,
|
53
|
+
StreamTextResult: () => StreamTextResult,
|
54
|
+
StreamingTextResponse: () => StreamingTextResponse,
|
55
|
+
convertDataContentToBase64String: () => convertDataContentToBase64String,
|
56
|
+
convertDataContentToUint8Array: () => convertDataContentToUint8Array,
|
57
|
+
createCallbacksTransformer: () => createCallbacksTransformer,
|
58
|
+
createChunkDecoder: () => createChunkDecoder,
|
59
|
+
createEventStreamTransformer: () => createEventStreamTransformer,
|
60
|
+
createStreamDataTransformer: () => createStreamDataTransformer,
|
61
|
+
experimental_AssistantResponse: () => experimental_AssistantResponse,
|
62
|
+
experimental_StreamData: () => experimental_StreamData,
|
63
|
+
experimental_StreamingReactResponse: () => experimental_StreamingReactResponse,
|
64
|
+
experimental_generateObject: () => experimental_generateObject,
|
65
|
+
experimental_generateText: () => experimental_generateText,
|
66
|
+
experimental_streamObject: () => experimental_streamObject,
|
67
|
+
experimental_streamText: () => experimental_streamText,
|
68
|
+
formatStreamPart: () => formatStreamPart,
|
69
|
+
generateId: () => generateId,
|
70
|
+
isStreamStringEqualToType: () => isStreamStringEqualToType,
|
71
|
+
nanoid: () => generateId,
|
72
|
+
parseStreamPart: () => parseStreamPart,
|
73
|
+
readDataStream: () => readDataStream,
|
74
|
+
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
75
|
+
streamToResponse: () => streamToResponse,
|
76
|
+
tool: () => tool,
|
77
|
+
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
78
|
+
});
|
79
|
+
module.exports = __toCommonJS(streams_exports);
|
80
|
+
|
81
|
+
// core/generate-object/generate-object.ts
|
82
|
+
var import_provider5 = require("@ai-sdk/provider");
|
83
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
84
|
+
|
85
|
+
// core/generate-text/token-usage.ts
|
86
|
+
function calculateTokenUsage(usage) {
|
87
|
+
return {
|
88
|
+
promptTokens: usage.promptTokens,
|
89
|
+
completionTokens: usage.completionTokens,
|
90
|
+
totalTokens: usage.promptTokens + usage.completionTokens
|
91
|
+
};
|
92
|
+
}
|
93
|
+
|
94
|
+
// core/util/detect-image-mimetype.ts
|
95
|
+
var mimeTypeSignatures = [
|
96
|
+
{ mimeType: "image/gif", bytes: [71, 73, 70] },
|
97
|
+
{ mimeType: "image/png", bytes: [137, 80, 78, 71] },
|
98
|
+
{ mimeType: "image/jpeg", bytes: [255, 216] },
|
99
|
+
{ mimeType: "image/webp", bytes: [82, 73, 70, 70] }
|
100
|
+
];
|
101
|
+
function detectImageMimeType(image) {
|
102
|
+
for (const { bytes, mimeType } of mimeTypeSignatures) {
|
103
|
+
if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
|
104
|
+
return mimeType;
|
105
|
+
}
|
106
|
+
}
|
107
|
+
return void 0;
|
108
|
+
}
|
109
|
+
|
110
|
+
// core/prompt/data-content.ts
|
111
|
+
var import_provider = require("@ai-sdk/provider");
|
112
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
113
|
+
function convertDataContentToBase64String(content) {
|
114
|
+
if (typeof content === "string") {
|
115
|
+
return content;
|
116
|
+
}
|
117
|
+
if (content instanceof ArrayBuffer) {
|
118
|
+
return (0, import_provider_utils.convertUint8ArrayToBase64)(new Uint8Array(content));
|
119
|
+
}
|
120
|
+
return (0, import_provider_utils.convertUint8ArrayToBase64)(content);
|
121
|
+
}
|
122
|
+
function convertDataContentToUint8Array(content) {
|
123
|
+
if (content instanceof Uint8Array) {
|
124
|
+
return content;
|
125
|
+
}
|
126
|
+
if (typeof content === "string") {
|
127
|
+
return (0, import_provider_utils.convertBase64ToUint8Array)(content);
|
128
|
+
}
|
129
|
+
if (content instanceof ArrayBuffer) {
|
130
|
+
return new Uint8Array(content);
|
131
|
+
}
|
132
|
+
throw new import_provider.InvalidDataContentError({ content });
|
133
|
+
}
|
134
|
+
|
135
|
+
// core/prompt/convert-to-language-model-prompt.ts
|
136
|
+
function convertToLanguageModelPrompt(prompt) {
|
137
|
+
const languageModelMessages = [];
|
138
|
+
if (prompt.system != null) {
|
139
|
+
languageModelMessages.push({ role: "system", content: prompt.system });
|
140
|
+
}
|
141
|
+
switch (prompt.type) {
|
142
|
+
case "prompt": {
|
143
|
+
languageModelMessages.push({
|
144
|
+
role: "user",
|
145
|
+
content: [{ type: "text", text: prompt.prompt }]
|
146
|
+
});
|
147
|
+
break;
|
148
|
+
}
|
149
|
+
case "messages": {
|
150
|
+
languageModelMessages.push(
|
151
|
+
...prompt.messages.map((message) => {
|
152
|
+
switch (message.role) {
|
153
|
+
case "user": {
|
154
|
+
if (typeof message.content === "string") {
|
155
|
+
return {
|
156
|
+
role: "user",
|
157
|
+
content: [{ type: "text", text: message.content }]
|
158
|
+
};
|
159
|
+
}
|
160
|
+
return {
|
161
|
+
role: "user",
|
162
|
+
content: message.content.map(
|
163
|
+
(part) => {
|
164
|
+
var _a;
|
165
|
+
switch (part.type) {
|
166
|
+
case "text": {
|
167
|
+
return part;
|
168
|
+
}
|
169
|
+
case "image": {
|
170
|
+
if (part.image instanceof URL) {
|
171
|
+
return {
|
172
|
+
type: "image",
|
173
|
+
image: part.image,
|
174
|
+
mimeType: part.mimeType
|
175
|
+
};
|
176
|
+
}
|
177
|
+
const imageUint8 = convertDataContentToUint8Array(
|
178
|
+
part.image
|
179
|
+
);
|
180
|
+
return {
|
181
|
+
type: "image",
|
182
|
+
image: imageUint8,
|
183
|
+
mimeType: (_a = part.mimeType) != null ? _a : detectImageMimeType(imageUint8)
|
184
|
+
};
|
185
|
+
}
|
186
|
+
}
|
187
|
+
}
|
188
|
+
)
|
189
|
+
};
|
190
|
+
}
|
191
|
+
case "assistant": {
|
192
|
+
if (typeof message.content === "string") {
|
193
|
+
return {
|
194
|
+
role: "assistant",
|
195
|
+
content: [{ type: "text", text: message.content }]
|
196
|
+
};
|
197
|
+
}
|
198
|
+
return { role: "assistant", content: message.content };
|
199
|
+
}
|
200
|
+
case "tool": {
|
201
|
+
return message;
|
202
|
+
}
|
203
|
+
}
|
204
|
+
})
|
205
|
+
);
|
206
|
+
break;
|
207
|
+
}
|
208
|
+
default: {
|
209
|
+
const _exhaustiveCheck = prompt;
|
210
|
+
throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
|
211
|
+
}
|
212
|
+
}
|
213
|
+
return languageModelMessages;
|
214
|
+
}
|
215
|
+
|
216
|
+
// core/prompt/get-validated-prompt.ts
|
217
|
+
var import_provider2 = require("@ai-sdk/provider");
|
218
|
+
function getValidatedPrompt(prompt) {
|
219
|
+
if (prompt.prompt == null && prompt.messages == null) {
|
220
|
+
throw new import_provider2.InvalidPromptError({
|
221
|
+
prompt,
|
222
|
+
message: "prompt or messages must be defined"
|
223
|
+
});
|
224
|
+
}
|
225
|
+
if (prompt.prompt != null && prompt.messages != null) {
|
226
|
+
throw new import_provider2.InvalidPromptError({
|
227
|
+
prompt,
|
228
|
+
message: "prompt and messages cannot be defined at the same time"
|
229
|
+
});
|
230
|
+
}
|
231
|
+
return prompt.prompt != null ? {
|
232
|
+
type: "prompt",
|
233
|
+
prompt: prompt.prompt,
|
234
|
+
messages: void 0,
|
235
|
+
system: prompt.system
|
236
|
+
} : {
|
237
|
+
type: "messages",
|
238
|
+
prompt: void 0,
|
239
|
+
messages: prompt.messages,
|
240
|
+
// only possible case bc of checks above
|
241
|
+
system: prompt.system
|
242
|
+
};
|
243
|
+
}
|
244
|
+
|
245
|
+
// core/prompt/prepare-call-settings.ts
|
246
|
+
var import_provider3 = require("@ai-sdk/provider");
|
247
|
+
function prepareCallSettings({
|
248
|
+
maxTokens,
|
249
|
+
temperature,
|
250
|
+
topP,
|
251
|
+
presencePenalty,
|
252
|
+
frequencyPenalty,
|
253
|
+
seed,
|
254
|
+
maxRetries
|
255
|
+
}) {
|
256
|
+
if (maxTokens != null) {
|
257
|
+
if (!Number.isInteger(maxTokens)) {
|
258
|
+
throw new import_provider3.InvalidArgumentError({
|
259
|
+
parameter: "maxTokens",
|
260
|
+
value: maxTokens,
|
261
|
+
message: "maxTokens must be an integer"
|
262
|
+
});
|
263
|
+
}
|
264
|
+
if (maxTokens < 1) {
|
265
|
+
throw new import_provider3.InvalidArgumentError({
|
266
|
+
parameter: "maxTokens",
|
267
|
+
value: maxTokens,
|
268
|
+
message: "maxTokens must be >= 1"
|
269
|
+
});
|
270
|
+
}
|
271
|
+
}
|
272
|
+
if (temperature != null) {
|
273
|
+
if (typeof temperature !== "number") {
|
274
|
+
throw new import_provider3.InvalidArgumentError({
|
275
|
+
parameter: "temperature",
|
276
|
+
value: temperature,
|
277
|
+
message: "temperature must be a number"
|
278
|
+
});
|
279
|
+
}
|
280
|
+
if (temperature < 0 || temperature > 1) {
|
281
|
+
throw new import_provider3.InvalidArgumentError({
|
282
|
+
parameter: "temperature",
|
283
|
+
value: temperature,
|
284
|
+
message: "temperature must be between 0 and 1 (inclusive)"
|
285
|
+
});
|
286
|
+
}
|
287
|
+
}
|
288
|
+
if (topP != null) {
|
289
|
+
if (typeof topP !== "number") {
|
290
|
+
throw new import_provider3.InvalidArgumentError({
|
291
|
+
parameter: "topP",
|
292
|
+
value: topP,
|
293
|
+
message: "topP must be a number"
|
294
|
+
});
|
295
|
+
}
|
296
|
+
if (topP < 0 || topP > 1) {
|
297
|
+
throw new import_provider3.InvalidArgumentError({
|
298
|
+
parameter: "topP",
|
299
|
+
value: topP,
|
300
|
+
message: "topP must be between 0 and 1 (inclusive)"
|
301
|
+
});
|
302
|
+
}
|
303
|
+
}
|
304
|
+
if (presencePenalty != null) {
|
305
|
+
if (typeof presencePenalty !== "number") {
|
306
|
+
throw new import_provider3.InvalidArgumentError({
|
307
|
+
parameter: "presencePenalty",
|
308
|
+
value: presencePenalty,
|
309
|
+
message: "presencePenalty must be a number"
|
310
|
+
});
|
311
|
+
}
|
312
|
+
if (presencePenalty < -1 || presencePenalty > 1) {
|
313
|
+
throw new import_provider3.InvalidArgumentError({
|
314
|
+
parameter: "presencePenalty",
|
315
|
+
value: presencePenalty,
|
316
|
+
message: "presencePenalty must be between -1 and 1 (inclusive)"
|
317
|
+
});
|
318
|
+
}
|
319
|
+
}
|
320
|
+
if (frequencyPenalty != null) {
|
321
|
+
if (typeof frequencyPenalty !== "number") {
|
322
|
+
throw new import_provider3.InvalidArgumentError({
|
323
|
+
parameter: "frequencyPenalty",
|
324
|
+
value: frequencyPenalty,
|
325
|
+
message: "frequencyPenalty must be a number"
|
326
|
+
});
|
327
|
+
}
|
328
|
+
if (frequencyPenalty < -1 || frequencyPenalty > 1) {
|
329
|
+
throw new import_provider3.InvalidArgumentError({
|
330
|
+
parameter: "frequencyPenalty",
|
331
|
+
value: frequencyPenalty,
|
332
|
+
message: "frequencyPenalty must be between -1 and 1 (inclusive)"
|
333
|
+
});
|
334
|
+
}
|
335
|
+
}
|
336
|
+
if (seed != null) {
|
337
|
+
if (!Number.isInteger(seed)) {
|
338
|
+
throw new import_provider3.InvalidArgumentError({
|
339
|
+
parameter: "seed",
|
340
|
+
value: seed,
|
341
|
+
message: "seed must be an integer"
|
342
|
+
});
|
343
|
+
}
|
344
|
+
}
|
345
|
+
if (maxRetries != null) {
|
346
|
+
if (!Number.isInteger(maxRetries)) {
|
347
|
+
throw new import_provider3.InvalidArgumentError({
|
348
|
+
parameter: "maxRetries",
|
349
|
+
value: maxRetries,
|
350
|
+
message: "maxRetries must be an integer"
|
351
|
+
});
|
352
|
+
}
|
353
|
+
if (maxRetries < 0) {
|
354
|
+
throw new import_provider3.InvalidArgumentError({
|
355
|
+
parameter: "maxRetries",
|
356
|
+
value: maxRetries,
|
357
|
+
message: "maxRetries must be >= 0"
|
358
|
+
});
|
359
|
+
}
|
360
|
+
}
|
361
|
+
return {
|
362
|
+
maxTokens,
|
363
|
+
temperature: temperature != null ? temperature : 0,
|
364
|
+
topP,
|
365
|
+
presencePenalty,
|
366
|
+
frequencyPenalty,
|
367
|
+
seed,
|
368
|
+
maxRetries: maxRetries != null ? maxRetries : 2
|
369
|
+
};
|
370
|
+
}
|
371
|
+
|
372
|
+
// core/util/convert-zod-to-json-schema.ts
|
373
|
+
var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
|
374
|
+
function convertZodToJSONSchema(zodSchema) {
|
375
|
+
return (0, import_zod_to_json_schema.default)(zodSchema);
|
376
|
+
}
|
377
|
+
|
378
|
+
// core/util/retry-with-exponential-backoff.ts
|
379
|
+
var import_provider4 = require("@ai-sdk/provider");
|
380
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
381
|
+
|
382
|
+
// core/util/delay.ts
|
383
|
+
async function delay(delayInMs) {
|
384
|
+
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
385
|
+
}
|
386
|
+
|
387
|
+
// core/util/retry-with-exponential-backoff.ts
|
388
|
+
var retryWithExponentialBackoff = ({
|
389
|
+
maxRetries = 2,
|
390
|
+
initialDelayInMs = 2e3,
|
391
|
+
backoffFactor = 2
|
392
|
+
} = {}) => async (f) => _retryWithExponentialBackoff(f, {
|
393
|
+
maxRetries,
|
394
|
+
delayInMs: initialDelayInMs,
|
395
|
+
backoffFactor
|
396
|
+
});
|
397
|
+
async function _retryWithExponentialBackoff(f, {
|
398
|
+
maxRetries,
|
399
|
+
delayInMs,
|
400
|
+
backoffFactor
|
401
|
+
}, errors = []) {
|
402
|
+
try {
|
403
|
+
return await f();
|
404
|
+
} catch (error) {
|
405
|
+
if (error instanceof Error && error.name === "AbortError") {
|
406
|
+
throw error;
|
407
|
+
}
|
408
|
+
if (maxRetries === 0) {
|
409
|
+
throw error;
|
410
|
+
}
|
411
|
+
const errorMessage = (0, import_provider_utils2.getErrorMessage)(error);
|
412
|
+
const newErrors = [...errors, error];
|
413
|
+
const tryNumber = newErrors.length;
|
414
|
+
if (tryNumber > maxRetries) {
|
415
|
+
throw new import_provider4.RetryError({
|
416
|
+
message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
|
417
|
+
reason: "maxRetriesExceeded",
|
418
|
+
errors: newErrors
|
419
|
+
});
|
420
|
+
}
|
421
|
+
if (error instanceof Error && import_provider4.APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
422
|
+
await delay(delayInMs);
|
423
|
+
return _retryWithExponentialBackoff(
|
424
|
+
f,
|
425
|
+
{ maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
|
426
|
+
newErrors
|
427
|
+
);
|
428
|
+
}
|
429
|
+
if (tryNumber === 1) {
|
430
|
+
throw error;
|
431
|
+
}
|
432
|
+
throw new import_provider4.RetryError({
|
433
|
+
message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
|
434
|
+
reason: "errorNotRetryable",
|
435
|
+
errors: newErrors
|
436
|
+
});
|
437
|
+
}
|
438
|
+
}
|
439
|
+
|
440
|
+
// core/generate-object/inject-json-schema-into-system.ts
|
441
|
+
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
442
|
+
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
443
|
+
function injectJsonSchemaIntoSystem({
|
444
|
+
system,
|
445
|
+
schema,
|
446
|
+
schemaPrefix = DEFAULT_SCHEMA_PREFIX,
|
447
|
+
schemaSuffix = DEFAULT_SCHEMA_SUFFIX
|
448
|
+
}) {
|
449
|
+
return [
|
450
|
+
system,
|
451
|
+
system != null ? "" : null,
|
452
|
+
// add a newline if system is not null
|
453
|
+
schemaPrefix,
|
454
|
+
JSON.stringify(schema),
|
455
|
+
schemaSuffix
|
456
|
+
].filter((line) => line != null).join("\n");
|
457
|
+
}
|
458
|
+
|
459
|
+
// core/generate-object/generate-object.ts
|
460
|
+
async function experimental_generateObject({
|
461
|
+
model,
|
462
|
+
schema,
|
463
|
+
mode,
|
464
|
+
system,
|
465
|
+
prompt,
|
466
|
+
messages,
|
467
|
+
maxRetries,
|
468
|
+
abortSignal,
|
469
|
+
...settings
|
470
|
+
}) {
|
471
|
+
var _a, _b;
|
472
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
473
|
+
const jsonSchema = convertZodToJSONSchema(schema);
|
474
|
+
if (mode === "auto" || mode == null) {
|
475
|
+
mode = model.defaultObjectGenerationMode;
|
476
|
+
}
|
477
|
+
let result;
|
478
|
+
let finishReason;
|
479
|
+
let usage;
|
480
|
+
let warnings;
|
481
|
+
switch (mode) {
|
482
|
+
case "json": {
|
483
|
+
const validatedPrompt = getValidatedPrompt({
|
484
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
485
|
+
prompt,
|
486
|
+
messages
|
487
|
+
});
|
488
|
+
const generateResult = await retry(() => {
|
489
|
+
return model.doGenerate({
|
490
|
+
mode: { type: "object-json" },
|
491
|
+
...prepareCallSettings(settings),
|
492
|
+
inputFormat: validatedPrompt.type,
|
493
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
494
|
+
abortSignal
|
495
|
+
});
|
496
|
+
});
|
497
|
+
if (generateResult.text === void 0) {
|
498
|
+
throw new import_provider5.NoTextGeneratedError();
|
499
|
+
}
|
500
|
+
result = generateResult.text;
|
501
|
+
finishReason = generateResult.finishReason;
|
502
|
+
usage = generateResult.usage;
|
503
|
+
warnings = generateResult.warnings;
|
504
|
+
break;
|
505
|
+
}
|
506
|
+
case "grammar": {
|
507
|
+
const validatedPrompt = getValidatedPrompt({
|
508
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
509
|
+
prompt,
|
510
|
+
messages
|
511
|
+
});
|
512
|
+
const generateResult = await retry(
|
513
|
+
() => model.doGenerate({
|
514
|
+
mode: { type: "object-grammar", schema: jsonSchema },
|
515
|
+
...settings,
|
516
|
+
inputFormat: validatedPrompt.type,
|
517
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
518
|
+
abortSignal
|
519
|
+
})
|
520
|
+
);
|
521
|
+
if (generateResult.text === void 0) {
|
522
|
+
throw new import_provider5.NoTextGeneratedError();
|
523
|
+
}
|
524
|
+
result = generateResult.text;
|
525
|
+
finishReason = generateResult.finishReason;
|
526
|
+
usage = generateResult.usage;
|
527
|
+
warnings = generateResult.warnings;
|
528
|
+
break;
|
529
|
+
}
|
530
|
+
case "tool": {
|
531
|
+
const validatedPrompt = getValidatedPrompt({
|
532
|
+
system,
|
533
|
+
prompt,
|
534
|
+
messages
|
535
|
+
});
|
536
|
+
const generateResult = await retry(
|
537
|
+
() => model.doGenerate({
|
538
|
+
mode: {
|
539
|
+
type: "object-tool",
|
540
|
+
tool: {
|
541
|
+
type: "function",
|
542
|
+
name: "json",
|
543
|
+
description: "Respond with a JSON object.",
|
544
|
+
parameters: jsonSchema
|
545
|
+
}
|
546
|
+
},
|
547
|
+
...settings,
|
548
|
+
inputFormat: validatedPrompt.type,
|
549
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
550
|
+
abortSignal
|
551
|
+
})
|
552
|
+
);
|
553
|
+
const functionArgs = (_b = (_a = generateResult.toolCalls) == null ? void 0 : _a[0]) == null ? void 0 : _b.args;
|
554
|
+
if (functionArgs === void 0) {
|
555
|
+
throw new import_provider5.NoTextGeneratedError();
|
556
|
+
}
|
557
|
+
result = functionArgs;
|
558
|
+
finishReason = generateResult.finishReason;
|
559
|
+
usage = generateResult.usage;
|
560
|
+
warnings = generateResult.warnings;
|
561
|
+
break;
|
562
|
+
}
|
563
|
+
case void 0: {
|
564
|
+
throw new Error("Model does not have a default object generation mode.");
|
565
|
+
}
|
566
|
+
default: {
|
567
|
+
const _exhaustiveCheck = mode;
|
568
|
+
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
569
|
+
}
|
570
|
+
}
|
571
|
+
const parseResult = (0, import_provider_utils3.safeParseJSON)({ text: result, schema });
|
572
|
+
if (!parseResult.success) {
|
573
|
+
throw parseResult.error;
|
574
|
+
}
|
575
|
+
return new GenerateObjectResult({
|
576
|
+
object: parseResult.value,
|
577
|
+
finishReason,
|
578
|
+
usage: calculateTokenUsage(usage),
|
579
|
+
warnings
|
580
|
+
});
|
581
|
+
}
|
582
|
+
var GenerateObjectResult = class {
|
583
|
+
constructor(options) {
|
584
|
+
this.object = options.object;
|
585
|
+
this.finishReason = options.finishReason;
|
586
|
+
this.usage = options.usage;
|
587
|
+
this.warnings = options.warnings;
|
588
|
+
}
|
589
|
+
};
|
590
|
+
|
591
|
+
// core/util/async-iterable-stream.ts
|
592
|
+
function createAsyncIterableStream(source, transformer) {
|
593
|
+
const transformedStream = source.pipeThrough(
|
594
|
+
new TransformStream(transformer)
|
595
|
+
);
|
596
|
+
transformedStream[Symbol.asyncIterator] = () => {
|
597
|
+
const reader = transformedStream.getReader();
|
598
|
+
return {
|
599
|
+
async next() {
|
600
|
+
const { done, value } = await reader.read();
|
601
|
+
return done ? { done: true, value: void 0 } : { done: false, value };
|
602
|
+
}
|
603
|
+
};
|
604
|
+
};
|
605
|
+
return transformedStream;
|
606
|
+
}
|
607
|
+
|
608
|
+
// core/util/is-deep-equal-data.ts
|
609
|
+
function isDeepEqualData(obj1, obj2) {
|
610
|
+
if (obj1 === obj2)
|
611
|
+
return true;
|
612
|
+
if (obj1 == null || obj2 == null)
|
613
|
+
return false;
|
614
|
+
if (typeof obj1 !== "object" && typeof obj2 !== "object")
|
615
|
+
return obj1 === obj2;
|
616
|
+
if (obj1.constructor !== obj2.constructor)
|
617
|
+
return false;
|
618
|
+
if (obj1 instanceof Date && obj2 instanceof Date) {
|
619
|
+
return obj1.getTime() === obj2.getTime();
|
620
|
+
}
|
621
|
+
if (Array.isArray(obj1)) {
|
622
|
+
if (obj1.length !== obj2.length)
|
623
|
+
return false;
|
624
|
+
for (let i = 0; i < obj1.length; i++) {
|
625
|
+
if (!isDeepEqualData(obj1[i], obj2[i]))
|
626
|
+
return false;
|
627
|
+
}
|
628
|
+
return true;
|
629
|
+
}
|
630
|
+
const keys1 = Object.keys(obj1);
|
631
|
+
const keys2 = Object.keys(obj2);
|
632
|
+
if (keys1.length !== keys2.length)
|
633
|
+
return false;
|
634
|
+
for (const key of keys1) {
|
635
|
+
if (!keys2.includes(key))
|
636
|
+
return false;
|
637
|
+
if (!isDeepEqualData(obj1[key], obj2[key]))
|
638
|
+
return false;
|
639
|
+
}
|
640
|
+
return true;
|
641
|
+
}
|
642
|
+
|
643
|
+
// core/util/parse-partial-json.ts
|
644
|
+
var import_secure_json_parse = __toESM(require("secure-json-parse"));
|
645
|
+
|
646
|
+
// core/util/fix-json.ts
|
647
|
+
function fixJson(input) {
|
648
|
+
const stack = ["ROOT"];
|
649
|
+
let lastValidIndex = -1;
|
650
|
+
let literalStart = null;
|
651
|
+
function processValueStart(char, i, swapState) {
|
652
|
+
{
|
653
|
+
switch (char) {
|
654
|
+
case '"': {
|
655
|
+
lastValidIndex = i;
|
656
|
+
stack.pop();
|
657
|
+
stack.push(swapState);
|
658
|
+
stack.push("INSIDE_STRING");
|
659
|
+
break;
|
660
|
+
}
|
661
|
+
case "f":
|
662
|
+
case "t":
|
663
|
+
case "n": {
|
664
|
+
lastValidIndex = i;
|
665
|
+
literalStart = i;
|
666
|
+
stack.pop();
|
667
|
+
stack.push(swapState);
|
668
|
+
stack.push("INSIDE_LITERAL");
|
669
|
+
break;
|
670
|
+
}
|
671
|
+
case "-": {
|
672
|
+
stack.pop();
|
673
|
+
stack.push(swapState);
|
674
|
+
stack.push("INSIDE_NUMBER");
|
675
|
+
break;
|
676
|
+
}
|
677
|
+
case "0":
|
678
|
+
case "1":
|
679
|
+
case "2":
|
680
|
+
case "3":
|
681
|
+
case "4":
|
682
|
+
case "5":
|
683
|
+
case "6":
|
684
|
+
case "7":
|
685
|
+
case "8":
|
686
|
+
case "9": {
|
687
|
+
lastValidIndex = i;
|
688
|
+
stack.pop();
|
689
|
+
stack.push(swapState);
|
690
|
+
stack.push("INSIDE_NUMBER");
|
691
|
+
break;
|
692
|
+
}
|
693
|
+
case "{": {
|
694
|
+
lastValidIndex = i;
|
695
|
+
stack.pop();
|
696
|
+
stack.push(swapState);
|
697
|
+
stack.push("INSIDE_OBJECT_START");
|
698
|
+
break;
|
699
|
+
}
|
700
|
+
case "[": {
|
701
|
+
lastValidIndex = i;
|
702
|
+
stack.pop();
|
703
|
+
stack.push(swapState);
|
704
|
+
stack.push("INSIDE_ARRAY_START");
|
705
|
+
break;
|
706
|
+
}
|
707
|
+
}
|
708
|
+
}
|
709
|
+
}
|
710
|
+
function processAfterObjectValue(char, i) {
|
711
|
+
switch (char) {
|
712
|
+
case ",": {
|
713
|
+
stack.pop();
|
714
|
+
stack.push("INSIDE_OBJECT_AFTER_COMMA");
|
715
|
+
break;
|
716
|
+
}
|
717
|
+
case "}": {
|
718
|
+
lastValidIndex = i;
|
719
|
+
stack.pop();
|
720
|
+
break;
|
721
|
+
}
|
722
|
+
}
|
723
|
+
}
|
724
|
+
function processAfterArrayValue(char, i) {
|
725
|
+
switch (char) {
|
726
|
+
case ",": {
|
727
|
+
stack.pop();
|
728
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
729
|
+
break;
|
730
|
+
}
|
731
|
+
case "]": {
|
732
|
+
lastValidIndex = i;
|
733
|
+
stack.pop();
|
734
|
+
break;
|
735
|
+
}
|
736
|
+
}
|
737
|
+
}
|
738
|
+
for (let i = 0; i < input.length; i++) {
|
739
|
+
const char = input[i];
|
740
|
+
const currentState = stack[stack.length - 1];
|
741
|
+
switch (currentState) {
|
742
|
+
case "ROOT":
|
743
|
+
processValueStart(char, i, "FINISH");
|
744
|
+
break;
|
745
|
+
case "INSIDE_OBJECT_START": {
|
746
|
+
switch (char) {
|
747
|
+
case '"': {
|
748
|
+
stack.pop();
|
749
|
+
stack.push("INSIDE_OBJECT_KEY");
|
750
|
+
break;
|
751
|
+
}
|
752
|
+
case "}": {
|
753
|
+
stack.pop();
|
754
|
+
break;
|
755
|
+
}
|
756
|
+
}
|
757
|
+
break;
|
758
|
+
}
|
759
|
+
case "INSIDE_OBJECT_AFTER_COMMA": {
|
760
|
+
switch (char) {
|
761
|
+
case '"': {
|
762
|
+
stack.pop();
|
763
|
+
stack.push("INSIDE_OBJECT_KEY");
|
764
|
+
break;
|
765
|
+
}
|
766
|
+
}
|
767
|
+
break;
|
768
|
+
}
|
769
|
+
case "INSIDE_OBJECT_KEY": {
|
770
|
+
switch (char) {
|
771
|
+
case '"': {
|
772
|
+
stack.pop();
|
773
|
+
stack.push("INSIDE_OBJECT_AFTER_KEY");
|
774
|
+
break;
|
775
|
+
}
|
776
|
+
}
|
777
|
+
break;
|
778
|
+
}
|
779
|
+
case "INSIDE_OBJECT_AFTER_KEY": {
|
780
|
+
switch (char) {
|
781
|
+
case ":": {
|
782
|
+
stack.pop();
|
783
|
+
stack.push("INSIDE_OBJECT_BEFORE_VALUE");
|
784
|
+
break;
|
785
|
+
}
|
786
|
+
}
|
787
|
+
break;
|
788
|
+
}
|
789
|
+
case "INSIDE_OBJECT_BEFORE_VALUE": {
|
790
|
+
processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
|
791
|
+
break;
|
792
|
+
}
|
793
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
794
|
+
processAfterObjectValue(char, i);
|
795
|
+
break;
|
796
|
+
}
|
797
|
+
case "INSIDE_STRING": {
|
798
|
+
switch (char) {
|
799
|
+
case '"': {
|
800
|
+
stack.pop();
|
801
|
+
lastValidIndex = i;
|
802
|
+
break;
|
803
|
+
}
|
804
|
+
case "\\": {
|
805
|
+
stack.push("INSIDE_STRING_ESCAPE");
|
806
|
+
break;
|
807
|
+
}
|
808
|
+
default: {
|
809
|
+
lastValidIndex = i;
|
810
|
+
}
|
811
|
+
}
|
812
|
+
break;
|
813
|
+
}
|
814
|
+
case "INSIDE_ARRAY_START": {
|
815
|
+
switch (char) {
|
816
|
+
case "]": {
|
817
|
+
lastValidIndex = i;
|
818
|
+
stack.pop();
|
819
|
+
break;
|
820
|
+
}
|
821
|
+
default: {
|
822
|
+
lastValidIndex = i;
|
823
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
824
|
+
break;
|
825
|
+
}
|
826
|
+
}
|
827
|
+
break;
|
828
|
+
}
|
829
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
830
|
+
switch (char) {
|
831
|
+
case ",": {
|
832
|
+
stack.pop();
|
833
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
834
|
+
break;
|
835
|
+
}
|
836
|
+
case "]": {
|
837
|
+
lastValidIndex = i;
|
838
|
+
stack.pop();
|
839
|
+
break;
|
840
|
+
}
|
841
|
+
default: {
|
842
|
+
lastValidIndex = i;
|
843
|
+
break;
|
844
|
+
}
|
845
|
+
}
|
846
|
+
break;
|
847
|
+
}
|
848
|
+
case "INSIDE_ARRAY_AFTER_COMMA": {
|
849
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
850
|
+
break;
|
851
|
+
}
|
852
|
+
case "INSIDE_STRING_ESCAPE": {
|
853
|
+
stack.pop();
|
854
|
+
lastValidIndex = i;
|
855
|
+
break;
|
856
|
+
}
|
857
|
+
case "INSIDE_NUMBER": {
|
858
|
+
switch (char) {
|
859
|
+
case "0":
|
860
|
+
case "1":
|
861
|
+
case "2":
|
862
|
+
case "3":
|
863
|
+
case "4":
|
864
|
+
case "5":
|
865
|
+
case "6":
|
866
|
+
case "7":
|
867
|
+
case "8":
|
868
|
+
case "9": {
|
869
|
+
lastValidIndex = i;
|
870
|
+
break;
|
871
|
+
}
|
872
|
+
case "e":
|
873
|
+
case "E":
|
874
|
+
case "-":
|
875
|
+
case ".": {
|
876
|
+
break;
|
877
|
+
}
|
878
|
+
case ",": {
|
879
|
+
stack.pop();
|
880
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
881
|
+
processAfterArrayValue(char, i);
|
882
|
+
}
|
883
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
884
|
+
processAfterObjectValue(char, i);
|
885
|
+
}
|
886
|
+
break;
|
887
|
+
}
|
888
|
+
case "}": {
|
889
|
+
stack.pop();
|
890
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
891
|
+
processAfterObjectValue(char, i);
|
892
|
+
}
|
893
|
+
break;
|
894
|
+
}
|
895
|
+
case "]": {
|
896
|
+
stack.pop();
|
897
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
898
|
+
processAfterArrayValue(char, i);
|
899
|
+
}
|
900
|
+
break;
|
901
|
+
}
|
902
|
+
default: {
|
903
|
+
stack.pop();
|
904
|
+
break;
|
905
|
+
}
|
906
|
+
}
|
907
|
+
break;
|
908
|
+
}
|
909
|
+
case "INSIDE_LITERAL": {
|
910
|
+
const partialLiteral = input.substring(literalStart, i + 1);
|
911
|
+
if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
|
912
|
+
stack.pop();
|
913
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
914
|
+
processAfterObjectValue(char, i);
|
915
|
+
} else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
916
|
+
processAfterArrayValue(char, i);
|
917
|
+
}
|
918
|
+
} else {
|
919
|
+
lastValidIndex = i;
|
920
|
+
}
|
921
|
+
break;
|
922
|
+
}
|
923
|
+
}
|
924
|
+
}
|
925
|
+
let result = input.slice(0, lastValidIndex + 1);
|
926
|
+
for (let i = stack.length - 1; i >= 0; i--) {
|
927
|
+
const state = stack[i];
|
928
|
+
switch (state) {
|
929
|
+
case "INSIDE_STRING": {
|
930
|
+
result += '"';
|
931
|
+
break;
|
932
|
+
}
|
933
|
+
case "INSIDE_OBJECT_KEY":
|
934
|
+
case "INSIDE_OBJECT_AFTER_KEY":
|
935
|
+
case "INSIDE_OBJECT_AFTER_COMMA":
|
936
|
+
case "INSIDE_OBJECT_START":
|
937
|
+
case "INSIDE_OBJECT_BEFORE_VALUE":
|
938
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
939
|
+
result += "}";
|
940
|
+
break;
|
941
|
+
}
|
942
|
+
case "INSIDE_ARRAY_START":
|
943
|
+
case "INSIDE_ARRAY_AFTER_COMMA":
|
944
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
945
|
+
result += "]";
|
946
|
+
break;
|
947
|
+
}
|
948
|
+
case "INSIDE_LITERAL": {
|
949
|
+
const partialLiteral = input.substring(literalStart, input.length);
|
950
|
+
if ("true".startsWith(partialLiteral)) {
|
951
|
+
result += "true".slice(partialLiteral.length);
|
952
|
+
} else if ("false".startsWith(partialLiteral)) {
|
953
|
+
result += "false".slice(partialLiteral.length);
|
954
|
+
} else if ("null".startsWith(partialLiteral)) {
|
955
|
+
result += "null".slice(partialLiteral.length);
|
956
|
+
}
|
957
|
+
}
|
958
|
+
}
|
959
|
+
}
|
960
|
+
return result;
|
961
|
+
}
|
962
|
+
|
963
|
+
// core/util/parse-partial-json.ts
|
964
|
+
function parsePartialJson(jsonText) {
|
965
|
+
if (jsonText == null) {
|
966
|
+
return void 0;
|
967
|
+
}
|
968
|
+
try {
|
969
|
+
return import_secure_json_parse.default.parse(jsonText);
|
970
|
+
} catch (ignored) {
|
971
|
+
try {
|
972
|
+
const fixedJsonText = fixJson(jsonText);
|
973
|
+
return import_secure_json_parse.default.parse(fixedJsonText);
|
974
|
+
} catch (ignored2) {
|
975
|
+
}
|
976
|
+
}
|
977
|
+
return void 0;
|
978
|
+
}
|
979
|
+
|
980
|
+
// core/generate-object/stream-object.ts
|
981
|
+
async function experimental_streamObject({
|
982
|
+
model,
|
983
|
+
schema,
|
984
|
+
mode,
|
985
|
+
system,
|
986
|
+
prompt,
|
987
|
+
messages,
|
988
|
+
maxRetries,
|
989
|
+
abortSignal,
|
990
|
+
...settings
|
991
|
+
}) {
|
992
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
993
|
+
const jsonSchema = convertZodToJSONSchema(schema);
|
994
|
+
if (mode === "auto" || mode == null) {
|
995
|
+
mode = model.defaultObjectGenerationMode;
|
996
|
+
}
|
997
|
+
let callOptions;
|
998
|
+
let transformer;
|
999
|
+
switch (mode) {
|
1000
|
+
case "json": {
|
1001
|
+
const validatedPrompt = getValidatedPrompt({
|
1002
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1003
|
+
prompt,
|
1004
|
+
messages
|
1005
|
+
});
|
1006
|
+
callOptions = {
|
1007
|
+
mode: { type: "object-json" },
|
1008
|
+
...prepareCallSettings(settings),
|
1009
|
+
inputFormat: validatedPrompt.type,
|
1010
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1011
|
+
abortSignal
|
1012
|
+
};
|
1013
|
+
transformer = {
|
1014
|
+
transform: (chunk, controller) => {
|
1015
|
+
switch (chunk.type) {
|
1016
|
+
case "text-delta":
|
1017
|
+
controller.enqueue(chunk.textDelta);
|
1018
|
+
break;
|
1019
|
+
case "error":
|
1020
|
+
controller.enqueue(chunk);
|
1021
|
+
break;
|
1022
|
+
}
|
1023
|
+
}
|
1024
|
+
};
|
1025
|
+
break;
|
1026
|
+
}
|
1027
|
+
case "grammar": {
|
1028
|
+
const validatedPrompt = getValidatedPrompt({
|
1029
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1030
|
+
prompt,
|
1031
|
+
messages
|
1032
|
+
});
|
1033
|
+
callOptions = {
|
1034
|
+
mode: { type: "object-grammar", schema: jsonSchema },
|
1035
|
+
...settings,
|
1036
|
+
inputFormat: validatedPrompt.type,
|
1037
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1038
|
+
abortSignal
|
1039
|
+
};
|
1040
|
+
transformer = {
|
1041
|
+
transform: (chunk, controller) => {
|
1042
|
+
switch (chunk.type) {
|
1043
|
+
case "text-delta":
|
1044
|
+
controller.enqueue(chunk.textDelta);
|
1045
|
+
break;
|
1046
|
+
case "error":
|
1047
|
+
controller.enqueue(chunk);
|
1048
|
+
break;
|
1049
|
+
}
|
1050
|
+
}
|
1051
|
+
};
|
1052
|
+
break;
|
1053
|
+
}
|
1054
|
+
case "tool": {
|
1055
|
+
const validatedPrompt = getValidatedPrompt({
|
1056
|
+
system,
|
1057
|
+
prompt,
|
1058
|
+
messages
|
1059
|
+
});
|
1060
|
+
callOptions = {
|
1061
|
+
mode: {
|
1062
|
+
type: "object-tool",
|
1063
|
+
tool: {
|
1064
|
+
type: "function",
|
1065
|
+
name: "json",
|
1066
|
+
description: "Respond with a JSON object.",
|
1067
|
+
parameters: jsonSchema
|
1068
|
+
}
|
1069
|
+
},
|
1070
|
+
...settings,
|
1071
|
+
inputFormat: validatedPrompt.type,
|
1072
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1073
|
+
abortSignal
|
1074
|
+
};
|
1075
|
+
transformer = {
|
1076
|
+
transform(chunk, controller) {
|
1077
|
+
switch (chunk.type) {
|
1078
|
+
case "tool-call-delta":
|
1079
|
+
controller.enqueue(chunk.argsTextDelta);
|
1080
|
+
break;
|
1081
|
+
case "error":
|
1082
|
+
controller.enqueue(chunk);
|
1083
|
+
break;
|
1084
|
+
}
|
1085
|
+
}
|
1086
|
+
};
|
1087
|
+
break;
|
1088
|
+
}
|
1089
|
+
case void 0: {
|
1090
|
+
throw new Error("Model does not have a default object generation mode.");
|
1091
|
+
}
|
1092
|
+
default: {
|
1093
|
+
const _exhaustiveCheck = mode;
|
1094
|
+
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
const result = await retry(() => model.doStream(callOptions));
|
1098
|
+
return new StreamObjectResult({
|
1099
|
+
stream: result.stream.pipeThrough(new TransformStream(transformer)),
|
1100
|
+
warnings: result.warnings
|
1101
|
+
});
|
1102
|
+
}
|
1103
|
+
var StreamObjectResult = class {
|
1104
|
+
constructor({
|
1105
|
+
stream,
|
1106
|
+
warnings
|
1107
|
+
}) {
|
1108
|
+
this.originalStream = stream;
|
1109
|
+
this.warnings = warnings;
|
1110
|
+
}
|
1111
|
+
get partialObjectStream() {
|
1112
|
+
let accumulatedText = "";
|
1113
|
+
let latestObject = void 0;
|
1114
|
+
return createAsyncIterableStream(this.originalStream, {
|
1115
|
+
transform(chunk, controller) {
|
1116
|
+
if (typeof chunk === "string") {
|
1117
|
+
accumulatedText += chunk;
|
1118
|
+
const currentObject = parsePartialJson(
|
1119
|
+
accumulatedText
|
1120
|
+
);
|
1121
|
+
if (!isDeepEqualData(latestObject, currentObject)) {
|
1122
|
+
latestObject = currentObject;
|
1123
|
+
controller.enqueue(currentObject);
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
if (typeof chunk === "object" && chunk.type === "error") {
|
1127
|
+
throw chunk.error;
|
1128
|
+
}
|
1129
|
+
}
|
1130
|
+
});
|
1131
|
+
}
|
1132
|
+
};
|
1133
|
+
|
1134
|
+
// core/generate-text/tool-call.ts
|
1135
|
+
var import_provider6 = require("@ai-sdk/provider");
|
1136
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1137
|
+
function parseToolCall({
|
1138
|
+
toolCall,
|
1139
|
+
tools
|
1140
|
+
}) {
|
1141
|
+
const toolName = toolCall.toolName;
|
1142
|
+
if (tools == null) {
|
1143
|
+
throw new import_provider6.NoSuchToolError({ toolName: toolCall.toolName });
|
1144
|
+
}
|
1145
|
+
const tool2 = tools[toolName];
|
1146
|
+
if (tool2 == null) {
|
1147
|
+
throw new import_provider6.NoSuchToolError({
|
1148
|
+
toolName: toolCall.toolName,
|
1149
|
+
availableTools: Object.keys(tools)
|
1150
|
+
});
|
1151
|
+
}
|
1152
|
+
const parseResult = (0, import_provider_utils4.safeParseJSON)({
|
1153
|
+
text: toolCall.args,
|
1154
|
+
schema: tool2.parameters
|
1155
|
+
});
|
1156
|
+
if (parseResult.success === false) {
|
1157
|
+
throw new import_provider6.InvalidToolArgumentsError({
|
1158
|
+
toolName,
|
1159
|
+
toolArgs: toolCall.args,
|
1160
|
+
cause: parseResult.error
|
1161
|
+
});
|
1162
|
+
}
|
1163
|
+
return {
|
1164
|
+
type: "tool-call",
|
1165
|
+
toolCallId: toolCall.toolCallId,
|
1166
|
+
toolName,
|
1167
|
+
args: parseResult.value
|
1168
|
+
};
|
1169
|
+
}
|
1170
|
+
|
1171
|
+
// core/generate-text/generate-text.ts
|
1172
|
+
async function experimental_generateText({
|
1173
|
+
model,
|
1174
|
+
tools,
|
1175
|
+
system,
|
1176
|
+
prompt,
|
1177
|
+
messages,
|
1178
|
+
maxRetries,
|
1179
|
+
abortSignal,
|
1180
|
+
...settings
|
1181
|
+
}) {
|
1182
|
+
var _a, _b;
|
1183
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
1184
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1185
|
+
const modelResponse = await retry(() => {
|
1186
|
+
return model.doGenerate({
|
1187
|
+
mode: {
|
1188
|
+
type: "regular",
|
1189
|
+
tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
|
1190
|
+
type: "function",
|
1191
|
+
name,
|
1192
|
+
description: tool2.description,
|
1193
|
+
parameters: convertZodToJSONSchema(tool2.parameters)
|
1194
|
+
}))
|
1195
|
+
},
|
1196
|
+
...prepareCallSettings(settings),
|
1197
|
+
inputFormat: validatedPrompt.type,
|
1198
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1199
|
+
abortSignal
|
1200
|
+
});
|
1201
|
+
});
|
1202
|
+
const toolCalls = [];
|
1203
|
+
for (const modelToolCall of (_a = modelResponse.toolCalls) != null ? _a : []) {
|
1204
|
+
toolCalls.push(parseToolCall({ toolCall: modelToolCall, tools }));
|
1205
|
+
}
|
1206
|
+
const toolResults = tools == null ? [] : await executeTools({ toolCalls, tools });
|
1207
|
+
return new GenerateTextResult({
|
1208
|
+
// Always return a string so that the caller doesn't have to check for undefined.
|
1209
|
+
// If they need to check if the model did not return any text,
|
1210
|
+
// they can check the length of the string:
|
1211
|
+
text: (_b = modelResponse.text) != null ? _b : "",
|
1212
|
+
toolCalls,
|
1213
|
+
toolResults,
|
1214
|
+
finishReason: modelResponse.finishReason,
|
1215
|
+
usage: calculateTokenUsage(modelResponse.usage),
|
1216
|
+
warnings: modelResponse.warnings
|
1217
|
+
});
|
1218
|
+
}
|
1219
|
+
async function executeTools({
|
1220
|
+
toolCalls,
|
1221
|
+
tools
|
1222
|
+
}) {
|
1223
|
+
const toolResults = await Promise.all(
|
1224
|
+
toolCalls.map(async (toolCall) => {
|
1225
|
+
const tool2 = tools[toolCall.toolName];
|
1226
|
+
if ((tool2 == null ? void 0 : tool2.execute) == null) {
|
1227
|
+
return void 0;
|
1228
|
+
}
|
1229
|
+
const result = await tool2.execute(toolCall.args);
|
1230
|
+
return {
|
1231
|
+
toolCallId: toolCall.toolCallId,
|
1232
|
+
toolName: toolCall.toolName,
|
1233
|
+
args: toolCall.args,
|
1234
|
+
result
|
1235
|
+
};
|
1236
|
+
})
|
1237
|
+
);
|
1238
|
+
return toolResults.filter(
|
1239
|
+
(result) => result != null
|
1240
|
+
);
|
1241
|
+
}
|
1242
|
+
var GenerateTextResult = class {
|
1243
|
+
constructor(options) {
|
1244
|
+
this.text = options.text;
|
1245
|
+
this.toolCalls = options.toolCalls;
|
1246
|
+
this.toolResults = options.toolResults;
|
1247
|
+
this.finishReason = options.finishReason;
|
1248
|
+
this.usage = options.usage;
|
1249
|
+
this.warnings = options.warnings;
|
1250
|
+
}
|
1251
|
+
};
|
1252
|
+
|
1253
|
+
// core/generate-text/run-tools-transformation.ts
|
1254
|
+
var import_provider7 = require("@ai-sdk/provider");
|
1255
|
+
|
1256
|
+
// shared/generate-id.ts
|
1257
|
+
var import_non_secure = require("nanoid/non-secure");
|
1258
|
+
var generateId = (0, import_non_secure.customAlphabet)(
|
1259
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
1260
|
+
7
|
1261
|
+
);
|
1262
|
+
|
1263
|
+
// core/generate-text/run-tools-transformation.ts
|
1264
|
+
function runToolsTransformation({
|
1265
|
+
tools,
|
1266
|
+
generatorStream
|
1267
|
+
}) {
|
1268
|
+
let canClose = false;
|
1269
|
+
const outstandingToolCalls = /* @__PURE__ */ new Set();
|
1270
|
+
let toolResultsStreamController = null;
|
1271
|
+
const toolResultsStream = new ReadableStream({
|
1272
|
+
start(controller) {
|
1273
|
+
toolResultsStreamController = controller;
|
1274
|
+
}
|
1275
|
+
});
|
1276
|
+
const forwardStream = new TransformStream({
|
1277
|
+
transform(chunk, controller) {
|
1278
|
+
const chunkType = chunk.type;
|
1279
|
+
switch (chunkType) {
|
1280
|
+
case "text-delta":
|
1281
|
+
case "error": {
|
1282
|
+
controller.enqueue(chunk);
|
1283
|
+
break;
|
1284
|
+
}
|
1285
|
+
case "tool-call": {
|
1286
|
+
const toolName = chunk.toolName;
|
1287
|
+
if (tools == null) {
|
1288
|
+
toolResultsStreamController.enqueue({
|
1289
|
+
type: "error",
|
1290
|
+
error: new import_provider7.NoSuchToolError({ toolName: chunk.toolName })
|
1291
|
+
});
|
1292
|
+
break;
|
1293
|
+
}
|
1294
|
+
const tool2 = tools[toolName];
|
1295
|
+
if (tool2 == null) {
|
1296
|
+
toolResultsStreamController.enqueue({
|
1297
|
+
type: "error",
|
1298
|
+
error: new import_provider7.NoSuchToolError({
|
1299
|
+
toolName: chunk.toolName,
|
1300
|
+
availableTools: Object.keys(tools)
|
1301
|
+
})
|
1302
|
+
});
|
1303
|
+
break;
|
1304
|
+
}
|
1305
|
+
try {
|
1306
|
+
const toolCall = parseToolCall({
|
1307
|
+
toolCall: chunk,
|
1308
|
+
tools
|
1309
|
+
});
|
1310
|
+
controller.enqueue(toolCall);
|
1311
|
+
if (tool2.execute != null) {
|
1312
|
+
const toolExecutionId = generateId();
|
1313
|
+
outstandingToolCalls.add(toolExecutionId);
|
1314
|
+
tool2.execute(toolCall.args).then(
|
1315
|
+
(result) => {
|
1316
|
+
toolResultsStreamController.enqueue({
|
1317
|
+
...toolCall,
|
1318
|
+
type: "tool-result",
|
1319
|
+
result
|
1320
|
+
});
|
1321
|
+
outstandingToolCalls.delete(toolExecutionId);
|
1322
|
+
if (canClose && outstandingToolCalls.size === 0) {
|
1323
|
+
toolResultsStreamController.close();
|
1324
|
+
}
|
1325
|
+
},
|
1326
|
+
(error) => {
|
1327
|
+
toolResultsStreamController.enqueue({
|
1328
|
+
type: "error",
|
1329
|
+
error
|
1330
|
+
});
|
1331
|
+
outstandingToolCalls.delete(toolExecutionId);
|
1332
|
+
if (canClose && outstandingToolCalls.size === 0) {
|
1333
|
+
toolResultsStreamController.close();
|
1334
|
+
}
|
1335
|
+
}
|
1336
|
+
);
|
1337
|
+
}
|
1338
|
+
} catch (error) {
|
1339
|
+
toolResultsStreamController.enqueue({
|
1340
|
+
type: "error",
|
1341
|
+
error
|
1342
|
+
});
|
1343
|
+
}
|
1344
|
+
break;
|
1345
|
+
}
|
1346
|
+
case "finish": {
|
1347
|
+
controller.enqueue({
|
1348
|
+
type: "finish",
|
1349
|
+
finishReason: chunk.finishReason,
|
1350
|
+
usage: {
|
1351
|
+
promptTokens: chunk.usage.promptTokens,
|
1352
|
+
completionTokens: chunk.usage.completionTokens,
|
1353
|
+
totalTokens: chunk.usage.promptTokens + chunk.usage.completionTokens
|
1354
|
+
}
|
1355
|
+
});
|
1356
|
+
break;
|
1357
|
+
}
|
1358
|
+
case "tool-call-delta": {
|
1359
|
+
break;
|
1360
|
+
}
|
1361
|
+
default: {
|
1362
|
+
const _exhaustiveCheck = chunkType;
|
1363
|
+
throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
|
1364
|
+
}
|
1365
|
+
}
|
1366
|
+
},
|
1367
|
+
flush() {
|
1368
|
+
canClose = true;
|
1369
|
+
if (outstandingToolCalls.size === 0) {
|
1370
|
+
toolResultsStreamController.close();
|
1371
|
+
}
|
1372
|
+
}
|
1373
|
+
});
|
1374
|
+
return new ReadableStream({
|
1375
|
+
async start(controller) {
|
1376
|
+
generatorStream.pipeThrough(forwardStream).pipeTo(
|
1377
|
+
new WritableStream({
|
1378
|
+
write(chunk) {
|
1379
|
+
controller.enqueue(chunk);
|
1380
|
+
},
|
1381
|
+
close() {
|
1382
|
+
}
|
1383
|
+
})
|
1384
|
+
);
|
1385
|
+
toolResultsStream.pipeTo(
|
1386
|
+
new WritableStream({
|
1387
|
+
write(chunk) {
|
1388
|
+
controller.enqueue(chunk);
|
1389
|
+
},
|
1390
|
+
close() {
|
1391
|
+
controller.close();
|
1392
|
+
}
|
1393
|
+
})
|
1394
|
+
);
|
1395
|
+
}
|
1396
|
+
});
|
1397
|
+
}
|
1398
|
+
|
1399
|
+
// core/generate-text/stream-text.ts
|
1400
|
+
async function experimental_streamText({
|
1401
|
+
model,
|
1402
|
+
tools,
|
1403
|
+
system,
|
1404
|
+
prompt,
|
1405
|
+
messages,
|
1406
|
+
maxRetries,
|
1407
|
+
abortSignal,
|
1408
|
+
...settings
|
1409
|
+
}) {
|
1410
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
1411
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1412
|
+
const { stream, warnings } = await retry(
|
1413
|
+
() => model.doStream({
|
1414
|
+
mode: {
|
1415
|
+
type: "regular",
|
1416
|
+
tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
|
1417
|
+
type: "function",
|
1418
|
+
name,
|
1419
|
+
description: tool2.description,
|
1420
|
+
parameters: convertZodToJSONSchema(tool2.parameters)
|
1421
|
+
}))
|
1422
|
+
},
|
1423
|
+
...prepareCallSettings(settings),
|
1424
|
+
inputFormat: validatedPrompt.type,
|
1425
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1426
|
+
abortSignal
|
1427
|
+
})
|
1428
|
+
);
|
1429
|
+
return new StreamTextResult({
|
1430
|
+
stream: runToolsTransformation({
|
1431
|
+
tools,
|
1432
|
+
generatorStream: stream
|
1433
|
+
}),
|
1434
|
+
warnings
|
1435
|
+
});
|
1436
|
+
}
|
1437
|
+
var StreamTextResult = class {
|
1438
|
+
constructor({
|
1439
|
+
stream,
|
1440
|
+
warnings
|
1441
|
+
}) {
|
1442
|
+
this.originalStream = stream;
|
1443
|
+
this.warnings = warnings;
|
1444
|
+
}
|
1445
|
+
/**
|
1446
|
+
A text stream that returns only the generated text deltas. You can use it
|
1447
|
+
as either an AsyncIterable or a ReadableStream. When an error occurs, the
|
1448
|
+
stream will throw the error.
|
1449
|
+
*/
|
1450
|
+
get textStream() {
|
1451
|
+
return createAsyncIterableStream(this.originalStream, {
|
1452
|
+
transform(chunk, controller) {
|
1453
|
+
if (chunk.type === "text-delta") {
|
1454
|
+
if (chunk.textDelta.length > 0) {
|
1455
|
+
controller.enqueue(chunk.textDelta);
|
1456
|
+
}
|
1457
|
+
} else if (chunk.type === "error") {
|
1458
|
+
throw chunk.error;
|
1459
|
+
}
|
1460
|
+
}
|
1461
|
+
});
|
1462
|
+
}
|
1463
|
+
/**
|
1464
|
+
A stream with all events, including text deltas, tool calls, tool results, and
|
1465
|
+
errors.
|
1466
|
+
You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the
|
1467
|
+
stream will throw the error.
|
1468
|
+
*/
|
1469
|
+
get fullStream() {
|
1470
|
+
return createAsyncIterableStream(this.originalStream, {
|
1471
|
+
transform(chunk, controller) {
|
1472
|
+
if (chunk.type === "text-delta") {
|
1473
|
+
if (chunk.textDelta.length > 0) {
|
1474
|
+
controller.enqueue(chunk);
|
1475
|
+
}
|
1476
|
+
} else {
|
1477
|
+
controller.enqueue(chunk);
|
1478
|
+
}
|
1479
|
+
}
|
1480
|
+
});
|
1481
|
+
}
|
1482
|
+
/**
|
1483
|
+
Converts the result to an `AIStream` object that is compatible with `StreamingTextResponse`.
|
1484
|
+
It can be used with the `useChat` and `useCompletion` hooks.
|
1485
|
+
|
1486
|
+
@param callbacks
|
1487
|
+
Stream callbacks that will be called when the stream emits events.
|
1488
|
+
|
1489
|
+
@returns an `AIStream` object.
|
1490
|
+
*/
|
1491
|
+
toAIStream(callbacks) {
|
1492
|
+
return this.textStream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
1493
|
+
}
|
1494
|
+
/**
|
1495
|
+
Creates a simple text stream response.
|
1496
|
+
Each text delta is encoded as UTF-8 and sent as a separate chunk.
|
1497
|
+
Non-text-delta events are ignored.
|
1498
|
+
*/
|
1499
|
+
toTextStreamResponse(init) {
|
1500
|
+
const encoder = new TextEncoder();
|
1501
|
+
return new Response(
|
1502
|
+
this.textStream.pipeThrough(
|
1503
|
+
new TransformStream({
|
1504
|
+
transform(chunk, controller) {
|
1505
|
+
controller.enqueue(encoder.encode(chunk));
|
1506
|
+
}
|
1507
|
+
})
|
1508
|
+
),
|
1509
|
+
{
|
1510
|
+
...init,
|
1511
|
+
status: 200,
|
1512
|
+
headers: {
|
1513
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1514
|
+
...init == null ? void 0 : init.headers
|
1515
|
+
}
|
1516
|
+
}
|
1517
|
+
);
|
1518
|
+
}
|
1519
|
+
};
|
1520
|
+
|
1521
|
+
// core/tool/tool.ts
|
1522
|
+
function tool(tool2) {
|
1523
|
+
return tool2;
|
1524
|
+
}
|
1525
|
+
|
1526
|
+
// shared/stream-parts.ts
|
1527
|
+
var textStreamPart = {
|
1528
|
+
code: "0",
|
1529
|
+
name: "text",
|
1530
|
+
parse: (value) => {
|
1531
|
+
if (typeof value !== "string") {
|
1532
|
+
throw new Error('"text" parts expect a string value.');
|
1533
|
+
}
|
1534
|
+
return { type: "text", value };
|
1535
|
+
}
|
1536
|
+
};
|
1537
|
+
var functionCallStreamPart = {
|
1538
|
+
code: "1",
|
1539
|
+
name: "function_call",
|
1540
|
+
parse: (value) => {
|
1541
|
+
if (value == null || typeof value !== "object" || !("function_call" in value) || typeof value.function_call !== "object" || value.function_call == null || !("name" in value.function_call) || !("arguments" in value.function_call) || typeof value.function_call.name !== "string" || typeof value.function_call.arguments !== "string") {
|
1542
|
+
throw new Error(
|
1543
|
+
'"function_call" parts expect an object with a "function_call" property.'
|
1544
|
+
);
|
1545
|
+
}
|
1546
|
+
return {
|
1547
|
+
type: "function_call",
|
1548
|
+
value
|
1549
|
+
};
|
1550
|
+
}
|
1551
|
+
};
|
1552
|
+
var dataStreamPart = {
|
1553
|
+
code: "2",
|
1554
|
+
name: "data",
|
1555
|
+
parse: (value) => {
|
1556
|
+
if (!Array.isArray(value)) {
|
1557
|
+
throw new Error('"data" parts expect an array value.');
|
1558
|
+
}
|
1559
|
+
return { type: "data", value };
|
1560
|
+
}
|
1561
|
+
};
|
1562
|
+
var errorStreamPart = {
|
1563
|
+
code: "3",
|
1564
|
+
name: "error",
|
1565
|
+
parse: (value) => {
|
1566
|
+
if (typeof value !== "string") {
|
1567
|
+
throw new Error('"error" parts expect a string value.');
|
1568
|
+
}
|
1569
|
+
return { type: "error", value };
|
1570
|
+
}
|
1571
|
+
};
|
1572
|
+
var assistantMessageStreamPart = {
|
1573
|
+
code: "4",
|
1574
|
+
name: "assistant_message",
|
1575
|
+
parse: (value) => {
|
1576
|
+
if (value == null || typeof value !== "object" || !("id" in value) || !("role" in value) || !("content" in value) || typeof value.id !== "string" || typeof value.role !== "string" || value.role !== "assistant" || !Array.isArray(value.content) || !value.content.every(
|
1577
|
+
(item) => item != null && typeof item === "object" && "type" in item && item.type === "text" && "text" in item && item.text != null && typeof item.text === "object" && "value" in item.text && typeof item.text.value === "string"
|
1578
|
+
)) {
|
1579
|
+
throw new Error(
|
1580
|
+
'"assistant_message" parts expect an object with an "id", "role", and "content" property.'
|
1581
|
+
);
|
1582
|
+
}
|
1583
|
+
return {
|
1584
|
+
type: "assistant_message",
|
1585
|
+
value
|
1586
|
+
};
|
1587
|
+
}
|
1588
|
+
};
|
1589
|
+
var assistantControlDataStreamPart = {
|
1590
|
+
code: "5",
|
1591
|
+
name: "assistant_control_data",
|
1592
|
+
parse: (value) => {
|
1593
|
+
if (value == null || typeof value !== "object" || !("threadId" in value) || !("messageId" in value) || typeof value.threadId !== "string" || typeof value.messageId !== "string") {
|
1594
|
+
throw new Error(
|
1595
|
+
'"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
|
1596
|
+
);
|
1597
|
+
}
|
1598
|
+
return {
|
1599
|
+
type: "assistant_control_data",
|
1600
|
+
value: {
|
1601
|
+
threadId: value.threadId,
|
1602
|
+
messageId: value.messageId
|
1603
|
+
}
|
1604
|
+
};
|
1605
|
+
}
|
1606
|
+
};
|
1607
|
+
var dataMessageStreamPart = {
|
1608
|
+
code: "6",
|
1609
|
+
name: "data_message",
|
1610
|
+
parse: (value) => {
|
1611
|
+
if (value == null || typeof value !== "object" || !("role" in value) || !("data" in value) || typeof value.role !== "string" || value.role !== "data") {
|
1612
|
+
throw new Error(
|
1613
|
+
'"data_message" parts expect an object with a "role" and "data" property.'
|
1614
|
+
);
|
1615
|
+
}
|
1616
|
+
return {
|
1617
|
+
type: "data_message",
|
1618
|
+
value
|
1619
|
+
};
|
1620
|
+
}
|
1621
|
+
};
|
1622
|
+
var toolCallStreamPart = {
|
1623
|
+
code: "7",
|
1624
|
+
name: "tool_calls",
|
1625
|
+
parse: (value) => {
|
1626
|
+
if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
|
1627
|
+
(tc) => tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
|
1628
|
+
)) {
|
1629
|
+
throw new Error(
|
1630
|
+
'"tool_calls" parts expect an object with a ToolCallPayload.'
|
1631
|
+
);
|
1632
|
+
}
|
1633
|
+
return {
|
1634
|
+
type: "tool_calls",
|
1635
|
+
value
|
1636
|
+
};
|
1637
|
+
}
|
1638
|
+
};
|
1639
|
+
var messageAnnotationsStreamPart = {
|
1640
|
+
code: "8",
|
1641
|
+
name: "message_annotations",
|
1642
|
+
parse: (value) => {
|
1643
|
+
if (!Array.isArray(value)) {
|
1644
|
+
throw new Error('"message_annotations" parts expect an array value.');
|
1645
|
+
}
|
1646
|
+
return { type: "message_annotations", value };
|
1647
|
+
}
|
1648
|
+
};
|
1649
|
+
var streamParts = [
|
1650
|
+
textStreamPart,
|
1651
|
+
functionCallStreamPart,
|
1652
|
+
dataStreamPart,
|
1653
|
+
errorStreamPart,
|
1654
|
+
assistantMessageStreamPart,
|
1655
|
+
assistantControlDataStreamPart,
|
1656
|
+
dataMessageStreamPart,
|
1657
|
+
toolCallStreamPart,
|
1658
|
+
messageAnnotationsStreamPart
|
1659
|
+
];
|
1660
|
+
var streamPartsByCode = {
|
1661
|
+
[textStreamPart.code]: textStreamPart,
|
1662
|
+
[functionCallStreamPart.code]: functionCallStreamPart,
|
1663
|
+
[dataStreamPart.code]: dataStreamPart,
|
1664
|
+
[errorStreamPart.code]: errorStreamPart,
|
1665
|
+
[assistantMessageStreamPart.code]: assistantMessageStreamPart,
|
1666
|
+
[assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
|
1667
|
+
[dataMessageStreamPart.code]: dataMessageStreamPart,
|
1668
|
+
[toolCallStreamPart.code]: toolCallStreamPart,
|
1669
|
+
[messageAnnotationsStreamPart.code]: messageAnnotationsStreamPart
|
1670
|
+
};
|
1671
|
+
var StreamStringPrefixes = {
|
1672
|
+
[textStreamPart.name]: textStreamPart.code,
|
1673
|
+
[functionCallStreamPart.name]: functionCallStreamPart.code,
|
1674
|
+
[dataStreamPart.name]: dataStreamPart.code,
|
1675
|
+
[errorStreamPart.name]: errorStreamPart.code,
|
1676
|
+
[assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
|
1677
|
+
[assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
|
1678
|
+
[dataMessageStreamPart.name]: dataMessageStreamPart.code,
|
1679
|
+
[toolCallStreamPart.name]: toolCallStreamPart.code,
|
1680
|
+
[messageAnnotationsStreamPart.name]: messageAnnotationsStreamPart.code
|
1681
|
+
};
|
1682
|
+
var validCodes = streamParts.map((part) => part.code);
|
1683
|
+
var parseStreamPart = (line) => {
|
1684
|
+
const firstSeparatorIndex = line.indexOf(":");
|
1685
|
+
if (firstSeparatorIndex === -1) {
|
1686
|
+
throw new Error("Failed to parse stream string. No separator found.");
|
1687
|
+
}
|
1688
|
+
const prefix = line.slice(0, firstSeparatorIndex);
|
1689
|
+
if (!validCodes.includes(prefix)) {
|
1690
|
+
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
|
1691
|
+
}
|
1692
|
+
const code = prefix;
|
1693
|
+
const textValue = line.slice(firstSeparatorIndex + 1);
|
1694
|
+
const jsonValue = JSON.parse(textValue);
|
1695
|
+
return streamPartsByCode[code].parse(jsonValue);
|
1696
|
+
};
|
1697
|
+
function formatStreamPart(type, value) {
|
1698
|
+
const streamPart = streamParts.find((part) => part.name === type);
|
1699
|
+
if (!streamPart) {
|
1700
|
+
throw new Error(`Invalid stream part type: ${type}`);
|
1701
|
+
}
|
1702
|
+
return `${streamPart.code}:${JSON.stringify(value)}
|
1703
|
+
`;
|
1704
|
+
}
|
1705
|
+
|
1706
|
+
// shared/read-data-stream.ts
|
1707
|
+
var NEWLINE = "\n".charCodeAt(0);
|
1708
|
+
function concatChunks(chunks, totalLength) {
|
1709
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
1710
|
+
let offset = 0;
|
1711
|
+
for (const chunk of chunks) {
|
1712
|
+
concatenatedChunks.set(chunk, offset);
|
1713
|
+
offset += chunk.length;
|
1714
|
+
}
|
1715
|
+
chunks.length = 0;
|
1716
|
+
return concatenatedChunks;
|
1717
|
+
}
|
1718
|
+
async function* readDataStream(reader, {
|
1719
|
+
isAborted
|
1720
|
+
} = {}) {
|
1721
|
+
const decoder = new TextDecoder();
|
1722
|
+
const chunks = [];
|
1723
|
+
let totalLength = 0;
|
1724
|
+
while (true) {
|
1725
|
+
const { value } = await reader.read();
|
1726
|
+
if (value) {
|
1727
|
+
chunks.push(value);
|
1728
|
+
totalLength += value.length;
|
1729
|
+
if (value[value.length - 1] !== NEWLINE) {
|
1730
|
+
continue;
|
1731
|
+
}
|
1732
|
+
}
|
1733
|
+
if (chunks.length === 0) {
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
1737
|
+
totalLength = 0;
|
1738
|
+
const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
|
1739
|
+
for (const streamPart of streamParts2) {
|
1740
|
+
yield streamPart;
|
1741
|
+
}
|
1742
|
+
if (isAborted == null ? void 0 : isAborted()) {
|
1743
|
+
reader.cancel();
|
1744
|
+
break;
|
1745
|
+
}
|
1746
|
+
}
|
1747
|
+
}
|
1748
|
+
|
1749
|
+
// shared/utils.ts
|
1750
|
+
function createChunkDecoder(complex) {
|
1751
|
+
const decoder = new TextDecoder();
|
1752
|
+
if (!complex) {
|
1753
|
+
return function(chunk) {
|
1754
|
+
if (!chunk)
|
1755
|
+
return "";
|
1756
|
+
return decoder.decode(chunk, { stream: true });
|
1757
|
+
};
|
1758
|
+
}
|
1759
|
+
return function(chunk) {
|
1760
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
1761
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
1762
|
+
};
|
1763
|
+
}
|
1764
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
1765
|
+
|
1766
|
+
// streams/ai-stream.ts
|
1767
|
+
var import_eventsource_parser = require("eventsource-parser");
|
1768
|
+
function createEventStreamTransformer(customParser) {
|
1769
|
+
const textDecoder = new TextDecoder();
|
1770
|
+
let eventSourceParser;
|
1771
|
+
return new TransformStream({
|
1772
|
+
async start(controller) {
|
1773
|
+
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
1774
|
+
(event) => {
|
1775
|
+
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
1776
|
+
// @see https://replicate.com/docs/streaming
|
1777
|
+
event.event === "done") {
|
1778
|
+
controller.terminate();
|
1779
|
+
return;
|
1780
|
+
}
|
1781
|
+
if ("data" in event) {
|
1782
|
+
const parsedMessage = customParser ? customParser(event.data, {
|
1783
|
+
event: event.event
|
1784
|
+
}) : event.data;
|
1785
|
+
if (parsedMessage)
|
1786
|
+
controller.enqueue(parsedMessage);
|
1787
|
+
}
|
1788
|
+
}
|
1789
|
+
);
|
1790
|
+
},
|
1791
|
+
transform(chunk) {
|
1792
|
+
eventSourceParser.feed(textDecoder.decode(chunk));
|
1793
|
+
}
|
1794
|
+
});
|
1795
|
+
}
|
1796
|
+
function createCallbacksTransformer(cb) {
|
1797
|
+
const textEncoder = new TextEncoder();
|
1798
|
+
let aggregatedResponse = "";
|
1799
|
+
const callbacks = cb || {};
|
1800
|
+
return new TransformStream({
|
1801
|
+
async start() {
|
1802
|
+
if (callbacks.onStart)
|
1803
|
+
await callbacks.onStart();
|
1804
|
+
},
|
1805
|
+
async transform(message, controller) {
|
1806
|
+
const content = typeof message === "string" ? message : message.content;
|
1807
|
+
controller.enqueue(textEncoder.encode(content));
|
1808
|
+
aggregatedResponse += content;
|
1809
|
+
if (callbacks.onToken)
|
1810
|
+
await callbacks.onToken(content);
|
1811
|
+
if (callbacks.onText && typeof message === "string") {
|
1812
|
+
await callbacks.onText(message);
|
1813
|
+
}
|
1814
|
+
},
|
1815
|
+
async flush() {
|
1816
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
1817
|
+
if (callbacks.onCompletion) {
|
1818
|
+
await callbacks.onCompletion(aggregatedResponse);
|
1819
|
+
}
|
1820
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
1821
|
+
await callbacks.onFinal(aggregatedResponse);
|
1822
|
+
}
|
1823
|
+
}
|
1824
|
+
});
|
1825
|
+
}
|
1826
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
1827
|
+
return "experimental_onFunctionCall" in callbacks;
|
1828
|
+
}
|
1829
|
+
function trimStartOfStreamHelper() {
|
1830
|
+
let isStreamStart = true;
|
1831
|
+
return (text) => {
|
1832
|
+
if (isStreamStart) {
|
1833
|
+
text = text.trimStart();
|
1834
|
+
if (text)
|
1835
|
+
isStreamStart = false;
|
1836
|
+
}
|
1837
|
+
return text;
|
1838
|
+
};
|
1839
|
+
}
|
1840
|
+
function AIStream(response, customParser, callbacks) {
|
1841
|
+
if (!response.ok) {
|
1842
|
+
if (response.body) {
|
1843
|
+
const reader = response.body.getReader();
|
1844
|
+
return new ReadableStream({
|
1845
|
+
async start(controller) {
|
1846
|
+
const { done, value } = await reader.read();
|
1847
|
+
if (!done) {
|
1848
|
+
const errorText = new TextDecoder().decode(value);
|
1849
|
+
controller.error(new Error(`Response error: ${errorText}`));
|
1850
|
+
}
|
1851
|
+
}
|
1852
|
+
});
|
1853
|
+
} else {
|
1854
|
+
return new ReadableStream({
|
1855
|
+
start(controller) {
|
1856
|
+
controller.error(new Error("Response error: No response body"));
|
1857
|
+
}
|
1858
|
+
});
|
1859
|
+
}
|
1860
|
+
}
|
1861
|
+
const responseBodyStream = response.body || createEmptyReadableStream();
|
1862
|
+
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
1863
|
+
}
|
1864
|
+
function createEmptyReadableStream() {
|
1865
|
+
return new ReadableStream({
|
1866
|
+
start(controller) {
|
1867
|
+
controller.close();
|
1868
|
+
}
|
1869
|
+
});
|
1870
|
+
}
|
1871
|
+
function readableFromAsyncIterable(iterable) {
|
1872
|
+
let it = iterable[Symbol.asyncIterator]();
|
1873
|
+
return new ReadableStream({
|
1874
|
+
async pull(controller) {
|
1875
|
+
const { done, value } = await it.next();
|
1876
|
+
if (done)
|
1877
|
+
controller.close();
|
1878
|
+
else
|
1879
|
+
controller.enqueue(value);
|
1880
|
+
},
|
1881
|
+
async cancel(reason) {
|
1882
|
+
var _a;
|
1883
|
+
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
1884
|
+
}
|
1885
|
+
});
|
1886
|
+
}
|
1887
|
+
|
1888
|
+
// streams/stream-data.ts
|
1889
|
+
var StreamData = class {
|
1890
|
+
constructor() {
|
1891
|
+
this.encoder = new TextEncoder();
|
1892
|
+
this.controller = null;
|
1893
|
+
// closing the stream is synchronous, but we want to return a promise
|
1894
|
+
// in case we're doing async work
|
1895
|
+
this.isClosedPromise = null;
|
1896
|
+
this.isClosedPromiseResolver = void 0;
|
1897
|
+
this.isClosed = false;
|
1898
|
+
// array to store appended data
|
1899
|
+
this.data = [];
|
1900
|
+
this.messageAnnotations = [];
|
1901
|
+
this.isClosedPromise = new Promise((resolve) => {
|
1902
|
+
this.isClosedPromiseResolver = resolve;
|
1903
|
+
});
|
1904
|
+
const self = this;
|
1905
|
+
this.stream = new TransformStream({
|
1906
|
+
start: async (controller) => {
|
1907
|
+
self.controller = controller;
|
1908
|
+
},
|
1909
|
+
transform: async (chunk, controller) => {
|
1910
|
+
if (self.data.length > 0) {
|
1911
|
+
const encodedData = self.encoder.encode(
|
1912
|
+
formatStreamPart("data", self.data)
|
1913
|
+
);
|
1914
|
+
self.data = [];
|
1915
|
+
controller.enqueue(encodedData);
|
1916
|
+
}
|
1917
|
+
if (self.messageAnnotations.length) {
|
1918
|
+
const encodedMessageAnnotations = self.encoder.encode(
|
1919
|
+
formatStreamPart("message_annotations", self.messageAnnotations)
|
1920
|
+
);
|
1921
|
+
self.messageAnnotations = [];
|
1922
|
+
controller.enqueue(encodedMessageAnnotations);
|
1923
|
+
}
|
1924
|
+
controller.enqueue(chunk);
|
1925
|
+
},
|
1926
|
+
async flush(controller) {
|
1927
|
+
const warningTimeout = process.env.NODE_ENV === "development" ? setTimeout(() => {
|
1928
|
+
console.warn(
|
1929
|
+
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
1930
|
+
);
|
1931
|
+
}, 3e3) : null;
|
1932
|
+
await self.isClosedPromise;
|
1933
|
+
if (warningTimeout !== null) {
|
1934
|
+
clearTimeout(warningTimeout);
|
1935
|
+
}
|
1936
|
+
if (self.data.length) {
|
1937
|
+
const encodedData = self.encoder.encode(
|
1938
|
+
formatStreamPart("data", self.data)
|
1939
|
+
);
|
1940
|
+
controller.enqueue(encodedData);
|
1941
|
+
}
|
1942
|
+
if (self.messageAnnotations.length) {
|
1943
|
+
const encodedData = self.encoder.encode(
|
1944
|
+
formatStreamPart("message_annotations", self.messageAnnotations)
|
1945
|
+
);
|
1946
|
+
controller.enqueue(encodedData);
|
1947
|
+
}
|
1948
|
+
}
|
1949
|
+
});
|
1950
|
+
}
|
1951
|
+
async close() {
|
1952
|
+
var _a;
|
1953
|
+
if (this.isClosed) {
|
1954
|
+
throw new Error("Data Stream has already been closed.");
|
1955
|
+
}
|
1956
|
+
if (!this.controller) {
|
1957
|
+
throw new Error("Stream controller is not initialized.");
|
1958
|
+
}
|
1959
|
+
(_a = this.isClosedPromiseResolver) == null ? void 0 : _a.call(this);
|
1960
|
+
this.isClosed = true;
|
1961
|
+
}
|
1962
|
+
append(value) {
|
1963
|
+
if (this.isClosed) {
|
1964
|
+
throw new Error("Data Stream has already been closed.");
|
1965
|
+
}
|
1966
|
+
this.data.push(value);
|
1967
|
+
}
|
1968
|
+
appendMessageAnnotation(value) {
|
1969
|
+
if (this.isClosed) {
|
1970
|
+
throw new Error("Data Stream has already been closed.");
|
1971
|
+
}
|
1972
|
+
this.messageAnnotations.push(value);
|
1973
|
+
}
|
1974
|
+
};
|
1975
|
+
function createStreamDataTransformer() {
|
1976
|
+
const encoder = new TextEncoder();
|
1977
|
+
const decoder = new TextDecoder();
|
1978
|
+
return new TransformStream({
|
1979
|
+
transform: async (chunk, controller) => {
|
1980
|
+
const message = decoder.decode(chunk);
|
1981
|
+
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
1982
|
+
}
|
1983
|
+
});
|
1984
|
+
}
|
1985
|
+
var experimental_StreamData = class extends StreamData {
|
1986
|
+
};
|
1987
|
+
|
1988
|
+
// streams/anthropic-stream.ts
|
1989
|
+
function parseAnthropicStream() {
|
1990
|
+
let previous = "";
|
1991
|
+
return (data) => {
|
1992
|
+
const json = JSON.parse(data);
|
1993
|
+
if ("error" in json) {
|
1994
|
+
throw new Error(`${json.error.type}: ${json.error.message}`);
|
1995
|
+
}
|
1996
|
+
if (!("completion" in json)) {
|
1997
|
+
return;
|
1998
|
+
}
|
1999
|
+
const text = json.completion;
|
2000
|
+
if (!previous || text.length > previous.length && text.startsWith(previous)) {
|
2001
|
+
const delta = text.slice(previous.length);
|
2002
|
+
previous = text;
|
2003
|
+
return delta;
|
2004
|
+
}
|
2005
|
+
return text;
|
2006
|
+
};
|
2007
|
+
}
|
2008
|
+
async function* streamable(stream) {
|
2009
|
+
for await (const chunk of stream) {
|
2010
|
+
if ("completion" in chunk) {
|
2011
|
+
const text = chunk.completion;
|
2012
|
+
if (text)
|
2013
|
+
yield text;
|
2014
|
+
} else if ("delta" in chunk) {
|
2015
|
+
const { delta } = chunk;
|
2016
|
+
if ("text" in delta) {
|
2017
|
+
const text = delta.text;
|
2018
|
+
if (text)
|
2019
|
+
yield text;
|
2020
|
+
}
|
2021
|
+
}
|
2022
|
+
}
|
2023
|
+
}
|
2024
|
+
function AnthropicStream(res, cb) {
|
2025
|
+
if (Symbol.asyncIterator in res) {
|
2026
|
+
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
2027
|
+
} else {
|
2028
|
+
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
2029
|
+
createStreamDataTransformer()
|
2030
|
+
);
|
2031
|
+
}
|
2032
|
+
}
|
2033
|
+
|
2034
|
+
// streams/assistant-response.ts
|
2035
|
+
function AssistantResponse({ threadId, messageId }, process2) {
|
2036
|
+
const stream = new ReadableStream({
|
2037
|
+
async start(controller) {
|
2038
|
+
var _a;
|
2039
|
+
const textEncoder = new TextEncoder();
|
2040
|
+
const sendMessage = (message) => {
|
2041
|
+
controller.enqueue(
|
2042
|
+
textEncoder.encode(formatStreamPart("assistant_message", message))
|
2043
|
+
);
|
2044
|
+
};
|
2045
|
+
const sendDataMessage = (message) => {
|
2046
|
+
controller.enqueue(
|
2047
|
+
textEncoder.encode(formatStreamPart("data_message", message))
|
2048
|
+
);
|
2049
|
+
};
|
2050
|
+
const sendError = (errorMessage) => {
|
2051
|
+
controller.enqueue(
|
2052
|
+
textEncoder.encode(formatStreamPart("error", errorMessage))
|
2053
|
+
);
|
2054
|
+
};
|
2055
|
+
const forwardStream = async (stream2) => {
|
2056
|
+
var _a2, _b;
|
2057
|
+
let result = void 0;
|
2058
|
+
for await (const value of stream2) {
|
2059
|
+
switch (value.event) {
|
2060
|
+
case "thread.message.created": {
|
2061
|
+
controller.enqueue(
|
2062
|
+
textEncoder.encode(
|
2063
|
+
formatStreamPart("assistant_message", {
|
2064
|
+
id: value.data.id,
|
2065
|
+
role: "assistant",
|
2066
|
+
content: [{ type: "text", text: { value: "" } }]
|
2067
|
+
})
|
2068
|
+
)
|
2069
|
+
);
|
2070
|
+
break;
|
2071
|
+
}
|
2072
|
+
case "thread.message.delta": {
|
2073
|
+
const content = (_a2 = value.data.delta.content) == null ? void 0 : _a2[0];
|
2074
|
+
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
2075
|
+
controller.enqueue(
|
2076
|
+
textEncoder.encode(
|
2077
|
+
formatStreamPart("text", content.text.value)
|
2078
|
+
)
|
2079
|
+
);
|
2080
|
+
}
|
2081
|
+
break;
|
2082
|
+
}
|
2083
|
+
case "thread.run.completed":
|
2084
|
+
case "thread.run.requires_action": {
|
2085
|
+
result = value.data;
|
2086
|
+
break;
|
2087
|
+
}
|
2088
|
+
}
|
2089
|
+
}
|
2090
|
+
return result;
|
2091
|
+
};
|
2092
|
+
controller.enqueue(
|
2093
|
+
textEncoder.encode(
|
2094
|
+
formatStreamPart("assistant_control_data", {
|
2095
|
+
threadId,
|
2096
|
+
messageId
|
2097
|
+
})
|
2098
|
+
)
|
2099
|
+
);
|
2100
|
+
try {
|
2101
|
+
await process2({
|
2102
|
+
threadId,
|
2103
|
+
messageId,
|
2104
|
+
sendMessage,
|
2105
|
+
sendDataMessage,
|
2106
|
+
forwardStream
|
2107
|
+
});
|
2108
|
+
} catch (error) {
|
2109
|
+
sendError((_a = error.message) != null ? _a : `${error}`);
|
2110
|
+
} finally {
|
2111
|
+
controller.close();
|
2112
|
+
}
|
2113
|
+
},
|
2114
|
+
pull(controller) {
|
2115
|
+
},
|
2116
|
+
cancel() {
|
2117
|
+
}
|
2118
|
+
});
|
2119
|
+
return new Response(stream, {
|
2120
|
+
status: 200,
|
2121
|
+
headers: {
|
2122
|
+
"Content-Type": "text/plain; charset=utf-8"
|
2123
|
+
}
|
2124
|
+
});
|
2125
|
+
}
|
2126
|
+
var experimental_AssistantResponse = AssistantResponse;
|
2127
|
+
|
2128
|
+
// streams/aws-bedrock-stream.ts
|
2129
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
2130
|
+
var _a, _b;
|
2131
|
+
const decoder = new TextDecoder();
|
2132
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
2133
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
2134
|
+
if (bytes != null) {
|
2135
|
+
const chunkText = decoder.decode(bytes);
|
2136
|
+
const chunkJSON = JSON.parse(chunkText);
|
2137
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
2138
|
+
if (delta != null) {
|
2139
|
+
yield delta;
|
2140
|
+
}
|
2141
|
+
}
|
2142
|
+
}
|
2143
|
+
}
|
2144
|
+
function AWSBedrockAnthropicMessagesStream(response, callbacks) {
|
2145
|
+
return AWSBedrockStream(response, callbacks, (chunk) => {
|
2146
|
+
var _a;
|
2147
|
+
return (_a = chunk.delta) == null ? void 0 : _a.text;
|
2148
|
+
});
|
2149
|
+
}
|
2150
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
2151
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
2152
|
+
}
|
2153
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
2154
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
|
2155
|
+
}
|
2156
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
2157
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
2158
|
+
}
|
2159
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
2160
|
+
return readableFromAsyncIterable(
|
2161
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
2162
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2163
|
+
}
|
2164
|
+
|
2165
|
+
// streams/cohere-stream.ts
|
2166
|
+
var utf8Decoder = new TextDecoder("utf-8");
|
2167
|
+
async function processLines(lines, controller) {
|
2168
|
+
for (const line of lines) {
|
2169
|
+
const { text, is_finished } = JSON.parse(line);
|
2170
|
+
if (!is_finished) {
|
2171
|
+
controller.enqueue(text);
|
2172
|
+
}
|
2173
|
+
}
|
2174
|
+
}
|
2175
|
+
async function readAndProcessLines(reader, controller) {
|
2176
|
+
let segment = "";
|
2177
|
+
while (true) {
|
2178
|
+
const { value: chunk, done } = await reader.read();
|
2179
|
+
if (done) {
|
2180
|
+
break;
|
2181
|
+
}
|
2182
|
+
segment += utf8Decoder.decode(chunk, { stream: true });
|
2183
|
+
const linesArray = segment.split(/\r\n|\n|\r/g);
|
2184
|
+
segment = linesArray.pop() || "";
|
2185
|
+
await processLines(linesArray, controller);
|
2186
|
+
}
|
2187
|
+
if (segment) {
|
2188
|
+
const linesArray = [segment];
|
2189
|
+
await processLines(linesArray, controller);
|
2190
|
+
}
|
2191
|
+
controller.close();
|
2192
|
+
}
|
2193
|
+
function createParser2(res) {
|
2194
|
+
var _a;
|
2195
|
+
const reader = (_a = res.body) == null ? void 0 : _a.getReader();
|
2196
|
+
return new ReadableStream({
|
2197
|
+
async start(controller) {
|
2198
|
+
if (!reader) {
|
2199
|
+
controller.close();
|
2200
|
+
return;
|
2201
|
+
}
|
2202
|
+
await readAndProcessLines(reader, controller);
|
2203
|
+
}
|
2204
|
+
});
|
2205
|
+
}
|
2206
|
+
async function* streamable2(stream) {
|
2207
|
+
for await (const chunk of stream) {
|
2208
|
+
if (chunk.eventType === "text-generation") {
|
2209
|
+
const text = chunk.text;
|
2210
|
+
if (text)
|
2211
|
+
yield text;
|
2212
|
+
}
|
2213
|
+
}
|
2214
|
+
}
|
2215
|
+
function CohereStream(reader, callbacks) {
|
2216
|
+
if (Symbol.asyncIterator in reader) {
|
2217
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2218
|
+
} else {
|
2219
|
+
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2220
|
+
}
|
2221
|
+
}
|
2222
|
+
|
2223
|
+
// streams/google-generative-ai-stream.ts
|
2224
|
+
async function* streamable3(response) {
|
2225
|
+
var _a, _b, _c;
|
2226
|
+
for await (const chunk of response.stream) {
|
2227
|
+
const parts = (_c = (_b = (_a = chunk.candidates) == null ? void 0 : _a[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
2228
|
+
if (parts === void 0) {
|
2229
|
+
continue;
|
2230
|
+
}
|
2231
|
+
const firstPart = parts[0];
|
2232
|
+
if (typeof firstPart.text === "string") {
|
2233
|
+
yield firstPart.text;
|
2234
|
+
}
|
2235
|
+
}
|
2236
|
+
}
|
2237
|
+
function GoogleGenerativeAIStream(response, cb) {
|
2238
|
+
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
2239
|
+
}
|
2240
|
+
|
2241
|
+
// streams/huggingface-stream.ts
|
2242
|
+
function createParser3(res) {
|
2243
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
2244
|
+
return new ReadableStream({
|
2245
|
+
async pull(controller) {
|
2246
|
+
var _a, _b;
|
2247
|
+
const { value, done } = await res.next();
|
2248
|
+
if (done) {
|
2249
|
+
controller.close();
|
2250
|
+
return;
|
2251
|
+
}
|
2252
|
+
const text = trimStartOfStream((_b = (_a = value.token) == null ? void 0 : _a.text) != null ? _b : "");
|
2253
|
+
if (!text)
|
2254
|
+
return;
|
2255
|
+
if (value.generated_text != null && value.generated_text.length > 0) {
|
2256
|
+
return;
|
2257
|
+
}
|
2258
|
+
if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
|
2259
|
+
return;
|
2260
|
+
}
|
2261
|
+
controller.enqueue(text);
|
2262
|
+
}
|
2263
|
+
});
|
2264
|
+
}
|
2265
|
+
function HuggingFaceStream(res, callbacks) {
|
2266
|
+
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2267
|
+
}
|
2268
|
+
|
2269
|
+
// streams/inkeep-stream.ts
|
2270
|
+
function InkeepStream(res, callbacks) {
|
2271
|
+
if (!res.body) {
|
2272
|
+
throw new Error("Response body is null");
|
2273
|
+
}
|
2274
|
+
let chat_session_id = "";
|
2275
|
+
let records_cited;
|
2276
|
+
const inkeepEventParser = (data, options) => {
|
2277
|
+
var _a, _b;
|
2278
|
+
const { event } = options;
|
2279
|
+
if (event === "records_cited") {
|
2280
|
+
records_cited = JSON.parse(data);
|
2281
|
+
(_a = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a.call(callbacks, records_cited);
|
2282
|
+
}
|
2283
|
+
if (event === "message_chunk") {
|
2284
|
+
const inkeepMessageChunk = JSON.parse(data);
|
2285
|
+
chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
|
2286
|
+
return inkeepMessageChunk.content_chunk;
|
2287
|
+
}
|
2288
|
+
return;
|
2289
|
+
};
|
2290
|
+
let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
|
2291
|
+
passThroughCallbacks = {
|
2292
|
+
...passThroughCallbacks,
|
2293
|
+
onFinal: (completion) => {
|
2294
|
+
var _a;
|
2295
|
+
const inkeepOnFinalMetadata = {
|
2296
|
+
chat_session_id,
|
2297
|
+
records_cited
|
2298
|
+
};
|
2299
|
+
(_a = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a.call(callbacks, completion, inkeepOnFinalMetadata);
|
2300
|
+
}
|
2301
|
+
};
|
2302
|
+
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
2303
|
+
createStreamDataTransformer()
|
2304
|
+
);
|
2305
|
+
}
|
2306
|
+
|
2307
|
+
// streams/langchain-stream.ts
|
2308
|
+
function LangChainStream(callbacks) {
|
2309
|
+
const stream = new TransformStream();
|
2310
|
+
const writer = stream.writable.getWriter();
|
2311
|
+
const runs = /* @__PURE__ */ new Set();
|
2312
|
+
const handleError = async (e, runId) => {
|
2313
|
+
runs.delete(runId);
|
2314
|
+
await writer.ready;
|
2315
|
+
await writer.abort(e);
|
2316
|
+
};
|
2317
|
+
const handleStart = async (runId) => {
|
2318
|
+
runs.add(runId);
|
2319
|
+
};
|
2320
|
+
const handleEnd = async (runId) => {
|
2321
|
+
runs.delete(runId);
|
2322
|
+
if (runs.size === 0) {
|
2323
|
+
await writer.ready;
|
2324
|
+
await writer.close();
|
2325
|
+
}
|
2326
|
+
};
|
2327
|
+
return {
|
2328
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
|
2329
|
+
writer,
|
2330
|
+
handlers: {
|
2331
|
+
handleLLMNewToken: async (token) => {
|
2332
|
+
await writer.ready;
|
2333
|
+
await writer.write(token);
|
2334
|
+
},
|
2335
|
+
handleLLMStart: async (_llm, _prompts, runId) => {
|
2336
|
+
handleStart(runId);
|
2337
|
+
},
|
2338
|
+
handleLLMEnd: async (_output, runId) => {
|
2339
|
+
await handleEnd(runId);
|
2340
|
+
},
|
2341
|
+
handleLLMError: async (e, runId) => {
|
2342
|
+
await handleError(e, runId);
|
2343
|
+
},
|
2344
|
+
handleChainStart: async (_chain, _inputs, runId) => {
|
2345
|
+
handleStart(runId);
|
2346
|
+
},
|
2347
|
+
handleChainEnd: async (_outputs, runId) => {
|
2348
|
+
await handleEnd(runId);
|
2349
|
+
},
|
2350
|
+
handleChainError: async (e, runId) => {
|
2351
|
+
await handleError(e, runId);
|
2352
|
+
},
|
2353
|
+
handleToolStart: async (_tool, _input, runId) => {
|
2354
|
+
handleStart(runId);
|
2355
|
+
},
|
2356
|
+
handleToolEnd: async (_output, runId) => {
|
2357
|
+
await handleEnd(runId);
|
2358
|
+
},
|
2359
|
+
handleToolError: async (e, runId) => {
|
2360
|
+
await handleError(e, runId);
|
2361
|
+
}
|
2362
|
+
}
|
2363
|
+
};
|
2364
|
+
}
|
2365
|
+
|
2366
|
+
// streams/mistral-stream.ts
|
2367
|
+
async function* streamable4(stream) {
|
2368
|
+
var _a, _b;
|
2369
|
+
for await (const chunk of stream) {
|
2370
|
+
const content = (_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content;
|
2371
|
+
if (content === void 0 || content === "") {
|
2372
|
+
continue;
|
2373
|
+
}
|
2374
|
+
yield content;
|
2375
|
+
}
|
2376
|
+
}
|
2377
|
+
function MistralStream(response, callbacks) {
|
2378
|
+
const stream = readableFromAsyncIterable(streamable4(response));
|
2379
|
+
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2380
|
+
}
|
2381
|
+
|
2382
|
+
// streams/openai-stream.ts
|
2383
|
+
function parseOpenAIStream() {
|
2384
|
+
const extract = chunkToText();
|
2385
|
+
return (data) => extract(JSON.parse(data));
|
2386
|
+
}
|
2387
|
+
async function* streamable5(stream) {
|
2388
|
+
const extract = chunkToText();
|
2389
|
+
for await (let chunk of stream) {
|
2390
|
+
if ("promptFilterResults" in chunk) {
|
2391
|
+
chunk = {
|
2392
|
+
id: chunk.id,
|
2393
|
+
created: chunk.created.getDate(),
|
2394
|
+
object: chunk.object,
|
2395
|
+
// not exposed by Azure API
|
2396
|
+
model: chunk.model,
|
2397
|
+
// not exposed by Azure API
|
2398
|
+
choices: chunk.choices.map((choice) => {
|
2399
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
2400
|
+
return {
|
2401
|
+
delta: {
|
2402
|
+
content: (_a = choice.delta) == null ? void 0 : _a.content,
|
2403
|
+
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
2404
|
+
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
2405
|
+
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
2406
|
+
index,
|
2407
|
+
id: toolCall.id,
|
2408
|
+
function: toolCall.function,
|
2409
|
+
type: toolCall.type
|
2410
|
+
})) : void 0
|
2411
|
+
},
|
2412
|
+
finish_reason: choice.finishReason,
|
2413
|
+
index: choice.index
|
2414
|
+
};
|
2415
|
+
})
|
2416
|
+
};
|
2417
|
+
}
|
2418
|
+
const text = extract(chunk);
|
2419
|
+
if (text)
|
2420
|
+
yield text;
|
2421
|
+
}
|
2422
|
+
}
|
2423
|
+
function chunkToText() {
|
2424
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
2425
|
+
let isFunctionStreamingIn;
|
2426
|
+
return (json) => {
|
2427
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
2428
|
+
if (isChatCompletionChunk(json)) {
|
2429
|
+
const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
|
2430
|
+
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
2431
|
+
isFunctionStreamingIn = true;
|
2432
|
+
return {
|
2433
|
+
isText: false,
|
2434
|
+
content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
|
2435
|
+
};
|
2436
|
+
} else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
|
2437
|
+
isFunctionStreamingIn = true;
|
2438
|
+
const toolCall = delta.tool_calls[0];
|
2439
|
+
if (toolCall.index === 0) {
|
2440
|
+
return {
|
2441
|
+
isText: false,
|
2442
|
+
content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
|
2443
|
+
};
|
2444
|
+
} else {
|
2445
|
+
return {
|
2446
|
+
isText: false,
|
2447
|
+
content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
|
2448
|
+
};
|
2449
|
+
}
|
2450
|
+
} else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
|
2451
|
+
return {
|
2452
|
+
isText: false,
|
2453
|
+
content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
|
2454
|
+
};
|
2455
|
+
} else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
|
2456
|
+
return {
|
2457
|
+
isText: false,
|
2458
|
+
content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
|
2459
|
+
};
|
2460
|
+
} else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
|
2461
|
+
isFunctionStreamingIn = false;
|
2462
|
+
return {
|
2463
|
+
isText: false,
|
2464
|
+
content: '"}}'
|
2465
|
+
};
|
2466
|
+
} else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
|
2467
|
+
isFunctionStreamingIn = false;
|
2468
|
+
return {
|
2469
|
+
isText: false,
|
2470
|
+
content: '"}}]}'
|
2471
|
+
};
|
2472
|
+
}
|
2473
|
+
}
|
2474
|
+
const text = trimStartOfStream(
|
2475
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
2476
|
+
);
|
2477
|
+
return text;
|
2478
|
+
};
|
2479
|
+
function cleanupArguments(argumentChunk) {
|
2480
|
+
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
2481
|
+
return `${escapedPartialJson}`;
|
2482
|
+
}
|
2483
|
+
}
|
2484
|
+
var __internal__OpenAIFnMessagesSymbol = Symbol(
|
2485
|
+
"internal_openai_fn_messages"
|
2486
|
+
);
|
2487
|
+
function isChatCompletionChunk(data) {
|
2488
|
+
return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
|
2489
|
+
}
|
2490
|
+
function isCompletion(data) {
|
2491
|
+
return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
|
2492
|
+
}
|
2493
|
+
function OpenAIStream(res, callbacks) {
|
2494
|
+
const cb = callbacks;
|
2495
|
+
let stream;
|
2496
|
+
if (Symbol.asyncIterator in res) {
|
2497
|
+
stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
|
2498
|
+
createCallbacksTransformer(
|
2499
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
2500
|
+
...cb,
|
2501
|
+
onFinal: void 0
|
2502
|
+
} : {
|
2503
|
+
...cb
|
2504
|
+
}
|
2505
|
+
)
|
2506
|
+
);
|
2507
|
+
} else {
|
2508
|
+
stream = AIStream(
|
2509
|
+
res,
|
2510
|
+
parseOpenAIStream(),
|
2511
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
2512
|
+
...cb,
|
2513
|
+
onFinal: void 0
|
2514
|
+
} : {
|
2515
|
+
...cb
|
2516
|
+
}
|
2517
|
+
);
|
2518
|
+
}
|
2519
|
+
if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
|
2520
|
+
const functionCallTransformer = createFunctionCallTransformer(cb);
|
2521
|
+
return stream.pipeThrough(functionCallTransformer);
|
2522
|
+
} else {
|
2523
|
+
return stream.pipeThrough(createStreamDataTransformer());
|
2524
|
+
}
|
2525
|
+
}
|
2526
|
+
function createFunctionCallTransformer(callbacks) {
|
2527
|
+
const textEncoder = new TextEncoder();
|
2528
|
+
let isFirstChunk = true;
|
2529
|
+
let aggregatedResponse = "";
|
2530
|
+
let aggregatedFinalCompletionResponse = "";
|
2531
|
+
let isFunctionStreamingIn = false;
|
2532
|
+
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
2533
|
+
const decode = createChunkDecoder();
|
2534
|
+
return new TransformStream({
|
2535
|
+
async transform(chunk, controller) {
|
2536
|
+
const message = decode(chunk);
|
2537
|
+
aggregatedFinalCompletionResponse += message;
|
2538
|
+
const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
|
2539
|
+
if (shouldHandleAsFunction) {
|
2540
|
+
isFunctionStreamingIn = true;
|
2541
|
+
aggregatedResponse += message;
|
2542
|
+
isFirstChunk = false;
|
2543
|
+
return;
|
2544
|
+
}
|
2545
|
+
if (!isFunctionStreamingIn) {
|
2546
|
+
controller.enqueue(
|
2547
|
+
textEncoder.encode(formatStreamPart("text", message))
|
2548
|
+
);
|
2549
|
+
return;
|
2550
|
+
} else {
|
2551
|
+
aggregatedResponse += message;
|
2552
|
+
}
|
2553
|
+
},
|
2554
|
+
async flush(controller) {
|
2555
|
+
try {
|
2556
|
+
if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
|
2557
|
+
isFunctionStreamingIn = false;
|
2558
|
+
const payload = JSON.parse(aggregatedResponse);
|
2559
|
+
let newFunctionCallMessages = [
|
2560
|
+
...functionCallMessages
|
2561
|
+
];
|
2562
|
+
let functionResponse = void 0;
|
2563
|
+
if (callbacks.experimental_onFunctionCall) {
|
2564
|
+
if (payload.function_call === void 0) {
|
2565
|
+
console.warn(
|
2566
|
+
"experimental_onFunctionCall should not be defined when using tools"
|
2567
|
+
);
|
2568
|
+
}
|
2569
|
+
const argumentsPayload = JSON.parse(
|
2570
|
+
payload.function_call.arguments
|
2571
|
+
);
|
2572
|
+
functionResponse = await callbacks.experimental_onFunctionCall(
|
2573
|
+
{
|
2574
|
+
name: payload.function_call.name,
|
2575
|
+
arguments: argumentsPayload
|
2576
|
+
},
|
2577
|
+
(result) => {
|
2578
|
+
newFunctionCallMessages = [
|
2579
|
+
...functionCallMessages,
|
2580
|
+
{
|
2581
|
+
role: "assistant",
|
2582
|
+
content: "",
|
2583
|
+
function_call: payload.function_call
|
2584
|
+
},
|
2585
|
+
{
|
2586
|
+
role: "function",
|
2587
|
+
name: payload.function_call.name,
|
2588
|
+
content: JSON.stringify(result)
|
2589
|
+
}
|
2590
|
+
];
|
2591
|
+
return newFunctionCallMessages;
|
2592
|
+
}
|
2593
|
+
);
|
2594
|
+
}
|
2595
|
+
if (callbacks.experimental_onToolCall) {
|
2596
|
+
const toolCalls = {
|
2597
|
+
tools: []
|
2598
|
+
};
|
2599
|
+
for (const tool2 of payload.tool_calls) {
|
2600
|
+
toolCalls.tools.push({
|
2601
|
+
id: tool2.id,
|
2602
|
+
type: "function",
|
2603
|
+
func: {
|
2604
|
+
name: tool2.function.name,
|
2605
|
+
arguments: JSON.parse(tool2.function.arguments)
|
2606
|
+
}
|
2607
|
+
});
|
2608
|
+
}
|
2609
|
+
let responseIndex = 0;
|
2610
|
+
try {
|
2611
|
+
functionResponse = await callbacks.experimental_onToolCall(
|
2612
|
+
toolCalls,
|
2613
|
+
(result) => {
|
2614
|
+
if (result) {
|
2615
|
+
const { tool_call_id, function_name, tool_call_result } = result;
|
2616
|
+
newFunctionCallMessages = [
|
2617
|
+
...newFunctionCallMessages,
|
2618
|
+
// Only append the assistant message if it's the first response
|
2619
|
+
...responseIndex === 0 ? [
|
2620
|
+
{
|
2621
|
+
role: "assistant",
|
2622
|
+
content: "",
|
2623
|
+
tool_calls: payload.tool_calls.map(
|
2624
|
+
(tc) => ({
|
2625
|
+
id: tc.id,
|
2626
|
+
type: "function",
|
2627
|
+
function: {
|
2628
|
+
name: tc.function.name,
|
2629
|
+
// we send the arguments an object to the user, but as the API expects a string, we need to stringify it
|
2630
|
+
arguments: JSON.stringify(
|
2631
|
+
tc.function.arguments
|
2632
|
+
)
|
2633
|
+
}
|
2634
|
+
})
|
2635
|
+
)
|
2636
|
+
}
|
2637
|
+
] : [],
|
2638
|
+
// Append the function call result message
|
2639
|
+
{
|
2640
|
+
role: "tool",
|
2641
|
+
tool_call_id,
|
2642
|
+
name: function_name,
|
2643
|
+
content: JSON.stringify(tool_call_result)
|
2644
|
+
}
|
2645
|
+
];
|
2646
|
+
responseIndex++;
|
2647
|
+
}
|
2648
|
+
return newFunctionCallMessages;
|
2649
|
+
}
|
2650
|
+
);
|
2651
|
+
} catch (e) {
|
2652
|
+
console.error("Error calling experimental_onToolCall:", e);
|
2653
|
+
}
|
2654
|
+
}
|
2655
|
+
if (!functionResponse) {
|
2656
|
+
controller.enqueue(
|
2657
|
+
textEncoder.encode(
|
2658
|
+
formatStreamPart(
|
2659
|
+
payload.function_call ? "function_call" : "tool_calls",
|
2660
|
+
// parse to prevent double-encoding:
|
2661
|
+
JSON.parse(aggregatedResponse)
|
2662
|
+
)
|
2663
|
+
)
|
2664
|
+
);
|
2665
|
+
return;
|
2666
|
+
} else if (typeof functionResponse === "string") {
|
2667
|
+
controller.enqueue(
|
2668
|
+
textEncoder.encode(formatStreamPart("text", functionResponse))
|
2669
|
+
);
|
2670
|
+
aggregatedFinalCompletionResponse = functionResponse;
|
2671
|
+
return;
|
2672
|
+
}
|
2673
|
+
const filteredCallbacks = {
|
2674
|
+
...callbacks,
|
2675
|
+
onStart: void 0
|
2676
|
+
};
|
2677
|
+
callbacks.onFinal = void 0;
|
2678
|
+
const openAIStream = OpenAIStream(functionResponse, {
|
2679
|
+
...filteredCallbacks,
|
2680
|
+
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
2681
|
+
});
|
2682
|
+
const reader = openAIStream.getReader();
|
2683
|
+
while (true) {
|
2684
|
+
const { done, value } = await reader.read();
|
2685
|
+
if (done) {
|
2686
|
+
break;
|
2687
|
+
}
|
2688
|
+
controller.enqueue(value);
|
2689
|
+
}
|
2690
|
+
}
|
2691
|
+
} finally {
|
2692
|
+
if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
|
2693
|
+
await callbacks.onFinal(aggregatedFinalCompletionResponse);
|
2694
|
+
}
|
2695
|
+
}
|
2696
|
+
}
|
2697
|
+
});
|
2698
|
+
}
|
2699
|
+
|
2700
|
+
// streams/replicate-stream.ts
|
2701
|
+
async function ReplicateStream(res, cb, options) {
|
2702
|
+
var _a;
|
2703
|
+
const url = (_a = res.urls) == null ? void 0 : _a.stream;
|
2704
|
+
if (!url) {
|
2705
|
+
if (res.error)
|
2706
|
+
throw new Error(res.error);
|
2707
|
+
else
|
2708
|
+
throw new Error("Missing stream URL in Replicate response");
|
2709
|
+
}
|
2710
|
+
const eventStream = await fetch(url, {
|
2711
|
+
method: "GET",
|
2712
|
+
headers: {
|
2713
|
+
Accept: "text/event-stream",
|
2714
|
+
...options == null ? void 0 : options.headers
|
2715
|
+
}
|
2716
|
+
});
|
2717
|
+
return AIStream(eventStream, void 0, cb).pipeThrough(
|
2718
|
+
createStreamDataTransformer()
|
2719
|
+
);
|
2720
|
+
}
|
2721
|
+
|
2722
|
+
// shared/parse-complex-response.ts
|
2723
|
+
function assignAnnotationsToMessage(message, annotations) {
|
2724
|
+
if (!message || !annotations || !annotations.length)
|
2725
|
+
return message;
|
2726
|
+
return { ...message, annotations: [...annotations] };
|
2727
|
+
}
|
2728
|
+
async function parseComplexResponse({
|
2729
|
+
reader,
|
2730
|
+
abortControllerRef,
|
2731
|
+
update,
|
2732
|
+
onFinish,
|
2733
|
+
generateId: generateId2 = generateId,
|
2734
|
+
getCurrentDate = () => /* @__PURE__ */ new Date()
|
2735
|
+
}) {
|
2736
|
+
const createdAt = getCurrentDate();
|
2737
|
+
const prefixMap = {
|
2738
|
+
data: []
|
2739
|
+
};
|
2740
|
+
let message_annotations = void 0;
|
2741
|
+
for await (const { type, value } of readDataStream(reader, {
|
2742
|
+
isAborted: () => (abortControllerRef == null ? void 0 : abortControllerRef.current) === null
|
2743
|
+
})) {
|
2744
|
+
if (type === "text") {
|
2745
|
+
if (prefixMap["text"]) {
|
2746
|
+
prefixMap["text"] = {
|
2747
|
+
...prefixMap["text"],
|
2748
|
+
content: (prefixMap["text"].content || "") + value
|
2749
|
+
};
|
2750
|
+
} else {
|
2751
|
+
prefixMap["text"] = {
|
2752
|
+
id: generateId2(),
|
2753
|
+
role: "assistant",
|
2754
|
+
content: value,
|
2755
|
+
createdAt
|
2756
|
+
};
|
2757
|
+
}
|
2758
|
+
}
|
2759
|
+
let functionCallMessage = null;
|
2760
|
+
if (type === "function_call") {
|
2761
|
+
prefixMap["function_call"] = {
|
2762
|
+
id: generateId2(),
|
2763
|
+
role: "assistant",
|
2764
|
+
content: "",
|
2765
|
+
function_call: value.function_call,
|
2766
|
+
name: value.function_call.name,
|
2767
|
+
createdAt
|
2768
|
+
};
|
2769
|
+
functionCallMessage = prefixMap["function_call"];
|
2770
|
+
}
|
2771
|
+
let toolCallMessage = null;
|
2772
|
+
if (type === "tool_calls") {
|
2773
|
+
prefixMap["tool_calls"] = {
|
2774
|
+
id: generateId2(),
|
2775
|
+
role: "assistant",
|
2776
|
+
content: "",
|
2777
|
+
tool_calls: value.tool_calls,
|
2778
|
+
createdAt
|
2779
|
+
};
|
2780
|
+
toolCallMessage = prefixMap["tool_calls"];
|
2781
|
+
}
|
2782
|
+
if (type === "data") {
|
2783
|
+
prefixMap["data"].push(...value);
|
2784
|
+
}
|
2785
|
+
let responseMessage = prefixMap["text"];
|
2786
|
+
if (type === "message_annotations") {
|
2787
|
+
if (!message_annotations) {
|
2788
|
+
message_annotations = [...value];
|
2789
|
+
} else {
|
2790
|
+
message_annotations.push(...value);
|
2791
|
+
}
|
2792
|
+
functionCallMessage = assignAnnotationsToMessage(
|
2793
|
+
prefixMap["function_call"],
|
2794
|
+
message_annotations
|
2795
|
+
);
|
2796
|
+
toolCallMessage = assignAnnotationsToMessage(
|
2797
|
+
prefixMap["tool_calls"],
|
2798
|
+
message_annotations
|
2799
|
+
);
|
2800
|
+
responseMessage = assignAnnotationsToMessage(
|
2801
|
+
prefixMap["text"],
|
2802
|
+
message_annotations
|
2803
|
+
);
|
2804
|
+
}
|
2805
|
+
if (message_annotations == null ? void 0 : message_annotations.length) {
|
2806
|
+
const messagePrefixKeys = [
|
2807
|
+
"text",
|
2808
|
+
"function_call",
|
2809
|
+
"tool_calls"
|
2810
|
+
];
|
2811
|
+
messagePrefixKeys.forEach((key) => {
|
2812
|
+
if (prefixMap[key]) {
|
2813
|
+
prefixMap[key].annotations = [...message_annotations];
|
2814
|
+
}
|
2815
|
+
});
|
2816
|
+
}
|
2817
|
+
const merged = [functionCallMessage, toolCallMessage, responseMessage].filter(Boolean).map((message) => ({
|
2818
|
+
...assignAnnotationsToMessage(message, message_annotations)
|
2819
|
+
}));
|
2820
|
+
update(merged, [...prefixMap["data"]]);
|
2821
|
+
}
|
2822
|
+
onFinish == null ? void 0 : onFinish(prefixMap);
|
2823
|
+
return {
|
2824
|
+
messages: [
|
2825
|
+
prefixMap.text,
|
2826
|
+
prefixMap.function_call,
|
2827
|
+
prefixMap.tool_calls
|
2828
|
+
].filter(Boolean),
|
2829
|
+
data: prefixMap.data
|
2830
|
+
};
|
2831
|
+
}
|
2832
|
+
|
2833
|
+
// streams/streaming-react-response.ts
|
2834
|
+
var experimental_StreamingReactResponse = class {
|
2835
|
+
constructor(res, options) {
|
2836
|
+
var _a, _b;
|
2837
|
+
let resolveFunc = () => {
|
2838
|
+
};
|
2839
|
+
let next = new Promise((resolve) => {
|
2840
|
+
resolveFunc = resolve;
|
2841
|
+
});
|
2842
|
+
const processedStream = (options == null ? void 0 : options.data) != null ? res.pipeThrough((_a = options == null ? void 0 : options.data) == null ? void 0 : _a.stream) : res;
|
2843
|
+
let lastPayload = void 0;
|
2844
|
+
parseComplexResponse({
|
2845
|
+
reader: processedStream.getReader(),
|
2846
|
+
update: (merged, data) => {
|
2847
|
+
var _a2, _b2, _c;
|
2848
|
+
const content = (_b2 = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b2 : "";
|
2849
|
+
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content, data })) || content;
|
2850
|
+
const payload = { ui, content };
|
2851
|
+
const resolvePrevious = resolveFunc;
|
2852
|
+
const nextRow = new Promise((resolve) => {
|
2853
|
+
resolveFunc = resolve;
|
2854
|
+
});
|
2855
|
+
resolvePrevious({
|
2856
|
+
next: nextRow,
|
2857
|
+
...payload
|
2858
|
+
});
|
2859
|
+
lastPayload = payload;
|
2860
|
+
},
|
2861
|
+
generateId: (_b = options == null ? void 0 : options.generateId) != null ? _b : generateId,
|
2862
|
+
onFinish: () => {
|
2863
|
+
if (lastPayload !== void 0) {
|
2864
|
+
resolveFunc({
|
2865
|
+
next: null,
|
2866
|
+
...lastPayload
|
2867
|
+
});
|
2868
|
+
}
|
2869
|
+
}
|
2870
|
+
});
|
2871
|
+
return next;
|
2872
|
+
}
|
2873
|
+
};
|
2874
|
+
|
2875
|
+
// streams/streaming-text-response.ts
|
2876
|
+
var StreamingTextResponse = class extends Response {
|
2877
|
+
constructor(res, init, data) {
|
2878
|
+
let processedStream = res;
|
2879
|
+
if (data) {
|
2880
|
+
processedStream = res.pipeThrough(data.stream);
|
2881
|
+
}
|
2882
|
+
super(processedStream, {
|
2883
|
+
...init,
|
2884
|
+
status: 200,
|
2885
|
+
headers: {
|
2886
|
+
"Content-Type": "text/plain; charset=utf-8",
|
2887
|
+
...init == null ? void 0 : init.headers
|
2888
|
+
}
|
2889
|
+
});
|
2890
|
+
}
|
2891
|
+
};
|
2892
|
+
function streamToResponse(res, response, init) {
|
2893
|
+
response.writeHead((init == null ? void 0 : init.status) || 200, {
|
2894
|
+
"Content-Type": "text/plain; charset=utf-8",
|
2895
|
+
...init == null ? void 0 : init.headers
|
2896
|
+
});
|
2897
|
+
const reader = res.getReader();
|
2898
|
+
function read() {
|
2899
|
+
reader.read().then(({ done, value }) => {
|
2900
|
+
if (done) {
|
2901
|
+
response.end();
|
2902
|
+
return;
|
2903
|
+
}
|
2904
|
+
response.write(value);
|
2905
|
+
read();
|
2906
|
+
});
|
2907
|
+
}
|
2908
|
+
read();
|
2909
|
+
}
|
2910
|
+
// Annotate the CommonJS export names for ESM import in node:
|
2911
|
+
0 && (module.exports = {
|
2912
|
+
AIStream,
|
2913
|
+
AWSBedrockAnthropicMessagesStream,
|
2914
|
+
AWSBedrockAnthropicStream,
|
2915
|
+
AWSBedrockCohereStream,
|
2916
|
+
AWSBedrockLlama2Stream,
|
2917
|
+
AWSBedrockStream,
|
2918
|
+
AnthropicStream,
|
2919
|
+
AssistantResponse,
|
2920
|
+
CohereStream,
|
2921
|
+
GenerateObjectResult,
|
2922
|
+
GenerateTextResult,
|
2923
|
+
GoogleGenerativeAIStream,
|
2924
|
+
HuggingFaceStream,
|
2925
|
+
InkeepStream,
|
2926
|
+
LangChainStream,
|
2927
|
+
MistralStream,
|
2928
|
+
OpenAIStream,
|
2929
|
+
ReplicateStream,
|
2930
|
+
StreamData,
|
2931
|
+
StreamObjectResult,
|
2932
|
+
StreamTextResult,
|
2933
|
+
StreamingTextResponse,
|
2934
|
+
convertDataContentToBase64String,
|
2935
|
+
convertDataContentToUint8Array,
|
2936
|
+
createCallbacksTransformer,
|
2937
|
+
createChunkDecoder,
|
2938
|
+
createEventStreamTransformer,
|
2939
|
+
createStreamDataTransformer,
|
2940
|
+
experimental_AssistantResponse,
|
2941
|
+
experimental_StreamData,
|
2942
|
+
experimental_StreamingReactResponse,
|
2943
|
+
experimental_generateObject,
|
2944
|
+
experimental_generateText,
|
2945
|
+
experimental_streamObject,
|
2946
|
+
experimental_streamText,
|
2947
|
+
formatStreamPart,
|
2948
|
+
generateId,
|
2949
|
+
isStreamStringEqualToType,
|
2950
|
+
nanoid,
|
2951
|
+
parseStreamPart,
|
2952
|
+
readDataStream,
|
2953
|
+
readableFromAsyncIterable,
|
2954
|
+
streamToResponse,
|
2955
|
+
tool,
|
2956
|
+
trimStartOfStreamHelper
|
2957
|
+
});
|
2958
|
+
//# sourceMappingURL=index.js.map
|