ai 3.2.45 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +221 -21
- package/dist/index.d.ts +221 -21
- package/dist/index.js +640 -278
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +618 -266
- package/dist/index.mjs.map +1 -1
- package/package.json +10 -8
- package/rsc/dist/index.d.ts +3 -3
- package/rsc/dist/rsc-server.d.mts +3 -3
- package/rsc/dist/rsc-server.mjs +1299 -1033
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/rsc/dist/rsc-shared.mjs +23 -45
- package/rsc/dist/rsc-shared.mjs.map +1 -1
- package/svelte/dist/index.js +1 -1
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +1 -1
- package/svelte/dist/index.mjs.map +1 -1
package/rsc/dist/rsc-server.mjs
CHANGED
@@ -1,12 +1,11 @@
|
|
1
1
|
// rsc/ai-state.tsx
|
2
|
-
import { AsyncLocalStorage } from "async_hooks";
|
3
2
|
import * as jsondiffpatch from "jsondiffpatch";
|
3
|
+
import { AsyncLocalStorage } from "async_hooks";
|
4
4
|
|
5
|
-
//
|
6
|
-
import { Suspense } from "react";
|
7
|
-
import { Fragment, jsx, jsxs } from "react/jsx-runtime";
|
5
|
+
// util/create-resolvable-promise.ts
|
8
6
|
function createResolvablePromise() {
|
9
|
-
let resolve
|
7
|
+
let resolve;
|
8
|
+
let reject;
|
10
9
|
const promise = new Promise((res, rej) => {
|
11
10
|
resolve = res;
|
12
11
|
reject = rej;
|
@@ -17,43 +16,9 @@ function createResolvablePromise() {
|
|
17
16
|
reject
|
18
17
|
};
|
19
18
|
}
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
// current
|
24
|
-
n
|
25
|
-
// next
|
26
|
-
}) => {
|
27
|
-
const chunk = await n;
|
28
|
-
if (chunk.done) {
|
29
|
-
return chunk.value;
|
30
|
-
}
|
31
|
-
if (chunk.append) {
|
32
|
-
return /* @__PURE__ */ jsxs(Fragment, { children: [
|
33
|
-
c,
|
34
|
-
/* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) })
|
35
|
-
] });
|
36
|
-
}
|
37
|
-
return /* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) });
|
38
|
-
}
|
39
|
-
][0];
|
40
|
-
function createSuspensedChunk(initialValue) {
|
41
|
-
const { promise, resolve, reject } = createResolvablePromise();
|
42
|
-
return {
|
43
|
-
row: /* @__PURE__ */ jsx(Suspense, { fallback: initialValue, children: /* @__PURE__ */ jsx(R, { c: initialValue, n: promise }) }),
|
44
|
-
resolve,
|
45
|
-
reject
|
46
|
-
};
|
47
|
-
}
|
48
|
-
var isFunction = (x) => typeof x === "function";
|
49
|
-
var consumeStream = async (stream) => {
|
50
|
-
const reader = stream.getReader();
|
51
|
-
while (true) {
|
52
|
-
const { done } = await reader.read();
|
53
|
-
if (done)
|
54
|
-
break;
|
55
|
-
}
|
56
|
-
};
|
19
|
+
|
20
|
+
// util/is-function.ts
|
21
|
+
var isFunction = (value) => typeof value === "function";
|
57
22
|
|
58
23
|
// rsc/ai-state.tsx
|
59
24
|
var asyncAIStateStorage = new AsyncLocalStorage();
|
@@ -115,7 +80,7 @@ function getMutableAIState(...args) {
|
|
115
80
|
store.mutationDeltaResolve = resolve;
|
116
81
|
}
|
117
82
|
function doUpdate(newState, done) {
|
118
|
-
var
|
83
|
+
var _a8, _b;
|
119
84
|
if (args.length > 0) {
|
120
85
|
if (typeof store.currentState !== "object") {
|
121
86
|
const key = args[0];
|
@@ -139,7 +104,7 @@ function getMutableAIState(...args) {
|
|
139
104
|
store.currentState = newState;
|
140
105
|
}
|
141
106
|
}
|
142
|
-
(_b = (
|
107
|
+
(_b = (_a8 = store.options).onSetAIState) == null ? void 0 : _b.call(_a8, {
|
143
108
|
key: args.length > 0 ? args[0] : void 0,
|
144
109
|
state: store.currentState,
|
145
110
|
done
|
@@ -174,209 +139,628 @@ function getMutableAIState(...args) {
|
|
174
139
|
return mutableState;
|
175
140
|
}
|
176
141
|
|
177
|
-
// rsc/
|
178
|
-
|
179
|
-
|
180
|
-
// core/util/retry-with-exponential-backoff.ts
|
181
|
-
import { APICallError, RetryError } from "@ai-sdk/provider";
|
182
|
-
import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
|
183
|
-
|
184
|
-
// core/util/delay.ts
|
185
|
-
async function delay(delayInMs) {
|
186
|
-
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
187
|
-
}
|
142
|
+
// rsc/constants.ts
|
143
|
+
var STREAMABLE_VALUE_TYPE = Symbol.for("ui.streamable.value");
|
144
|
+
var DEV_DEFAULT_STREAMABLE_WARNING_TIME = 15 * 1e3;
|
188
145
|
|
189
|
-
//
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
maxRetries,
|
201
|
-
delayInMs,
|
202
|
-
backoffFactor
|
203
|
-
}, errors = []) {
|
204
|
-
try {
|
205
|
-
return await f();
|
206
|
-
} catch (error) {
|
207
|
-
if (isAbortError(error)) {
|
208
|
-
throw error;
|
209
|
-
}
|
210
|
-
if (maxRetries === 0) {
|
211
|
-
throw error;
|
212
|
-
}
|
213
|
-
const errorMessage = getErrorMessage(error);
|
214
|
-
const newErrors = [...errors, error];
|
215
|
-
const tryNumber = newErrors.length;
|
216
|
-
if (tryNumber > maxRetries) {
|
217
|
-
throw new RetryError({
|
218
|
-
message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
|
219
|
-
reason: "maxRetriesExceeded",
|
220
|
-
errors: newErrors
|
221
|
-
});
|
222
|
-
}
|
223
|
-
if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
224
|
-
await delay(delayInMs);
|
225
|
-
return _retryWithExponentialBackoff(
|
226
|
-
f,
|
227
|
-
{ maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
|
228
|
-
newErrors
|
229
|
-
);
|
146
|
+
// rsc/create-suspended-chunk.tsx
|
147
|
+
import { Suspense } from "react";
|
148
|
+
import { Fragment, jsx, jsxs } from "react/jsx-runtime";
|
149
|
+
var R = [
|
150
|
+
async ({
|
151
|
+
c: current,
|
152
|
+
n: next
|
153
|
+
}) => {
|
154
|
+
const chunk = await next;
|
155
|
+
if (chunk.done) {
|
156
|
+
return chunk.value;
|
230
157
|
}
|
231
|
-
if (
|
232
|
-
|
158
|
+
if (chunk.append) {
|
159
|
+
return /* @__PURE__ */ jsxs(Fragment, { children: [
|
160
|
+
current,
|
161
|
+
/* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) })
|
162
|
+
] });
|
233
163
|
}
|
234
|
-
|
235
|
-
message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
|
236
|
-
reason: "errorNotRetryable",
|
237
|
-
errors: newErrors
|
238
|
-
});
|
164
|
+
return /* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) });
|
239
165
|
}
|
166
|
+
][0];
|
167
|
+
function createSuspendedChunk(initialValue) {
|
168
|
+
const { promise, resolve, reject } = createResolvablePromise();
|
169
|
+
return {
|
170
|
+
row: /* @__PURE__ */ jsx(Suspense, { fallback: initialValue, children: /* @__PURE__ */ jsx(R, { c: initialValue, n: promise }) }),
|
171
|
+
resolve,
|
172
|
+
reject
|
173
|
+
};
|
240
174
|
}
|
241
175
|
|
242
|
-
//
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
{ mimeType: "image/webp", bytes: [82, 73, 70, 70] }
|
251
|
-
];
|
252
|
-
function detectImageMimeType(image) {
|
253
|
-
for (const { bytes, mimeType } of mimeTypeSignatures) {
|
254
|
-
if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
|
255
|
-
return mimeType;
|
176
|
+
// rsc/streamable.tsx
|
177
|
+
function createStreamableUI(initialValue) {
|
178
|
+
let currentValue = initialValue;
|
179
|
+
let closed = false;
|
180
|
+
let { row, resolve, reject } = createSuspendedChunk(initialValue);
|
181
|
+
function assertStream(method) {
|
182
|
+
if (closed) {
|
183
|
+
throw new Error(method + ": UI stream is already closed.");
|
256
184
|
}
|
257
185
|
}
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
try {
|
270
|
-
const response = await fetchImplementation(urlText);
|
271
|
-
if (!response.ok) {
|
272
|
-
throw new DownloadError({
|
273
|
-
url: urlText,
|
274
|
-
statusCode: response.status,
|
275
|
-
statusText: response.statusText
|
276
|
-
});
|
277
|
-
}
|
278
|
-
return {
|
279
|
-
data: new Uint8Array(await response.arrayBuffer()),
|
280
|
-
mimeType: (_a = response.headers.get("content-type")) != null ? _a : void 0
|
281
|
-
};
|
282
|
-
} catch (error) {
|
283
|
-
if (DownloadError.isDownloadError(error)) {
|
284
|
-
throw error;
|
186
|
+
let warningTimeout;
|
187
|
+
function warnUnclosedStream() {
|
188
|
+
if (process.env.NODE_ENV === "development") {
|
189
|
+
if (warningTimeout) {
|
190
|
+
clearTimeout(warningTimeout);
|
191
|
+
}
|
192
|
+
warningTimeout = setTimeout(() => {
|
193
|
+
console.warn(
|
194
|
+
"The streamable UI has been slow to update. This may be a bug or a performance issue or you forgot to call `.done()`."
|
195
|
+
);
|
196
|
+
}, DEV_DEFAULT_STREAMABLE_WARNING_TIME);
|
285
197
|
}
|
286
|
-
throw new DownloadError({ url: urlText, cause: error });
|
287
|
-
}
|
288
|
-
}
|
289
|
-
|
290
|
-
// core/prompt/data-content.ts
|
291
|
-
import { InvalidDataContentError } from "@ai-sdk/provider";
|
292
|
-
import {
|
293
|
-
convertBase64ToUint8Array,
|
294
|
-
convertUint8ArrayToBase64
|
295
|
-
} from "@ai-sdk/provider-utils";
|
296
|
-
function convertDataContentToUint8Array(content) {
|
297
|
-
if (content instanceof Uint8Array) {
|
298
|
-
return content;
|
299
198
|
}
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
}
|
199
|
+
warnUnclosedStream();
|
200
|
+
const streamable2 = {
|
201
|
+
value: row,
|
202
|
+
update(value) {
|
203
|
+
assertStream(".update()");
|
204
|
+
if (value === currentValue) {
|
205
|
+
warnUnclosedStream();
|
206
|
+
return streamable2;
|
207
|
+
}
|
208
|
+
const resolvable = createResolvablePromise();
|
209
|
+
currentValue = value;
|
210
|
+
resolve({ value: currentValue, done: false, next: resolvable.promise });
|
211
|
+
resolve = resolvable.resolve;
|
212
|
+
reject = resolvable.reject;
|
213
|
+
warnUnclosedStream();
|
214
|
+
return streamable2;
|
215
|
+
},
|
216
|
+
append(value) {
|
217
|
+
assertStream(".append()");
|
218
|
+
const resolvable = createResolvablePromise();
|
219
|
+
currentValue = value;
|
220
|
+
resolve({ value, done: false, append: true, next: resolvable.promise });
|
221
|
+
resolve = resolvable.resolve;
|
222
|
+
reject = resolvable.reject;
|
223
|
+
warnUnclosedStream();
|
224
|
+
return streamable2;
|
225
|
+
},
|
226
|
+
error(error) {
|
227
|
+
assertStream(".error()");
|
228
|
+
if (warningTimeout) {
|
229
|
+
clearTimeout(warningTimeout);
|
230
|
+
}
|
231
|
+
closed = true;
|
232
|
+
reject(error);
|
233
|
+
return streamable2;
|
234
|
+
},
|
235
|
+
done(...args) {
|
236
|
+
assertStream(".done()");
|
237
|
+
if (warningTimeout) {
|
238
|
+
clearTimeout(warningTimeout);
|
239
|
+
}
|
240
|
+
closed = true;
|
241
|
+
if (args.length) {
|
242
|
+
resolve({ value: args[0], done: true });
|
243
|
+
return streamable2;
|
244
|
+
}
|
245
|
+
resolve({ value: currentValue, done: true });
|
246
|
+
return streamable2;
|
309
247
|
}
|
310
|
-
}
|
311
|
-
|
312
|
-
return new Uint8Array(content);
|
313
|
-
}
|
314
|
-
throw new InvalidDataContentError({ content });
|
248
|
+
};
|
249
|
+
return streamable2;
|
315
250
|
}
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
322
|
-
}) {
|
323
|
-
super(message);
|
324
|
-
this.name = "AI_InvalidMessageRoleError";
|
325
|
-
this.role = role;
|
326
|
-
}
|
327
|
-
static isInvalidMessageRoleError(error) {
|
328
|
-
return error instanceof Error && error.name === "AI_InvalidMessageRoleError" && typeof error.role === "string";
|
251
|
+
var STREAMABLE_VALUE_INTERNAL_LOCK = Symbol("streamable.value.lock");
|
252
|
+
function createStreamableValue(initialValue) {
|
253
|
+
const isReadableStream = initialValue instanceof ReadableStream || typeof initialValue === "object" && initialValue !== null && "getReader" in initialValue && typeof initialValue.getReader === "function" && "locked" in initialValue && typeof initialValue.locked === "boolean";
|
254
|
+
if (!isReadableStream) {
|
255
|
+
return createStreamableValueImpl(initialValue);
|
329
256
|
}
|
330
|
-
|
331
|
-
|
257
|
+
const streamableValue = createStreamableValueImpl();
|
258
|
+
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = true;
|
259
|
+
(async () => {
|
260
|
+
try {
|
261
|
+
const reader = initialValue.getReader();
|
262
|
+
while (true) {
|
263
|
+
const { value, done } = await reader.read();
|
264
|
+
if (done) {
|
265
|
+
break;
|
266
|
+
}
|
267
|
+
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = false;
|
268
|
+
if (typeof value === "string") {
|
269
|
+
streamableValue.append(value);
|
270
|
+
} else {
|
271
|
+
streamableValue.update(value);
|
272
|
+
}
|
273
|
+
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = true;
|
274
|
+
}
|
275
|
+
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = false;
|
276
|
+
streamableValue.done();
|
277
|
+
} catch (e) {
|
278
|
+
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = false;
|
279
|
+
streamableValue.error(e);
|
280
|
+
}
|
281
|
+
})();
|
282
|
+
return streamableValue;
|
283
|
+
}
|
284
|
+
function createStreamableValueImpl(initialValue) {
|
285
|
+
let closed = false;
|
286
|
+
let locked = false;
|
287
|
+
let resolvable = createResolvablePromise();
|
288
|
+
let currentValue = initialValue;
|
289
|
+
let currentError;
|
290
|
+
let currentPromise = resolvable.promise;
|
291
|
+
let currentPatchValue;
|
292
|
+
function assertStream(method) {
|
293
|
+
if (closed) {
|
294
|
+
throw new Error(method + ": Value stream is already closed.");
|
295
|
+
}
|
296
|
+
if (locked) {
|
297
|
+
throw new Error(
|
298
|
+
method + ": Value stream is locked and cannot be updated."
|
299
|
+
);
|
300
|
+
}
|
301
|
+
}
|
302
|
+
let warningTimeout;
|
303
|
+
function warnUnclosedStream() {
|
304
|
+
if (process.env.NODE_ENV === "development") {
|
305
|
+
if (warningTimeout) {
|
306
|
+
clearTimeout(warningTimeout);
|
307
|
+
}
|
308
|
+
warningTimeout = setTimeout(() => {
|
309
|
+
console.warn(
|
310
|
+
"The streamable value has been slow to update. This may be a bug or a performance issue or you forgot to call `.done()`."
|
311
|
+
);
|
312
|
+
}, DEV_DEFAULT_STREAMABLE_WARNING_TIME);
|
313
|
+
}
|
314
|
+
}
|
315
|
+
warnUnclosedStream();
|
316
|
+
function createWrapped(initialChunk) {
|
317
|
+
let init;
|
318
|
+
if (currentError !== void 0) {
|
319
|
+
init = { error: currentError };
|
320
|
+
} else {
|
321
|
+
if (currentPatchValue && !initialChunk) {
|
322
|
+
init = { diff: currentPatchValue };
|
323
|
+
} else {
|
324
|
+
init = { curr: currentValue };
|
325
|
+
}
|
326
|
+
}
|
327
|
+
if (currentPromise) {
|
328
|
+
init.next = currentPromise;
|
329
|
+
}
|
330
|
+
if (initialChunk) {
|
331
|
+
init.type = STREAMABLE_VALUE_TYPE;
|
332
|
+
}
|
333
|
+
return init;
|
334
|
+
}
|
335
|
+
function updateValueStates(value) {
|
336
|
+
currentPatchValue = void 0;
|
337
|
+
if (typeof value === "string") {
|
338
|
+
if (typeof currentValue === "string") {
|
339
|
+
if (value.startsWith(currentValue)) {
|
340
|
+
currentPatchValue = [0, value.slice(currentValue.length)];
|
341
|
+
}
|
342
|
+
}
|
343
|
+
}
|
344
|
+
currentValue = value;
|
345
|
+
}
|
346
|
+
const streamable2 = {
|
347
|
+
set [STREAMABLE_VALUE_INTERNAL_LOCK](state) {
|
348
|
+
locked = state;
|
349
|
+
},
|
350
|
+
get value() {
|
351
|
+
return createWrapped(true);
|
352
|
+
},
|
353
|
+
update(value) {
|
354
|
+
assertStream(".update()");
|
355
|
+
const resolvePrevious = resolvable.resolve;
|
356
|
+
resolvable = createResolvablePromise();
|
357
|
+
updateValueStates(value);
|
358
|
+
currentPromise = resolvable.promise;
|
359
|
+
resolvePrevious(createWrapped());
|
360
|
+
warnUnclosedStream();
|
361
|
+
return streamable2;
|
362
|
+
},
|
363
|
+
append(value) {
|
364
|
+
assertStream(".append()");
|
365
|
+
if (typeof currentValue !== "string" && typeof currentValue !== "undefined") {
|
366
|
+
throw new Error(
|
367
|
+
`.append(): The current value is not a string. Received: ${typeof currentValue}`
|
368
|
+
);
|
369
|
+
}
|
370
|
+
if (typeof value !== "string") {
|
371
|
+
throw new Error(
|
372
|
+
`.append(): The value is not a string. Received: ${typeof value}`
|
373
|
+
);
|
374
|
+
}
|
375
|
+
const resolvePrevious = resolvable.resolve;
|
376
|
+
resolvable = createResolvablePromise();
|
377
|
+
if (typeof currentValue === "string") {
|
378
|
+
currentPatchValue = [0, value];
|
379
|
+
currentValue = currentValue + value;
|
380
|
+
} else {
|
381
|
+
currentPatchValue = void 0;
|
382
|
+
currentValue = value;
|
383
|
+
}
|
384
|
+
currentPromise = resolvable.promise;
|
385
|
+
resolvePrevious(createWrapped());
|
386
|
+
warnUnclosedStream();
|
387
|
+
return streamable2;
|
388
|
+
},
|
389
|
+
error(error) {
|
390
|
+
assertStream(".error()");
|
391
|
+
if (warningTimeout) {
|
392
|
+
clearTimeout(warningTimeout);
|
393
|
+
}
|
394
|
+
closed = true;
|
395
|
+
currentError = error;
|
396
|
+
currentPromise = void 0;
|
397
|
+
resolvable.resolve({ error });
|
398
|
+
return streamable2;
|
399
|
+
},
|
400
|
+
done(...args) {
|
401
|
+
assertStream(".done()");
|
402
|
+
if (warningTimeout) {
|
403
|
+
clearTimeout(warningTimeout);
|
404
|
+
}
|
405
|
+
closed = true;
|
406
|
+
currentPromise = void 0;
|
407
|
+
if (args.length) {
|
408
|
+
updateValueStates(args[0]);
|
409
|
+
resolvable.resolve(createWrapped());
|
410
|
+
return streamable2;
|
411
|
+
}
|
412
|
+
resolvable.resolve({});
|
413
|
+
return streamable2;
|
414
|
+
}
|
415
|
+
};
|
416
|
+
return streamable2;
|
417
|
+
}
|
418
|
+
|
419
|
+
// rsc/render.ts
|
420
|
+
import zodToJsonSchema2 from "zod-to-json-schema";
|
421
|
+
|
422
|
+
// util/retry-with-exponential-backoff.ts
|
423
|
+
import { APICallError } from "@ai-sdk/provider";
|
424
|
+
import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
|
425
|
+
|
426
|
+
// util/delay.ts
|
427
|
+
async function delay(delayInMs) {
|
428
|
+
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
429
|
+
}
|
430
|
+
|
431
|
+
// util/retry-error.ts
|
432
|
+
import { AISDKError } from "@ai-sdk/provider";
|
433
|
+
var name = "AI_RetryError";
|
434
|
+
var marker = `vercel.ai.error.${name}`;
|
435
|
+
var symbol = Symbol.for(marker);
|
436
|
+
var _a;
|
437
|
+
var RetryError = class extends AISDKError {
|
438
|
+
constructor({
|
439
|
+
message,
|
440
|
+
reason,
|
441
|
+
errors
|
442
|
+
}) {
|
443
|
+
super({ name, message });
|
444
|
+
this[_a] = true;
|
445
|
+
this.reason = reason;
|
446
|
+
this.errors = errors;
|
447
|
+
this.lastError = errors[errors.length - 1];
|
448
|
+
}
|
449
|
+
static isInstance(error) {
|
450
|
+
return AISDKError.hasMarker(error, marker);
|
451
|
+
}
|
452
|
+
/**
|
453
|
+
* @deprecated use `isInstance` instead
|
454
|
+
*/
|
455
|
+
static isRetryError(error) {
|
456
|
+
return error instanceof Error && error.name === name && typeof error.reason === "string" && Array.isArray(error.errors);
|
457
|
+
}
|
458
|
+
/**
|
459
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
460
|
+
*/
|
461
|
+
toJSON() {
|
462
|
+
return {
|
332
463
|
name: this.name,
|
333
464
|
message: this.message,
|
334
|
-
|
335
|
-
|
465
|
+
reason: this.reason,
|
466
|
+
lastError: this.lastError,
|
467
|
+
errors: this.errors
|
336
468
|
};
|
337
469
|
}
|
338
470
|
};
|
471
|
+
_a = symbol;
|
339
472
|
|
340
|
-
//
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
}) {
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
473
|
+
// util/retry-with-exponential-backoff.ts
|
474
|
+
var retryWithExponentialBackoff = ({
|
475
|
+
maxRetries = 2,
|
476
|
+
initialDelayInMs = 2e3,
|
477
|
+
backoffFactor = 2
|
478
|
+
} = {}) => async (f) => _retryWithExponentialBackoff(f, {
|
479
|
+
maxRetries,
|
480
|
+
delayInMs: initialDelayInMs,
|
481
|
+
backoffFactor
|
482
|
+
});
|
483
|
+
async function _retryWithExponentialBackoff(f, {
|
484
|
+
maxRetries,
|
485
|
+
delayInMs,
|
486
|
+
backoffFactor
|
487
|
+
}, errors = []) {
|
488
|
+
try {
|
489
|
+
return await f();
|
490
|
+
} catch (error) {
|
491
|
+
if (isAbortError(error)) {
|
492
|
+
throw error;
|
493
|
+
}
|
494
|
+
if (maxRetries === 0) {
|
495
|
+
throw error;
|
496
|
+
}
|
497
|
+
const errorMessage = getErrorMessage(error);
|
498
|
+
const newErrors = [...errors, error];
|
499
|
+
const tryNumber = newErrors.length;
|
500
|
+
if (tryNumber > maxRetries) {
|
501
|
+
throw new RetryError({
|
502
|
+
message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
|
503
|
+
reason: "maxRetriesExceeded",
|
504
|
+
errors: newErrors
|
357
505
|
});
|
358
|
-
break;
|
359
506
|
}
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
507
|
+
if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
508
|
+
await delay(delayInMs);
|
509
|
+
return _retryWithExponentialBackoff(
|
510
|
+
f,
|
511
|
+
{ maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
|
512
|
+
newErrors
|
365
513
|
);
|
366
|
-
break;
|
367
514
|
}
|
368
|
-
|
369
|
-
|
370
|
-
throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
|
515
|
+
if (tryNumber === 1) {
|
516
|
+
throw error;
|
371
517
|
}
|
518
|
+
throw new RetryError({
|
519
|
+
message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
|
520
|
+
reason: "errorNotRetryable",
|
521
|
+
errors: newErrors
|
522
|
+
});
|
372
523
|
}
|
373
|
-
return languageModelMessages;
|
374
524
|
}
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
525
|
+
|
526
|
+
// core/prompt/convert-to-language-model-prompt.ts
|
527
|
+
import { getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider-utils";
|
528
|
+
|
529
|
+
// util/download-error.ts
|
530
|
+
import { AISDKError as AISDKError2 } from "@ai-sdk/provider";
|
531
|
+
var name2 = "AI_DownloadError";
|
532
|
+
var marker2 = `vercel.ai.error.${name2}`;
|
533
|
+
var symbol2 = Symbol.for(marker2);
|
534
|
+
var _a2;
|
535
|
+
var DownloadError = class extends AISDKError2 {
|
536
|
+
constructor({
|
537
|
+
url,
|
538
|
+
statusCode,
|
539
|
+
statusText,
|
540
|
+
cause,
|
541
|
+
message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
|
542
|
+
}) {
|
543
|
+
super({ name: name2, message, cause });
|
544
|
+
this[_a2] = true;
|
545
|
+
this.url = url;
|
546
|
+
this.statusCode = statusCode;
|
547
|
+
this.statusText = statusText;
|
548
|
+
}
|
549
|
+
static isInstance(error) {
|
550
|
+
return AISDKError2.hasMarker(error, marker2);
|
551
|
+
}
|
552
|
+
/**
|
553
|
+
* @deprecated use `isInstance` instead
|
554
|
+
*/
|
555
|
+
static isDownloadError(error) {
|
556
|
+
return error instanceof Error && error.name === name2 && typeof error.url === "string" && (error.statusCode == null || typeof error.statusCode === "number") && (error.statusText == null || typeof error.statusText === "string");
|
557
|
+
}
|
558
|
+
/**
|
559
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
560
|
+
*/
|
561
|
+
toJSON() {
|
562
|
+
return {
|
563
|
+
name: this.name,
|
564
|
+
message: this.message,
|
565
|
+
url: this.url,
|
566
|
+
statusCode: this.statusCode,
|
567
|
+
statusText: this.statusText,
|
568
|
+
cause: this.cause
|
569
|
+
};
|
570
|
+
}
|
571
|
+
};
|
572
|
+
_a2 = symbol2;
|
573
|
+
|
574
|
+
// util/download.ts
|
575
|
+
async function download({
|
576
|
+
url,
|
577
|
+
fetchImplementation = fetch
|
578
|
+
}) {
|
579
|
+
var _a8;
|
580
|
+
const urlText = url.toString();
|
581
|
+
try {
|
582
|
+
const response = await fetchImplementation(urlText);
|
583
|
+
if (!response.ok) {
|
584
|
+
throw new DownloadError({
|
585
|
+
url: urlText,
|
586
|
+
statusCode: response.status,
|
587
|
+
statusText: response.statusText
|
588
|
+
});
|
589
|
+
}
|
590
|
+
return {
|
591
|
+
data: new Uint8Array(await response.arrayBuffer()),
|
592
|
+
mimeType: (_a8 = response.headers.get("content-type")) != null ? _a8 : void 0
|
593
|
+
};
|
594
|
+
} catch (error) {
|
595
|
+
if (DownloadError.isInstance(error)) {
|
596
|
+
throw error;
|
597
|
+
}
|
598
|
+
throw new DownloadError({ url: urlText, cause: error });
|
599
|
+
}
|
600
|
+
}
|
601
|
+
|
602
|
+
// core/util/detect-image-mimetype.ts
|
603
|
+
var mimeTypeSignatures = [
|
604
|
+
{ mimeType: "image/gif", bytes: [71, 73, 70] },
|
605
|
+
{ mimeType: "image/png", bytes: [137, 80, 78, 71] },
|
606
|
+
{ mimeType: "image/jpeg", bytes: [255, 216] },
|
607
|
+
{ mimeType: "image/webp", bytes: [82, 73, 70, 70] }
|
608
|
+
];
|
609
|
+
function detectImageMimeType(image) {
|
610
|
+
for (const { bytes, mimeType } of mimeTypeSignatures) {
|
611
|
+
if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
|
612
|
+
return mimeType;
|
613
|
+
}
|
614
|
+
}
|
615
|
+
return void 0;
|
616
|
+
}
|
617
|
+
|
618
|
+
// core/prompt/data-content.ts
|
619
|
+
import {
|
620
|
+
convertBase64ToUint8Array,
|
621
|
+
convertUint8ArrayToBase64
|
622
|
+
} from "@ai-sdk/provider-utils";
|
623
|
+
|
624
|
+
// core/prompt/invalid-data-content-error.ts
|
625
|
+
import { AISDKError as AISDKError3 } from "@ai-sdk/provider";
|
626
|
+
var name3 = "AI_InvalidDataContentError";
|
627
|
+
var marker3 = `vercel.ai.error.${name3}`;
|
628
|
+
var symbol3 = Symbol.for(marker3);
|
629
|
+
var _a3;
|
630
|
+
var InvalidDataContentError = class extends AISDKError3 {
|
631
|
+
constructor({
|
632
|
+
content,
|
633
|
+
cause,
|
634
|
+
message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
635
|
+
}) {
|
636
|
+
super({ name: name3, message, cause });
|
637
|
+
this[_a3] = true;
|
638
|
+
this.content = content;
|
639
|
+
}
|
640
|
+
static isInstance(error) {
|
641
|
+
return AISDKError3.hasMarker(error, marker3);
|
642
|
+
}
|
643
|
+
/**
|
644
|
+
* @deprecated use `isInstance` instead
|
645
|
+
*/
|
646
|
+
static isInvalidDataContentError(error) {
|
647
|
+
return error instanceof Error && error.name === name3 && error.content != null;
|
648
|
+
}
|
649
|
+
/**
|
650
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
651
|
+
*/
|
652
|
+
toJSON() {
|
653
|
+
return {
|
654
|
+
name: this.name,
|
655
|
+
message: this.message,
|
656
|
+
stack: this.stack,
|
657
|
+
cause: this.cause,
|
658
|
+
content: this.content
|
659
|
+
};
|
660
|
+
}
|
661
|
+
};
|
662
|
+
_a3 = symbol3;
|
663
|
+
|
664
|
+
// core/prompt/data-content.ts
|
665
|
+
function convertDataContentToUint8Array(content) {
|
666
|
+
if (content instanceof Uint8Array) {
|
667
|
+
return content;
|
668
|
+
}
|
669
|
+
if (typeof content === "string") {
|
670
|
+
try {
|
671
|
+
return convertBase64ToUint8Array(content);
|
672
|
+
} catch (error) {
|
673
|
+
throw new InvalidDataContentError({
|
674
|
+
message: "Invalid data content. Content string is not a base64-encoded media.",
|
675
|
+
content,
|
676
|
+
cause: error
|
677
|
+
});
|
678
|
+
}
|
679
|
+
}
|
680
|
+
if (content instanceof ArrayBuffer) {
|
681
|
+
return new Uint8Array(content);
|
682
|
+
}
|
683
|
+
throw new InvalidDataContentError({ content });
|
684
|
+
}
|
685
|
+
|
686
|
+
// core/prompt/invalid-message-role-error.ts
|
687
|
+
import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
|
688
|
+
var name4 = "AI_InvalidMessageRoleError";
|
689
|
+
var marker4 = `vercel.ai.error.${name4}`;
|
690
|
+
var symbol4 = Symbol.for(marker4);
|
691
|
+
var _a4;
|
692
|
+
var InvalidMessageRoleError = class extends AISDKError4 {
|
693
|
+
constructor({
|
694
|
+
role,
|
695
|
+
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
696
|
+
}) {
|
697
|
+
super({ name: name4, message });
|
698
|
+
this[_a4] = true;
|
699
|
+
this.role = role;
|
700
|
+
}
|
701
|
+
static isInstance(error) {
|
702
|
+
return AISDKError4.hasMarker(error, marker4);
|
703
|
+
}
|
704
|
+
/**
|
705
|
+
* @deprecated use `isInstance` instead
|
706
|
+
*/
|
707
|
+
static isInvalidMessageRoleError(error) {
|
708
|
+
return error instanceof Error && error.name === name4 && typeof error.role === "string";
|
709
|
+
}
|
710
|
+
/**
|
711
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
712
|
+
*/
|
713
|
+
toJSON() {
|
714
|
+
return {
|
715
|
+
name: this.name,
|
716
|
+
message: this.message,
|
717
|
+
stack: this.stack,
|
718
|
+
role: this.role
|
719
|
+
};
|
720
|
+
}
|
721
|
+
};
|
722
|
+
_a4 = symbol4;
|
723
|
+
|
724
|
+
// core/prompt/convert-to-language-model-prompt.ts
|
725
|
+
async function convertToLanguageModelPrompt({
|
726
|
+
prompt,
|
727
|
+
modelSupportsImageUrls = true,
|
728
|
+
downloadImplementation = download
|
729
|
+
}) {
|
730
|
+
const languageModelMessages = [];
|
731
|
+
if (prompt.system != null) {
|
732
|
+
languageModelMessages.push({ role: "system", content: prompt.system });
|
733
|
+
}
|
734
|
+
const downloadedImages = modelSupportsImageUrls || prompt.messages == null ? null : await downloadImages(prompt.messages, downloadImplementation);
|
735
|
+
const promptType = prompt.type;
|
736
|
+
switch (promptType) {
|
737
|
+
case "prompt": {
|
738
|
+
languageModelMessages.push({
|
739
|
+
role: "user",
|
740
|
+
content: [{ type: "text", text: prompt.prompt }]
|
741
|
+
});
|
742
|
+
break;
|
743
|
+
}
|
744
|
+
case "messages": {
|
745
|
+
languageModelMessages.push(
|
746
|
+
...prompt.messages.map(
|
747
|
+
(message) => convertToLanguageModelMessage(message, downloadedImages)
|
748
|
+
)
|
749
|
+
);
|
750
|
+
break;
|
751
|
+
}
|
752
|
+
default: {
|
753
|
+
const _exhaustiveCheck = promptType;
|
754
|
+
throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
|
755
|
+
}
|
756
|
+
}
|
757
|
+
return languageModelMessages;
|
758
|
+
}
|
759
|
+
function convertToLanguageModelMessage(message, downloadedImages) {
|
760
|
+
const role = message.role;
|
761
|
+
switch (role) {
|
762
|
+
case "system": {
|
763
|
+
return { role: "system", content: message.content };
|
380
764
|
}
|
381
765
|
case "user": {
|
382
766
|
if (typeof message.content === "string") {
|
@@ -389,7 +773,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
389
773
|
role: "user",
|
390
774
|
content: message.content.map(
|
391
775
|
(part) => {
|
392
|
-
var
|
776
|
+
var _a8, _b, _c;
|
393
777
|
switch (part.type) {
|
394
778
|
case "text": {
|
395
779
|
return part;
|
@@ -407,7 +791,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
407
791
|
return {
|
408
792
|
type: "image",
|
409
793
|
image: downloadedImage.data,
|
410
|
-
mimeType: (
|
794
|
+
mimeType: (_a8 = part.mimeType) != null ? _a8 : downloadedImage.mimeType
|
411
795
|
};
|
412
796
|
}
|
413
797
|
}
|
@@ -556,8 +940,48 @@ function getValidatedPrompt(prompt) {
|
|
556
940
|
};
|
557
941
|
}
|
558
942
|
|
943
|
+
// errors/invalid-argument-error.ts
|
944
|
+
import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
|
945
|
+
var name5 = "AI_InvalidArgumentError";
|
946
|
+
var marker5 = `vercel.ai.error.${name5}`;
|
947
|
+
var symbol5 = Symbol.for(marker5);
|
948
|
+
var _a5;
|
949
|
+
var InvalidArgumentError = class extends AISDKError5 {
|
950
|
+
constructor({
|
951
|
+
parameter,
|
952
|
+
value,
|
953
|
+
message
|
954
|
+
}) {
|
955
|
+
super({
|
956
|
+
name: name5,
|
957
|
+
message: `Invalid argument for parameter ${parameter}: ${message}`
|
958
|
+
});
|
959
|
+
this[_a5] = true;
|
960
|
+
this.parameter = parameter;
|
961
|
+
this.value = value;
|
962
|
+
}
|
963
|
+
static isInstance(error) {
|
964
|
+
return AISDKError5.hasMarker(error, marker5);
|
965
|
+
}
|
966
|
+
/**
|
967
|
+
* @deprecated use `isInstance` instead
|
968
|
+
*/
|
969
|
+
static isInvalidArgumentError(error) {
|
970
|
+
return error instanceof Error && error.name === name5 && typeof error.parameter === "string" && typeof error.value === "string";
|
971
|
+
}
|
972
|
+
toJSON() {
|
973
|
+
return {
|
974
|
+
name: this.name,
|
975
|
+
message: this.message,
|
976
|
+
stack: this.stack,
|
977
|
+
parameter: this.parameter,
|
978
|
+
value: this.value
|
979
|
+
};
|
980
|
+
}
|
981
|
+
};
|
982
|
+
_a5 = symbol5;
|
983
|
+
|
559
984
|
// core/prompt/prepare-call-settings.ts
|
560
|
-
import { InvalidArgumentError } from "@ai-sdk/provider";
|
561
985
|
function prepareCallSettings({
|
562
986
|
maxTokens,
|
563
987
|
temperature,
|
@@ -660,797 +1084,648 @@ function prepareCallSettings({
|
|
660
1084
|
// core/types/token-usage.ts
|
661
1085
|
function calculateCompletionTokenUsage(usage) {
|
662
1086
|
return {
|
663
|
-
promptTokens: usage.promptTokens,
|
664
|
-
completionTokens: usage.completionTokens,
|
665
|
-
totalTokens: usage.promptTokens + usage.completionTokens
|
666
|
-
};
|
667
|
-
}
|
668
|
-
|
669
|
-
// core/util/schema.ts
|
670
|
-
import { validatorSymbol } from "@ai-sdk/provider-utils";
|
671
|
-
import zodToJsonSchema from "zod-to-json-schema";
|
672
|
-
var schemaSymbol = Symbol("vercel.ai.schema");
|
673
|
-
function jsonSchema(jsonSchema2, {
|
674
|
-
validate
|
675
|
-
} = {}) {
|
676
|
-
return {
|
677
|
-
[schemaSymbol]: true,
|
678
|
-
_type: void 0,
|
679
|
-
// should never be used directly
|
680
|
-
[validatorSymbol]: true,
|
681
|
-
jsonSchema: jsonSchema2,
|
682
|
-
validate
|
683
|
-
};
|
684
|
-
}
|
685
|
-
function isSchema(value) {
|
686
|
-
return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
|
687
|
-
}
|
688
|
-
function asSchema(schema) {
|
689
|
-
return isSchema(schema) ? schema : zodSchema(schema);
|
690
|
-
}
|
691
|
-
function zodSchema(zodSchema2) {
|
692
|
-
return jsonSchema(
|
693
|
-
// we assume that zodToJsonSchema will return a valid JSONSchema7:
|
694
|
-
zodToJsonSchema(zodSchema2),
|
695
|
-
{
|
696
|
-
validate: (value) => {
|
697
|
-
const result = zodSchema2.safeParse(value);
|
698
|
-
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
699
|
-
}
|
700
|
-
}
|
701
|
-
);
|
702
|
-
}
|
703
|
-
|
704
|
-
// core/util/is-non-empty-object.ts
|
705
|
-
function isNonEmptyObject(object) {
|
706
|
-
return object != null && Object.keys(object).length > 0;
|
707
|
-
}
|
708
|
-
|
709
|
-
// core/prompt/prepare-tools-and-tool-choice.ts
|
710
|
-
function prepareToolsAndToolChoice({
|
711
|
-
tools,
|
712
|
-
toolChoice
|
713
|
-
}) {
|
714
|
-
if (!isNonEmptyObject(tools)) {
|
715
|
-
return {
|
716
|
-
tools: void 0,
|
717
|
-
toolChoice: void 0
|
718
|
-
};
|
719
|
-
}
|
720
|
-
return {
|
721
|
-
tools: Object.entries(tools).map(([name, tool]) => ({
|
722
|
-
type: "function",
|
723
|
-
name,
|
724
|
-
description: tool.description,
|
725
|
-
parameters: asSchema(tool.parameters).jsonSchema
|
726
|
-
})),
|
727
|
-
toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
|
728
|
-
};
|
729
|
-
}
|
730
|
-
|
731
|
-
// streams/ai-stream.ts
|
732
|
-
import {
|
733
|
-
createParser
|
734
|
-
} from "eventsource-parser";
|
735
|
-
function createEventStreamTransformer(customParser) {
|
736
|
-
const textDecoder = new TextDecoder();
|
737
|
-
let eventSourceParser;
|
738
|
-
return new TransformStream({
|
739
|
-
async start(controller) {
|
740
|
-
eventSourceParser = createParser(
|
741
|
-
(event) => {
|
742
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
743
|
-
// @see https://replicate.com/docs/streaming
|
744
|
-
event.event === "done") {
|
745
|
-
controller.terminate();
|
746
|
-
return;
|
747
|
-
}
|
748
|
-
if ("data" in event) {
|
749
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
750
|
-
event: event.event
|
751
|
-
}) : event.data;
|
752
|
-
if (parsedMessage)
|
753
|
-
controller.enqueue(parsedMessage);
|
754
|
-
}
|
755
|
-
}
|
756
|
-
);
|
757
|
-
},
|
758
|
-
transform(chunk) {
|
759
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
760
|
-
}
|
761
|
-
});
|
762
|
-
}
|
763
|
-
function createCallbacksTransformer(cb) {
|
764
|
-
const textEncoder = new TextEncoder();
|
765
|
-
let aggregatedResponse = "";
|
766
|
-
const callbacks = cb || {};
|
767
|
-
return new TransformStream({
|
768
|
-
async start() {
|
769
|
-
if (callbacks.onStart)
|
770
|
-
await callbacks.onStart();
|
771
|
-
},
|
772
|
-
async transform(message, controller) {
|
773
|
-
const content = typeof message === "string" ? message : message.content;
|
774
|
-
controller.enqueue(textEncoder.encode(content));
|
775
|
-
aggregatedResponse += content;
|
776
|
-
if (callbacks.onToken)
|
777
|
-
await callbacks.onToken(content);
|
778
|
-
if (callbacks.onText && typeof message === "string") {
|
779
|
-
await callbacks.onText(message);
|
780
|
-
}
|
781
|
-
},
|
782
|
-
async flush() {
|
783
|
-
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
784
|
-
if (callbacks.onCompletion) {
|
785
|
-
await callbacks.onCompletion(aggregatedResponse);
|
786
|
-
}
|
787
|
-
if (callbacks.onFinal && !isOpenAICallbacks) {
|
788
|
-
await callbacks.onFinal(aggregatedResponse);
|
789
|
-
}
|
790
|
-
}
|
791
|
-
});
|
792
|
-
}
|
793
|
-
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
794
|
-
return "experimental_onFunctionCall" in callbacks;
|
795
|
-
}
|
796
|
-
function trimStartOfStreamHelper() {
|
797
|
-
let isStreamStart = true;
|
798
|
-
return (text) => {
|
799
|
-
if (isStreamStart) {
|
800
|
-
text = text.trimStart();
|
801
|
-
if (text)
|
802
|
-
isStreamStart = false;
|
803
|
-
}
|
804
|
-
return text;
|
805
|
-
};
|
806
|
-
}
|
807
|
-
function AIStream(response, customParser, callbacks) {
|
808
|
-
if (!response.ok) {
|
809
|
-
if (response.body) {
|
810
|
-
const reader = response.body.getReader();
|
811
|
-
return new ReadableStream({
|
812
|
-
async start(controller) {
|
813
|
-
const { done, value } = await reader.read();
|
814
|
-
if (!done) {
|
815
|
-
const errorText = new TextDecoder().decode(value);
|
816
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
817
|
-
}
|
818
|
-
}
|
819
|
-
});
|
820
|
-
} else {
|
821
|
-
return new ReadableStream({
|
822
|
-
start(controller) {
|
823
|
-
controller.error(new Error("Response error: No response body"));
|
824
|
-
}
|
825
|
-
});
|
826
|
-
}
|
827
|
-
}
|
828
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
829
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
830
|
-
}
|
831
|
-
function createEmptyReadableStream() {
|
832
|
-
return new ReadableStream({
|
833
|
-
start(controller) {
|
834
|
-
controller.close();
|
835
|
-
}
|
836
|
-
});
|
837
|
-
}
|
838
|
-
function readableFromAsyncIterable(iterable) {
|
839
|
-
let it = iterable[Symbol.asyncIterator]();
|
840
|
-
return new ReadableStream({
|
841
|
-
async pull(controller) {
|
842
|
-
const { done, value } = await it.next();
|
843
|
-
if (done)
|
844
|
-
controller.close();
|
845
|
-
else
|
846
|
-
controller.enqueue(value);
|
847
|
-
},
|
848
|
-
async cancel(reason) {
|
849
|
-
var _a;
|
850
|
-
await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
|
851
|
-
}
|
852
|
-
});
|
853
|
-
}
|
854
|
-
|
855
|
-
// streams/stream-data.ts
|
856
|
-
import { formatStreamPart } from "@ai-sdk/ui-utils";
|
857
|
-
function createStreamDataTransformer() {
|
858
|
-
const encoder = new TextEncoder();
|
859
|
-
const decoder = new TextDecoder();
|
860
|
-
return new TransformStream({
|
861
|
-
transform: async (chunk, controller) => {
|
862
|
-
const message = decoder.decode(chunk);
|
863
|
-
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
864
|
-
}
|
865
|
-
});
|
866
|
-
}
|
867
|
-
|
868
|
-
// streams/openai-stream.ts
|
869
|
-
import {
|
870
|
-
createChunkDecoder,
|
871
|
-
formatStreamPart as formatStreamPart2
|
872
|
-
} from "@ai-sdk/ui-utils";
|
873
|
-
function parseOpenAIStream() {
|
874
|
-
const extract = chunkToText();
|
875
|
-
return (data) => extract(JSON.parse(data));
|
876
|
-
}
|
877
|
-
async function* streamable(stream) {
|
878
|
-
const extract = chunkToText();
|
879
|
-
for await (let chunk of stream) {
|
880
|
-
if ("promptFilterResults" in chunk) {
|
881
|
-
chunk = {
|
882
|
-
id: chunk.id,
|
883
|
-
created: chunk.created.getDate(),
|
884
|
-
object: chunk.object,
|
885
|
-
// not exposed by Azure API
|
886
|
-
model: chunk.model,
|
887
|
-
// not exposed by Azure API
|
888
|
-
choices: chunk.choices.map((choice) => {
|
889
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
890
|
-
return {
|
891
|
-
delta: {
|
892
|
-
content: (_a = choice.delta) == null ? void 0 : _a.content,
|
893
|
-
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
894
|
-
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
895
|
-
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
896
|
-
index,
|
897
|
-
id: toolCall.id,
|
898
|
-
function: toolCall.function,
|
899
|
-
type: toolCall.type
|
900
|
-
})) : void 0
|
901
|
-
},
|
902
|
-
finish_reason: choice.finishReason,
|
903
|
-
index: choice.index
|
904
|
-
};
|
905
|
-
})
|
906
|
-
};
|
907
|
-
}
|
908
|
-
const text = extract(chunk);
|
909
|
-
if (text)
|
910
|
-
yield text;
|
911
|
-
}
|
912
|
-
}
|
913
|
-
function chunkToText() {
|
914
|
-
const trimStartOfStream = trimStartOfStreamHelper();
|
915
|
-
let isFunctionStreamingIn;
|
916
|
-
return (json) => {
|
917
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
918
|
-
if (isChatCompletionChunk(json)) {
|
919
|
-
const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
|
920
|
-
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
921
|
-
isFunctionStreamingIn = true;
|
922
|
-
return {
|
923
|
-
isText: false,
|
924
|
-
content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
|
925
|
-
};
|
926
|
-
} else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
|
927
|
-
isFunctionStreamingIn = true;
|
928
|
-
const toolCall = delta.tool_calls[0];
|
929
|
-
if (toolCall.index === 0) {
|
930
|
-
return {
|
931
|
-
isText: false,
|
932
|
-
content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
|
933
|
-
};
|
934
|
-
} else {
|
935
|
-
return {
|
936
|
-
isText: false,
|
937
|
-
content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
|
938
|
-
};
|
939
|
-
}
|
940
|
-
} else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
|
941
|
-
return {
|
942
|
-
isText: false,
|
943
|
-
content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
|
944
|
-
};
|
945
|
-
} else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
|
946
|
-
return {
|
947
|
-
isText: false,
|
948
|
-
content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
|
949
|
-
};
|
950
|
-
} else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
|
951
|
-
isFunctionStreamingIn = false;
|
952
|
-
return {
|
953
|
-
isText: false,
|
954
|
-
content: '"}}'
|
955
|
-
};
|
956
|
-
} else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
|
957
|
-
isFunctionStreamingIn = false;
|
958
|
-
return {
|
959
|
-
isText: false,
|
960
|
-
content: '"}}]}'
|
961
|
-
};
|
962
|
-
}
|
963
|
-
}
|
964
|
-
const text = trimStartOfStream(
|
965
|
-
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
966
|
-
);
|
967
|
-
return text;
|
968
|
-
};
|
969
|
-
function cleanupArguments(argumentChunk) {
|
970
|
-
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
971
|
-
return `${escapedPartialJson}`;
|
972
|
-
}
|
973
|
-
}
|
974
|
-
var __internal__OpenAIFnMessagesSymbol = Symbol(
|
975
|
-
"internal_openai_fn_messages"
|
976
|
-
);
|
977
|
-
function isChatCompletionChunk(data) {
|
978
|
-
return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
|
979
|
-
}
|
980
|
-
function isCompletion(data) {
|
981
|
-
return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
|
982
|
-
}
|
983
|
-
function OpenAIStream(res, callbacks) {
|
984
|
-
const cb = callbacks;
|
985
|
-
let stream;
|
986
|
-
if (Symbol.asyncIterator in res) {
|
987
|
-
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
988
|
-
createCallbacksTransformer(
|
989
|
-
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
990
|
-
...cb,
|
991
|
-
onFinal: void 0
|
992
|
-
} : {
|
993
|
-
...cb
|
994
|
-
}
|
995
|
-
)
|
996
|
-
);
|
997
|
-
} else {
|
998
|
-
stream = AIStream(
|
999
|
-
res,
|
1000
|
-
parseOpenAIStream(),
|
1001
|
-
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
1002
|
-
...cb,
|
1003
|
-
onFinal: void 0
|
1004
|
-
} : {
|
1005
|
-
...cb
|
1006
|
-
}
|
1007
|
-
);
|
1008
|
-
}
|
1009
|
-
if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
|
1010
|
-
const functionCallTransformer = createFunctionCallTransformer(cb);
|
1011
|
-
return stream.pipeThrough(functionCallTransformer);
|
1012
|
-
} else {
|
1013
|
-
return stream.pipeThrough(createStreamDataTransformer());
|
1014
|
-
}
|
1015
|
-
}
|
1016
|
-
function createFunctionCallTransformer(callbacks) {
|
1017
|
-
const textEncoder = new TextEncoder();
|
1018
|
-
let isFirstChunk = true;
|
1019
|
-
let aggregatedResponse = "";
|
1020
|
-
let aggregatedFinalCompletionResponse = "";
|
1021
|
-
let isFunctionStreamingIn = false;
|
1022
|
-
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
1023
|
-
const decode = createChunkDecoder();
|
1024
|
-
return new TransformStream({
|
1025
|
-
async transform(chunk, controller) {
|
1026
|
-
const message = decode(chunk);
|
1027
|
-
aggregatedFinalCompletionResponse += message;
|
1028
|
-
const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
|
1029
|
-
if (shouldHandleAsFunction) {
|
1030
|
-
isFunctionStreamingIn = true;
|
1031
|
-
aggregatedResponse += message;
|
1032
|
-
isFirstChunk = false;
|
1033
|
-
return;
|
1034
|
-
}
|
1035
|
-
if (!isFunctionStreamingIn) {
|
1036
|
-
controller.enqueue(
|
1037
|
-
textEncoder.encode(formatStreamPart2("text", message))
|
1038
|
-
);
|
1039
|
-
return;
|
1040
|
-
} else {
|
1041
|
-
aggregatedResponse += message;
|
1042
|
-
}
|
1043
|
-
},
|
1044
|
-
async flush(controller) {
|
1045
|
-
try {
|
1046
|
-
if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
|
1047
|
-
isFunctionStreamingIn = false;
|
1048
|
-
const payload = JSON.parse(aggregatedResponse);
|
1049
|
-
let newFunctionCallMessages = [
|
1050
|
-
...functionCallMessages
|
1051
|
-
];
|
1052
|
-
let functionResponse = void 0;
|
1053
|
-
if (callbacks.experimental_onFunctionCall) {
|
1054
|
-
if (payload.function_call === void 0) {
|
1055
|
-
console.warn(
|
1056
|
-
"experimental_onFunctionCall should not be defined when using tools"
|
1057
|
-
);
|
1058
|
-
}
|
1059
|
-
const argumentsPayload = JSON.parse(
|
1060
|
-
payload.function_call.arguments
|
1061
|
-
);
|
1062
|
-
functionResponse = await callbacks.experimental_onFunctionCall(
|
1063
|
-
{
|
1064
|
-
name: payload.function_call.name,
|
1065
|
-
arguments: argumentsPayload
|
1066
|
-
},
|
1067
|
-
(result) => {
|
1068
|
-
newFunctionCallMessages = [
|
1069
|
-
...functionCallMessages,
|
1070
|
-
{
|
1071
|
-
role: "assistant",
|
1072
|
-
content: "",
|
1073
|
-
function_call: payload.function_call
|
1074
|
-
},
|
1075
|
-
{
|
1076
|
-
role: "function",
|
1077
|
-
name: payload.function_call.name,
|
1078
|
-
content: JSON.stringify(result)
|
1079
|
-
}
|
1080
|
-
];
|
1081
|
-
return newFunctionCallMessages;
|
1082
|
-
}
|
1083
|
-
);
|
1084
|
-
}
|
1085
|
-
if (callbacks.experimental_onToolCall) {
|
1086
|
-
const toolCalls = {
|
1087
|
-
tools: []
|
1088
|
-
};
|
1089
|
-
for (const tool of payload.tool_calls) {
|
1090
|
-
toolCalls.tools.push({
|
1091
|
-
id: tool.id,
|
1092
|
-
type: "function",
|
1093
|
-
func: {
|
1094
|
-
name: tool.function.name,
|
1095
|
-
arguments: JSON.parse(tool.function.arguments)
|
1096
|
-
}
|
1097
|
-
});
|
1098
|
-
}
|
1099
|
-
let responseIndex = 0;
|
1100
|
-
try {
|
1101
|
-
functionResponse = await callbacks.experimental_onToolCall(
|
1102
|
-
toolCalls,
|
1103
|
-
(result) => {
|
1104
|
-
if (result) {
|
1105
|
-
const { tool_call_id, function_name, tool_call_result } = result;
|
1106
|
-
newFunctionCallMessages = [
|
1107
|
-
...newFunctionCallMessages,
|
1108
|
-
// Only append the assistant message if it's the first response
|
1109
|
-
...responseIndex === 0 ? [
|
1110
|
-
{
|
1111
|
-
role: "assistant",
|
1112
|
-
content: "",
|
1113
|
-
tool_calls: payload.tool_calls.map(
|
1114
|
-
(tc) => ({
|
1115
|
-
id: tc.id,
|
1116
|
-
type: "function",
|
1117
|
-
function: {
|
1118
|
-
name: tc.function.name,
|
1119
|
-
// we send the arguments an object to the user, but as the API expects a string, we need to stringify it
|
1120
|
-
arguments: JSON.stringify(
|
1121
|
-
tc.function.arguments
|
1122
|
-
)
|
1123
|
-
}
|
1124
|
-
})
|
1125
|
-
)
|
1126
|
-
}
|
1127
|
-
] : [],
|
1128
|
-
// Append the function call result message
|
1129
|
-
{
|
1130
|
-
role: "tool",
|
1131
|
-
tool_call_id,
|
1132
|
-
name: function_name,
|
1133
|
-
content: JSON.stringify(tool_call_result)
|
1134
|
-
}
|
1135
|
-
];
|
1136
|
-
responseIndex++;
|
1137
|
-
}
|
1138
|
-
return newFunctionCallMessages;
|
1139
|
-
}
|
1140
|
-
);
|
1141
|
-
} catch (e) {
|
1142
|
-
console.error("Error calling experimental_onToolCall:", e);
|
1143
|
-
}
|
1144
|
-
}
|
1145
|
-
if (!functionResponse) {
|
1146
|
-
controller.enqueue(
|
1147
|
-
textEncoder.encode(
|
1148
|
-
formatStreamPart2(
|
1149
|
-
payload.function_call ? "function_call" : "tool_calls",
|
1150
|
-
// parse to prevent double-encoding:
|
1151
|
-
JSON.parse(aggregatedResponse)
|
1152
|
-
)
|
1153
|
-
)
|
1154
|
-
);
|
1155
|
-
return;
|
1156
|
-
} else if (typeof functionResponse === "string") {
|
1157
|
-
controller.enqueue(
|
1158
|
-
textEncoder.encode(formatStreamPart2("text", functionResponse))
|
1159
|
-
);
|
1160
|
-
aggregatedFinalCompletionResponse = functionResponse;
|
1161
|
-
return;
|
1162
|
-
}
|
1163
|
-
const filteredCallbacks = {
|
1164
|
-
...callbacks,
|
1165
|
-
onStart: void 0
|
1166
|
-
};
|
1167
|
-
callbacks.onFinal = void 0;
|
1168
|
-
const openAIStream = OpenAIStream(functionResponse, {
|
1169
|
-
...filteredCallbacks,
|
1170
|
-
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
1171
|
-
});
|
1172
|
-
const reader = openAIStream.getReader();
|
1173
|
-
while (true) {
|
1174
|
-
const { done, value } = await reader.read();
|
1175
|
-
if (done) {
|
1176
|
-
break;
|
1177
|
-
}
|
1178
|
-
controller.enqueue(value);
|
1179
|
-
}
|
1180
|
-
}
|
1181
|
-
} finally {
|
1182
|
-
if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
|
1183
|
-
await callbacks.onFinal(aggregatedFinalCompletionResponse);
|
1184
|
-
}
|
1087
|
+
promptTokens: usage.promptTokens,
|
1088
|
+
completionTokens: usage.completionTokens,
|
1089
|
+
totalTokens: usage.promptTokens + usage.completionTokens
|
1090
|
+
};
|
1091
|
+
}
|
1092
|
+
|
1093
|
+
// core/util/schema.ts
|
1094
|
+
import { validatorSymbol } from "@ai-sdk/provider-utils";
|
1095
|
+
import zodToJsonSchema from "zod-to-json-schema";
|
1096
|
+
var schemaSymbol = Symbol.for("vercel.ai.schema");
|
1097
|
+
function jsonSchema(jsonSchema2, {
|
1098
|
+
validate
|
1099
|
+
} = {}) {
|
1100
|
+
return {
|
1101
|
+
[schemaSymbol]: true,
|
1102
|
+
_type: void 0,
|
1103
|
+
// should never be used directly
|
1104
|
+
[validatorSymbol]: true,
|
1105
|
+
jsonSchema: jsonSchema2,
|
1106
|
+
validate
|
1107
|
+
};
|
1108
|
+
}
|
1109
|
+
function isSchema(value) {
|
1110
|
+
return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
|
1111
|
+
}
|
1112
|
+
function asSchema(schema) {
|
1113
|
+
return isSchema(schema) ? schema : zodSchema(schema);
|
1114
|
+
}
|
1115
|
+
function zodSchema(zodSchema2) {
|
1116
|
+
return jsonSchema(
|
1117
|
+
// we assume that zodToJsonSchema will return a valid JSONSchema7:
|
1118
|
+
zodToJsonSchema(zodSchema2),
|
1119
|
+
{
|
1120
|
+
validate: (value) => {
|
1121
|
+
const result = zodSchema2.safeParse(value);
|
1122
|
+
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
1185
1123
|
}
|
1186
1124
|
}
|
1187
|
-
|
1125
|
+
);
|
1188
1126
|
}
|
1189
1127
|
|
1190
|
-
//
|
1191
|
-
|
1192
|
-
|
1128
|
+
// core/util/is-non-empty-object.ts
|
1129
|
+
function isNonEmptyObject(object) {
|
1130
|
+
return object != null && Object.keys(object).length > 0;
|
1131
|
+
}
|
1193
1132
|
|
1194
|
-
//
|
1195
|
-
function
|
1196
|
-
|
1197
|
-
|
1198
|
-
|
1199
|
-
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1133
|
+
// core/prompt/prepare-tools-and-tool-choice.ts
|
1134
|
+
function prepareToolsAndToolChoice({
|
1135
|
+
tools,
|
1136
|
+
toolChoice
|
1137
|
+
}) {
|
1138
|
+
if (!isNonEmptyObject(tools)) {
|
1139
|
+
return {
|
1140
|
+
tools: void 0,
|
1141
|
+
toolChoice: void 0
|
1142
|
+
};
|
1203
1143
|
}
|
1204
|
-
|
1205
|
-
|
1206
|
-
|
1207
|
-
|
1208
|
-
|
1209
|
-
|
1210
|
-
|
1211
|
-
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1215
|
-
|
1144
|
+
return {
|
1145
|
+
tools: Object.entries(tools).map(([name8, tool]) => ({
|
1146
|
+
type: "function",
|
1147
|
+
name: name8,
|
1148
|
+
description: tool.description,
|
1149
|
+
parameters: asSchema(tool.parameters).jsonSchema
|
1150
|
+
})),
|
1151
|
+
toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
|
1152
|
+
};
|
1153
|
+
}
|
1154
|
+
|
1155
|
+
// errors/invalid-tool-arguments-error.ts
|
1156
|
+
import { AISDKError as AISDKError6, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
1157
|
+
var name6 = "AI_InvalidToolArgumentsError";
|
1158
|
+
var marker6 = `vercel.ai.error.${name6}`;
|
1159
|
+
var symbol6 = Symbol.for(marker6);
|
1160
|
+
var _a6;
|
1161
|
+
var InvalidToolArgumentsError = class extends AISDKError6 {
|
1162
|
+
constructor({
|
1163
|
+
toolArgs,
|
1164
|
+
toolName,
|
1165
|
+
cause,
|
1166
|
+
message = `Invalid arguments for tool ${toolName}: ${getErrorMessage3(
|
1167
|
+
cause
|
1168
|
+
)}`
|
1169
|
+
}) {
|
1170
|
+
super({ name: name6, message, cause });
|
1171
|
+
this[_a6] = true;
|
1172
|
+
this.toolArgs = toolArgs;
|
1173
|
+
this.toolName = toolName;
|
1216
1174
|
}
|
1217
|
-
|
1218
|
-
|
1219
|
-
|
1220
|
-
|
1221
|
-
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
|
1229
|
-
|
1230
|
-
|
1231
|
-
|
1232
|
-
|
1175
|
+
static isInstance(error) {
|
1176
|
+
return AISDKError6.hasMarker(error, marker6);
|
1177
|
+
}
|
1178
|
+
/**
|
1179
|
+
* @deprecated use `isInstance` instead
|
1180
|
+
*/
|
1181
|
+
static isInvalidToolArgumentsError(error) {
|
1182
|
+
return error instanceof Error && error.name === name6 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
1183
|
+
}
|
1184
|
+
/**
|
1185
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
1186
|
+
*/
|
1187
|
+
toJSON() {
|
1188
|
+
return {
|
1189
|
+
name: this.name,
|
1190
|
+
message: this.message,
|
1191
|
+
cause: this.cause,
|
1192
|
+
stack: this.stack,
|
1193
|
+
toolName: this.toolName,
|
1194
|
+
toolArgs: this.toolArgs
|
1195
|
+
};
|
1196
|
+
}
|
1197
|
+
};
|
1198
|
+
_a6 = symbol6;
|
1199
|
+
|
1200
|
+
// errors/no-such-tool-error.ts
|
1201
|
+
import { AISDKError as AISDKError7 } from "@ai-sdk/provider";
|
1202
|
+
var name7 = "AI_NoSuchToolError";
|
1203
|
+
var marker7 = `vercel.ai.error.${name7}`;
|
1204
|
+
var symbol7 = Symbol.for(marker7);
|
1205
|
+
var _a7;
|
1206
|
+
var NoSuchToolError = class extends AISDKError7 {
|
1207
|
+
constructor({
|
1208
|
+
toolName,
|
1209
|
+
availableTools = void 0,
|
1210
|
+
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
1211
|
+
}) {
|
1212
|
+
super({ name: name7, message });
|
1213
|
+
this[_a7] = true;
|
1214
|
+
this.toolName = toolName;
|
1215
|
+
this.availableTools = availableTools;
|
1216
|
+
}
|
1217
|
+
static isInstance(error) {
|
1218
|
+
return AISDKError7.hasMarker(error, marker7);
|
1219
|
+
}
|
1220
|
+
/**
|
1221
|
+
* @deprecated use `isInstance` instead
|
1222
|
+
*/
|
1223
|
+
static isNoSuchToolError(error) {
|
1224
|
+
return error instanceof Error && error.name === name7 && "toolName" in error && error.toolName != void 0 && typeof error.name === "string";
|
1225
|
+
}
|
1226
|
+
/**
|
1227
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
1228
|
+
*/
|
1229
|
+
toJSON() {
|
1230
|
+
return {
|
1231
|
+
name: this.name,
|
1232
|
+
message: this.message,
|
1233
|
+
stack: this.stack,
|
1234
|
+
toolName: this.toolName,
|
1235
|
+
availableTools: this.availableTools
|
1236
|
+
};
|
1237
|
+
}
|
1238
|
+
};
|
1239
|
+
_a7 = symbol7;
|
1240
|
+
|
1241
|
+
// streams/ai-stream.ts
|
1242
|
+
import {
|
1243
|
+
createParser
|
1244
|
+
} from "eventsource-parser";
|
1245
|
+
function createEventStreamTransformer(customParser) {
|
1246
|
+
const textDecoder = new TextDecoder();
|
1247
|
+
let eventSourceParser;
|
1248
|
+
return new TransformStream({
|
1249
|
+
async start(controller) {
|
1250
|
+
eventSourceParser = createParser(
|
1251
|
+
(event) => {
|
1252
|
+
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
1253
|
+
// @see https://replicate.com/docs/streaming
|
1254
|
+
event.event === "done") {
|
1255
|
+
controller.terminate();
|
1256
|
+
return;
|
1257
|
+
}
|
1258
|
+
if ("data" in event) {
|
1259
|
+
const parsedMessage = customParser ? customParser(event.data, {
|
1260
|
+
event: event.event
|
1261
|
+
}) : event.data;
|
1262
|
+
if (parsedMessage)
|
1263
|
+
controller.enqueue(parsedMessage);
|
1264
|
+
}
|
1265
|
+
}
|
1266
|
+
);
|
1233
1267
|
},
|
1234
|
-
|
1235
|
-
|
1236
|
-
|
1237
|
-
|
1238
|
-
|
1239
|
-
|
1240
|
-
|
1241
|
-
|
1242
|
-
|
1268
|
+
transform(chunk) {
|
1269
|
+
eventSourceParser.feed(textDecoder.decode(chunk));
|
1270
|
+
}
|
1271
|
+
});
|
1272
|
+
}
|
1273
|
+
function createCallbacksTransformer(cb) {
|
1274
|
+
const textEncoder = new TextEncoder();
|
1275
|
+
let aggregatedResponse = "";
|
1276
|
+
const callbacks = cb || {};
|
1277
|
+
return new TransformStream({
|
1278
|
+
async start() {
|
1279
|
+
if (callbacks.onStart)
|
1280
|
+
await callbacks.onStart();
|
1243
1281
|
},
|
1244
|
-
|
1245
|
-
|
1246
|
-
|
1247
|
-
|
1282
|
+
async transform(message, controller) {
|
1283
|
+
const content = typeof message === "string" ? message : message.content;
|
1284
|
+
controller.enqueue(textEncoder.encode(content));
|
1285
|
+
aggregatedResponse += content;
|
1286
|
+
if (callbacks.onToken)
|
1287
|
+
await callbacks.onToken(content);
|
1288
|
+
if (callbacks.onText && typeof message === "string") {
|
1289
|
+
await callbacks.onText(message);
|
1248
1290
|
}
|
1249
|
-
closed = true;
|
1250
|
-
reject(error);
|
1251
|
-
return streamable2;
|
1252
1291
|
},
|
1253
|
-
|
1254
|
-
|
1255
|
-
if (
|
1256
|
-
|
1292
|
+
async flush() {
|
1293
|
+
const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
|
1294
|
+
if (callbacks.onCompletion) {
|
1295
|
+
await callbacks.onCompletion(aggregatedResponse);
|
1257
1296
|
}
|
1258
|
-
|
1259
|
-
|
1260
|
-
resolve({ value: args[0], done: true });
|
1261
|
-
return streamable2;
|
1297
|
+
if (callbacks.onFinal && !isOpenAICallbacks) {
|
1298
|
+
await callbacks.onFinal(aggregatedResponse);
|
1262
1299
|
}
|
1263
|
-
resolve({ value: currentValue, done: true });
|
1264
|
-
return streamable2;
|
1265
1300
|
}
|
1301
|
+
});
|
1302
|
+
}
|
1303
|
+
function isOfTypeOpenAIStreamCallbacks(callbacks) {
|
1304
|
+
return "experimental_onFunctionCall" in callbacks;
|
1305
|
+
}
|
1306
|
+
function trimStartOfStreamHelper() {
|
1307
|
+
let isStreamStart = true;
|
1308
|
+
return (text) => {
|
1309
|
+
if (isStreamStart) {
|
1310
|
+
text = text.trimStart();
|
1311
|
+
if (text)
|
1312
|
+
isStreamStart = false;
|
1313
|
+
}
|
1314
|
+
return text;
|
1266
1315
|
};
|
1267
|
-
return streamable2;
|
1268
1316
|
}
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
|
1279
|
-
|
1280
|
-
while (true) {
|
1281
|
-
const { value, done } = await reader.read();
|
1282
|
-
if (done) {
|
1283
|
-
break;
|
1317
|
+
function AIStream(response, customParser, callbacks) {
|
1318
|
+
if (!response.ok) {
|
1319
|
+
if (response.body) {
|
1320
|
+
const reader = response.body.getReader();
|
1321
|
+
return new ReadableStream({
|
1322
|
+
async start(controller) {
|
1323
|
+
const { done, value } = await reader.read();
|
1324
|
+
if (!done) {
|
1325
|
+
const errorText = new TextDecoder().decode(value);
|
1326
|
+
controller.error(new Error(`Response error: ${errorText}`));
|
1327
|
+
}
|
1284
1328
|
}
|
1285
|
-
|
1286
|
-
|
1287
|
-
|
1288
|
-
|
1289
|
-
|
1329
|
+
});
|
1330
|
+
} else {
|
1331
|
+
return new ReadableStream({
|
1332
|
+
start(controller) {
|
1333
|
+
controller.error(new Error("Response error: No response body"));
|
1290
1334
|
}
|
1291
|
-
|
1292
|
-
}
|
1293
|
-
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = false;
|
1294
|
-
streamableValue.done();
|
1295
|
-
} catch (e) {
|
1296
|
-
streamableValue[STREAMABLE_VALUE_INTERNAL_LOCK] = false;
|
1297
|
-
streamableValue.error(e);
|
1335
|
+
});
|
1298
1336
|
}
|
1299
|
-
}
|
1300
|
-
|
1337
|
+
}
|
1338
|
+
const responseBodyStream = response.body || createEmptyReadableStream();
|
1339
|
+
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
1301
1340
|
}
|
1302
|
-
function
|
1303
|
-
|
1304
|
-
|
1305
|
-
|
1306
|
-
let currentValue = initialValue;
|
1307
|
-
let currentError;
|
1308
|
-
let currentPromise = resolvable.promise;
|
1309
|
-
let currentPatchValue;
|
1310
|
-
function assertStream(method) {
|
1311
|
-
if (closed) {
|
1312
|
-
throw new Error(method + ": Value stream is already closed.");
|
1341
|
+
function createEmptyReadableStream() {
|
1342
|
+
return new ReadableStream({
|
1343
|
+
start(controller) {
|
1344
|
+
controller.close();
|
1313
1345
|
}
|
1314
|
-
|
1315
|
-
|
1316
|
-
|
1317
|
-
|
1346
|
+
});
|
1347
|
+
}
|
1348
|
+
function readableFromAsyncIterable(iterable) {
|
1349
|
+
let it = iterable[Symbol.asyncIterator]();
|
1350
|
+
return new ReadableStream({
|
1351
|
+
async pull(controller) {
|
1352
|
+
const { done, value } = await it.next();
|
1353
|
+
if (done)
|
1354
|
+
controller.close();
|
1355
|
+
else
|
1356
|
+
controller.enqueue(value);
|
1357
|
+
},
|
1358
|
+
async cancel(reason) {
|
1359
|
+
var _a8;
|
1360
|
+
await ((_a8 = it.return) == null ? void 0 : _a8.call(it, reason));
|
1318
1361
|
}
|
1319
|
-
}
|
1320
|
-
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1324
|
-
|
1325
|
-
|
1326
|
-
|
1327
|
-
|
1328
|
-
|
1329
|
-
|
1330
|
-
|
1362
|
+
});
|
1363
|
+
}
|
1364
|
+
|
1365
|
+
// streams/stream-data.ts
|
1366
|
+
import { formatStreamPart } from "@ai-sdk/ui-utils";
|
1367
|
+
function createStreamDataTransformer() {
|
1368
|
+
const encoder = new TextEncoder();
|
1369
|
+
const decoder = new TextDecoder();
|
1370
|
+
return new TransformStream({
|
1371
|
+
transform: async (chunk, controller) => {
|
1372
|
+
const message = decoder.decode(chunk);
|
1373
|
+
controller.enqueue(encoder.encode(formatStreamPart("text", message)));
|
1374
|
+
}
|
1375
|
+
});
|
1376
|
+
}
|
1377
|
+
|
1378
|
+
// streams/openai-stream.ts
|
1379
|
+
import {
|
1380
|
+
createChunkDecoder,
|
1381
|
+
formatStreamPart as formatStreamPart2
|
1382
|
+
} from "@ai-sdk/ui-utils";
|
1383
|
+
function parseOpenAIStream() {
|
1384
|
+
const extract = chunkToText();
|
1385
|
+
return (data) => extract(JSON.parse(data));
|
1386
|
+
}
|
1387
|
+
async function* streamable(stream) {
|
1388
|
+
const extract = chunkToText();
|
1389
|
+
for await (let chunk of stream) {
|
1390
|
+
if ("promptFilterResults" in chunk) {
|
1391
|
+
chunk = {
|
1392
|
+
id: chunk.id,
|
1393
|
+
created: chunk.created.getDate(),
|
1394
|
+
object: chunk.object,
|
1395
|
+
// not exposed by Azure API
|
1396
|
+
model: chunk.model,
|
1397
|
+
// not exposed by Azure API
|
1398
|
+
choices: chunk.choices.map((choice) => {
|
1399
|
+
var _a8, _b, _c, _d, _e, _f, _g;
|
1400
|
+
return {
|
1401
|
+
delta: {
|
1402
|
+
content: (_a8 = choice.delta) == null ? void 0 : _a8.content,
|
1403
|
+
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
1404
|
+
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
1405
|
+
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
1406
|
+
index,
|
1407
|
+
id: toolCall.id,
|
1408
|
+
function: toolCall.function,
|
1409
|
+
type: toolCall.type
|
1410
|
+
})) : void 0
|
1411
|
+
},
|
1412
|
+
finish_reason: choice.finishReason,
|
1413
|
+
index: choice.index
|
1414
|
+
};
|
1415
|
+
})
|
1416
|
+
};
|
1331
1417
|
}
|
1418
|
+
const text = extract(chunk);
|
1419
|
+
if (text)
|
1420
|
+
yield text;
|
1332
1421
|
}
|
1333
|
-
|
1334
|
-
|
1335
|
-
|
1336
|
-
|
1337
|
-
|
1338
|
-
|
1339
|
-
|
1340
|
-
|
1341
|
-
|
1342
|
-
|
1422
|
+
}
|
1423
|
+
function chunkToText() {
|
1424
|
+
const trimStartOfStream = trimStartOfStreamHelper();
|
1425
|
+
let isFunctionStreamingIn;
|
1426
|
+
return (json) => {
|
1427
|
+
var _a8, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
1428
|
+
if (isChatCompletionChunk(json)) {
|
1429
|
+
const delta = (_a8 = json.choices[0]) == null ? void 0 : _a8.delta;
|
1430
|
+
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
1431
|
+
isFunctionStreamingIn = true;
|
1432
|
+
return {
|
1433
|
+
isText: false,
|
1434
|
+
content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
|
1435
|
+
};
|
1436
|
+
} else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
|
1437
|
+
isFunctionStreamingIn = true;
|
1438
|
+
const toolCall = delta.tool_calls[0];
|
1439
|
+
if (toolCall.index === 0) {
|
1440
|
+
return {
|
1441
|
+
isText: false,
|
1442
|
+
content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
|
1443
|
+
};
|
1444
|
+
} else {
|
1445
|
+
return {
|
1446
|
+
isText: false,
|
1447
|
+
content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
|
1448
|
+
};
|
1449
|
+
}
|
1450
|
+
} else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
|
1451
|
+
return {
|
1452
|
+
isText: false,
|
1453
|
+
content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
|
1454
|
+
};
|
1455
|
+
} else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
|
1456
|
+
return {
|
1457
|
+
isText: false,
|
1458
|
+
content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
|
1459
|
+
};
|
1460
|
+
} else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
|
1461
|
+
isFunctionStreamingIn = false;
|
1462
|
+
return {
|
1463
|
+
isText: false,
|
1464
|
+
content: '"}}'
|
1465
|
+
};
|
1466
|
+
} else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
|
1467
|
+
isFunctionStreamingIn = false;
|
1468
|
+
return {
|
1469
|
+
isText: false,
|
1470
|
+
content: '"}}]}'
|
1471
|
+
};
|
1343
1472
|
}
|
1344
1473
|
}
|
1345
|
-
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1349
|
-
|
1350
|
-
|
1351
|
-
|
1474
|
+
const text = trimStartOfStream(
|
1475
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
1476
|
+
);
|
1477
|
+
return text;
|
1478
|
+
};
|
1479
|
+
function cleanupArguments(argumentChunk) {
|
1480
|
+
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
1481
|
+
return `${escapedPartialJson}`;
|
1352
1482
|
}
|
1353
|
-
|
1354
|
-
|
1355
|
-
|
1356
|
-
|
1357
|
-
|
1358
|
-
|
1483
|
+
}
|
1484
|
+
var __internal__OpenAIFnMessagesSymbol = Symbol(
|
1485
|
+
"internal_openai_fn_messages"
|
1486
|
+
);
|
1487
|
+
function isChatCompletionChunk(data) {
|
1488
|
+
return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
|
1489
|
+
}
|
1490
|
+
function isCompletion(data) {
|
1491
|
+
return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
|
1492
|
+
}
|
1493
|
+
function OpenAIStream(res, callbacks) {
|
1494
|
+
const cb = callbacks;
|
1495
|
+
let stream;
|
1496
|
+
if (Symbol.asyncIterator in res) {
|
1497
|
+
stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
|
1498
|
+
createCallbacksTransformer(
|
1499
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
1500
|
+
...cb,
|
1501
|
+
onFinal: void 0
|
1502
|
+
} : {
|
1503
|
+
...cb
|
1359
1504
|
}
|
1505
|
+
)
|
1506
|
+
);
|
1507
|
+
} else {
|
1508
|
+
stream = AIStream(
|
1509
|
+
res,
|
1510
|
+
parseOpenAIStream(),
|
1511
|
+
(cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
|
1512
|
+
...cb,
|
1513
|
+
onFinal: void 0
|
1514
|
+
} : {
|
1515
|
+
...cb
|
1360
1516
|
}
|
1361
|
-
|
1362
|
-
currentValue = value;
|
1517
|
+
);
|
1363
1518
|
}
|
1364
|
-
|
1365
|
-
|
1366
|
-
|
1367
|
-
|
1368
|
-
|
1369
|
-
|
1370
|
-
|
1371
|
-
|
1372
|
-
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1376
|
-
|
1377
|
-
|
1378
|
-
|
1379
|
-
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1383
|
-
|
1384
|
-
|
1385
|
-
|
1386
|
-
|
1519
|
+
if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
|
1520
|
+
const functionCallTransformer = createFunctionCallTransformer(cb);
|
1521
|
+
return stream.pipeThrough(functionCallTransformer);
|
1522
|
+
} else {
|
1523
|
+
return stream.pipeThrough(createStreamDataTransformer());
|
1524
|
+
}
|
1525
|
+
}
|
1526
|
+
function createFunctionCallTransformer(callbacks) {
|
1527
|
+
const textEncoder = new TextEncoder();
|
1528
|
+
let isFirstChunk = true;
|
1529
|
+
let aggregatedResponse = "";
|
1530
|
+
let aggregatedFinalCompletionResponse = "";
|
1531
|
+
let isFunctionStreamingIn = false;
|
1532
|
+
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
1533
|
+
const decode = createChunkDecoder();
|
1534
|
+
return new TransformStream({
|
1535
|
+
async transform(chunk, controller) {
|
1536
|
+
const message = decode(chunk);
|
1537
|
+
aggregatedFinalCompletionResponse += message;
|
1538
|
+
const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
|
1539
|
+
if (shouldHandleAsFunction) {
|
1540
|
+
isFunctionStreamingIn = true;
|
1541
|
+
aggregatedResponse += message;
|
1542
|
+
isFirstChunk = false;
|
1543
|
+
return;
|
1387
1544
|
}
|
1388
|
-
if (
|
1389
|
-
|
1390
|
-
|
1545
|
+
if (!isFunctionStreamingIn) {
|
1546
|
+
controller.enqueue(
|
1547
|
+
textEncoder.encode(formatStreamPart2("text", message))
|
1391
1548
|
);
|
1392
|
-
|
1393
|
-
const resolvePrevious = resolvable.resolve;
|
1394
|
-
resolvable = createResolvablePromise();
|
1395
|
-
if (typeof currentValue === "string") {
|
1396
|
-
currentPatchValue = [0, value];
|
1397
|
-
currentValue = currentValue + value;
|
1549
|
+
return;
|
1398
1550
|
} else {
|
1399
|
-
|
1400
|
-
currentValue = value;
|
1401
|
-
}
|
1402
|
-
currentPromise = resolvable.promise;
|
1403
|
-
resolvePrevious(createWrapped());
|
1404
|
-
warnUnclosedStream();
|
1405
|
-
return streamable2;
|
1406
|
-
},
|
1407
|
-
error(error) {
|
1408
|
-
assertStream(".error()");
|
1409
|
-
if (warningTimeout) {
|
1410
|
-
clearTimeout(warningTimeout);
|
1551
|
+
aggregatedResponse += message;
|
1411
1552
|
}
|
1412
|
-
closed = true;
|
1413
|
-
currentError = error;
|
1414
|
-
currentPromise = void 0;
|
1415
|
-
resolvable.resolve({ error });
|
1416
|
-
return streamable2;
|
1417
1553
|
},
|
1418
|
-
|
1419
|
-
|
1420
|
-
|
1421
|
-
|
1422
|
-
|
1423
|
-
|
1424
|
-
|
1425
|
-
|
1426
|
-
|
1427
|
-
|
1428
|
-
|
1554
|
+
async flush(controller) {
|
1555
|
+
try {
|
1556
|
+
if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
|
1557
|
+
isFunctionStreamingIn = false;
|
1558
|
+
const payload = JSON.parse(aggregatedResponse);
|
1559
|
+
let newFunctionCallMessages = [
|
1560
|
+
...functionCallMessages
|
1561
|
+
];
|
1562
|
+
let functionResponse = void 0;
|
1563
|
+
if (callbacks.experimental_onFunctionCall) {
|
1564
|
+
if (payload.function_call === void 0) {
|
1565
|
+
console.warn(
|
1566
|
+
"experimental_onFunctionCall should not be defined when using tools"
|
1567
|
+
);
|
1568
|
+
}
|
1569
|
+
const argumentsPayload = JSON.parse(
|
1570
|
+
payload.function_call.arguments
|
1571
|
+
);
|
1572
|
+
functionResponse = await callbacks.experimental_onFunctionCall(
|
1573
|
+
{
|
1574
|
+
name: payload.function_call.name,
|
1575
|
+
arguments: argumentsPayload
|
1576
|
+
},
|
1577
|
+
(result) => {
|
1578
|
+
newFunctionCallMessages = [
|
1579
|
+
...functionCallMessages,
|
1580
|
+
{
|
1581
|
+
role: "assistant",
|
1582
|
+
content: "",
|
1583
|
+
function_call: payload.function_call
|
1584
|
+
},
|
1585
|
+
{
|
1586
|
+
role: "function",
|
1587
|
+
name: payload.function_call.name,
|
1588
|
+
content: JSON.stringify(result)
|
1589
|
+
}
|
1590
|
+
];
|
1591
|
+
return newFunctionCallMessages;
|
1592
|
+
}
|
1593
|
+
);
|
1594
|
+
}
|
1595
|
+
if (callbacks.experimental_onToolCall) {
|
1596
|
+
const toolCalls = {
|
1597
|
+
tools: []
|
1598
|
+
};
|
1599
|
+
for (const tool of payload.tool_calls) {
|
1600
|
+
toolCalls.tools.push({
|
1601
|
+
id: tool.id,
|
1602
|
+
type: "function",
|
1603
|
+
func: {
|
1604
|
+
name: tool.function.name,
|
1605
|
+
arguments: JSON.parse(tool.function.arguments)
|
1606
|
+
}
|
1607
|
+
});
|
1608
|
+
}
|
1609
|
+
let responseIndex = 0;
|
1610
|
+
try {
|
1611
|
+
functionResponse = await callbacks.experimental_onToolCall(
|
1612
|
+
toolCalls,
|
1613
|
+
(result) => {
|
1614
|
+
if (result) {
|
1615
|
+
const { tool_call_id, function_name, tool_call_result } = result;
|
1616
|
+
newFunctionCallMessages = [
|
1617
|
+
...newFunctionCallMessages,
|
1618
|
+
// Only append the assistant message if it's the first response
|
1619
|
+
...responseIndex === 0 ? [
|
1620
|
+
{
|
1621
|
+
role: "assistant",
|
1622
|
+
content: "",
|
1623
|
+
tool_calls: payload.tool_calls.map(
|
1624
|
+
(tc) => ({
|
1625
|
+
id: tc.id,
|
1626
|
+
type: "function",
|
1627
|
+
function: {
|
1628
|
+
name: tc.function.name,
|
1629
|
+
// we send the arguments an object to the user, but as the API expects a string, we need to stringify it
|
1630
|
+
arguments: JSON.stringify(
|
1631
|
+
tc.function.arguments
|
1632
|
+
)
|
1633
|
+
}
|
1634
|
+
})
|
1635
|
+
)
|
1636
|
+
}
|
1637
|
+
] : [],
|
1638
|
+
// Append the function call result message
|
1639
|
+
{
|
1640
|
+
role: "tool",
|
1641
|
+
tool_call_id,
|
1642
|
+
name: function_name,
|
1643
|
+
content: JSON.stringify(tool_call_result)
|
1644
|
+
}
|
1645
|
+
];
|
1646
|
+
responseIndex++;
|
1647
|
+
}
|
1648
|
+
return newFunctionCallMessages;
|
1649
|
+
}
|
1650
|
+
);
|
1651
|
+
} catch (e) {
|
1652
|
+
console.error("Error calling experimental_onToolCall:", e);
|
1653
|
+
}
|
1654
|
+
}
|
1655
|
+
if (!functionResponse) {
|
1656
|
+
controller.enqueue(
|
1657
|
+
textEncoder.encode(
|
1658
|
+
formatStreamPart2(
|
1659
|
+
payload.function_call ? "function_call" : "tool_calls",
|
1660
|
+
// parse to prevent double-encoding:
|
1661
|
+
JSON.parse(aggregatedResponse)
|
1662
|
+
)
|
1663
|
+
)
|
1664
|
+
);
|
1665
|
+
return;
|
1666
|
+
} else if (typeof functionResponse === "string") {
|
1667
|
+
controller.enqueue(
|
1668
|
+
textEncoder.encode(formatStreamPart2("text", functionResponse))
|
1669
|
+
);
|
1670
|
+
aggregatedFinalCompletionResponse = functionResponse;
|
1671
|
+
return;
|
1672
|
+
}
|
1673
|
+
const filteredCallbacks = {
|
1674
|
+
...callbacks,
|
1675
|
+
onStart: void 0
|
1676
|
+
};
|
1677
|
+
callbacks.onFinal = void 0;
|
1678
|
+
const openAIStream = OpenAIStream(functionResponse, {
|
1679
|
+
...filteredCallbacks,
|
1680
|
+
[__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
|
1681
|
+
});
|
1682
|
+
const reader = openAIStream.getReader();
|
1683
|
+
while (true) {
|
1684
|
+
const { done, value } = await reader.read();
|
1685
|
+
if (done) {
|
1686
|
+
break;
|
1687
|
+
}
|
1688
|
+
controller.enqueue(value);
|
1689
|
+
}
|
1690
|
+
}
|
1691
|
+
} finally {
|
1692
|
+
if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
|
1693
|
+
await callbacks.onFinal(aggregatedFinalCompletionResponse);
|
1694
|
+
}
|
1429
1695
|
}
|
1430
|
-
resolvable.resolve({});
|
1431
|
-
return streamable2;
|
1432
1696
|
}
|
1433
|
-
};
|
1434
|
-
|
1697
|
+
});
|
1698
|
+
}
|
1699
|
+
|
1700
|
+
// util/consume-stream.ts
|
1701
|
+
async function consumeStream(stream) {
|
1702
|
+
const reader = stream.getReader();
|
1703
|
+
while (true) {
|
1704
|
+
const { done } = await reader.read();
|
1705
|
+
if (done)
|
1706
|
+
break;
|
1707
|
+
}
|
1435
1708
|
}
|
1709
|
+
|
1710
|
+
// rsc/render.ts
|
1436
1711
|
function render(options) {
|
1437
1712
|
const ui = createStreamableUI(options.initial);
|
1438
1713
|
const text = options.text ? options.text : ({ content }) => content;
|
1439
1714
|
const functions = options.functions ? Object.entries(options.functions).map(
|
1440
|
-
([
|
1715
|
+
([name8, { description, parameters }]) => {
|
1441
1716
|
return {
|
1442
|
-
name,
|
1717
|
+
name: name8,
|
1443
1718
|
description,
|
1444
1719
|
parameters: zodToJsonSchema2(parameters)
|
1445
1720
|
};
|
1446
1721
|
}
|
1447
1722
|
) : void 0;
|
1448
1723
|
const tools = options.tools ? Object.entries(options.tools).map(
|
1449
|
-
([
|
1724
|
+
([name8, { description, parameters }]) => {
|
1450
1725
|
return {
|
1451
1726
|
type: "function",
|
1452
1727
|
function: {
|
1453
|
-
name,
|
1728
|
+
name: name8,
|
1454
1729
|
description,
|
1455
1730
|
parameters: zodToJsonSchema2(parameters)
|
1456
1731
|
}
|
@@ -1520,23 +1795,23 @@ function render(options) {
|
|
1520
1795
|
{
|
1521
1796
|
...functions ? {
|
1522
1797
|
async experimental_onFunctionCall(functionCallPayload) {
|
1523
|
-
var
|
1798
|
+
var _a8, _b;
|
1524
1799
|
hasFunction = true;
|
1525
1800
|
handleRender(
|
1526
1801
|
functionCallPayload.arguments,
|
1527
|
-
(_b = (
|
1802
|
+
(_b = (_a8 = options.functions) == null ? void 0 : _a8[functionCallPayload.name]) == null ? void 0 : _b.render,
|
1528
1803
|
ui
|
1529
1804
|
);
|
1530
1805
|
}
|
1531
1806
|
} : {},
|
1532
1807
|
...tools ? {
|
1533
1808
|
async experimental_onToolCall(toolCallPayload) {
|
1534
|
-
var
|
1809
|
+
var _a8, _b;
|
1535
1810
|
hasFunction = true;
|
1536
1811
|
for (const tool of toolCallPayload.tools) {
|
1537
1812
|
handleRender(
|
1538
1813
|
tool.func.arguments,
|
1539
|
-
(_b = (
|
1814
|
+
(_b = (_a8 = options.tools) == null ? void 0 : _a8[tool.func.name]) == null ? void 0 : _b.render,
|
1540
1815
|
ui
|
1541
1816
|
);
|
1542
1817
|
}
|
@@ -1564,11 +1839,19 @@ function render(options) {
|
|
1564
1839
|
}
|
1565
1840
|
|
1566
1841
|
// rsc/stream-ui/stream-ui.tsx
|
1567
|
-
import {
|
1568
|
-
InvalidToolArgumentsError,
|
1569
|
-
NoSuchToolError
|
1570
|
-
} from "@ai-sdk/provider";
|
1571
1842
|
import { safeParseJSON } from "@ai-sdk/provider-utils";
|
1843
|
+
|
1844
|
+
// util/is-async-generator.ts
|
1845
|
+
function isAsyncGenerator(value) {
|
1846
|
+
return value != null && typeof value === "object" && Symbol.asyncIterator in value;
|
1847
|
+
}
|
1848
|
+
|
1849
|
+
// util/is-generator.ts
|
1850
|
+
function isGenerator(value) {
|
1851
|
+
return value != null && typeof value === "object" && Symbol.iterator in value;
|
1852
|
+
}
|
1853
|
+
|
1854
|
+
// rsc/stream-ui/stream-ui.tsx
|
1572
1855
|
var defaultTextRenderer = ({ content }) => content;
|
1573
1856
|
async function streamUI({
|
1574
1857
|
model,
|
@@ -1601,10 +1884,10 @@ async function streamUI({
|
|
1601
1884
|
);
|
1602
1885
|
}
|
1603
1886
|
if (tools) {
|
1604
|
-
for (const [
|
1887
|
+
for (const [name8, tool] of Object.entries(tools)) {
|
1605
1888
|
if ("render" in tool) {
|
1606
1889
|
throw new Error(
|
1607
|
-
"Tool definition in `streamUI` should not have `render` property. Use `generate` instead. Found in tool: " +
|
1890
|
+
"Tool definition in `streamUI` should not have `render` property. Use `generate` instead. Found in tool: " + name8
|
1608
1891
|
);
|
1609
1892
|
}
|
1610
1893
|
}
|
@@ -1612,58 +1895,38 @@ async function streamUI({
|
|
1612
1895
|
const ui = createStreamableUI(initial);
|
1613
1896
|
const textRender = text || defaultTextRenderer;
|
1614
1897
|
let finished;
|
1615
|
-
async function
|
1898
|
+
async function render2({
|
1899
|
+
args,
|
1900
|
+
renderer,
|
1901
|
+
streamableUI,
|
1902
|
+
isLastCall = false
|
1903
|
+
}) {
|
1616
1904
|
if (!renderer)
|
1617
1905
|
return;
|
1618
|
-
const
|
1619
|
-
|
1620
|
-
|
1621
|
-
|
1622
|
-
finished = resolvable.promise;
|
1623
|
-
}
|
1624
|
-
const value = renderer(...args);
|
1625
|
-
if (value instanceof Promise || value && typeof value === "object" && "then" in value && typeof value.then === "function") {
|
1626
|
-
const node = await value;
|
1627
|
-
if (lastCall) {
|
1628
|
-
res.done(node);
|
1629
|
-
} else {
|
1630
|
-
res.update(node);
|
1631
|
-
}
|
1632
|
-
resolvable.resolve(void 0);
|
1633
|
-
} else if (value && typeof value === "object" && Symbol.asyncIterator in value) {
|
1634
|
-
const it = value;
|
1635
|
-
while (true) {
|
1636
|
-
const { done, value: value2 } = await it.next();
|
1637
|
-
if (lastCall && done) {
|
1638
|
-
res.done(value2);
|
1639
|
-
} else {
|
1640
|
-
res.update(value2);
|
1641
|
-
}
|
1642
|
-
if (done)
|
1643
|
-
break;
|
1644
|
-
}
|
1645
|
-
resolvable.resolve(void 0);
|
1646
|
-
} else if (value && typeof value === "object" && Symbol.iterator in value) {
|
1647
|
-
const it = value;
|
1906
|
+
const renderFinished = createResolvablePromise();
|
1907
|
+
finished = finished ? finished.then(() => renderFinished.promise) : renderFinished.promise;
|
1908
|
+
const rendererResult = renderer(...args);
|
1909
|
+
if (isAsyncGenerator(rendererResult) || isGenerator(rendererResult)) {
|
1648
1910
|
while (true) {
|
1649
|
-
const { done, value
|
1650
|
-
|
1651
|
-
|
1911
|
+
const { done, value } = await rendererResult.next();
|
1912
|
+
const node = await value;
|
1913
|
+
if (isLastCall && done) {
|
1914
|
+
streamableUI.done(node);
|
1652
1915
|
} else {
|
1653
|
-
|
1916
|
+
streamableUI.update(node);
|
1654
1917
|
}
|
1655
1918
|
if (done)
|
1656
1919
|
break;
|
1657
1920
|
}
|
1658
|
-
resolvable.resolve(void 0);
|
1659
1921
|
} else {
|
1660
|
-
|
1661
|
-
|
1922
|
+
const node = await rendererResult;
|
1923
|
+
if (isLastCall) {
|
1924
|
+
streamableUI.done(node);
|
1662
1925
|
} else {
|
1663
|
-
|
1926
|
+
streamableUI.update(node);
|
1664
1927
|
}
|
1665
|
-
resolvable.resolve(void 0);
|
1666
1928
|
}
|
1929
|
+
renderFinished.resolve(void 0);
|
1667
1930
|
}
|
1668
1931
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1669
1932
|
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
@@ -1696,11 +1959,11 @@ async function streamUI({
|
|
1696
1959
|
switch (value.type) {
|
1697
1960
|
case "text-delta": {
|
1698
1961
|
content += value.textDelta;
|
1699
|
-
|
1700
|
-
|
1701
|
-
|
1702
|
-
ui
|
1703
|
-
);
|
1962
|
+
render2({
|
1963
|
+
renderer: textRender,
|
1964
|
+
args: [{ content, done: false, delta: value.textDelta }],
|
1965
|
+
streamableUI: ui
|
1966
|
+
});
|
1704
1967
|
break;
|
1705
1968
|
}
|
1706
1969
|
case "tool-call-delta": {
|
@@ -1731,18 +1994,18 @@ async function streamUI({
|
|
1731
1994
|
cause: parseResult.error
|
1732
1995
|
});
|
1733
1996
|
}
|
1734
|
-
|
1735
|
-
|
1997
|
+
render2({
|
1998
|
+
renderer: tool.generate,
|
1999
|
+
args: [
|
1736
2000
|
parseResult.value,
|
1737
2001
|
{
|
1738
2002
|
toolName,
|
1739
2003
|
toolCallId: value.toolCallId
|
1740
2004
|
}
|
1741
2005
|
],
|
1742
|
-
|
1743
|
-
|
1744
|
-
|
1745
|
-
);
|
2006
|
+
streamableUI: ui,
|
2007
|
+
isLastCall: true
|
2008
|
+
});
|
1746
2009
|
break;
|
1747
2010
|
}
|
1748
2011
|
case "error": {
|
@@ -1759,12 +2022,15 @@ async function streamUI({
|
|
1759
2022
|
}
|
1760
2023
|
}
|
1761
2024
|
}
|
1762
|
-
if (hasToolCall) {
|
1763
|
-
|
1764
|
-
|
1765
|
-
|
1766
|
-
|
2025
|
+
if (!hasToolCall) {
|
2026
|
+
render2({
|
2027
|
+
renderer: textRender,
|
2028
|
+
args: [{ content, done: true }],
|
2029
|
+
streamableUI: ui,
|
2030
|
+
isLastCall: true
|
2031
|
+
});
|
1767
2032
|
}
|
2033
|
+
await finished;
|
1768
2034
|
} catch (error) {
|
1769
2035
|
ui.error(error);
|
1770
2036
|
}
|
@@ -1808,20 +2074,20 @@ function createAI({
|
|
1808
2074
|
onGetUIState
|
1809
2075
|
}) {
|
1810
2076
|
const wrappedActions = {};
|
1811
|
-
for (const
|
1812
|
-
wrappedActions[
|
2077
|
+
for (const name8 in actions) {
|
2078
|
+
wrappedActions[name8] = wrapAction(actions[name8], {
|
1813
2079
|
onSetAIState
|
1814
2080
|
});
|
1815
2081
|
}
|
1816
2082
|
const wrappedSyncUIState = onGetUIState ? wrapAction(onGetUIState, {}) : void 0;
|
1817
2083
|
const AI = async (props) => {
|
1818
|
-
var
|
2084
|
+
var _a8, _b;
|
1819
2085
|
if ("useState" in React2) {
|
1820
2086
|
throw new Error(
|
1821
2087
|
"This component can only be used inside Server Components."
|
1822
2088
|
);
|
1823
2089
|
}
|
1824
|
-
let uiState = (
|
2090
|
+
let uiState = (_a8 = props.initialUIState) != null ? _a8 : initialUIState;
|
1825
2091
|
let aiState = (_b = props.initialAIState) != null ? _b : initialAIState;
|
1826
2092
|
let aiStateDelta = void 0;
|
1827
2093
|
if (wrappedSyncUIState) {
|