workers-ai-provider 0.0.10 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +16 -5
- package/dist/index.js +32 -313
- package/dist/index.js.map +1 -1
- package/package.json +6 -5
- package/src/index.ts +10 -9
- package/src/workersai-chat-language-model.ts +30 -31
- package/src/workersai-chat-settings.ts +4 -0
- package/src/workersai-error.ts +8 -7
- package/src/workersai-models.ts +6 -0
package/dist/index.d.ts
CHANGED
@@ -7,8 +7,19 @@ interface WorkersAIChatSettings {
|
|
7
7
|
Defaults to `false`.
|
8
8
|
*/
|
9
9
|
safePrompt?: boolean;
|
10
|
+
/**
|
11
|
+
* Optionally set Cloudflare AI Gateway options.
|
12
|
+
*/
|
13
|
+
gateway?: GatewayOptions;
|
10
14
|
}
|
11
15
|
|
16
|
+
/**
|
17
|
+
* The names of the BaseAiTextGeneration models.
|
18
|
+
*/
|
19
|
+
type TextGenerationModels = {
|
20
|
+
[K in keyof AiModels]: AiModels[K] extends BaseAiTextGeneration ? K : never;
|
21
|
+
}[keyof AiModels];
|
22
|
+
|
12
23
|
type WorkersAIChatConfig = {
|
13
24
|
provider: string;
|
14
25
|
binding: Ai;
|
@@ -16,10 +27,10 @@ type WorkersAIChatConfig = {
|
|
16
27
|
declare class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
17
28
|
readonly specificationVersion = "v1";
|
18
29
|
readonly defaultObjectGenerationMode = "json";
|
19
|
-
readonly modelId:
|
30
|
+
readonly modelId: TextGenerationModels;
|
20
31
|
readonly settings: WorkersAIChatSettings;
|
21
32
|
private readonly config;
|
22
|
-
constructor(modelId:
|
33
|
+
constructor(modelId: TextGenerationModels, settings: WorkersAIChatSettings, config: WorkersAIChatConfig);
|
23
34
|
get provider(): string;
|
24
35
|
private getArgs;
|
25
36
|
doGenerate(options: Parameters<LanguageModelV1["doGenerate"]>[0]): Promise<Awaited<ReturnType<LanguageModelV1["doGenerate"]>>>;
|
@@ -27,18 +38,18 @@ declare class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
27
38
|
}
|
28
39
|
|
29
40
|
interface WorkersAI {
|
30
|
-
(modelId:
|
41
|
+
(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;
|
31
42
|
/**
|
32
43
|
* Creates a model for text generation.
|
33
44
|
**/
|
34
|
-
chat(modelId:
|
45
|
+
chat(modelId: TextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;
|
35
46
|
}
|
36
47
|
interface WorkersAISettings {
|
37
48
|
/**
|
38
49
|
* Provide an `env.AI` binding to use for the AI inference.
|
39
50
|
* You can set up an AI bindings in your Workers project
|
40
51
|
* by adding the following this to `wrangler.toml`:
|
41
|
-
|
52
|
+
|
42
53
|
```toml
|
43
54
|
[ai]
|
44
55
|
binding = "AI"
|
package/dist/index.js
CHANGED
@@ -1,140 +1,12 @@
|
|
1
|
-
var __create = Object.create;
|
2
1
|
var __defProp = Object.defineProperty;
|
3
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
4
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
5
|
-
var __getProtoOf = Object.getPrototypeOf;
|
6
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
7
2
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
8
|
-
var __commonJS = (cb, mod) => function __require() {
|
9
|
-
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
10
|
-
};
|
11
|
-
var __copyProps = (to, from, except, desc) => {
|
12
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
13
|
-
for (let key of __getOwnPropNames(from))
|
14
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
15
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
16
|
-
}
|
17
|
-
return to;
|
18
|
-
};
|
19
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
20
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
21
|
-
// file that has been converted to a CommonJS file using a Babel-
|
22
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
23
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
24
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
25
|
-
mod
|
26
|
-
));
|
27
3
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
28
4
|
|
29
|
-
// ../../node_modules/secure-json-parse/index.js
|
30
|
-
var require_secure_json_parse = __commonJS({
|
31
|
-
"../../node_modules/secure-json-parse/index.js"(exports, module) {
|
32
|
-
"use strict";
|
33
|
-
var hasBuffer = typeof Buffer !== "undefined";
|
34
|
-
var suspectProtoRx = /"(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])"\s*:/;
|
35
|
-
var suspectConstructorRx = /"(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)"\s*:/;
|
36
|
-
function _parse(text, reviver, options) {
|
37
|
-
if (options == null) {
|
38
|
-
if (reviver !== null && typeof reviver === "object") {
|
39
|
-
options = reviver;
|
40
|
-
reviver = void 0;
|
41
|
-
}
|
42
|
-
}
|
43
|
-
if (hasBuffer && Buffer.isBuffer(text)) {
|
44
|
-
text = text.toString();
|
45
|
-
}
|
46
|
-
if (text && text.charCodeAt(0) === 65279) {
|
47
|
-
text = text.slice(1);
|
48
|
-
}
|
49
|
-
const obj = JSON.parse(text, reviver);
|
50
|
-
if (obj === null || typeof obj !== "object") {
|
51
|
-
return obj;
|
52
|
-
}
|
53
|
-
const protoAction = options && options.protoAction || "error";
|
54
|
-
const constructorAction = options && options.constructorAction || "error";
|
55
|
-
if (protoAction === "ignore" && constructorAction === "ignore") {
|
56
|
-
return obj;
|
57
|
-
}
|
58
|
-
if (protoAction !== "ignore" && constructorAction !== "ignore") {
|
59
|
-
if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
|
60
|
-
return obj;
|
61
|
-
}
|
62
|
-
} else if (protoAction !== "ignore" && constructorAction === "ignore") {
|
63
|
-
if (suspectProtoRx.test(text) === false) {
|
64
|
-
return obj;
|
65
|
-
}
|
66
|
-
} else {
|
67
|
-
if (suspectConstructorRx.test(text) === false) {
|
68
|
-
return obj;
|
69
|
-
}
|
70
|
-
}
|
71
|
-
return filter(obj, { protoAction, constructorAction, safe: options && options.safe });
|
72
|
-
}
|
73
|
-
function filter(obj, { protoAction = "error", constructorAction = "error", safe } = {}) {
|
74
|
-
let next = [obj];
|
75
|
-
while (next.length) {
|
76
|
-
const nodes = next;
|
77
|
-
next = [];
|
78
|
-
for (const node of nodes) {
|
79
|
-
if (protoAction !== "ignore" && Object.prototype.hasOwnProperty.call(node, "__proto__")) {
|
80
|
-
if (safe === true) {
|
81
|
-
return null;
|
82
|
-
} else if (protoAction === "error") {
|
83
|
-
throw new SyntaxError("Object contains forbidden prototype property");
|
84
|
-
}
|
85
|
-
delete node.__proto__;
|
86
|
-
}
|
87
|
-
if (constructorAction !== "ignore" && Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
|
88
|
-
if (safe === true) {
|
89
|
-
return null;
|
90
|
-
} else if (constructorAction === "error") {
|
91
|
-
throw new SyntaxError("Object contains forbidden prototype property");
|
92
|
-
}
|
93
|
-
delete node.constructor;
|
94
|
-
}
|
95
|
-
for (const key in node) {
|
96
|
-
const value = node[key];
|
97
|
-
if (value && typeof value === "object") {
|
98
|
-
next.push(value);
|
99
|
-
}
|
100
|
-
}
|
101
|
-
}
|
102
|
-
}
|
103
|
-
return obj;
|
104
|
-
}
|
105
|
-
function parse(text, reviver, options) {
|
106
|
-
const stackTraceLimit = Error.stackTraceLimit;
|
107
|
-
Error.stackTraceLimit = 0;
|
108
|
-
try {
|
109
|
-
return _parse(text, reviver, options);
|
110
|
-
} finally {
|
111
|
-
Error.stackTraceLimit = stackTraceLimit;
|
112
|
-
}
|
113
|
-
}
|
114
|
-
function safeParse(text, reviver) {
|
115
|
-
const stackTraceLimit = Error.stackTraceLimit;
|
116
|
-
Error.stackTraceLimit = 0;
|
117
|
-
try {
|
118
|
-
return _parse(text, reviver, { safe: true });
|
119
|
-
} catch (_e) {
|
120
|
-
return null;
|
121
|
-
} finally {
|
122
|
-
Error.stackTraceLimit = stackTraceLimit;
|
123
|
-
}
|
124
|
-
}
|
125
|
-
module.exports = parse;
|
126
|
-
module.exports.default = parse;
|
127
|
-
module.exports.parse = parse;
|
128
|
-
module.exports.safeParse = safeParse;
|
129
|
-
module.exports.scan = filter;
|
130
|
-
}
|
131
|
-
});
|
132
|
-
|
133
5
|
// src/workersai-chat-language-model.ts
|
134
6
|
import {
|
135
7
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
136
8
|
} from "@ai-sdk/provider";
|
137
|
-
import { z
|
9
|
+
import { z } from "zod";
|
138
10
|
|
139
11
|
// src/convert-to-workersai-chat-messages.ts
|
140
12
|
import {
|
@@ -222,173 +94,6 @@ function convertToWorkersAIChatMessages(prompt) {
|
|
222
94
|
return messages;
|
223
95
|
}
|
224
96
|
|
225
|
-
// ../../node_modules/nanoid/non-secure/index.js
|
226
|
-
var customAlphabet = (alphabet, defaultSize = 21) => {
|
227
|
-
return (size = defaultSize) => {
|
228
|
-
let id = "";
|
229
|
-
let i = size;
|
230
|
-
while (i--) {
|
231
|
-
id += alphabet[Math.random() * alphabet.length | 0];
|
232
|
-
}
|
233
|
-
return id;
|
234
|
-
};
|
235
|
-
};
|
236
|
-
|
237
|
-
// ../../node_modules/@ai-sdk/provider-utils/dist/index.mjs
|
238
|
-
var import_secure_json_parse = __toESM(require_secure_json_parse(), 1);
|
239
|
-
import { LoadAPIKeyError } from "@ai-sdk/provider";
|
240
|
-
import { LoadSettingError } from "@ai-sdk/provider";
|
241
|
-
import { JSONParseError, TypeValidationError as TypeValidationError2 } from "@ai-sdk/provider";
|
242
|
-
import { TypeValidationError } from "@ai-sdk/provider";
|
243
|
-
import { APICallError } from "@ai-sdk/provider";
|
244
|
-
import { APICallError as APICallError2, EmptyResponseBodyError } from "@ai-sdk/provider";
|
245
|
-
function extractResponseHeaders(response) {
|
246
|
-
const headers = {};
|
247
|
-
response.headers.forEach((value, key) => {
|
248
|
-
headers[key] = value;
|
249
|
-
});
|
250
|
-
return headers;
|
251
|
-
}
|
252
|
-
var generateId = customAlphabet(
|
253
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
254
|
-
7
|
255
|
-
);
|
256
|
-
var validatorSymbol = Symbol("vercel.ai.validator");
|
257
|
-
function validator(validate) {
|
258
|
-
return { [validatorSymbol]: true, validate };
|
259
|
-
}
|
260
|
-
function isValidator(value) {
|
261
|
-
return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
|
262
|
-
}
|
263
|
-
function zodValidator(zodSchema) {
|
264
|
-
return validator((value) => {
|
265
|
-
const result = zodSchema.safeParse(value);
|
266
|
-
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
267
|
-
});
|
268
|
-
}
|
269
|
-
function validateTypes({
|
270
|
-
value,
|
271
|
-
schema: inputSchema
|
272
|
-
}) {
|
273
|
-
const result = safeValidateTypes({ value, schema: inputSchema });
|
274
|
-
if (!result.success) {
|
275
|
-
throw new TypeValidationError({ value, cause: result.error });
|
276
|
-
}
|
277
|
-
return result.value;
|
278
|
-
}
|
279
|
-
function safeValidateTypes({
|
280
|
-
value,
|
281
|
-
schema: inputSchema
|
282
|
-
}) {
|
283
|
-
const schema = isValidator(inputSchema) ? inputSchema : zodValidator(inputSchema);
|
284
|
-
try {
|
285
|
-
if (schema.validate == null) {
|
286
|
-
return { success: true, value };
|
287
|
-
}
|
288
|
-
const validationResult = schema.validate(value);
|
289
|
-
if (validationResult.success) {
|
290
|
-
return validationResult;
|
291
|
-
}
|
292
|
-
return {
|
293
|
-
success: false,
|
294
|
-
error: new TypeValidationError({
|
295
|
-
value,
|
296
|
-
cause: validationResult.error
|
297
|
-
})
|
298
|
-
};
|
299
|
-
} catch (error) {
|
300
|
-
return {
|
301
|
-
success: false,
|
302
|
-
error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
|
303
|
-
};
|
304
|
-
}
|
305
|
-
}
|
306
|
-
function parseJSON({
|
307
|
-
text,
|
308
|
-
schema
|
309
|
-
}) {
|
310
|
-
try {
|
311
|
-
const value = import_secure_json_parse.default.parse(text);
|
312
|
-
if (schema == null) {
|
313
|
-
return value;
|
314
|
-
}
|
315
|
-
return validateTypes({ value, schema });
|
316
|
-
} catch (error) {
|
317
|
-
if (JSONParseError.isJSONParseError(error) || TypeValidationError2.isTypeValidationError(error)) {
|
318
|
-
throw error;
|
319
|
-
}
|
320
|
-
throw new JSONParseError({ text, cause: error });
|
321
|
-
}
|
322
|
-
}
|
323
|
-
var createJsonErrorResponseHandler = ({
|
324
|
-
errorSchema,
|
325
|
-
errorToMessage,
|
326
|
-
isRetryable
|
327
|
-
}) => async ({ response, url, requestBodyValues }) => {
|
328
|
-
const responseBody = await response.text();
|
329
|
-
const responseHeaders = extractResponseHeaders(response);
|
330
|
-
if (responseBody.trim() === "") {
|
331
|
-
return {
|
332
|
-
responseHeaders,
|
333
|
-
value: new APICallError2({
|
334
|
-
message: response.statusText,
|
335
|
-
url,
|
336
|
-
requestBodyValues,
|
337
|
-
statusCode: response.status,
|
338
|
-
responseHeaders,
|
339
|
-
responseBody,
|
340
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
341
|
-
})
|
342
|
-
};
|
343
|
-
}
|
344
|
-
try {
|
345
|
-
const parsedError = parseJSON({
|
346
|
-
text: responseBody,
|
347
|
-
schema: errorSchema
|
348
|
-
});
|
349
|
-
return {
|
350
|
-
responseHeaders,
|
351
|
-
value: new APICallError2({
|
352
|
-
message: errorToMessage(parsedError),
|
353
|
-
url,
|
354
|
-
requestBodyValues,
|
355
|
-
statusCode: response.status,
|
356
|
-
responseHeaders,
|
357
|
-
responseBody,
|
358
|
-
data: parsedError,
|
359
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
|
360
|
-
})
|
361
|
-
};
|
362
|
-
} catch (parseError) {
|
363
|
-
return {
|
364
|
-
responseHeaders,
|
365
|
-
value: new APICallError2({
|
366
|
-
message: response.statusText,
|
367
|
-
url,
|
368
|
-
requestBodyValues,
|
369
|
-
statusCode: response.status,
|
370
|
-
responseHeaders,
|
371
|
-
responseBody,
|
372
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
373
|
-
})
|
374
|
-
};
|
375
|
-
}
|
376
|
-
};
|
377
|
-
|
378
|
-
// src/workersai-error.ts
|
379
|
-
import { z } from "zod";
|
380
|
-
var workersAIErrorDataSchema = z.object({
|
381
|
-
object: z.literal("error"),
|
382
|
-
message: z.string(),
|
383
|
-
type: z.string(),
|
384
|
-
param: z.string().nullable(),
|
385
|
-
code: z.string().nullable()
|
386
|
-
});
|
387
|
-
var workersAIFailedResponseHandler = createJsonErrorResponseHandler({
|
388
|
-
errorSchema: workersAIErrorDataSchema,
|
389
|
-
errorToMessage: (data) => data.message
|
390
|
-
});
|
391
|
-
|
392
97
|
// src/workersai-chat-language-model.ts
|
393
98
|
import { events } from "fetch-event-stream";
|
394
99
|
var WorkersAIChatLanguageModel = class {
|
@@ -453,7 +158,8 @@ var WorkersAIChatLanguageModel = class {
|
|
453
158
|
return {
|
454
159
|
args: {
|
455
160
|
...baseArgs,
|
456
|
-
response_format: { type: "json_object" }
|
161
|
+
response_format: { type: "json_object" },
|
162
|
+
tools: void 0
|
457
163
|
},
|
458
164
|
warnings
|
459
165
|
};
|
@@ -468,6 +174,8 @@ var WorkersAIChatLanguageModel = class {
|
|
468
174
|
warnings
|
469
175
|
};
|
470
176
|
}
|
177
|
+
// @ts-expect-error - this is unreachable code
|
178
|
+
// TODO: fixme
|
471
179
|
case "object-grammar": {
|
472
180
|
throw new UnsupportedFunctionalityError2({
|
473
181
|
functionality: "object-grammar mode"
|
@@ -481,21 +189,27 @@ var WorkersAIChatLanguageModel = class {
|
|
481
189
|
}
|
482
190
|
async doGenerate(options) {
|
483
191
|
const { args, warnings } = this.getArgs(options);
|
484
|
-
const
|
485
|
-
|
486
|
-
|
487
|
-
|
192
|
+
const output = await this.config.binding.run(
|
193
|
+
args.model,
|
194
|
+
{
|
195
|
+
messages: args.messages,
|
196
|
+
tools: args.tools
|
197
|
+
},
|
198
|
+
{
|
199
|
+
gateway: this.settings.gateway
|
200
|
+
}
|
201
|
+
);
|
202
|
+
if (output instanceof ReadableStream) {
|
488
203
|
throw new Error("This shouldn't happen");
|
489
204
|
}
|
490
205
|
return {
|
491
|
-
text:
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
// })),
|
206
|
+
text: output.response,
|
207
|
+
toolCalls: output.tool_calls?.map((toolCall) => ({
|
208
|
+
toolCallType: "function",
|
209
|
+
toolCallId: toolCall.name,
|
210
|
+
toolName: toolCall.name,
|
211
|
+
args: JSON.stringify(toolCall.arguments || {})
|
212
|
+
})),
|
499
213
|
finishReason: "stop",
|
500
214
|
// TODO: mapWorkersAIFinishReason(response.finish_reason),
|
501
215
|
rawCall: { rawPrompt: args.messages, rawSettings: args },
|
@@ -512,7 +226,8 @@ var WorkersAIChatLanguageModel = class {
|
|
512
226
|
const decoder = new TextDecoder();
|
513
227
|
const response = await this.config.binding.run(args.model, {
|
514
228
|
messages: args.messages,
|
515
|
-
stream: true
|
229
|
+
stream: true,
|
230
|
+
tools: args.tools
|
516
231
|
});
|
517
232
|
if (!(response instanceof ReadableStream)) {
|
518
233
|
throw new Error("This shouldn't happen");
|
@@ -560,10 +275,10 @@ var WorkersAIChatLanguageModel = class {
|
|
560
275
|
};
|
561
276
|
}
|
562
277
|
};
|
563
|
-
var workersAIChatResponseSchema =
|
564
|
-
response:
|
278
|
+
var workersAIChatResponseSchema = z.object({
|
279
|
+
response: z.string()
|
565
280
|
});
|
566
|
-
var workersAIChatChunkSchema =
|
281
|
+
var workersAIChatChunkSchema = z.instanceof(Uint8Array);
|
567
282
|
function prepareToolsAndToolChoice(mode) {
|
568
283
|
const tools = mode.tools?.length ? mode.tools : void 0;
|
569
284
|
if (tools == null) {
|
@@ -573,7 +288,9 @@ function prepareToolsAndToolChoice(mode) {
|
|
573
288
|
type: "function",
|
574
289
|
function: {
|
575
290
|
name: tool.name,
|
291
|
+
// @ts-expect-error - description is not a property of tool
|
576
292
|
description: tool.description,
|
293
|
+
// @ts-expect-error - parameters is not a property of tool
|
577
294
|
parameters: tool.parameters
|
578
295
|
}
|
579
296
|
}));
|
@@ -589,6 +306,8 @@ function prepareToolsAndToolChoice(mode) {
|
|
589
306
|
return { tools: mappedTools, tool_choice: type };
|
590
307
|
case "required":
|
591
308
|
return { tools: mappedTools, tool_choice: "any" };
|
309
|
+
// workersAI does not support tool mode directly,
|
310
|
+
// so we filter the tools and force the tool choice through 'any'
|
592
311
|
case "tool":
|
593
312
|
return {
|
594
313
|
tools: mappedTools.filter(
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../../node_modules/secure-json-parse/index.js","../src/workersai-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../../../node_modules/nanoid/non-secure/index.js","../../../node_modules/@ai-sdk/provider-utils/src/combine-headers.ts","../../../node_modules/@ai-sdk/provider-utils/src/convert-async-generator-to-readable-stream.ts","../../../node_modules/@ai-sdk/provider-utils/src/extract-response-headers.ts","../../../node_modules/@ai-sdk/provider-utils/src/generate-id.ts","../../../node_modules/@ai-sdk/provider-utils/src/get-error-message.ts","../../../node_modules/@ai-sdk/provider-utils/src/is-abort-error.ts","../../../node_modules/@ai-sdk/provider-utils/src/load-api-key.ts","../../../node_modules/@ai-sdk/provider-utils/src/load-setting.ts","../../../node_modules/@ai-sdk/provider-utils/src/parse-json.ts","../../../node_modules/@ai-sdk/provider-utils/src/validate-types.ts","../../../node_modules/@ai-sdk/provider-utils/src/validator.ts","../../../node_modules/@ai-sdk/provider-utils/src/post-to-api.ts","../../../node_modules/@ai-sdk/provider-utils/src/remove-undefined-entries.ts","../../../node_modules/@ai-sdk/provider-utils/src/response-handler.ts","../../../node_modules/@ai-sdk/provider-utils/src/uint8-utils.ts","../../../node_modules/@ai-sdk/provider-utils/src/without-trailing-slash.ts","../src/workersai-error.ts","../src/index.ts"],"sourcesContent":["'use strict'\n\nconst hasBuffer = typeof Buffer !== 'undefined'\nconst suspectProtoRx = /\"(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])(?:p|\\\\u0070)(?:r|\\\\u0072)(?:o|\\\\u006[Ff])(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:_|\\\\u005[Ff])(?:_|\\\\u005[Ff])\"\\s*:/\nconst suspectConstructorRx = /\"(?:c|\\\\u0063)(?:o|\\\\u006[Ff])(?:n|\\\\u006[Ee])(?:s|\\\\u0073)(?:t|\\\\u0074)(?:r|\\\\u0072)(?:u|\\\\u0075)(?:c|\\\\u0063)(?:t|\\\\u0074)(?:o|\\\\u006[Ff])(?:r|\\\\u0072)\"\\s*:/\n\nfunction _parse (text, reviver, options) {\n // Normalize arguments\n if (options == null) {\n if (reviver !== null && typeof reviver === 'object') {\n options = reviver\n reviver = undefined\n }\n }\n\n if (hasBuffer && Buffer.isBuffer(text)) {\n text = text.toString()\n }\n\n // BOM checker\n if (text && text.charCodeAt(0) === 0xFEFF) {\n text = text.slice(1)\n }\n\n // Parse normally, allowing exceptions\n const obj = JSON.parse(text, reviver)\n\n // Ignore null and non-objects\n if (obj === null || typeof obj !== 'object') {\n return obj\n }\n\n const protoAction = (options && options.protoAction) || 'error'\n const constructorAction = (options && options.constructorAction) || 'error'\n\n // options: 'error' (default) / 'remove' / 'ignore'\n if (protoAction === 'ignore' && constructorAction === 'ignore') {\n return obj\n }\n\n if (protoAction !== 'ignore' && constructorAction !== 'ignore') {\n if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {\n return obj\n }\n } else if (protoAction !== 'ignore' && constructorAction === 'ignore') {\n if (suspectProtoRx.test(text) === false) {\n return obj\n }\n } else {\n if (suspectConstructorRx.test(text) === false) {\n return obj\n }\n }\n\n // Scan result for proto keys\n return filter(obj, { protoAction, constructorAction, safe: options && options.safe })\n}\n\nfunction filter (obj, { protoAction = 'error', constructorAction = 'error', safe } = {}) {\n let next = [obj]\n\n while (next.length) {\n const nodes = next\n next = []\n\n for (const node of nodes) {\n if (protoAction !== 'ignore' && Object.prototype.hasOwnProperty.call(node, '__proto__')) { // Avoid calling node.hasOwnProperty directly\n if (safe === true) {\n return null\n } else if (protoAction === 'error') {\n throw new SyntaxError('Object contains forbidden prototype property')\n }\n\n delete node.__proto__ // eslint-disable-line no-proto\n }\n\n if (constructorAction !== 'ignore' &&\n Object.prototype.hasOwnProperty.call(node, 'constructor') &&\n Object.prototype.hasOwnProperty.call(node.constructor, 'prototype')) { // Avoid calling node.hasOwnProperty directly\n if (safe === true) {\n return null\n } else if (constructorAction === 'error') {\n throw new SyntaxError('Object contains forbidden prototype property')\n }\n\n delete node.constructor\n }\n\n for (const key in node) {\n const value = node[key]\n if (value && typeof value === 'object') {\n next.push(value)\n }\n }\n }\n }\n return obj\n}\n\nfunction parse (text, reviver, options) {\n const stackTraceLimit = Error.stackTraceLimit\n Error.stackTraceLimit = 0\n try {\n return _parse(text, reviver, options)\n } finally {\n Error.stackTraceLimit = stackTraceLimit\n }\n}\n\nfunction safeParse (text, reviver) {\n const stackTraceLimit = Error.stackTraceLimit\n Error.stackTraceLimit = 0\n try {\n return _parse(text, reviver, { safe: true })\n } catch (_e) {\n return null\n } finally {\n Error.stackTraceLimit = stackTraceLimit\n }\n}\n\nmodule.exports = parse\nmodule.exports.default = parse\nmodule.exports.parse = parse\nmodule.exports.safeParse = safeParse\nmodule.exports.scan = filter\n","import {\n type LanguageModelV1,\n type LanguageModelV1CallWarning,\n type LanguageModelV1FinishReason,\n type LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport type {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport { mapWorkersAIFinishReason } from \"./map-workersai-finish-reason\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport { workersAIFailedResponseHandler } from \"./workersai-error\";\n\nimport { events } from \"fetch-event-stream\";\n\ntype WorkersAIChatConfig = {\n provider: string;\n binding: Ai;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n readonly defaultObjectGenerationMode = \"json\";\n\n readonly modelId: BaseAiTextGenerationModels;\n readonly settings: WorkersAIChatSettings;\n\n private readonly config: WorkersAIChatConfig;\n\n constructor(\n modelId: BaseAiTextGenerationModels,\n settings: WorkersAIChatSettings,\n config: WorkersAIChatConfig\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"frequencyPenalty\",\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"presencePenalty\",\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToWorkersAIChatMessages(prompt),\n };\n\n switch (type) {\n case \"regular\": {\n return {\n args: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n warnings,\n };\n }\n\n case \"object-json\": {\n return {\n args: {\n ...baseArgs,\n response_format: { type: \"json_object\" },\n },\n warnings,\n };\n }\n\n case \"object-tool\": {\n return {\n args: {\n ...baseArgs,\n tool_choice: \"any\",\n tools: [{ type: \"function\", function: mode.tool }],\n },\n warnings,\n };\n }\n\n // @ts-expect-error - this is unreachable code\n // TODO: fixme\n case \"object-grammar\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"object-grammar mode\",\n });\n }\n\n default: {\n const exhaustiveCheck = type satisfies never;\n throw new Error(`Unsupported type: ${exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1[\"doGenerate\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await this.config.binding.run(args.model, {\n messages: args.messages,\n });\n\n if (response instanceof ReadableStream) {\n throw new Error(\"This shouldn't happen\");\n }\n\n return {\n text: response.response,\n // TODO: tool calls\n // toolCalls: response.tool_calls?.map((toolCall) => ({\n // toolCallType: \"function\",\n // toolCallId: toolCall.name, // TODO: what can the id be?\n // toolName: toolCall.name,\n // args: JSON.stringify(toolCall.arguments || {}),\n // })),\n finishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n usage: {\n // TODO: mapWorkersAIUsage(response.usage),\n promptTokens: 0,\n completionTokens: 0,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1[\"doStream\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n const decoder = new TextDecoder();\n\n const response = await this.config.binding.run(args.model, {\n messages: args.messages,\n stream: true,\n });\n\n if (!(response instanceof ReadableStream)) {\n throw new Error(\"This shouldn't happen\");\n }\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof workersAIChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n async transform(chunk, controller) {\n const chunkToText = decoder.decode(chunk as unknown as Uint8Array);\n const chunks = events(new Response(chunkToText));\n for await (const singleChunk of chunks) {\n if (!singleChunk.data) {\n continue;\n }\n if (singleChunk.data === \"[DONE]\") {\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n return;\n }\n const data = JSON.parse(singleChunk.data);\n\n controller.enqueue({\n type: \"text-delta\",\n textDelta: data.response ?? \"DATALOSS\",\n });\n }\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n },\n })\n ),\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n warnings,\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatResponseSchema = z.object({\n response: z.string(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatChunkSchema = z.instanceof(Uint8Array);\n\nfunction prepareToolsAndToolChoice(\n mode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n type: \"regular\";\n }\n) {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined };\n }\n\n const mappedTools = tools.map((tool) => ({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n }));\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mappedTools, tool_choice: undefined };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case \"auto\":\n return { tools: mappedTools, tool_choice: type };\n case \"none\":\n return { tools: mappedTools, tool_choice: type };\n case \"required\":\n return { tools: mappedTools, tool_choice: \"any\" };\n\n // workersAI does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case \"tool\":\n return {\n tools: mappedTools.filter(\n (tool) => tool.function.name === toolChoice.toolName\n ),\n tool_choice: \"any\",\n };\n default: {\n const exhaustiveCheck = type satisfies never;\n throw new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n }\n }\n}\n","import {\n type LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\n// TODO\nexport function convertToWorkersAIChatMessages(\n prompt: LanguageModelV1Prompt\n): WorkersAIChatPrompt {\n const messages: WorkersAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case \"system\": {\n messages.push({ role: \"system\", content });\n break;\n }\n\n case \"user\": {\n messages.push({\n role: \"user\",\n content: content\n .map((part) => {\n switch (part.type) {\n case \"text\": {\n return part.text;\n }\n case \"image\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"image-part\",\n });\n }\n }\n })\n .join(\"\"),\n });\n break;\n }\n\n case \"assistant\": {\n let text = \"\";\n const toolCalls: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case \"text\": {\n text += part.text;\n break;\n }\n case \"tool-call\": {\n toolCalls.push({\n id: part.toolCallId,\n type: \"function\",\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const exhaustiveCheck = part satisfies never;\n throw new Error(`Unsupported part: ${exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: \"assistant\",\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: \"null\",\n type: \"function\",\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case \"tool\": {\n for (const toolResponse of content) {\n messages.push({\n role: \"tool\",\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const exhaustiveCheck = role satisfies never;\n throw new Error(`Unsupported role: ${exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","let urlAlphabet =\n 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict'\nlet customAlphabet = (alphabet, defaultSize = 21) => {\n return (size = defaultSize) => {\n let id = ''\n let i = size\n while (i--) {\n id += alphabet[(Math.random() * alphabet.length) | 0]\n }\n return id\n }\n}\nlet nanoid = (size = 21) => {\n let id = ''\n let i = size\n while (i--) {\n id += urlAlphabet[(Math.random() * 64) | 0]\n }\n return id\n}\nexport { nanoid, customAlphabet }\n","export function combineHeaders(\n ...headers: Array<Record<string, string | undefined> | undefined>\n): Record<string, string | undefined> {\n return headers.reduce(\n (combinedHeaders, currentHeaders) => ({\n ...combinedHeaders,\n ...(currentHeaders ?? {}),\n }),\n {},\n ) as Record<string, string | undefined>;\n}\n","/**\n * Converts an AsyncGenerator to a ReadableStream.\n *\n * @template T - The type of elements produced by the AsyncGenerator.\n * @param {AsyncGenerator<T>} stream - The AsyncGenerator to convert.\n * @returns {ReadableStream<T>} - A ReadableStream that provides the same data as the AsyncGenerator.\n */\nexport function convertAsyncGeneratorToReadableStream<T>(\n stream: AsyncGenerator<T>,\n): ReadableStream<T> {\n return new ReadableStream<T>({\n /**\n * Called when the consumer wants to pull more data from the stream.\n *\n * @param {ReadableStreamDefaultController<T>} controller - The controller to enqueue data into the stream.\n * @returns {Promise<void>}\n */\n async pull(controller) {\n try {\n const { value, done } = await stream.next();\n if (done) {\n controller.close();\n } else {\n controller.enqueue(value);\n }\n } catch (error) {\n controller.error(error);\n }\n },\n /**\n * Called when the consumer cancels the stream.\n */\n cancel() {},\n });\n}\n","/**\nExtracts the headers from a response object and returns them as a key-value object.\n\n@param response - The response object to extract headers from.\n@returns The headers as a key-value object.\n*/\nexport function extractResponseHeaders(\n response: Response,\n): Record<string, string> {\n const headers: Record<string, string> = {};\n response.headers.forEach((value, key) => {\n headers[key] = value;\n });\n return headers;\n}\n","import { customAlphabet } from 'nanoid/non-secure';\n\n/**\n * Generates a 7-character random string to use for IDs. Not secure.\n */\nexport const generateId = customAlphabet(\n '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',\n 7,\n);\n","export function getErrorMessage(error: unknown | undefined) {\n if (error == null) {\n return 'unknown error';\n }\n\n if (typeof error === 'string') {\n return error;\n }\n\n if (error instanceof Error) {\n return error.message;\n }\n\n return JSON.stringify(error);\n}\n","export function isAbortError(error: unknown): error is Error {\n return (\n error instanceof Error &&\n (error.name === 'AbortError' || error.name === 'TimeoutError')\n );\n}\n","import { LoadAPIKeyError } from '@ai-sdk/provider';\n\nexport function loadApiKey({\n apiKey,\n environmentVariableName,\n apiKeyParameterName = 'apiKey',\n description,\n}: {\n apiKey: string | undefined;\n environmentVariableName: string;\n apiKeyParameterName?: string;\n description: string;\n}): string {\n if (typeof apiKey === 'string') {\n return apiKey;\n }\n\n if (apiKey != null) {\n throw new LoadAPIKeyError({\n message: `${description} API key must be a string.`,\n });\n }\n\n if (typeof process === 'undefined') {\n throw new LoadAPIKeyError({\n message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`,\n });\n }\n\n apiKey = process.env[environmentVariableName];\n\n if (apiKey == null) {\n throw new LoadAPIKeyError({\n message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`,\n });\n }\n\n if (typeof apiKey !== 'string') {\n throw new LoadAPIKeyError({\n message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`,\n });\n }\n\n return apiKey;\n}\n","import { LoadSettingError } from '@ai-sdk/provider';\n\nexport function loadSetting({\n settingValue,\n environmentVariableName,\n settingName,\n description,\n}: {\n settingValue: string | undefined;\n environmentVariableName: string;\n settingName: string;\n description: string;\n}): string {\n if (typeof settingValue === 'string') {\n return settingValue;\n }\n\n if (settingValue != null) {\n throw new LoadSettingError({\n message: `${description} setting must be a string.`,\n });\n }\n\n if (typeof process === 'undefined') {\n throw new LoadSettingError({\n message: `${description} setting is missing. Pass it using the '${settingName}' parameter. Environment variables is not supported in this environment.`,\n });\n }\n\n settingValue = process.env[environmentVariableName];\n\n if (settingValue == null) {\n throw new LoadSettingError({\n message: `${description} setting is missing. Pass it using the '${settingName}' parameter or the ${environmentVariableName} environment variable.`,\n });\n }\n\n if (typeof settingValue !== 'string') {\n throw new LoadSettingError({\n message: `${description} setting must be a string. The value of the ${environmentVariableName} environment variable is not a string.`,\n });\n }\n\n return settingValue;\n}\n","import { JSONParseError, TypeValidationError } from '@ai-sdk/provider';\nimport SecureJSON from 'secure-json-parse';\nimport { ZodSchema } from 'zod';\nimport { safeValidateTypes, validateTypes } from './validate-types';\nimport { Validator } from './validator';\n\n/**\n * Parses a JSON string into an unknown object.\n *\n * @param text - The JSON string to parse.\n * @returns {unknown} - The parsed JSON object.\n */\nexport function parseJSON({ text }: { text: string }): unknown;\n/**\n * Parses a JSON string into a strongly-typed object using the provided schema.\n *\n * @template T - The type of the object to parse the JSON into.\n * @param {string} text - The JSON string to parse.\n * @param {Validator<T>} schema - The schema to use for parsing the JSON.\n * @returns {T} - The parsed object.\n */\nexport function parseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema: ZodSchema<T> | Validator<T>;\n}): T;\nexport function parseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema?: ZodSchema<T> | Validator<T>;\n}): T {\n try {\n const value = SecureJSON.parse(text);\n\n if (schema == null) {\n return value;\n }\n\n return validateTypes({ value, schema });\n } catch (error) {\n if (\n JSONParseError.isJSONParseError(error) ||\n TypeValidationError.isTypeValidationError(error)\n ) {\n throw error;\n }\n\n throw new JSONParseError({ text, cause: error });\n }\n}\n\nexport type ParseResult<T> =\n | { success: true; value: T }\n | { success: false; error: JSONParseError | TypeValidationError };\n\n/**\n * Safely parses a JSON string and returns the result as an object of type `unknown`.\n *\n * @param text - The JSON string to parse.\n * @returns {object} Either an object with `success: true` and the parsed data, or an object with `success: false` and the error that occurred.\n */\nexport function safeParseJSON({ text }: { text: string }): ParseResult<unknown>;\n/**\n * Safely parses a JSON string into a strongly-typed object, using a provided schema to validate the object.\n *\n * @template T - The type of the object to parse the JSON into.\n * @param {string} text - The JSON string to parse.\n * @param {Validator<T>} schema - The schema to use for parsing the JSON.\n * @returns An object with either a `success` flag and the parsed and typed data, or a `success` flag and an error object.\n */\nexport function safeParseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema: ZodSchema<T> | Validator<T>;\n}): ParseResult<T>;\nexport function safeParseJSON<T>({\n text,\n schema,\n}: {\n text: string;\n schema?: ZodSchema<T> | Validator<T>;\n}):\n | { success: true; value: T }\n | { success: false; error: JSONParseError | TypeValidationError } {\n try {\n const value = SecureJSON.parse(text);\n\n if (schema == null) {\n return {\n success: true,\n value: value as T,\n };\n }\n\n return safeValidateTypes({ value, schema });\n } catch (error) {\n return {\n success: false,\n error: JSONParseError.isJSONParseError(error)\n ? error\n : new JSONParseError({ text, cause: error }),\n };\n }\n}\n\nexport function isParsableJson(input: string): boolean {\n try {\n SecureJSON.parse(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n@deprecated Use `isParsableJson` instead.\n */\nexport const isParseableJson = isParsableJson;\n","import { TypeValidationError } from '@ai-sdk/provider';\nimport { ZodSchema } from 'zod';\nimport { Validator, isValidator, zodValidator } from './validator';\n\n/**\n * Validates the types of an unknown object using a schema and\n * return a strongly-typed object.\n *\n * @template T - The type of the object to validate.\n * @param {string} options.value - The object to validate.\n * @param {Validator<T>} options.schema - The schema to use for validating the JSON.\n * @returns {T} - The typed object.\n */\nexport function validateTypes<T>({\n value,\n schema: inputSchema,\n}: {\n value: unknown;\n schema: ZodSchema<T> | Validator<T>;\n}): T {\n const result = safeValidateTypes({ value, schema: inputSchema });\n\n if (!result.success) {\n throw new TypeValidationError({ value, cause: result.error });\n }\n\n return result.value;\n}\n\n/**\n * Safely validates the types of an unknown object using a schema and\n * return a strongly-typed object.\n *\n * @template T - The type of the object to validate.\n * @param {string} options.value - The JSON object to validate.\n * @param {Validator<T>} options.schema - The schema to use for validating the JSON.\n * @returns An object with either a `success` flag and the parsed and typed data, or a `success` flag and an error object.\n */\nexport function safeValidateTypes<T>({\n value,\n schema: inputSchema,\n}: {\n value: unknown;\n schema: ZodSchema<T> | Validator<T>;\n}):\n | { success: true; value: T }\n | { success: false; error: TypeValidationError } {\n const schema = isValidator(inputSchema)\n ? inputSchema\n : zodValidator(inputSchema);\n\n try {\n if (schema.validate == null) {\n return { success: true, value: value as T };\n }\n\n const validationResult = schema.validate(value);\n\n if (validationResult.success) {\n return validationResult;\n }\n\n return {\n success: false,\n error: new TypeValidationError({\n value,\n cause: validationResult.error,\n }),\n };\n } catch (error) {\n return {\n success: false,\n error: TypeValidationError.isTypeValidationError(error)\n ? error\n : new TypeValidationError({ value, cause: error }),\n };\n }\n}\n","import { z } from 'zod';\n\n/**\n * Used to mark validator functions so we can support both Zod and custom schemas.\n */\nexport const validatorSymbol = Symbol('vercel.ai.validator');\n\nexport type Validator<OBJECT = unknown> = {\n /**\n * Used to mark validator functions so we can support both Zod and custom schemas.\n */\n [validatorSymbol]: true;\n\n /**\n * Optional. Validates that the structure of a value matches this schema,\n * and returns a typed version of the value if it does.\n */\n readonly validate?: (\n value: unknown,\n ) => { success: true; value: OBJECT } | { success: false; error: Error };\n};\n\n/**\n * Create a validator.\n *\n * @param validate A validation function for the schema.\n */\nexport function validator<OBJECT>(\n validate: (\n value: unknown,\n ) => { success: true; value: OBJECT } | { success: false; error: Error },\n): Validator<OBJECT> {\n return { [validatorSymbol]: true, validate };\n}\n\nexport function isValidator(value: unknown): value is Validator {\n return (\n typeof value === 'object' &&\n value !== null &&\n validatorSymbol in value &&\n value[validatorSymbol] === true &&\n 'validate' in value\n );\n}\n\nexport function zodValidator<OBJECT>(\n zodSchema: z.Schema<OBJECT>,\n): Validator<OBJECT> {\n return validator(value => {\n const result = zodSchema.safeParse(value);\n return result.success\n ? { success: true, value: result.data }\n : { success: false, error: result.error };\n });\n}\n","import { APICallError } from '@ai-sdk/provider';\nimport { extractResponseHeaders } from './extract-response-headers';\nimport { isAbortError } from './is-abort-error';\nimport { ResponseHandler } from './response-handler';\nimport { removeUndefinedEntries } from './remove-undefined-entries';\n\n// use function to allow for mocking in tests:\nconst getOriginalFetch = () => fetch;\n\nexport const postJsonToApi = async <T>({\n url,\n headers,\n body,\n failedResponseHandler,\n successfulResponseHandler,\n abortSignal,\n fetch,\n}: {\n url: string;\n headers?: Record<string, string | undefined>;\n body: unknown;\n failedResponseHandler: ResponseHandler<APICallError>;\n successfulResponseHandler: ResponseHandler<T>;\n abortSignal?: AbortSignal;\n fetch?: ReturnType<typeof getOriginalFetch>;\n}) =>\n postToApi({\n url,\n headers: {\n 'Content-Type': 'application/json',\n ...headers,\n },\n body: {\n content: JSON.stringify(body),\n values: body,\n },\n failedResponseHandler,\n successfulResponseHandler,\n abortSignal,\n fetch,\n });\n\nexport const postToApi = async <T>({\n url,\n headers = {},\n body,\n successfulResponseHandler,\n failedResponseHandler,\n abortSignal,\n fetch = getOriginalFetch(),\n}: {\n url: string;\n headers?: Record<string, string | undefined>;\n body: {\n content: string | FormData | Uint8Array;\n values: unknown;\n };\n failedResponseHandler: ResponseHandler<Error>;\n successfulResponseHandler: ResponseHandler<T>;\n abortSignal?: AbortSignal;\n fetch?: ReturnType<typeof getOriginalFetch>;\n}) => {\n try {\n const response = await fetch(url, {\n method: 'POST',\n headers: removeUndefinedEntries(headers),\n body: body.content,\n signal: abortSignal,\n });\n\n const responseHeaders = extractResponseHeaders(response);\n\n if (!response.ok) {\n let errorInformation: {\n value: Error;\n responseHeaders?: Record<string, string> | undefined;\n };\n\n try {\n errorInformation = await failedResponseHandler({\n response,\n url,\n requestBodyValues: body.values,\n });\n } catch (error) {\n if (isAbortError(error) || APICallError.isAPICallError(error)) {\n throw error;\n }\n\n throw new APICallError({\n message: 'Failed to process error response',\n cause: error,\n statusCode: response.status,\n url,\n responseHeaders,\n requestBodyValues: body.values,\n });\n }\n\n throw errorInformation.value;\n }\n\n try {\n return await successfulResponseHandler({\n response,\n url,\n requestBodyValues: body.values,\n });\n } catch (error) {\n if (error instanceof Error) {\n if (isAbortError(error) || APICallError.isAPICallError(error)) {\n throw error;\n }\n }\n\n throw new APICallError({\n message: 'Failed to process successful response',\n cause: error,\n statusCode: response.status,\n url,\n responseHeaders,\n requestBodyValues: body.values,\n });\n }\n } catch (error) {\n if (isAbortError(error)) {\n throw error;\n }\n\n // unwrap original error when fetch failed (for easier debugging):\n if (error instanceof TypeError && error.message === 'fetch failed') {\n const cause = (error as any).cause;\n\n if (cause != null) {\n // Failed to connect to server:\n throw new APICallError({\n message: `Cannot connect to API: ${cause.message}`,\n cause,\n url,\n requestBodyValues: body.values,\n isRetryable: true, // retry when network error\n });\n }\n }\n\n throw error;\n }\n};\n","export function removeUndefinedEntries<T>(\n record: Record<string, T | undefined>,\n): Record<string, T> {\n return Object.fromEntries(\n Object.entries(record).filter(([_key, value]) => value != null),\n ) as Record<string, T>;\n}\n","import { APICallError, EmptyResponseBodyError } from '@ai-sdk/provider';\nimport {\n EventSourceParserStream,\n ParsedEvent,\n} from 'eventsource-parser/stream';\nimport { ZodSchema } from 'zod';\nimport { extractResponseHeaders } from './extract-response-headers';\nimport { ParseResult, parseJSON, safeParseJSON } from './parse-json';\n\nexport type ResponseHandler<RETURN_TYPE> = (options: {\n url: string;\n requestBodyValues: unknown;\n response: Response;\n}) => PromiseLike<{\n value: RETURN_TYPE;\n responseHeaders?: Record<string, string>;\n}>;\n\nexport const createJsonErrorResponseHandler =\n <T>({\n errorSchema,\n errorToMessage,\n isRetryable,\n }: {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n }): ResponseHandler<APICallError> =>\n async ({ response, url, requestBodyValues }) => {\n const responseBody = await response.text();\n const responseHeaders = extractResponseHeaders(response);\n\n // Some providers return an empty response body for some errors:\n if (responseBody.trim() === '') {\n return {\n responseHeaders,\n value: new APICallError({\n message: response.statusText,\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n isRetryable: isRetryable?.(response),\n }),\n };\n }\n\n // resilient parsing in case the response is not JSON or does not match the schema:\n try {\n const parsedError = parseJSON({\n text: responseBody,\n schema: errorSchema,\n });\n\n return {\n responseHeaders,\n value: new APICallError({\n message: errorToMessage(parsedError),\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n data: parsedError,\n isRetryable: isRetryable?.(response, parsedError),\n }),\n };\n } catch (parseError) {\n return {\n responseHeaders,\n value: new APICallError({\n message: response.statusText,\n url,\n requestBodyValues,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n isRetryable: isRetryable?.(response),\n }),\n };\n }\n };\n\nexport const createEventSourceResponseHandler =\n <T>(\n chunkSchema: ZodSchema<T>,\n ): ResponseHandler<ReadableStream<ParseResult<T>>> =>\n async ({ response }: { response: Response }) => {\n const responseHeaders = extractResponseHeaders(response);\n\n if (response.body == null) {\n throw new EmptyResponseBodyError({});\n }\n\n return {\n responseHeaders,\n value: response.body\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(new EventSourceParserStream())\n .pipeThrough(\n new TransformStream<ParsedEvent, ParseResult<T>>({\n transform({ data }, controller) {\n // ignore the 'DONE' event that e.g. OpenAI sends:\n if (data === '[DONE]') {\n return;\n }\n\n controller.enqueue(\n safeParseJSON({\n text: data,\n schema: chunkSchema,\n }),\n );\n },\n }),\n ),\n };\n };\n\nexport const createJsonStreamResponseHandler =\n <T>(\n chunkSchema: ZodSchema<T>,\n ): ResponseHandler<ReadableStream<ParseResult<T>>> =>\n async ({ response }: { response: Response }) => {\n const responseHeaders = extractResponseHeaders(response);\n\n if (response.body == null) {\n throw new EmptyResponseBodyError({});\n }\n\n let buffer = '';\n\n return {\n responseHeaders,\n value: response.body.pipeThrough(new TextDecoderStream()).pipeThrough(\n new TransformStream<string, ParseResult<T>>({\n transform(chunkText, controller) {\n if (chunkText.endsWith('\\n')) {\n controller.enqueue(\n safeParseJSON({\n text: buffer + chunkText,\n schema: chunkSchema,\n }),\n );\n buffer = '';\n } else {\n buffer += chunkText;\n }\n },\n }),\n ),\n };\n };\n\nexport const createJsonResponseHandler =\n <T>(responseSchema: ZodSchema<T>): ResponseHandler<T> =>\n async ({ response, url, requestBodyValues }) => {\n const responseBody = await response.text();\n\n const parsedResult = safeParseJSON({\n text: responseBody,\n schema: responseSchema,\n });\n\n const responseHeaders = extractResponseHeaders(response);\n\n if (!parsedResult.success) {\n throw new APICallError({\n message: 'Invalid JSON response',\n cause: parsedResult.error,\n statusCode: response.status,\n responseHeaders,\n responseBody,\n url,\n requestBodyValues,\n });\n }\n\n return {\n responseHeaders,\n value: parsedResult.value,\n };\n };\n","export function convertBase64ToUint8Array(base64String: string) {\n const base64Url = base64String.replace(/-/g, '+').replace(/_/g, '/');\n const latin1string = globalThis.atob(base64Url);\n return Uint8Array.from(latin1string, byte => byte.codePointAt(0)!);\n}\n\nexport function convertUint8ArrayToBase64(array: Uint8Array): string {\n let latin1string = '';\n\n // Note: regular for loop to support older JavaScript versions that\n // do not support for..of on Uint8Array\n for (let i = 0; i < array.length; i++) {\n latin1string += String.fromCodePoint(array[i]);\n }\n\n return globalThis.btoa(latin1string);\n}\n","export function withoutTrailingSlash(url: string | undefined) {\n return url?.replace(/\\/$/, '');\n}\n","import { createJsonErrorResponseHandler } from \"@ai-sdk/provider-utils\";\nimport { z } from \"zod\";\n\nconst workersAIErrorDataSchema = z.object({\n object: z.literal(\"error\"),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type WorkersAIErrorData = z.infer<typeof workersAIErrorDataSchema>;\n\nexport const workersAIFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: workersAIErrorDataSchema,\n errorToMessage: (data) => data.message,\n});\n","import { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\n\nexport interface WorkersAI {\n (\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n\n /**\n * Creates a model for text generation.\n **/\n chat(\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n}\n\nexport interface WorkersAISettings {\n /**\n * Provide an `env.AI` binding to use for the AI inference. \n * You can set up an AI bindings in your Workers project \n * by adding the following this to `wrangler.toml`: \n \n ```toml\n[ai]\nbinding = \"AI\"\n ``` \n **/\n binding: Ai;\n}\n\n/**\n * Create a Workers AI provider instance.\n **/\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n const createChatModel = (\n modelId: BaseAiTextGenerationModels,\n settings: WorkersAIChatSettings = {}\n ) =>\n new WorkersAIChatLanguageModel(modelId, settings, {\n provider: \"workersai.chat\",\n binding: options.binding,\n });\n\n const provider = function (\n modelId: BaseAiTextGenerationModels,\n settings?: WorkersAIChatSettings\n ) {\n if (new.target) {\n throw new Error(\n \"The WorkersAI model function cannot be called with the new keyword.\"\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.chat = createChatModel;\n\n return provider;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAEA,QAAM,YAAY,OAAO,WAAW;AACpC,QAAM,iBAAiB;AACvB,QAAM,uBAAuB;AAE7B,aAAS,OAAQ,MAAM,SAAS,SAAS;AAEvC,UAAI,WAAW,MAAM;AACnB,YAAI,YAAY,QAAQ,OAAO,YAAY,UAAU;AACnD,oBAAU;AACV,oBAAU;AAAA,QACZ;AAAA,MACF;AAEA,UAAI,aAAa,OAAO,SAAS,IAAI,GAAG;AACtC,eAAO,KAAK,SAAS;AAAA,MACvB;AAGA,UAAI,QAAQ,KAAK,WAAW,CAAC,MAAM,OAAQ;AACzC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAGA,YAAM,MAAM,KAAK,MAAM,MAAM,OAAO;AAGpC,UAAI,QAAQ,QAAQ,OAAO,QAAQ,UAAU;AAC3C,eAAO;AAAA,MACT;AAEA,YAAM,cAAe,WAAW,QAAQ,eAAgB;AACxD,YAAM,oBAAqB,WAAW,QAAQ,qBAAsB;AAGpE,UAAI,gBAAgB,YAAY,sBAAsB,UAAU;AAC9D,eAAO;AAAA,MACT;AAEA,UAAI,gBAAgB,YAAY,sBAAsB,UAAU;AAC9D,YAAI,eAAe,KAAK,IAAI,MAAM,SAAS,qBAAqB,KAAK,IAAI,MAAM,OAAO;AACpF,iBAAO;AAAA,QACT;AAAA,MACF,WAAW,gBAAgB,YAAY,sBAAsB,UAAU;AACrE,YAAI,eAAe,KAAK,IAAI,MAAM,OAAO;AACvC,iBAAO;AAAA,QACT;AAAA,MACF,OAAO;AACL,YAAI,qBAAqB,KAAK,IAAI,MAAM,OAAO;AAC7C,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,aAAO,OAAO,KAAK,EAAE,aAAa,mBAAmB,MAAM,WAAW,QAAQ,KAAK,CAAC;AAAA,IACtF;AAEA,aAAS,OAAQ,KAAK,EAAE,cAAc,SAAS,oBAAoB,SAAS,KAAK,IAAI,CAAC,GAAG;AACvF,UAAI,OAAO,CAAC,GAAG;AAEf,aAAO,KAAK,QAAQ;AAClB,cAAM,QAAQ;AACd,eAAO,CAAC;AAER,mBAAW,QAAQ,OAAO;AACxB,cAAI,gBAAgB,YAAY,OAAO,UAAU,eAAe,KAAK,MAAM,WAAW,GAAG;AACvF,gBAAI,SAAS,MAAM;AACjB,qBAAO;AAAA,YACT,WAAW,gBAAgB,SAAS;AAClC,oBAAM,IAAI,YAAY,8CAA8C;AAAA,YACtE;AAEA,mBAAO,KAAK;AAAA,UACd;AAEA,cAAI,sBAAsB,YACtB,OAAO,UAAU,eAAe,KAAK,MAAM,aAAa,KACxD,OAAO,UAAU,eAAe,KAAK,KAAK,aAAa,WAAW,GAAG;AACvE,gBAAI,SAAS,MAAM;AACjB,qBAAO;AAAA,YACT,WAAW,sBAAsB,SAAS;AACxC,oBAAM,IAAI,YAAY,8CAA8C;AAAA,YACtE;AAEA,mBAAO,KAAK;AAAA,UACd;AAEA,qBAAW,OAAO,MAAM;AACtB,kBAAM,QAAQ,KAAK,GAAG;AACtB,gBAAI,SAAS,OAAO,UAAU,UAAU;AACtC,mBAAK,KAAK,KAAK;AAAA,YACjB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAEA,aAAS,MAAO,MAAM,SAAS,SAAS;AACtC,YAAM,kBAAkB,MAAM;AAC9B,YAAM,kBAAkB;AACxB,UAAI;AACF,eAAO,OAAO,MAAM,SAAS,OAAO;AAAA,MACtC,UAAE;AACA,cAAM,kBAAkB;AAAA,MAC1B;AAAA,IACF;AAEA,aAAS,UAAW,MAAM,SAAS;AACjC,YAAM,kBAAkB,MAAM;AAC9B,YAAM,kBAAkB;AACxB,UAAI;AACF,eAAO,OAAO,MAAM,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,MAC7C,SAAS,IAAI;AACX,eAAO;AAAA,MACT,UAAE;AACA,cAAM,kBAAkB;AAAA,MAC1B;AAAA,IACF;AAEA,WAAO,UAAU;AACjB,WAAO,QAAQ,UAAU;AACzB,WAAO,QAAQ,QAAQ;AACvB,WAAO,QAAQ,YAAY;AAC3B,WAAO,QAAQ,OAAO;AAAA;AAAA;;;AC7HtB;AAAA,EAKE,iCAAAA;AAAA,OACK;AAOP,SAAS,KAAAC,UAAS;;;ACblB;AAAA,EAEE;AAAA,OACK;AAIA,SAAS,+BACd,QACqB;AACrB,QAAM,WAAgC,CAAC;AAEvC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,CAAC,SAAS;AACb,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACxD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvGA,IAAI,iBAAiB,CAAC,UAAU,cAAc,OAAO;AACnD,SAAO,CAAC,OAAO,gBAAgB;AAC7B,QAAI,KAAK;AACT,QAAI,IAAI;AACR,WAAO,KAAK;AACV,YAAM,SAAU,KAAK,OAAO,IAAI,SAAS,SAAU,CAAC;AAAA,IACtD;AACA,WAAO;AAAA,EACT;AACF;;;ASVA,+BAAuB;AFDvB,SAAS,uBAAuB;ACAhC,SAAS,wBAAwB;ACAjC,SAAS,gBAAgB,uBAAAC,4BAA2B;ACApD,SAAS,2BAA2B;AEApC,SAAS,oBAAoB;AEA7B,SAAS,gBAAAC,eAAc,8BAA8B;AXM9C,SAAS,uBACd,UACwB;AACxB,QAAM,UAAkC,CAAC;AACzC,WAAS,QAAQ,QAAQ,CAAC,OAAO,QAAQ;AACvC,YAAQ,GAAG,IAAI;EACjB,CAAC;AACD,SAAO;AACT;ACTO,IAAM,aAAa;EACxB;EACA;AACF;AOHO,IAAM,kBAAkB,OAAO,qBAAqB;AAsBpD,SAAS,UACd,UAGmB;AACnB,SAAO,EAAE,CAAC,eAAe,GAAG,MAAM,SAAS;AAC7C;AAEO,SAAS,YAAY,OAAoC;AAC9D,SACE,OAAO,UAAU,YACjB,UAAU,QACV,mBAAmB,SACnB,MAAM,eAAe,MAAM,QAC3B,cAAc;AAElB;AAEO,SAAS,aACd,WACmB;AACnB,SAAO,UAAU,CAAA,UAAS;AACxB,UAAM,SAAS,UAAU,UAAU,KAAK;AACxC,WAAO,OAAO,UACV,EAAE,SAAS,MAAM,OAAO,OAAO,KAAK,IACpC,EAAE,SAAS,OAAO,OAAO,OAAO,MAAM;EAC5C,CAAC;AACH;ADzCO,SAAS,cAAiB;EAC/B;EACA,QAAQ;AACV,GAGM;AACJ,QAAM,SAAS,kBAAkB,EAAE,OAAO,QAAQ,YAAY,CAAC;AAE/D,MAAI,CAAC,OAAO,SAAS;AACnB,UAAM,IAAI,oBAAoB,EAAE,OAAO,OAAO,OAAO,MAAM,CAAC;EAC9D;AAEA,SAAO,OAAO;AAChB;AAWO,SAAS,kBAAqB;EACnC;EACA,QAAQ;AACV,GAKmD;AACjD,QAAM,SAAS,YAAY,WAAW,IAClC,cACA,aAAa,WAAW;AAE5B,MAAI;AACF,QAAI,OAAO,YAAY,MAAM;AAC3B,aAAO,EAAE,SAAS,MAAM,MAAkB;IAC5C;AAEA,UAAM,mBAAmB,OAAO,SAAS,KAAK;AAE9C,QAAI,iBAAiB,SAAS;AAC5B,aAAO;IACT;AAEA,WAAO;MACL,SAAS;MACT,OAAO,IAAI,oBAAoB;QAC7B;QACA,OAAO,iBAAiB;MAC1B,CAAC;IACH;EACF,SAAS,OAAO;AACd,WAAO;MACL,SAAS;MACT,OAAO,oBAAoB,sBAAsB,KAAK,IAClD,QACA,IAAI,oBAAoB,EAAE,OAAO,OAAO,MAAM,CAAC;IACrD;EACF;AACF;ADjDO,SAAS,UAAa;EAC3B;EACA;AACF,GAGM;AACJ,MAAI;AACF,UAAM,QAAQ,yBAAAC,QAAW,MAAM,IAAI;AAEnC,QAAI,UAAU,MAAM;AAClB,aAAO;IACT;AAEA,WAAO,cAAc,EAAE,OAAO,OAAO,CAAC;EACxC,SAAS,OAAO;AACd,QACE,eAAe,iBAAiB,KAAK,KACrCC,qBAAoB,sBAAsB,KAAK,GAC/C;AACA,YAAM;IACR;AAEA,UAAM,IAAI,eAAe,EAAE,MAAM,OAAO,MAAM,CAAC;EACjD;AACF;AKnCO,IAAM,iCACX,CAAI;EACF;EACA;EACA;AACF,MAKA,OAAO,EAAE,UAAU,KAAK,kBAAkB,MAAM;AAC9C,QAAM,eAAe,MAAM,SAAS,KAAK;AACzC,QAAM,kBAAkB,uBAAuB,QAAQ;AAGvD,MAAI,aAAa,KAAK,MAAM,IAAI;AAC9B,WAAO;MACL;MACA,OAAO,IAAIC,cAAa;QACtB,SAAS,SAAS;QAClB;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,aAAa,eAAA,OAAA,SAAA,YAAc,QAAA;MAC7B,CAAC;IACH;EACF;AAGA,MAAI;AACF,UAAM,cAAc,UAAU;MAC5B,MAAM;MACN,QAAQ;IACV,CAAC;AAED,WAAO;MACL;MACA,OAAO,IAAIA,cAAa;QACtB,SAAS,eAAe,WAAW;QACnC;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,MAAM;QACN,aAAa,eAAA,OAAA,SAAA,YAAc,UAAU,WAAA;MACvC,CAAC;IACH;EACF,SAAS,YAAY;AACnB,WAAO;MACL;MACA,OAAO,IAAIA,cAAa;QACtB,SAAS,SAAS;QAClB;QACA;QACA,YAAY,SAAS;QACrB;QACA;QACA,aAAa,eAAA,OAAA,SAAA,YAAc,QAAA;MAC7B,CAAC;IACH;EACF;AACF;;;AGjFF,SAAS,SAAS;AAElB,IAAM,2BAA2B,EAAE,OAAO;AAAA,EACxC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,iCAAiC,+BAA+B;AAAA,EAC3E,aAAa;AAAA,EACb,gBAAgB,CAAC,SAAS,KAAK;AACjC,CAAC;;;AnBGD,SAAS,cAAc;AAOhB,IAAM,6BAAN,MAA4D;AAAA,EASjE,YACE,SACA,UACA,QACA;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOf,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,+BAA+B,MAAM;AAAA,IACjD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAIA,KAAK,kBAAkB;AACrB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO;AAAA,MACzD,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,QAAI,oBAAoB,gBAAgB;AACtC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,WAAO;AAAA,MACL,MAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQf,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO;AAAA;AAAA,QAEL,cAAc;AAAA,QACd,kBAAkB;AAAA,MACpB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,UAAU,IAAI,YAAY;AAEhC,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO;AAAA,MACzD,UAAU,KAAK;AAAA,MACf,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,EAAE,oBAAoB,iBAAiB;AACzC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,UAAU,OAAO,YAAY;AACjC,kBAAM,cAAc,QAAQ,OAAO,KAA8B;AACjE,kBAAM,SAAS,OAAO,IAAI,SAAS,WAAW,CAAC;AAC/C,6BAAiB,eAAe,QAAQ;AACtC,kBAAI,CAAC,YAAY,MAAM;AACrB;AAAA,cACF;AACA,kBAAI,YAAY,SAAS,UAAU;AACjC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,OAAO;AAAA,oBACL,cAAc;AAAA,oBACd,kBAAkB;AAAA,kBACpB;AAAA,gBACF,CAAC;AACD;AAAA,cACF;AACA,oBAAM,OAAO,KAAK,MAAM,YAAY,IAAI;AAExC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,KAAK,YAAY;AAAA,cAC9B,CAAC;AAAA,YACH;AACA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAO;AAAA,gBACL,cAAc;AAAA,gBACd,kBAAkB;AAAA,cACpB;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AACF;AAGA,IAAM,8BAA8BC,GAAE,OAAO;AAAA,EAC3C,UAAUA,GAAE,OAAO;AACrB,CAAC;AAID,IAAM,2BAA2BA,GAAE,WAAW,UAAU;AAExD,SAAS,0BACP,MAGA;AAEA,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACpD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACvC,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA,IAIlD,KAAK;AACH,aAAO;AAAA,QACL,OAAO,YAAY;AAAA,UACjB,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW;AAAA,QAC9C;AAAA,QACA,aAAa;AAAA,MACf;AAAA,IACF,SAAS;AACP,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACpE;AAAA,EACF;AACF;;;AoBpQO,SAAS,gBAAgB,SAAuC;AACrE,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAEnC,IAAI,2BAA2B,SAAS,UAAU;AAAA,IAChD,UAAU;AAAA,IACV,SAAS,QAAQ;AAAA,EACnB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,SAAO;AACT;","names":["UnsupportedFunctionalityError","z","TypeValidationError","APICallError","SecureJSON","TypeValidationError","APICallError","UnsupportedFunctionalityError","z"]}
|
1
|
+
{"version":3,"sources":["../src/workersai-chat-language-model.ts","../src/convert-to-workersai-chat-messages.ts","../src/index.ts"],"sourcesContent":["import {\n type LanguageModelV1,\n type LanguageModelV1CallWarning,\n type LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport { z } from \"zod\";\nimport { convertToWorkersAIChatMessages } from \"./convert-to-workersai-chat-messages\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nimport { events } from \"fetch-event-stream\";\n\ntype WorkersAIChatConfig = {\n provider: string;\n binding: Ai;\n};\n\nexport class WorkersAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = \"v1\";\n readonly defaultObjectGenerationMode = \"json\";\n\n readonly modelId: TextGenerationModels;\n readonly settings: WorkersAIChatSettings;\n\n private readonly config: WorkersAIChatConfig;\n\n constructor(\n modelId: TextGenerationModels,\n settings: WorkersAIChatSettings,\n config: WorkersAIChatConfig\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1[\"doGenerate\"]>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"frequencyPenalty\",\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: \"unsupported-setting\",\n setting: \"presencePenalty\",\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToWorkersAIChatMessages(prompt),\n };\n\n switch (type) {\n case \"regular\": {\n return {\n args: { ...baseArgs, ...prepareToolsAndToolChoice(mode) },\n warnings,\n };\n }\n\n case \"object-json\": {\n return {\n args: {\n ...baseArgs,\n response_format: { type: \"json_object\" },\n tools: undefined,\n },\n warnings,\n };\n }\n\n case \"object-tool\": {\n return {\n args: {\n ...baseArgs,\n tool_choice: \"any\",\n tools: [{ type: \"function\", function: mode.tool }],\n },\n warnings,\n };\n }\n\n // @ts-expect-error - this is unreachable code\n // TODO: fixme\n case \"object-grammar\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"object-grammar mode\",\n });\n }\n\n default: {\n const exhaustiveCheck = type satisfies never;\n throw new Error(`Unsupported type: ${exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1[\"doGenerate\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doGenerate\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n const output = await this.config.binding.run(\n args.model,\n {\n messages: args.messages,\n tools: args.tools,\n },\n {\n gateway: this.settings.gateway,\n }\n );\n\n if (output instanceof ReadableStream) {\n throw new Error(\"This shouldn't happen\");\n }\n\n return {\n text: output.response,\n toolCalls: output.tool_calls?.map((toolCall) => ({\n toolCallType: \"function\",\n toolCallId: toolCall.name,\n toolName: toolCall.name,\n args: JSON.stringify(toolCall.arguments || {}),\n })),\n finishReason: \"stop\", // TODO: mapWorkersAIFinishReason(response.finish_reason),\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n usage: {\n\t // TODO: mapWorkersAIUsage(response.usage),\n \t\tpromptTokens: 0,\n\t completionTokens: 0,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1[\"doStream\"]>[0]\n ): Promise<Awaited<ReturnType<LanguageModelV1[\"doStream\"]>>> {\n const { args, warnings } = this.getArgs(options);\n\n const decoder = new TextDecoder();\n\n const response = await this.config.binding.run(args.model, {\n messages: args.messages,\n stream: true,\n tools: args.tools,\n });\n\n if (!(response instanceof ReadableStream)) {\n throw new Error(\"This shouldn't happen\");\n }\n\n return {\n stream: response.pipeThrough(\n new TransformStream<Uint8Array, LanguageModelV1StreamPart>({\n async transform(chunk, controller) {\n const chunkToText = decoder.decode(chunk);\n const chunks = events(new Response(chunkToText));\n for await (const singleChunk of chunks) {\n if (!singleChunk.data) {\n continue;\n }\n if (singleChunk.data === \"[DONE]\") {\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n return;\n }\n const data = JSON.parse(singleChunk.data);\n\n controller.enqueue({\n type: \"text-delta\",\n textDelta: data.response ?? \"DATALOSS\",\n });\n }\n controller.enqueue({\n type: \"finish\",\n finishReason: \"stop\",\n usage: {\n promptTokens: 0,\n completionTokens: 0,\n },\n });\n },\n })\n ),\n rawCall: { rawPrompt: args.messages, rawSettings: args },\n warnings,\n };\n }\n}\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatResponseSchema = z.object({\n response: z.string(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst workersAIChatChunkSchema = z.instanceof(Uint8Array);\n\nfunction prepareToolsAndToolChoice(\n mode: Parameters<LanguageModelV1[\"doGenerate\"]>[0][\"mode\"] & {\n type: \"regular\";\n }\n) {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined };\n }\n\n const mappedTools = tools.map((tool) => ({\n type: \"function\",\n function: {\n name: tool.name,\n // @ts-expect-error - description is not a property of tool\n description: tool.description,\n // @ts-expect-error - parameters is not a property of tool\n parameters: tool.parameters,\n },\n }));\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mappedTools, tool_choice: undefined };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case \"auto\":\n return { tools: mappedTools, tool_choice: type };\n case \"none\":\n return { tools: mappedTools, tool_choice: type };\n case \"required\":\n return { tools: mappedTools, tool_choice: \"any\" };\n\n // workersAI does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case \"tool\":\n return {\n tools: mappedTools.filter(\n (tool) => tool.function.name === toolChoice.toolName\n ),\n tool_choice: \"any\",\n };\n default: {\n const exhaustiveCheck = type satisfies never;\n throw new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);\n }\n }\n}\n","import {\n type LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from \"@ai-sdk/provider\";\nimport type { WorkersAIChatPrompt } from \"./workersai-chat-prompt\";\n\n// TODO\nexport function convertToWorkersAIChatMessages(\n prompt: LanguageModelV1Prompt\n): WorkersAIChatPrompt {\n const messages: WorkersAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case \"system\": {\n messages.push({ role: \"system\", content });\n break;\n }\n\n case \"user\": {\n messages.push({\n role: \"user\",\n content: content\n .map((part) => {\n switch (part.type) {\n case \"text\": {\n return part.text;\n }\n case \"image\": {\n throw new UnsupportedFunctionalityError({\n functionality: \"image-part\",\n });\n }\n }\n })\n .join(\"\"),\n });\n break;\n }\n\n case \"assistant\": {\n let text = \"\";\n const toolCalls: Array<{\n id: string;\n type: \"function\";\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case \"text\": {\n text += part.text;\n break;\n }\n case \"tool-call\": {\n toolCalls.push({\n id: part.toolCallId,\n type: \"function\",\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const exhaustiveCheck = part satisfies never;\n throw new Error(`Unsupported part: ${exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: \"assistant\",\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: \"null\",\n type: \"function\",\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case \"tool\": {\n for (const toolResponse of content) {\n messages.push({\n role: \"tool\",\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const exhaustiveCheck = role satisfies never;\n throw new Error(`Unsupported role: ${exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { WorkersAIChatLanguageModel } from \"./workersai-chat-language-model\";\nimport type { WorkersAIChatSettings } from \"./workersai-chat-settings\";\nimport type { TextGenerationModels } from \"./workersai-models\";\n\nexport interface WorkersAI {\n (\n modelId: TextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n\n /**\n * Creates a model for text generation.\n **/\n chat(\n modelId: TextGenerationModels,\n settings?: WorkersAIChatSettings\n ): WorkersAIChatLanguageModel;\n}\n\nexport interface WorkersAISettings {\n /**\n * Provide an `env.AI` binding to use for the AI inference.\n * You can set up an AI bindings in your Workers project\n * by adding the following this to `wrangler.toml`:\n\n ```toml\n[ai]\nbinding = \"AI\"\n ```\n **/\n binding: Ai;\n}\n\n/**\n * Create a Workers AI provider instance.\n **/\nexport function createWorkersAI(options: WorkersAISettings): WorkersAI {\n const createChatModel = (\n modelId: TextGenerationModels,\n settings: WorkersAIChatSettings = {}\n ) =>\n new WorkersAIChatLanguageModel(modelId, settings, {\n provider: \"workersai.chat\",\n binding: options.binding,\n });\n\n const provider = function (\n modelId: TextGenerationModels,\n settings?: WorkersAIChatSettings\n ) {\n if (new.target) {\n throw new Error(\n \"The WorkersAI model function cannot be called with the new keyword.\"\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.chat = createChatModel;\n\n return provider;\n}\n"],"mappings":";;;;;AAAA;AAAA,EAIE,iCAAAA;AAAA,OACK;AACP,SAAS,SAAS;;;ACNlB;AAAA,EAEE;AAAA,OACK;AAIA,SAAS,+BACd,QACqB;AACrB,QAAM,WAAgC,CAAC;AAEvC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,CAAC,SAAS;AACb,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,kBAAkB;AACxB,oBAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,YACxD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AD9FA,SAAS,cAAc;AAOhB,IAAM,6BAAN,MAA4D;AAAA,EASjE,YACE,SACA,UACA,QACA;AAZF,wBAAS,wBAAuB;AAChC,wBAAS,+BAA8B;AAEvC,wBAAS;AACT,wBAAS;AAET,wBAAiB;AAOf,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,+BAA+B,MAAM;AAAA,IACjD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,YACvC,OAAO;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA,MAIA,KAAK,kBAAkB;AACrB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,kBAAkB;AACxB,cAAM,IAAI,MAAM,qBAAqB,eAAe,EAAE;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,SAAS,MAAM,KAAK,OAAO,QAAQ;AAAA,MACvC,KAAK;AAAA,MACL;AAAA,QACE,UAAU,KAAK;AAAA,QACf,OAAO,KAAK;AAAA,MACd;AAAA,MACA;AAAA,QACE,SAAS,KAAK,SAAS;AAAA,MACzB;AAAA,IACF;AAEA,QAAI,kBAAkB,gBAAgB;AACpC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,WAAW,OAAO,YAAY,IAAI,CAAC,cAAc;AAAA,QAC/C,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS;AAAA,QACnB,MAAM,KAAK,UAAU,SAAS,aAAa,CAAC,CAAC;AAAA,MAC/C,EAAE;AAAA,MACF,cAAc;AAAA;AAAA,MACd,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD,OAAO;AAAA;AAAA,QAET,cAAc;AAAA,QACb,kBAAkB;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,UAAU,IAAI,YAAY;AAEhC,UAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,IAAI,KAAK,OAAO;AAAA,MACzD,UAAU,KAAK;AAAA,MACf,QAAQ;AAAA,MACR,OAAO,KAAK;AAAA,IACd,CAAC;AAED,QAAI,EAAE,oBAAoB,iBAAiB;AACzC,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAAuD;AAAA,UACzD,MAAM,UAAU,OAAO,YAAY;AACjC,kBAAM,cAAc,QAAQ,OAAO,KAAK;AACxC,kBAAM,SAAS,OAAO,IAAI,SAAS,WAAW,CAAC;AAC/C,6BAAiB,eAAe,QAAQ;AACtC,kBAAI,CAAC,YAAY,MAAM;AACrB;AAAA,cACF;AACA,kBAAI,YAAY,SAAS,UAAU;AACjC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,OAAO;AAAA,oBACL,cAAc;AAAA,oBACd,kBAAkB;AAAA,kBACpB;AAAA,gBACF,CAAC;AACD;AAAA,cACF;AACA,oBAAM,OAAO,KAAK,MAAM,YAAY,IAAI;AAExC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,KAAK,YAAY;AAAA,cAC9B,CAAC;AAAA,YACH;AACA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,cAAc;AAAA,cACd,OAAO;AAAA,gBACL,cAAc;AAAA,gBACd,kBAAkB;AAAA,cACpB;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,KAAK,UAAU,aAAa,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AACF;AAGA,IAAM,8BAA8B,EAAE,OAAO;AAAA,EAC3C,UAAU,EAAE,OAAO;AACrB,CAAC;AAID,IAAM,2BAA2B,EAAE,WAAW,UAAU;AAExD,SAAS,0BACP,MAGA;AAEA,QAAM,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACpD;AAEA,QAAM,cAAc,MAAM,IAAI,CAAC,UAAU;AAAA,IACvC,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA;AAAA,MAEX,aAAa,KAAK;AAAA;AAAA,MAElB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM;AAAA;AAAA;AAAA,IAIlD,KAAK;AACH,aAAO;AAAA,QACL,OAAO,YAAY;AAAA,UACjB,CAAC,SAAS,KAAK,SAAS,SAAS,WAAW;AAAA,QAC9C;AAAA,QACA,aAAa;AAAA,MACf;AAAA,IACF,SAAS;AACP,YAAM,kBAAkB;AACxB,YAAM,IAAI,MAAM,iCAAiC,eAAe,EAAE;AAAA,IACpE;AAAA,EACF;AACF;;;AElQO,SAAS,gBAAgB,SAAuC;AACrE,QAAM,kBAAkB,CACtB,SACA,WAAkC,CAAC,MAEnC,IAAI,2BAA2B,SAAS,UAAU;AAAA,IAChD,UAAU;AAAA,IACV,SAAS,QAAQ;AAAA,EACnB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,OAAO;AAEhB,SAAO;AACT;","names":["UnsupportedFunctionalityError","UnsupportedFunctionalityError"]}
|
package/package.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
"name": "workers-ai-provider",
|
3
3
|
"description": "Workers AI Provider for the vercel AI SDK",
|
4
4
|
"type": "module",
|
5
|
-
"version": "0.0.
|
5
|
+
"version": "0.0.12",
|
6
6
|
"main": "dist/index.js",
|
7
7
|
"types": "dist/index.d.ts",
|
8
8
|
"repository": {
|
@@ -18,10 +18,11 @@
|
|
18
18
|
"build": "rm -rf dist && tsup src/index.ts --dts --sourcemap --format esm --target es2020"
|
19
19
|
},
|
20
20
|
"dependencies": {
|
21
|
-
"@ai-sdk/provider": "^
|
22
|
-
"ai": "^
|
21
|
+
"@ai-sdk/provider": "^1.0.2",
|
22
|
+
"@ai-sdk/provider-utils": "^1.0.22",
|
23
|
+
"ai": "^4.0.14",
|
23
24
|
"fetch-event-stream": "^0.1.5",
|
24
|
-
"zod": "^3.
|
25
|
+
"zod": "^3.24.1"
|
25
26
|
},
|
26
27
|
"files": [
|
27
28
|
"dist",
|
@@ -40,6 +41,6 @@
|
|
40
41
|
"serverless"
|
41
42
|
],
|
42
43
|
"devDependencies": {
|
43
|
-
"@cloudflare/workers-types": "^4.
|
44
|
+
"@cloudflare/workers-types": "^4.20250214.0"
|
44
45
|
}
|
45
46
|
}
|
package/src/index.ts
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
import { WorkersAIChatLanguageModel } from "./workersai-chat-language-model";
|
2
2
|
import type { WorkersAIChatSettings } from "./workersai-chat-settings";
|
3
|
+
import type { TextGenerationModels } from "./workersai-models";
|
3
4
|
|
4
5
|
export interface WorkersAI {
|
5
6
|
(
|
6
|
-
modelId:
|
7
|
+
modelId: TextGenerationModels,
|
7
8
|
settings?: WorkersAIChatSettings
|
8
9
|
): WorkersAIChatLanguageModel;
|
9
10
|
|
@@ -11,21 +12,21 @@ export interface WorkersAI {
|
|
11
12
|
* Creates a model for text generation.
|
12
13
|
**/
|
13
14
|
chat(
|
14
|
-
modelId:
|
15
|
+
modelId: TextGenerationModels,
|
15
16
|
settings?: WorkersAIChatSettings
|
16
17
|
): WorkersAIChatLanguageModel;
|
17
18
|
}
|
18
19
|
|
19
20
|
export interface WorkersAISettings {
|
20
21
|
/**
|
21
|
-
* Provide an `env.AI` binding to use for the AI inference.
|
22
|
-
* You can set up an AI bindings in your Workers project
|
23
|
-
* by adding the following this to `wrangler.toml`:
|
24
|
-
|
22
|
+
* Provide an `env.AI` binding to use for the AI inference.
|
23
|
+
* You can set up an AI bindings in your Workers project
|
24
|
+
* by adding the following this to `wrangler.toml`:
|
25
|
+
|
25
26
|
```toml
|
26
27
|
[ai]
|
27
28
|
binding = "AI"
|
28
|
-
```
|
29
|
+
```
|
29
30
|
**/
|
30
31
|
binding: Ai;
|
31
32
|
}
|
@@ -35,7 +36,7 @@ binding = "AI"
|
|
35
36
|
**/
|
36
37
|
export function createWorkersAI(options: WorkersAISettings): WorkersAI {
|
37
38
|
const createChatModel = (
|
38
|
-
modelId:
|
39
|
+
modelId: TextGenerationModels,
|
39
40
|
settings: WorkersAIChatSettings = {}
|
40
41
|
) =>
|
41
42
|
new WorkersAIChatLanguageModel(modelId, settings, {
|
@@ -44,7 +45,7 @@ export function createWorkersAI(options: WorkersAISettings): WorkersAI {
|
|
44
45
|
});
|
45
46
|
|
46
47
|
const provider = function (
|
47
|
-
modelId:
|
48
|
+
modelId: TextGenerationModels,
|
48
49
|
settings?: WorkersAIChatSettings
|
49
50
|
) {
|
50
51
|
if (new.target) {
|
@@ -1,21 +1,13 @@
|
|
1
1
|
import {
|
2
2
|
type LanguageModelV1,
|
3
3
|
type LanguageModelV1CallWarning,
|
4
|
-
type LanguageModelV1FinishReason,
|
5
4
|
type LanguageModelV1StreamPart,
|
6
5
|
UnsupportedFunctionalityError,
|
7
6
|
} from "@ai-sdk/provider";
|
8
|
-
import type {
|
9
|
-
ParseResult,
|
10
|
-
createEventSourceResponseHandler,
|
11
|
-
createJsonResponseHandler,
|
12
|
-
postJsonToApi,
|
13
|
-
} from "@ai-sdk/provider-utils";
|
14
7
|
import { z } from "zod";
|
15
8
|
import { convertToWorkersAIChatMessages } from "./convert-to-workersai-chat-messages";
|
16
|
-
import { mapWorkersAIFinishReason } from "./map-workersai-finish-reason";
|
17
9
|
import type { WorkersAIChatSettings } from "./workersai-chat-settings";
|
18
|
-
import {
|
10
|
+
import type { TextGenerationModels } from "./workersai-models";
|
19
11
|
|
20
12
|
import { events } from "fetch-event-stream";
|
21
13
|
|
@@ -28,13 +20,13 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
28
20
|
readonly specificationVersion = "v1";
|
29
21
|
readonly defaultObjectGenerationMode = "json";
|
30
22
|
|
31
|
-
readonly modelId:
|
23
|
+
readonly modelId: TextGenerationModels;
|
32
24
|
readonly settings: WorkersAIChatSettings;
|
33
25
|
|
34
26
|
private readonly config: WorkersAIChatConfig;
|
35
27
|
|
36
28
|
constructor(
|
37
|
-
modelId:
|
29
|
+
modelId: TextGenerationModels,
|
38
30
|
settings: WorkersAIChatSettings,
|
39
31
|
config: WorkersAIChatConfig
|
40
32
|
) {
|
@@ -105,6 +97,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
105
97
|
args: {
|
106
98
|
...baseArgs,
|
107
99
|
response_format: { type: "json_object" },
|
100
|
+
tools: undefined,
|
108
101
|
},
|
109
102
|
warnings,
|
110
103
|
};
|
@@ -141,29 +134,35 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
141
134
|
): Promise<Awaited<ReturnType<LanguageModelV1["doGenerate"]>>> {
|
142
135
|
const { args, warnings } = this.getArgs(options);
|
143
136
|
|
144
|
-
const
|
145
|
-
|
146
|
-
|
137
|
+
const output = await this.config.binding.run(
|
138
|
+
args.model,
|
139
|
+
{
|
140
|
+
messages: args.messages,
|
141
|
+
tools: args.tools,
|
142
|
+
},
|
143
|
+
{
|
144
|
+
gateway: this.settings.gateway,
|
145
|
+
}
|
146
|
+
);
|
147
147
|
|
148
|
-
if (
|
148
|
+
if (output instanceof ReadableStream) {
|
149
149
|
throw new Error("This shouldn't happen");
|
150
150
|
}
|
151
151
|
|
152
152
|
return {
|
153
|
-
text:
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
// })),
|
153
|
+
text: output.response,
|
154
|
+
toolCalls: output.tool_calls?.map((toolCall) => ({
|
155
|
+
toolCallType: "function",
|
156
|
+
toolCallId: toolCall.name,
|
157
|
+
toolName: toolCall.name,
|
158
|
+
args: JSON.stringify(toolCall.arguments || {}),
|
159
|
+
})),
|
161
160
|
finishReason: "stop", // TODO: mapWorkersAIFinishReason(response.finish_reason),
|
162
161
|
rawCall: { rawPrompt: args.messages, rawSettings: args },
|
163
162
|
usage: {
|
164
|
-
|
165
|
-
|
166
|
-
|
163
|
+
// TODO: mapWorkersAIUsage(response.usage),
|
164
|
+
promptTokens: 0,
|
165
|
+
completionTokens: 0,
|
167
166
|
},
|
168
167
|
warnings,
|
169
168
|
};
|
@@ -179,6 +178,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
179
178
|
const response = await this.config.binding.run(args.model, {
|
180
179
|
messages: args.messages,
|
181
180
|
stream: true,
|
181
|
+
tools: args.tools,
|
182
182
|
});
|
183
183
|
|
184
184
|
if (!(response instanceof ReadableStream)) {
|
@@ -187,12 +187,9 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
187
187
|
|
188
188
|
return {
|
189
189
|
stream: response.pipeThrough(
|
190
|
-
new TransformStream<
|
191
|
-
ParseResult<z.infer<typeof workersAIChatChunkSchema>>,
|
192
|
-
LanguageModelV1StreamPart
|
193
|
-
>({
|
190
|
+
new TransformStream<Uint8Array, LanguageModelV1StreamPart>({
|
194
191
|
async transform(chunk, controller) {
|
195
|
-
const chunkToText = decoder.decode(chunk
|
192
|
+
const chunkToText = decoder.decode(chunk);
|
196
193
|
const chunks = events(new Response(chunkToText));
|
197
194
|
for await (const singleChunk of chunks) {
|
198
195
|
if (!singleChunk.data) {
|
@@ -258,7 +255,9 @@ function prepareToolsAndToolChoice(
|
|
258
255
|
type: "function",
|
259
256
|
function: {
|
260
257
|
name: tool.name,
|
258
|
+
// @ts-expect-error - description is not a property of tool
|
261
259
|
description: tool.description,
|
260
|
+
// @ts-expect-error - parameters is not a property of tool
|
262
261
|
parameters: tool.parameters,
|
263
262
|
},
|
264
263
|
}));
|
package/src/workersai-error.ts
CHANGED
@@ -1,4 +1,6 @@
|
|
1
|
-
import {
|
1
|
+
import {
|
2
|
+
createJsonErrorResponseHandler,
|
3
|
+
} from "@ai-sdk/provider-utils";
|
2
4
|
import { z } from "zod";
|
3
5
|
|
4
6
|
const workersAIErrorDataSchema = z.object({
|
@@ -9,9 +11,8 @@ const workersAIErrorDataSchema = z.object({
|
|
9
11
|
code: z.string().nullable(),
|
10
12
|
});
|
11
13
|
|
12
|
-
export
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
});
|
14
|
+
export const workersAIFailedResponseHandler =
|
15
|
+
createJsonErrorResponseHandler({
|
16
|
+
errorSchema: workersAIErrorDataSchema,
|
17
|
+
errorToMessage: (data) => data.message,
|
18
|
+
});
|