workers-ai-provider 0.3.2 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +52 -7
- package/dist/index.js +298 -140
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
- package/src/autorag-chat-language-model.ts +172 -0
- package/src/autorag-chat-settings.ts +14 -0
- package/src/index.ts +38 -0
- package/src/streaming.ts +37 -0
- package/src/utils.ts +93 -1
- package/src/workersai-chat-language-model.ts +6 -103
@@ -8,9 +8,9 @@ import { convertToWorkersAIChatMessages } from "./convert-to-workersai-chat-mess
|
|
8
8
|
import type { WorkersAIChatSettings } from "./workersai-chat-settings";
|
9
9
|
import type { TextGenerationModels } from "./workersai-models";
|
10
10
|
|
11
|
-
import { events } from "fetch-event-stream";
|
12
11
|
import { mapWorkersAIUsage } from "./map-workersai-usage";
|
13
|
-
import
|
12
|
+
import { getMappedStream } from "./streaming";
|
13
|
+
import { lastMessageWasUser, prepareToolsAndToolChoice, processToolCalls } from "./utils";
|
14
14
|
|
15
15
|
type WorkersAIChatConfig = {
|
16
16
|
provider: string;
|
@@ -138,9 +138,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
138
138
|
const { gateway, safePrompt, ...passthroughOptions } = this.settings;
|
139
139
|
|
140
140
|
// Extract image from messages if present
|
141
|
-
const { messages, images } = convertToWorkersAIChatMessages(
|
142
|
-
options.prompt,
|
143
|
-
);
|
141
|
+
const { messages, images } = convertToWorkersAIChatMessages(options.prompt);
|
144
142
|
|
145
143
|
// TODO: support for multiple images
|
146
144
|
if (images.length !== 0 && images.length !== 1) {
|
@@ -175,12 +173,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
175
173
|
typeof output.response === "object" && output.response !== null
|
176
174
|
? JSON.stringify(output.response) // ai-sdk expects a string here
|
177
175
|
: output.response,
|
178
|
-
toolCalls: output
|
179
|
-
toolCallType: "function",
|
180
|
-
toolCallId: toolCall.name,
|
181
|
-
toolName: toolCall.name,
|
182
|
-
args: JSON.stringify(toolCall.arguments || {}),
|
183
|
-
})),
|
176
|
+
toolCalls: processToolCalls(output),
|
184
177
|
finishReason: "stop", // TODO: mapWorkersAIFinishReason(response.finish_reason),
|
185
178
|
rawCall: { rawPrompt: messages, rawSettings: args },
|
186
179
|
usage: mapWorkersAIUsage(output),
|
@@ -194,9 +187,7 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
194
187
|
const { args, warnings } = this.getArgs(options);
|
195
188
|
|
196
189
|
// Extract image from messages if present
|
197
|
-
const { messages, images } = convertToWorkersAIChatMessages(
|
198
|
-
options.prompt,
|
199
|
-
);
|
190
|
+
const { messages, images } = convertToWorkersAIChatMessages(options.prompt);
|
200
191
|
|
201
192
|
// [1] When the latest message is not a tool response, we use the regular generate function
|
202
193
|
// and simulate it as a streamed response in order to satisfy the AI SDK's interface for
|
@@ -270,98 +261,10 @@ export class WorkersAIChatLanguageModel implements LanguageModelV1 {
|
|
270
261
|
throw new Error("This shouldn't happen");
|
271
262
|
}
|
272
263
|
|
273
|
-
const chunkEvent = events(new Response(response));
|
274
|
-
let usage = { promptTokens: 0, completionTokens: 0 };
|
275
|
-
|
276
264
|
return {
|
277
|
-
stream: new
|
278
|
-
async start(controller) {
|
279
|
-
for await (const event of chunkEvent) {
|
280
|
-
if (!event.data) {
|
281
|
-
continue;
|
282
|
-
}
|
283
|
-
if (event.data === "[DONE]") {
|
284
|
-
break;
|
285
|
-
}
|
286
|
-
const chunk = JSON.parse(event.data);
|
287
|
-
if (chunk.usage) {
|
288
|
-
usage = mapWorkersAIUsage(chunk);
|
289
|
-
}
|
290
|
-
chunk.response?.length &&
|
291
|
-
controller.enqueue({
|
292
|
-
type: "text-delta",
|
293
|
-
textDelta: chunk.response,
|
294
|
-
});
|
295
|
-
}
|
296
|
-
controller.enqueue({
|
297
|
-
type: "finish",
|
298
|
-
finishReason: "stop",
|
299
|
-
usage: usage,
|
300
|
-
});
|
301
|
-
controller.close();
|
302
|
-
},
|
303
|
-
}),
|
265
|
+
stream: getMappedStream(new Response(response)),
|
304
266
|
rawCall: { rawPrompt: messages, rawSettings: args },
|
305
267
|
warnings,
|
306
268
|
};
|
307
269
|
}
|
308
270
|
}
|
309
|
-
|
310
|
-
function prepareToolsAndToolChoice(
|
311
|
-
mode: Parameters<LanguageModelV1["doGenerate"]>[0]["mode"] & {
|
312
|
-
type: "regular";
|
313
|
-
},
|
314
|
-
) {
|
315
|
-
// when the tools array is empty, change it to undefined to prevent errors:
|
316
|
-
const tools = mode.tools?.length ? mode.tools : undefined;
|
317
|
-
|
318
|
-
if (tools == null) {
|
319
|
-
return { tools: undefined, tool_choice: undefined };
|
320
|
-
}
|
321
|
-
|
322
|
-
const mappedTools = tools.map((tool) => ({
|
323
|
-
type: "function",
|
324
|
-
function: {
|
325
|
-
name: tool.name,
|
326
|
-
// @ts-expect-error - description is not a property of tool
|
327
|
-
description: tool.description,
|
328
|
-
// @ts-expect-error - parameters is not a property of tool
|
329
|
-
parameters: tool.parameters,
|
330
|
-
},
|
331
|
-
}));
|
332
|
-
|
333
|
-
const toolChoice = mode.toolChoice;
|
334
|
-
|
335
|
-
if (toolChoice == null) {
|
336
|
-
return { tools: mappedTools, tool_choice: undefined };
|
337
|
-
}
|
338
|
-
|
339
|
-
const type = toolChoice.type;
|
340
|
-
|
341
|
-
switch (type) {
|
342
|
-
case "auto":
|
343
|
-
return { tools: mappedTools, tool_choice: type };
|
344
|
-
case "none":
|
345
|
-
return { tools: mappedTools, tool_choice: type };
|
346
|
-
case "required":
|
347
|
-
return { tools: mappedTools, tool_choice: "any" };
|
348
|
-
|
349
|
-
// workersAI does not support tool mode directly,
|
350
|
-
// so we filter the tools and force the tool choice through 'any'
|
351
|
-
case "tool":
|
352
|
-
return {
|
353
|
-
tools: mappedTools.filter(
|
354
|
-
(tool) => tool.function.name === toolChoice.toolName,
|
355
|
-
),
|
356
|
-
tool_choice: "any",
|
357
|
-
};
|
358
|
-
default: {
|
359
|
-
const exhaustiveCheck = type satisfies never;
|
360
|
-
throw new Error(`Unsupported tool choice type: ${exhaustiveCheck}`);
|
361
|
-
}
|
362
|
-
}
|
363
|
-
}
|
364
|
-
|
365
|
-
function lastMessageWasUser(messages: WorkersAIChatPrompt) {
|
366
|
-
return messages.length > 0 && messages[messages.length - 1].role === "user";
|
367
|
-
}
|