modelfusion 0.103.0 → 0.105.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +56 -0
- package/model-function/Delta.d.ts +1 -2
- package/model-function/executeStreamCall.cjs +6 -4
- package/model-function/executeStreamCall.d.ts +2 -2
- package/model-function/executeStreamCall.js +6 -4
- package/model-function/generate-speech/streamSpeech.cjs +1 -2
- package/model-function/generate-speech/streamSpeech.js +1 -2
- package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +25 -29
- package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +3 -1
- package/model-function/generate-structure/StructureFromTextStreamingModel.js +25 -29
- package/model-function/generate-structure/StructureGenerationModel.d.ts +2 -0
- package/model-function/generate-structure/streamStructure.cjs +7 -8
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.js +7 -8
- package/model-function/generate-text/PromptTemplateFullTextModel.cjs +35 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +41 -0
- package/model-function/generate-text/PromptTemplateFullTextModel.js +31 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +3 -0
- package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +2 -1
- package/model-function/generate-text/PromptTemplateTextStreamingModel.js +3 -0
- package/model-function/generate-text/TextGenerationModel.d.ts +2 -1
- package/model-function/generate-text/index.cjs +1 -0
- package/model-function/generate-text/index.d.ts +1 -0
- package/model-function/generate-text/index.js +1 -0
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +2 -1
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +2 -1
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs +9 -5
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.js +9 -5
- package/model-function/generate-text/prompt-template/ChatPrompt.cjs +38 -20
- package/model-function/generate-text/prompt-template/ChatPrompt.d.ts +33 -34
- package/model-function/generate-text/prompt-template/ChatPrompt.js +37 -18
- package/model-function/generate-text/prompt-template/ContentPart.cjs +11 -0
- package/model-function/generate-text/prompt-template/ContentPart.d.ts +30 -0
- package/model-function/generate-text/prompt-template/ContentPart.js +7 -0
- package/model-function/generate-text/prompt-template/InstructionPrompt.d.ts +7 -22
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs +40 -6
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.d.ts +16 -4
- package/model-function/generate-text/prompt-template/Llama2PromptTemplate.js +38 -5
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs +10 -5
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js +10 -5
- package/model-function/generate-text/prompt-template/TextPromptTemplate.cjs +8 -5
- package/model-function/generate-text/prompt-template/TextPromptTemplate.d.ts +4 -4
- package/model-function/generate-text/prompt-template/TextPromptTemplate.js +8 -5
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +8 -4
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +2 -2
- package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +8 -4
- package/model-function/generate-text/prompt-template/index.cjs +1 -1
- package/model-function/generate-text/prompt-template/index.d.ts +1 -1
- package/model-function/generate-text/prompt-template/index.js +1 -1
- package/model-function/generate-text/prompt-template/trimChatPrompt.cjs +0 -2
- package/model-function/generate-text/prompt-template/trimChatPrompt.d.ts +4 -4
- package/model-function/generate-text/prompt-template/trimChatPrompt.js +0 -2
- package/model-function/generate-text/streamText.cjs +27 -28
- package/model-function/generate-text/streamText.d.ts +1 -0
- package/model-function/generate-text/streamText.js +27 -28
- package/model-provider/anthropic/AnthropicPromptTemplate.cjs +9 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +4 -4
- package/model-provider/anthropic/AnthropicPromptTemplate.js +9 -4
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +13 -4
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +8 -14
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +44 -0
- package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +42 -0
- package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -44
- package/model-provider/cohere/CohereTextGenerationModel.d.ts +47 -13
- package/model-provider/cohere/CohereTextGenerationModel.js +7 -45
- package/model-provider/cohere/CohereTextGenerationModel.test.cjs +33 -0
- package/model-provider/cohere/CohereTextGenerationModel.test.js +31 -0
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.cjs +1 -2
- package/model-provider/elevenlabs/ElevenLabsSpeechModel.js +1 -2
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +29 -17
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -4
- package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +29 -17
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +7 -14
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +157 -6
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +8 -15
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.cjs +37 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.d.ts +1 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.test.js +35 -0
- package/model-provider/mistral/MistralChatModel.cjs +30 -104
- package/model-provider/mistral/MistralChatModel.d.ts +49 -16
- package/model-provider/mistral/MistralChatModel.js +30 -104
- package/model-provider/mistral/MistralChatModel.test.cjs +51 -0
- package/model-provider/mistral/MistralChatModel.test.d.ts +1 -0
- package/model-provider/mistral/MistralChatModel.test.js +49 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +13 -5
- package/model-provider/mistral/MistralPromptTemplate.d.ts +4 -4
- package/model-provider/mistral/MistralPromptTemplate.js +13 -5
- package/model-provider/ollama/OllamaChatModel.cjs +7 -43
- package/model-provider/ollama/OllamaChatModel.d.ts +63 -11
- package/model-provider/ollama/OllamaChatModel.js +7 -43
- package/model-provider/ollama/OllamaChatModel.test.cjs +27 -0
- package/model-provider/ollama/OllamaChatModel.test.d.ts +1 -0
- package/model-provider/ollama/OllamaChatModel.test.js +25 -0
- package/model-provider/ollama/OllamaChatPromptTemplate.cjs +43 -17
- package/model-provider/ollama/OllamaChatPromptTemplate.d.ts +4 -4
- package/model-provider/ollama/OllamaChatPromptTemplate.js +43 -17
- package/model-provider/ollama/OllamaCompletionModel.cjs +22 -43
- package/model-provider/ollama/OllamaCompletionModel.d.ts +65 -9
- package/model-provider/ollama/OllamaCompletionModel.js +23 -44
- package/model-provider/ollama/OllamaCompletionModel.test.cjs +101 -13
- package/model-provider/ollama/OllamaCompletionModel.test.js +78 -13
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs} +71 -15
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts} +273 -19
- package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js} +71 -15
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.cjs → OpenAIChatFunctionCallStructureGenerationModel.cjs} +18 -2
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts → OpenAIChatFunctionCallStructureGenerationModel.d.ts} +41 -11
- package/model-provider/openai/{chat/OpenAIChatFunctionCallStructureGenerationModel.js → OpenAIChatFunctionCallStructureGenerationModel.js} +18 -2
- package/model-provider/openai/{chat/OpenAIChatMessage.d.ts → OpenAIChatMessage.d.ts} +3 -3
- package/model-provider/openai/{chat/OpenAIChatModel.cjs → OpenAIChatModel.cjs} +5 -5
- package/model-provider/openai/{chat/OpenAIChatModel.d.ts → OpenAIChatModel.d.ts} +12 -12
- package/model-provider/openai/{chat/OpenAIChatModel.js → OpenAIChatModel.js} +5 -5
- package/model-provider/openai/OpenAIChatModel.test.cjs +94 -0
- package/model-provider/openai/OpenAIChatModel.test.d.ts +1 -0
- package/model-provider/openai/OpenAIChatModel.test.js +92 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.cjs +114 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.d.ts +20 -0
- package/model-provider/openai/OpenAIChatPromptTemplate.js +107 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +32 -84
- package/model-provider/openai/OpenAICompletionModel.d.ts +29 -12
- package/model-provider/openai/OpenAICompletionModel.js +33 -85
- package/model-provider/openai/OpenAICompletionModel.test.cjs +53 -0
- package/model-provider/openai/OpenAICompletionModel.test.d.ts +1 -0
- package/model-provider/openai/OpenAICompletionModel.test.js +51 -0
- package/model-provider/openai/OpenAICostCalculator.cjs +1 -1
- package/model-provider/openai/OpenAICostCalculator.js +1 -1
- package/model-provider/openai/OpenAIFacade.cjs +2 -2
- package/model-provider/openai/OpenAIFacade.d.ts +3 -3
- package/model-provider/openai/OpenAIFacade.js +2 -2
- package/model-provider/openai/OpenAITranscriptionModel.d.ts +6 -6
- package/model-provider/openai/TikTokenTokenizer.d.ts +1 -1
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.cjs → countOpenAIChatMessageTokens.cjs} +2 -2
- package/model-provider/openai/{chat/countOpenAIChatMessageTokens.js → countOpenAIChatMessageTokens.js} +2 -2
- package/model-provider/openai/index.cjs +6 -6
- package/model-provider/openai/index.d.ts +5 -6
- package/model-provider/openai/index.js +5 -5
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +4 -4
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +6 -6
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +4 -4
- package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
- package/package.json +5 -5
- package/test/JsonTestServer.cjs +33 -0
- package/test/JsonTestServer.d.ts +7 -0
- package/test/JsonTestServer.js +29 -0
- package/test/StreamingTestServer.cjs +55 -0
- package/test/StreamingTestServer.d.ts +7 -0
- package/test/StreamingTestServer.js +51 -0
- package/test/arrayFromAsync.cjs +13 -0
- package/test/arrayFromAsync.d.ts +1 -0
- package/test/arrayFromAsync.js +9 -0
- package/util/streaming/createEventSourceResponseHandler.cjs +9 -0
- package/util/streaming/createEventSourceResponseHandler.d.ts +4 -0
- package/util/streaming/createEventSourceResponseHandler.js +5 -0
- package/util/streaming/createJsonStreamResponseHandler.cjs +9 -0
- package/util/streaming/createJsonStreamResponseHandler.d.ts +4 -0
- package/util/streaming/createJsonStreamResponseHandler.js +5 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.cjs +52 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseEventSourceStreamAsAsyncIterable.js +48 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.cjs +21 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.d.ts +6 -0
- package/util/streaming/parseJsonStreamAsAsyncIterable.js +17 -0
- package/model-function/generate-text/prompt-template/Content.cjs +0 -2
- package/model-function/generate-text/prompt-template/Content.d.ts +0 -20
- package/model-provider/openai/chat/OpenAIChatModel.test.cjs +0 -61
- package/model-provider/openai/chat/OpenAIChatModel.test.js +0 -59
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.cjs +0 -72
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +0 -20
- package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +0 -65
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +0 -156
- package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +0 -19
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +0 -152
- /package/{model-function/generate-text/prompt-template/Content.js → model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/{openai/chat/OpenAIChatModel.test.d.ts → cohere/CohereTextGenerationModel.test.d.ts} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.cjs → OpenAIChatMessage.cjs} +0 -0
- /package/model-provider/openai/{chat/OpenAIChatMessage.js → OpenAIChatMessage.js} +0 -0
- /package/model-provider/openai/{chat/countOpenAIChatMessageTokens.d.ts → countOpenAIChatMessageTokens.d.ts} +0 -0
@@ -1,23 +1,23 @@
|
|
1
1
|
import { fail } from "assert";
|
2
|
-
import {
|
3
|
-
import { setupServer } from "msw/node";
|
2
|
+
import { z } from "zod";
|
4
3
|
import { ApiCallError } from "../../core/api/ApiCallError.js";
|
5
4
|
import { retryNever } from "../../core/api/retryNever.js";
|
5
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
6
|
+
import { jsonStructurePrompt } from "../../model-function/generate-structure/jsonStructurePrompt.js";
|
7
|
+
import { streamStructure } from "../../model-function/generate-structure/streamStructure.js";
|
6
8
|
import { generateText } from "../../model-function/generate-text/generateText.js";
|
9
|
+
import * as TextPrompt from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
|
10
|
+
import { streamText } from "../../model-function/generate-text/streamText.js";
|
11
|
+
import { JsonTestServer } from "../../test/JsonTestServer.js";
|
12
|
+
import { StreamingTestServer } from "../../test/StreamingTestServer.js";
|
13
|
+
import { arrayFromAsync } from "../../test/arrayFromAsync.js";
|
7
14
|
import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
|
8
15
|
import { OllamaCompletionModel } from "./OllamaCompletionModel.js";
|
9
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
10
|
-
let responseBodyJson = {};
|
11
|
-
const server = setupServer(http.post("http://127.0.0.1:11434/api/generate", () => HttpResponse.json(responseBodyJson)));
|
12
|
-
beforeAll(() => server.listen());
|
13
|
-
beforeEach(() => {
|
14
|
-
responseBodyJson = {};
|
15
|
-
});
|
16
|
-
afterEach(() => server.resetHandlers());
|
17
|
-
afterAll(() => server.close());
|
18
16
|
describe("generateText", () => {
|
17
|
+
const server = new JsonTestServer("http://127.0.0.1:11434/api/generate");
|
18
|
+
server.setupTestEnvironment();
|
19
19
|
it("should return the generated text", async () => {
|
20
|
-
responseBodyJson = {
|
20
|
+
server.responseBodyJson = {
|
21
21
|
model: "test-model",
|
22
22
|
created_at: "2023-08-04T19:22:45.499127Z",
|
23
23
|
response: "test response",
|
@@ -38,7 +38,7 @@ describe("generateText", () => {
|
|
38
38
|
expect(result).toEqual("test response");
|
39
39
|
});
|
40
40
|
it("should throw retryable ApiCallError when Ollama is overloaded", async () => {
|
41
|
-
responseBodyJson = {
|
41
|
+
server.responseBodyJson = {
|
42
42
|
model: "",
|
43
43
|
created_at: "0001-01-01T00:00:00Z",
|
44
44
|
response: "",
|
@@ -59,3 +59,68 @@ describe("generateText", () => {
|
|
59
59
|
}
|
60
60
|
});
|
61
61
|
});
|
62
|
+
describe("streamText", () => {
|
63
|
+
const server = new StreamingTestServer("http://127.0.0.1:11434/api/generate");
|
64
|
+
server.setupTestEnvironment();
|
65
|
+
it("should return a text stream", async () => {
|
66
|
+
server.responseChunks = [
|
67
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:11:17.715003Z","response":"Hello","done":false}\n`,
|
68
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:11:17.715003Z","response":", ","done":false}\n`,
|
69
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:11:17.715003Z","response":"world!","done":false}\n`,
|
70
|
+
`{"model":"mistral:text","created_at":"2023-12-24T16:11:19.697067Z","response":"",` +
|
71
|
+
`"done":true,"context":[123,456,789],"total_duration":2165354041,"load_duration":1293958,` +
|
72
|
+
`"prompt_eval_count":5,"prompt_eval_duration":193273000,"eval_count":136,"eval_duration":1966852000}\n`,
|
73
|
+
];
|
74
|
+
const stream = await streamText(new OllamaCompletionModel({ model: "mistral:text" }).withTextPrompt(), "hello");
|
75
|
+
// note: space moved to last chunk bc of trimming
|
76
|
+
expect(await arrayFromAsync(stream)).toStrictEqual([
|
77
|
+
"Hello",
|
78
|
+
",",
|
79
|
+
" world!",
|
80
|
+
]);
|
81
|
+
});
|
82
|
+
});
|
83
|
+
describe("streamStructure", () => {
|
84
|
+
const server = new StreamingTestServer("http://127.0.0.1:11434/api/generate");
|
85
|
+
server.setupTestEnvironment();
|
86
|
+
it("should return a text stream", async () => {
|
87
|
+
server.responseChunks = [
|
88
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.253175Z","response":"{","done":false}\n`,
|
89
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.273505Z","response":"\\n","done":false}\n`,
|
90
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.293192Z","response":" ","done":false}\n`,
|
91
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.312446Z","response":" \\"","done":false}\n`,
|
92
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.332021Z","response":"name","done":false}\n`,
|
93
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.351128Z","response":"\\":","done":false}\n`,
|
94
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.372082Z","response":" \\"","done":false}\n`,
|
95
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.391903Z","response":"M","done":false}\n`,
|
96
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.411056Z","response":"ike","done":false}\n`,
|
97
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.430789Z","response":"\\"","done":false}\n`,
|
98
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.450216Z","response":"\\n","done":false}\n`,
|
99
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.47009Z","response":"}","done":false}\n`,
|
100
|
+
`{"model":"mistral:text","created_at":"2023-12-25T11:48:02.48885Z","response":"","done":true,` +
|
101
|
+
`"total_duration":521893000,"load_duration":957666,"prompt_eval_count":74,"prompt_eval_duration":302508000,` +
|
102
|
+
`"eval_count":12,"eval_duration":215282000}\n`,
|
103
|
+
];
|
104
|
+
const stream = await streamStructure(new OllamaCompletionModel({
|
105
|
+
model: "mistral:text",
|
106
|
+
format: "json",
|
107
|
+
raw: true,
|
108
|
+
})
|
109
|
+
.withTextPromptTemplate(TextPrompt.instruction())
|
110
|
+
.asStructureGenerationModel(jsonStructurePrompt((instruction, schema) => ({
|
111
|
+
system: "JSON schema: \n" +
|
112
|
+
JSON.stringify(schema.getJsonSchema()) +
|
113
|
+
"\n\n" +
|
114
|
+
"Respond only using JSON that matches the above schema.",
|
115
|
+
instruction,
|
116
|
+
}))), zodSchema(z.object({ name: z.string() })), "generate a name");
|
117
|
+
// note: space moved to last chunk bc of trimming
|
118
|
+
expect(await arrayFromAsync(stream)).toStrictEqual([
|
119
|
+
{ isComplete: false, value: {} },
|
120
|
+
{ isComplete: false, value: { name: "" } },
|
121
|
+
{ isComplete: false, value: { name: "M" } },
|
122
|
+
{ isComplete: false, value: { name: "Mike" } },
|
123
|
+
{ isComplete: true, value: { name: "Mike" } },
|
124
|
+
]);
|
125
|
+
});
|
126
|
+
});
|
package/model-provider/openai/{chat/AbstractOpenAIChatModel.cjs → AbstractOpenAIChatModel.cjs}
RENAMED
@@ -2,14 +2,14 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAIChatResponseFormat = exports.AbstractOpenAIChatModel = void 0;
|
4
4
|
const zod_1 = require("zod");
|
5
|
-
const callWithRetryAndThrottle_js_1 = require("
|
6
|
-
const postToApi_js_1 = require("
|
7
|
-
const
|
8
|
-
const
|
9
|
-
const
|
10
|
-
const
|
11
|
-
const
|
12
|
-
const
|
5
|
+
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
|
+
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
+
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
8
|
+
const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
|
9
|
+
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
10
|
+
const createEventSourceResponseHandler_js_1 = require("../../util/streaming/createEventSourceResponseHandler.cjs");
|
11
|
+
const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
|
12
|
+
const OpenAIError_js_1 = require("./OpenAIError.cjs");
|
13
13
|
/**
|
14
14
|
* Abstract text generation model that calls an API that is compatible with the OpenAI chat API.
|
15
15
|
*
|
@@ -105,9 +105,21 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
|
|
105
105
|
doStreamText(prompt, options) {
|
106
106
|
return this.callAPI(prompt, {
|
107
107
|
...options,
|
108
|
-
responseFormat: exports.OpenAIChatResponseFormat.
|
108
|
+
responseFormat: exports.OpenAIChatResponseFormat.deltaIterable,
|
109
109
|
});
|
110
110
|
}
|
111
|
+
extractTextDelta(delta) {
|
112
|
+
const chunk = delta;
|
113
|
+
if (chunk.object !== "chat.completion.chunk") {
|
114
|
+
return undefined;
|
115
|
+
}
|
116
|
+
const chatChunk = chunk;
|
117
|
+
const firstChoice = chatChunk.choices[0];
|
118
|
+
if (firstChoice.index > 0) {
|
119
|
+
return undefined;
|
120
|
+
}
|
121
|
+
return firstChoice.delta.content ?? undefined;
|
122
|
+
}
|
111
123
|
async doGenerateToolCall(tool, prompt, options) {
|
112
124
|
const response = await this.callAPI(prompt, {
|
113
125
|
...options,
|
@@ -220,6 +232,54 @@ const openAIChatResponseSchema = zod_1.z.object({
|
|
220
232
|
total_tokens: zod_1.z.number(),
|
221
233
|
}),
|
222
234
|
});
|
235
|
+
const chatCompletionChunkSchema = zod_1.z.object({
|
236
|
+
object: zod_1.z.literal("chat.completion.chunk"),
|
237
|
+
id: zod_1.z.string(),
|
238
|
+
choices: zod_1.z.array(zod_1.z.object({
|
239
|
+
delta: zod_1.z.object({
|
240
|
+
role: zod_1.z.enum(["assistant", "user"]).optional(),
|
241
|
+
content: zod_1.z.string().nullable().optional(),
|
242
|
+
function_call: zod_1.z
|
243
|
+
.object({
|
244
|
+
name: zod_1.z.string().optional(),
|
245
|
+
arguments: zod_1.z.string().optional(),
|
246
|
+
})
|
247
|
+
.optional(),
|
248
|
+
tool_calls: zod_1.z
|
249
|
+
.array(zod_1.z.object({
|
250
|
+
id: zod_1.z.string(),
|
251
|
+
type: zod_1.z.literal("function"),
|
252
|
+
function: zod_1.z.object({
|
253
|
+
name: zod_1.z.string(),
|
254
|
+
arguments: zod_1.z.string(),
|
255
|
+
}),
|
256
|
+
}))
|
257
|
+
.optional(),
|
258
|
+
}),
|
259
|
+
finish_reason: zod_1.z
|
260
|
+
.enum([
|
261
|
+
"stop",
|
262
|
+
"length",
|
263
|
+
"tool_calls",
|
264
|
+
"content_filter",
|
265
|
+
"function_call",
|
266
|
+
])
|
267
|
+
.nullable()
|
268
|
+
.optional(),
|
269
|
+
index: zod_1.z.number(),
|
270
|
+
})),
|
271
|
+
created: zod_1.z.number(),
|
272
|
+
model: zod_1.z.string(),
|
273
|
+
system_fingerprint: zod_1.z.string().optional().nullable(),
|
274
|
+
});
|
275
|
+
const openaiChatChunkSchema = (0, ZodSchema_js_1.zodSchema)(zod_1.z.union([
|
276
|
+
chatCompletionChunkSchema,
|
277
|
+
zod_1.z.object({
|
278
|
+
object: zod_1.z.string().refine((obj) => obj !== "chat.completion.chunk", {
|
279
|
+
message: "Object must be 'chat.completion.chunk'",
|
280
|
+
}),
|
281
|
+
}),
|
282
|
+
]));
|
223
283
|
exports.OpenAIChatResponseFormat = {
|
224
284
|
/**
|
225
285
|
* Returns the response as a JSON object.
|
@@ -231,12 +291,8 @@ exports.OpenAIChatResponseFormat = {
|
|
231
291
|
/**
|
232
292
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
233
293
|
*/
|
234
|
-
|
235
|
-
stream: true,
|
236
|
-
handler: async ({ response }) => (0, OpenAIChatStreamIterable_js_1.createOpenAIChatDeltaIterableQueue)(response.body, (delta) => delta[0]?.delta?.content ?? ""),
|
237
|
-
},
|
238
|
-
structureDeltaIterable: {
|
294
|
+
deltaIterable: {
|
239
295
|
stream: true,
|
240
|
-
handler:
|
296
|
+
handler: (0, createEventSourceResponseHandler_js_1.createEventSourceResponseHandler)(openaiChatChunkSchema),
|
241
297
|
},
|
242
298
|
};
|
package/model-provider/openai/{chat/AbstractOpenAIChatModel.d.ts → AbstractOpenAIChatModel.d.ts}
RENAMED
@@ -1,12 +1,11 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import { FunctionOptions } from "
|
3
|
-
import { ApiConfiguration } from "
|
4
|
-
import { ResponseHandler } from "
|
5
|
-
import { AbstractModel } from "
|
6
|
-
import {
|
7
|
-
import {
|
8
|
-
import {
|
9
|
-
import { ToolDefinition } from "../../../tool/ToolDefinition.js";
|
2
|
+
import { FunctionOptions } from "../../core/FunctionOptions.js";
|
3
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
|
+
import { ResponseHandler } from "../../core/api/postToApi.js";
|
5
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
|
+
import { TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
|
7
|
+
import { TextGenerationFinishReason } from "../../model-function/generate-text/TextGenerationResult.js";
|
8
|
+
import { ToolDefinition } from "../../tool/ToolDefinition.js";
|
10
9
|
import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
|
11
10
|
export interface AbstractOpenAIChatCallSettings {
|
12
11
|
api?: ApiConfiguration;
|
@@ -138,7 +137,36 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
138
137
|
};
|
139
138
|
}>;
|
140
139
|
private translateFinishReason;
|
141
|
-
doStreamText(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<
|
140
|
+
doStreamText(prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
|
141
|
+
object: "chat.completion.chunk";
|
142
|
+
model: string;
|
143
|
+
id: string;
|
144
|
+
created: number;
|
145
|
+
choices: {
|
146
|
+
delta: {
|
147
|
+
role?: "user" | "assistant" | undefined;
|
148
|
+
content?: string | null | undefined;
|
149
|
+
function_call?: {
|
150
|
+
name?: string | undefined;
|
151
|
+
arguments?: string | undefined;
|
152
|
+
} | undefined;
|
153
|
+
tool_calls?: {
|
154
|
+
function: {
|
155
|
+
name: string;
|
156
|
+
arguments: string;
|
157
|
+
};
|
158
|
+
type: "function";
|
159
|
+
id: string;
|
160
|
+
}[] | undefined;
|
161
|
+
};
|
162
|
+
index: number;
|
163
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
164
|
+
}[];
|
165
|
+
system_fingerprint?: string | null | undefined;
|
166
|
+
} | {
|
167
|
+
object: string;
|
168
|
+
}>>>;
|
169
|
+
extractTextDelta(delta: unknown): string | undefined;
|
142
170
|
doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options?: FunctionOptions): Promise<{
|
143
171
|
response: {
|
144
172
|
object: "chat.completion";
|
@@ -437,6 +465,210 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
437
465
|
system_fingerprint?: string | null | undefined;
|
438
466
|
}>;
|
439
467
|
export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
|
468
|
+
declare const chatCompletionChunkSchema: z.ZodObject<{
|
469
|
+
object: z.ZodLiteral<"chat.completion.chunk">;
|
470
|
+
id: z.ZodString;
|
471
|
+
choices: z.ZodArray<z.ZodObject<{
|
472
|
+
delta: z.ZodObject<{
|
473
|
+
role: z.ZodOptional<z.ZodEnum<["assistant", "user"]>>;
|
474
|
+
content: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
475
|
+
function_call: z.ZodOptional<z.ZodObject<{
|
476
|
+
name: z.ZodOptional<z.ZodString>;
|
477
|
+
arguments: z.ZodOptional<z.ZodString>;
|
478
|
+
}, "strip", z.ZodTypeAny, {
|
479
|
+
name?: string | undefined;
|
480
|
+
arguments?: string | undefined;
|
481
|
+
}, {
|
482
|
+
name?: string | undefined;
|
483
|
+
arguments?: string | undefined;
|
484
|
+
}>>;
|
485
|
+
tool_calls: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
486
|
+
id: z.ZodString;
|
487
|
+
type: z.ZodLiteral<"function">;
|
488
|
+
function: z.ZodObject<{
|
489
|
+
name: z.ZodString;
|
490
|
+
arguments: z.ZodString;
|
491
|
+
}, "strip", z.ZodTypeAny, {
|
492
|
+
name: string;
|
493
|
+
arguments: string;
|
494
|
+
}, {
|
495
|
+
name: string;
|
496
|
+
arguments: string;
|
497
|
+
}>;
|
498
|
+
}, "strip", z.ZodTypeAny, {
|
499
|
+
function: {
|
500
|
+
name: string;
|
501
|
+
arguments: string;
|
502
|
+
};
|
503
|
+
type: "function";
|
504
|
+
id: string;
|
505
|
+
}, {
|
506
|
+
function: {
|
507
|
+
name: string;
|
508
|
+
arguments: string;
|
509
|
+
};
|
510
|
+
type: "function";
|
511
|
+
id: string;
|
512
|
+
}>, "many">>;
|
513
|
+
}, "strip", z.ZodTypeAny, {
|
514
|
+
role?: "user" | "assistant" | undefined;
|
515
|
+
content?: string | null | undefined;
|
516
|
+
function_call?: {
|
517
|
+
name?: string | undefined;
|
518
|
+
arguments?: string | undefined;
|
519
|
+
} | undefined;
|
520
|
+
tool_calls?: {
|
521
|
+
function: {
|
522
|
+
name: string;
|
523
|
+
arguments: string;
|
524
|
+
};
|
525
|
+
type: "function";
|
526
|
+
id: string;
|
527
|
+
}[] | undefined;
|
528
|
+
}, {
|
529
|
+
role?: "user" | "assistant" | undefined;
|
530
|
+
content?: string | null | undefined;
|
531
|
+
function_call?: {
|
532
|
+
name?: string | undefined;
|
533
|
+
arguments?: string | undefined;
|
534
|
+
} | undefined;
|
535
|
+
tool_calls?: {
|
536
|
+
function: {
|
537
|
+
name: string;
|
538
|
+
arguments: string;
|
539
|
+
};
|
540
|
+
type: "function";
|
541
|
+
id: string;
|
542
|
+
}[] | undefined;
|
543
|
+
}>;
|
544
|
+
finish_reason: z.ZodOptional<z.ZodNullable<z.ZodEnum<["stop", "length", "tool_calls", "content_filter", "function_call"]>>>;
|
545
|
+
index: z.ZodNumber;
|
546
|
+
}, "strip", z.ZodTypeAny, {
|
547
|
+
delta: {
|
548
|
+
role?: "user" | "assistant" | undefined;
|
549
|
+
content?: string | null | undefined;
|
550
|
+
function_call?: {
|
551
|
+
name?: string | undefined;
|
552
|
+
arguments?: string | undefined;
|
553
|
+
} | undefined;
|
554
|
+
tool_calls?: {
|
555
|
+
function: {
|
556
|
+
name: string;
|
557
|
+
arguments: string;
|
558
|
+
};
|
559
|
+
type: "function";
|
560
|
+
id: string;
|
561
|
+
}[] | undefined;
|
562
|
+
};
|
563
|
+
index: number;
|
564
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
565
|
+
}, {
|
566
|
+
delta: {
|
567
|
+
role?: "user" | "assistant" | undefined;
|
568
|
+
content?: string | null | undefined;
|
569
|
+
function_call?: {
|
570
|
+
name?: string | undefined;
|
571
|
+
arguments?: string | undefined;
|
572
|
+
} | undefined;
|
573
|
+
tool_calls?: {
|
574
|
+
function: {
|
575
|
+
name: string;
|
576
|
+
arguments: string;
|
577
|
+
};
|
578
|
+
type: "function";
|
579
|
+
id: string;
|
580
|
+
}[] | undefined;
|
581
|
+
};
|
582
|
+
index: number;
|
583
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
584
|
+
}>, "many">;
|
585
|
+
created: z.ZodNumber;
|
586
|
+
model: z.ZodString;
|
587
|
+
system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
|
588
|
+
}, "strip", z.ZodTypeAny, {
|
589
|
+
object: "chat.completion.chunk";
|
590
|
+
model: string;
|
591
|
+
id: string;
|
592
|
+
created: number;
|
593
|
+
choices: {
|
594
|
+
delta: {
|
595
|
+
role?: "user" | "assistant" | undefined;
|
596
|
+
content?: string | null | undefined;
|
597
|
+
function_call?: {
|
598
|
+
name?: string | undefined;
|
599
|
+
arguments?: string | undefined;
|
600
|
+
} | undefined;
|
601
|
+
tool_calls?: {
|
602
|
+
function: {
|
603
|
+
name: string;
|
604
|
+
arguments: string;
|
605
|
+
};
|
606
|
+
type: "function";
|
607
|
+
id: string;
|
608
|
+
}[] | undefined;
|
609
|
+
};
|
610
|
+
index: number;
|
611
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
612
|
+
}[];
|
613
|
+
system_fingerprint?: string | null | undefined;
|
614
|
+
}, {
|
615
|
+
object: "chat.completion.chunk";
|
616
|
+
model: string;
|
617
|
+
id: string;
|
618
|
+
created: number;
|
619
|
+
choices: {
|
620
|
+
delta: {
|
621
|
+
role?: "user" | "assistant" | undefined;
|
622
|
+
content?: string | null | undefined;
|
623
|
+
function_call?: {
|
624
|
+
name?: string | undefined;
|
625
|
+
arguments?: string | undefined;
|
626
|
+
} | undefined;
|
627
|
+
tool_calls?: {
|
628
|
+
function: {
|
629
|
+
name: string;
|
630
|
+
arguments: string;
|
631
|
+
};
|
632
|
+
type: "function";
|
633
|
+
id: string;
|
634
|
+
}[] | undefined;
|
635
|
+
};
|
636
|
+
index: number;
|
637
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
638
|
+
}[];
|
639
|
+
system_fingerprint?: string | null | undefined;
|
640
|
+
}>;
|
641
|
+
export type OpenAIChatCompletionChunk = z.infer<typeof chatCompletionChunkSchema>;
|
642
|
+
declare const openaiChatChunkSchema: import("../../core/schema/ZodSchema.js").ZodSchema<{
|
643
|
+
object: "chat.completion.chunk";
|
644
|
+
model: string;
|
645
|
+
id: string;
|
646
|
+
created: number;
|
647
|
+
choices: {
|
648
|
+
delta: {
|
649
|
+
role?: "user" | "assistant" | undefined;
|
650
|
+
content?: string | null | undefined;
|
651
|
+
function_call?: {
|
652
|
+
name?: string | undefined;
|
653
|
+
arguments?: string | undefined;
|
654
|
+
} | undefined;
|
655
|
+
tool_calls?: {
|
656
|
+
function: {
|
657
|
+
name: string;
|
658
|
+
arguments: string;
|
659
|
+
};
|
660
|
+
type: "function";
|
661
|
+
id: string;
|
662
|
+
}[] | undefined;
|
663
|
+
};
|
664
|
+
index: number;
|
665
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
666
|
+
}[];
|
667
|
+
system_fingerprint?: string | null | undefined;
|
668
|
+
} | {
|
669
|
+
object: string;
|
670
|
+
}>;
|
671
|
+
export type OpenAIChatChunk = (typeof openaiChatChunkSchema)["_type"];
|
440
672
|
export type OpenAIChatResponseFormatType<T> = {
|
441
673
|
stream: boolean;
|
442
674
|
handler: ResponseHandler<T>;
|
@@ -446,7 +678,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
446
678
|
* Returns the response as a JSON object.
|
447
679
|
*/
|
448
680
|
json: {
|
449
|
-
stream:
|
681
|
+
stream: boolean;
|
450
682
|
handler: ResponseHandler<{
|
451
683
|
object: "chat.completion";
|
452
684
|
usage: {
|
@@ -484,17 +716,39 @@ export declare const OpenAIChatResponseFormat: {
|
|
484
716
|
/**
|
485
717
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
486
718
|
*/
|
487
|
-
|
488
|
-
stream:
|
719
|
+
deltaIterable: {
|
720
|
+
stream: boolean;
|
489
721
|
handler: ({ response }: {
|
490
722
|
response: Response;
|
491
|
-
}) => Promise<AsyncIterable<Delta<
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
723
|
+
}) => Promise<AsyncIterable<import("../../index.js").Delta<{
|
724
|
+
object: "chat.completion.chunk";
|
725
|
+
model: string;
|
726
|
+
id: string;
|
727
|
+
created: number;
|
728
|
+
choices: {
|
729
|
+
delta: {
|
730
|
+
role?: "user" | "assistant" | undefined;
|
731
|
+
content?: string | null | undefined;
|
732
|
+
function_call?: {
|
733
|
+
name?: string | undefined;
|
734
|
+
arguments?: string | undefined;
|
735
|
+
} | undefined;
|
736
|
+
tool_calls?: {
|
737
|
+
function: {
|
738
|
+
name: string;
|
739
|
+
arguments: string;
|
740
|
+
};
|
741
|
+
type: "function";
|
742
|
+
id: string;
|
743
|
+
}[] | undefined;
|
744
|
+
};
|
745
|
+
index: number;
|
746
|
+
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
747
|
+
}[];
|
748
|
+
system_fingerprint?: string | null | undefined;
|
749
|
+
} | {
|
750
|
+
object: string;
|
751
|
+
}>>>;
|
498
752
|
};
|
499
753
|
};
|
500
754
|
export {};
|
package/model-provider/openai/{chat/AbstractOpenAIChatModel.js → AbstractOpenAIChatModel.js}
RENAMED
@@ -1,12 +1,12 @@
|
|
1
1
|
import { z } from "zod";
|
2
|
-
import { callWithRetryAndThrottle } from "
|
3
|
-
import { createJsonResponseHandler, postJsonToApi, } from "
|
4
|
-
import {
|
5
|
-
import {
|
6
|
-
import {
|
7
|
-
import {
|
8
|
-
import {
|
9
|
-
import {
|
2
|
+
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
|
+
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
|
+
import { parseJSON } from "../../core/schema/parseJSON.js";
|
6
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
7
|
+
import { createEventSourceResponseHandler } from "../../util/streaming/createEventSourceResponseHandler.js";
|
8
|
+
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
9
|
+
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
10
10
|
/**
|
11
11
|
* Abstract text generation model that calls an API that is compatible with the OpenAI chat API.
|
12
12
|
*
|
@@ -102,9 +102,21 @@ export class AbstractOpenAIChatModel extends AbstractModel {
|
|
102
102
|
doStreamText(prompt, options) {
|
103
103
|
return this.callAPI(prompt, {
|
104
104
|
...options,
|
105
|
-
responseFormat: OpenAIChatResponseFormat.
|
105
|
+
responseFormat: OpenAIChatResponseFormat.deltaIterable,
|
106
106
|
});
|
107
107
|
}
|
108
|
+
extractTextDelta(delta) {
|
109
|
+
const chunk = delta;
|
110
|
+
if (chunk.object !== "chat.completion.chunk") {
|
111
|
+
return undefined;
|
112
|
+
}
|
113
|
+
const chatChunk = chunk;
|
114
|
+
const firstChoice = chatChunk.choices[0];
|
115
|
+
if (firstChoice.index > 0) {
|
116
|
+
return undefined;
|
117
|
+
}
|
118
|
+
return firstChoice.delta.content ?? undefined;
|
119
|
+
}
|
108
120
|
async doGenerateToolCall(tool, prompt, options) {
|
109
121
|
const response = await this.callAPI(prompt, {
|
110
122
|
...options,
|
@@ -216,6 +228,54 @@ const openAIChatResponseSchema = z.object({
|
|
216
228
|
total_tokens: z.number(),
|
217
229
|
}),
|
218
230
|
});
|
231
|
+
const chatCompletionChunkSchema = z.object({
|
232
|
+
object: z.literal("chat.completion.chunk"),
|
233
|
+
id: z.string(),
|
234
|
+
choices: z.array(z.object({
|
235
|
+
delta: z.object({
|
236
|
+
role: z.enum(["assistant", "user"]).optional(),
|
237
|
+
content: z.string().nullable().optional(),
|
238
|
+
function_call: z
|
239
|
+
.object({
|
240
|
+
name: z.string().optional(),
|
241
|
+
arguments: z.string().optional(),
|
242
|
+
})
|
243
|
+
.optional(),
|
244
|
+
tool_calls: z
|
245
|
+
.array(z.object({
|
246
|
+
id: z.string(),
|
247
|
+
type: z.literal("function"),
|
248
|
+
function: z.object({
|
249
|
+
name: z.string(),
|
250
|
+
arguments: z.string(),
|
251
|
+
}),
|
252
|
+
}))
|
253
|
+
.optional(),
|
254
|
+
}),
|
255
|
+
finish_reason: z
|
256
|
+
.enum([
|
257
|
+
"stop",
|
258
|
+
"length",
|
259
|
+
"tool_calls",
|
260
|
+
"content_filter",
|
261
|
+
"function_call",
|
262
|
+
])
|
263
|
+
.nullable()
|
264
|
+
.optional(),
|
265
|
+
index: z.number(),
|
266
|
+
})),
|
267
|
+
created: z.number(),
|
268
|
+
model: z.string(),
|
269
|
+
system_fingerprint: z.string().optional().nullable(),
|
270
|
+
});
|
271
|
+
const openaiChatChunkSchema = zodSchema(z.union([
|
272
|
+
chatCompletionChunkSchema,
|
273
|
+
z.object({
|
274
|
+
object: z.string().refine((obj) => obj !== "chat.completion.chunk", {
|
275
|
+
message: "Object must be 'chat.completion.chunk'",
|
276
|
+
}),
|
277
|
+
}),
|
278
|
+
]));
|
219
279
|
export const OpenAIChatResponseFormat = {
|
220
280
|
/**
|
221
281
|
* Returns the response as a JSON object.
|
@@ -227,12 +287,8 @@ export const OpenAIChatResponseFormat = {
|
|
227
287
|
/**
|
228
288
|
* Returns an async iterable over the text deltas (only the tex different of the first choice).
|
229
289
|
*/
|
230
|
-
|
231
|
-
stream: true,
|
232
|
-
handler: async ({ response }) => createOpenAIChatDeltaIterableQueue(response.body, (delta) => delta[0]?.delta?.content ?? ""),
|
233
|
-
},
|
234
|
-
structureDeltaIterable: {
|
290
|
+
deltaIterable: {
|
235
291
|
stream: true,
|
236
|
-
handler:
|
292
|
+
handler: createEventSourceResponseHandler(openaiChatChunkSchema),
|
237
293
|
},
|
238
294
|
};
|