modelfusion 0.40.0 → 0.41.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +14 -7
  2. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +3 -3
  3. package/core/FunctionEvent.d.ts +1 -1
  4. package/model-function/AsyncIterableResultPromise.d.ts +1 -1
  5. package/model-function/Delta.d.ts +8 -0
  6. package/model-function/ModelCallEvent.d.ts +1 -1
  7. package/model-function/ModelCallMetadata.d.ts +13 -0
  8. package/model-function/describe-image/ImageDescriptionEvent.d.ts +1 -1
  9. package/model-function/describe-image/ImageDescriptionModel.d.ts +6 -4
  10. package/model-function/describe-image/describeImage.cjs +7 -2
  11. package/model-function/describe-image/describeImage.d.ts +2 -2
  12. package/model-function/describe-image/describeImage.js +7 -2
  13. package/model-function/embed/EmbeddingEvent.d.ts +1 -1
  14. package/model-function/embed/EmbeddingModel.d.ts +6 -4
  15. package/model-function/embed/embed.cjs +16 -11
  16. package/model-function/embed/embed.d.ts +3 -3
  17. package/model-function/embed/embed.js +16 -11
  18. package/model-function/executeCall.cjs +26 -30
  19. package/model-function/executeCall.d.ts +19 -28
  20. package/model-function/executeCall.js +26 -30
  21. package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
  22. package/model-function/generate-image/ImageGenerationModel.d.ts +6 -4
  23. package/model-function/generate-image/generateImage.cjs +7 -2
  24. package/model-function/generate-image/generateImage.d.ts +2 -2
  25. package/model-function/generate-image/generateImage.js +7 -2
  26. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -5
  27. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +7 -5
  28. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -5
  29. package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
  30. package/model-function/generate-structure/StructureGenerationModel.d.ts +15 -18
  31. package/model-function/generate-structure/StructureOrTextGenerationModel.d.ts +19 -17
  32. package/model-function/generate-structure/generateStructure.cjs +10 -8
  33. package/model-function/generate-structure/generateStructure.d.ts +2 -2
  34. package/model-function/generate-structure/generateStructure.js +10 -8
  35. package/model-function/generate-structure/generateStructureOrText.cjs +15 -8
  36. package/model-function/generate-structure/generateStructureOrText.d.ts +4 -4
  37. package/model-function/generate-structure/generateStructureOrText.js +15 -8
  38. package/model-function/generate-structure/streamStructure.cjs +4 -16
  39. package/model-function/generate-structure/streamStructure.d.ts +3 -7
  40. package/model-function/generate-structure/streamStructure.js +4 -16
  41. package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
  42. package/model-function/generate-text/TextGenerationModel.d.ts +18 -19
  43. package/model-function/generate-text/generateText.cjs +8 -9
  44. package/model-function/generate-text/generateText.d.ts +2 -2
  45. package/model-function/generate-text/generateText.js +8 -9
  46. package/model-function/generate-text/streamText.cjs +8 -21
  47. package/model-function/generate-text/streamText.d.ts +3 -7
  48. package/model-function/generate-text/streamText.js +8 -21
  49. package/model-function/index.cjs +2 -2
  50. package/model-function/index.d.ts +2 -2
  51. package/model-function/index.js +2 -2
  52. package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +1 -1
  53. package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +3 -3
  54. package/model-function/synthesize-speech/synthesizeSpeech.cjs +7 -2
  55. package/model-function/synthesize-speech/synthesizeSpeech.d.ts +2 -2
  56. package/model-function/synthesize-speech/synthesizeSpeech.js +7 -2
  57. package/model-function/transcribe-speech/TranscriptionEvent.d.ts +1 -1
  58. package/model-function/transcribe-speech/TranscriptionModel.d.ts +6 -4
  59. package/model-function/transcribe-speech/transcribe.cjs +7 -2
  60. package/model-function/transcribe-speech/transcribe.d.ts +2 -2
  61. package/model-function/transcribe-speech/transcribe.js +7 -2
  62. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +14 -18
  63. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +11 -9
  64. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +14 -18
  65. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +13 -16
  66. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +12 -10
  67. package/model-provider/cohere/CohereTextEmbeddingModel.js +13 -16
  68. package/model-provider/cohere/CohereTextGenerationModel.cjs +29 -29
  69. package/model-provider/cohere/CohereTextGenerationModel.d.ts +24 -22
  70. package/model-provider/cohere/CohereTextGenerationModel.js +29 -29
  71. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.cjs +10 -17
  72. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +2 -2
  73. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.js +10 -17
  74. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.cjs +13 -16
  75. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.d.ts +9 -7
  76. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.js +13 -16
  77. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +19 -25
  78. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +8 -6
  79. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +19 -25
  80. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +18 -24
  81. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +10 -8
  82. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +18 -24
  83. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +13 -16
  84. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +8 -6
  85. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +13 -16
  86. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +31 -34
  87. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +62 -60
  88. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +31 -34
  89. package/model-provider/lmnt/LmntSpeechSynthesisModel.cjs +7 -12
  90. package/model-provider/lmnt/LmntSpeechSynthesisModel.d.ts +2 -2
  91. package/model-provider/lmnt/LmntSpeechSynthesisModel.js +7 -12
  92. package/model-provider/openai/OpenAIImageGenerationModel.cjs +8 -16
  93. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +11 -11
  94. package/model-provider/openai/OpenAIImageGenerationModel.js +8 -16
  95. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -24
  96. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +18 -16
  97. package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -24
  98. package/model-provider/openai/OpenAITextGenerationModel.cjs +23 -27
  99. package/model-provider/openai/OpenAITextGenerationModel.d.ts +31 -33
  100. package/model-provider/openai/OpenAITextGenerationModel.js +23 -27
  101. package/model-provider/openai/OpenAITranscriptionModel.cjs +19 -28
  102. package/model-provider/openai/OpenAITranscriptionModel.d.ts +27 -7
  103. package/model-provider/openai/OpenAITranscriptionModel.js +19 -28
  104. package/model-provider/openai/chat/OpenAIChatModel.cjs +82 -86
  105. package/model-provider/openai/chat/OpenAIChatModel.d.ts +127 -50
  106. package/model-provider/openai/chat/OpenAIChatModel.js +83 -87
  107. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +4 -3
  108. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +2 -2
  109. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +2 -1
  110. package/model-provider/stability/StabilityImageGenerationModel.cjs +16 -21
  111. package/model-provider/stability/StabilityImageGenerationModel.d.ts +13 -11
  112. package/model-provider/stability/StabilityImageGenerationModel.js +16 -21
  113. package/package.json +1 -1
  114. package/prompt/PromptFormatTextGenerationModel.cjs +6 -19
  115. package/prompt/PromptFormatTextGenerationModel.d.ts +14 -10
  116. package/prompt/PromptFormatTextGenerationModel.js +6 -19
  117. package/prompt/PromptFormatTextStreamingModel.cjs +31 -0
  118. package/prompt/PromptFormatTextStreamingModel.d.ts +13 -0
  119. package/prompt/PromptFormatTextStreamingModel.js +27 -0
  120. package/prompt/chat/trimChatPrompt.d.ts +2 -2
  121. package/prompt/index.cjs +1 -0
  122. package/prompt/index.d.ts +1 -0
  123. package/prompt/index.js +1 -0
  124. package/retriever/Retriever.d.ts +3 -6
  125. package/retriever/retrieve.cjs +2 -2
  126. package/retriever/retrieve.d.ts +3 -3
  127. package/retriever/retrieve.js +2 -2
  128. package/tool/executeTool.cjs +2 -2
  129. package/tool/executeTool.js +2 -2
  130. package/tool/useTool.cjs +2 -4
  131. package/tool/useTool.d.ts +2 -2
  132. package/tool/useTool.js +2 -4
  133. package/tool/useToolOrGenerateText.d.ts +2 -2
  134. package/util/SafeResult.d.ts +1 -1
  135. package/util/runSafe.cjs +1 -1
  136. package/util/runSafe.js +1 -1
  137. package/vector-index/VectorIndexRetriever.cjs +0 -7
  138. package/vector-index/VectorIndexRetriever.d.ts +5 -5
  139. package/vector-index/VectorIndexRetriever.js +0 -7
  140. package/vector-index/upsertIntoVectorIndex.d.ts +4 -4
  141. package/model-function/DeltaEvent.d.ts +0 -7
  142. package/model-function/ModelFunctionOptions.d.ts +0 -4
  143. /package/model-function/{DeltaEvent.cjs → Delta.cjs} +0 -0
  144. /package/model-function/{DeltaEvent.js → Delta.js} +0 -0
  145. /package/model-function/{ModelFunctionOptions.cjs → ModelCallMetadata.cjs} +0 -0
  146. /package/model-function/{ModelFunctionOptions.js → ModelCallMetadata.js} +0 -0
@@ -10,7 +10,7 @@ const callWithRetryAndThrottle_js_1 = require("../../../core/api/callWithRetryAn
10
10
  const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
11
11
  const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
12
12
  const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
13
- const PromptFormatTextGenerationModel_js_1 = require("../../../prompt/PromptFormatTextGenerationModel.cjs");
13
+ const PromptFormatTextStreamingModel_js_1 = require("../../../prompt/PromptFormatTextStreamingModel.cjs");
14
14
  const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
15
15
  const OpenAIError_js_1 = require("../OpenAIError.cjs");
16
16
  const TikTokenTokenizer_js_1 = require("../TikTokenTokenizer.cjs");
@@ -183,27 +183,25 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
183
183
  });
184
184
  }
185
185
  async callAPI(messages, options) {
186
- const { run, settings, responseFormat } = options;
187
- const combinedSettings = {
188
- ...this.settings,
189
- ...settings,
190
- };
191
- const callSettings = {
192
- user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
193
- // Copied settings:
194
- ...combinedSettings,
195
- // map to OpenAI API names:
196
- stop: combinedSettings.stopSequences,
197
- maxTokens: combinedSettings.maxCompletionTokens,
198
- // other settings:
199
- abortSignal: run?.abortSignal,
200
- messages,
201
- responseFormat,
202
- };
203
186
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
204
- retry: callSettings.api?.retry,
205
- throttle: callSettings.api?.throttle,
206
- call: async () => callOpenAIChatCompletionAPI(callSettings),
187
+ retry: this.settings.api?.retry,
188
+ throttle: this.settings.api?.throttle,
189
+ call: async () => callOpenAIChatCompletionAPI({
190
+ ...this.settings,
191
+ // function calling:
192
+ functions: options.functions ?? this.settings.functions,
193
+ functionCall: options.functionCall ?? this.settings.functionCall,
194
+ // map to OpenAI API names:
195
+ stop: this.settings.stopSequences,
196
+ maxTokens: this.settings.maxCompletionTokens,
197
+ // other settings:
198
+ user: this.settings.isUserIdForwardingEnabled
199
+ ? options.run?.userId
200
+ : undefined,
201
+ abortSignal: options.run?.abortSignal,
202
+ responseFormat: options.responseFormat,
203
+ messages,
204
+ }),
207
205
  });
208
206
  }
209
207
  get settingsForEvent() {
@@ -221,24 +219,23 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
221
219
  ];
222
220
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
223
221
  }
224
- generateTextResponse(prompt, options) {
225
- return this.callAPI(prompt, {
222
+ async doGenerateText(prompt, options) {
223
+ const response = await this.callAPI(prompt, {
226
224
  ...options,
227
225
  responseFormat: exports.OpenAIChatResponseFormat.json,
228
226
  });
227
+ return {
228
+ response,
229
+ text: response.choices[0].message.content,
230
+ usage: this.extractUsage(response),
231
+ };
229
232
  }
230
- extractText(response) {
231
- return response.choices[0].message.content;
232
- }
233
- generateDeltaStreamResponse(prompt, options) {
233
+ doStreamText(prompt, options) {
234
234
  return this.callAPI(prompt, {
235
235
  ...options,
236
- responseFormat: exports.OpenAIChatResponseFormat.deltaIterable,
236
+ responseFormat: exports.OpenAIChatResponseFormat.textDeltaIterable,
237
237
  });
238
238
  }
239
- extractTextDelta(fullDelta) {
240
- return fullDelta[0]?.delta.content ?? undefined;
241
- }
242
239
  /**
243
240
  * JSON generation uses the OpenAI GPT function calling API.
244
241
  * It provides a single function specification and instructs the model to provide parameters for calling the function.
@@ -246,69 +243,54 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
246
243
  *
247
244
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
248
245
  */
249
- generateStructureResponse(structureDefinition, prompt, options) {
250
- return this.callAPI(prompt, {
246
+ async doGenerateStructure(structureDefinition, prompt, options) {
247
+ const response = await this.callAPI(prompt, {
248
+ ...options,
251
249
  responseFormat: exports.OpenAIChatResponseFormat.json,
252
- functionId: options?.functionId,
253
- settings: {
254
- ...options,
255
- functionCall: { name: structureDefinition.name },
256
- functions: [
257
- {
258
- name: structureDefinition.name,
259
- description: structureDefinition.description,
260
- parameters: structureDefinition.schema.getJsonSchema(),
261
- },
262
- ],
263
- },
264
- run: options?.run,
265
- });
266
- }
267
- extractStructure(response) {
268
- return secure_json_parse_1.default.parse(response.choices[0].message.function_call.arguments);
269
- }
270
- generateStructureStreamResponse(structureDefinition, prompt, options) {
271
- return this.callAPI(prompt, {
272
- responseFormat: exports.OpenAIChatResponseFormat.deltaIterable,
273
- functionId: options?.functionId,
274
- settings: {
275
- ...options,
276
- functionCall: { name: structureDefinition.name },
277
- functions: [
278
- {
279
- name: structureDefinition.name,
280
- description: structureDefinition.description,
281
- parameters: structureDefinition.schema.getJsonSchema(),
282
- },
283
- ],
284
- },
285
- run: options?.run,
250
+ functionCall: { name: structureDefinition.name },
251
+ functions: [
252
+ {
253
+ name: structureDefinition.name,
254
+ description: structureDefinition.description,
255
+ parameters: structureDefinition.schema.getJsonSchema(),
256
+ },
257
+ ],
286
258
  });
259
+ return {
260
+ response,
261
+ structure: secure_json_parse_1.default.parse(response.choices[0].message.function_call.arguments),
262
+ usage: this.extractUsage(response),
263
+ };
287
264
  }
288
- extractPartialStructure(fullDelta) {
289
- return (0, parsePartialJson_js_1.parsePartialJson)(fullDelta[0]?.function_call?.arguments);
290
- }
291
- generateStructureOrTextResponse(structureDefinitions, prompt, options) {
265
+ async doStreamStructure(structureDefinition, prompt, options) {
292
266
  return this.callAPI(prompt, {
293
- responseFormat: exports.OpenAIChatResponseFormat.json,
294
- functionId: options?.functionId,
295
- settings: {
296
- ...options,
297
- functionCall: "auto",
298
- functions: structureDefinitions.map((structureDefinition) => ({
267
+ ...options,
268
+ responseFormat: exports.OpenAIChatResponseFormat.structureDeltaIterable,
269
+ functionCall: { name: structureDefinition.name },
270
+ functions: [
271
+ {
299
272
  name: structureDefinition.name,
300
273
  description: structureDefinition.description,
301
274
  parameters: structureDefinition.schema.getJsonSchema(),
302
- })),
303
- },
304
- run: options?.run,
275
+ },
276
+ ],
305
277
  });
306
278
  }
307
- extractStructureAndText(response) {
279
+ async doGenerateStructureOrText(structureDefinitions, prompt, options) {
280
+ const response = await this.callAPI(prompt, {
281
+ ...options,
282
+ responseFormat: exports.OpenAIChatResponseFormat.json,
283
+ functionCall: "auto",
284
+ functions: structureDefinitions.map((structureDefinition) => ({
285
+ name: structureDefinition.name,
286
+ description: structureDefinition.description,
287
+ parameters: structureDefinition.schema.getJsonSchema(),
288
+ })),
289
+ });
308
290
  const message = response.choices[0].message;
309
291
  const content = message.content;
310
292
  const functionCall = message.function_call;
311
- return functionCall == null
293
+ const structureAndText = functionCall == null
312
294
  ? {
313
295
  structure: null,
314
296
  value: null,
@@ -319,6 +301,11 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
319
301
  value: secure_json_parse_1.default.parse(functionCall.arguments),
320
302
  text: content,
321
303
  };
304
+ return {
305
+ response,
306
+ structureAndText,
307
+ usage: this.extractUsage(response),
308
+ };
322
309
  }
323
310
  extractUsage(response) {
324
311
  return {
@@ -328,8 +315,13 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
328
315
  };
329
316
  }
330
317
  withPromptFormat(promptFormat) {
331
- return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
332
- model: this.withSettings({ stopSequences: promptFormat.stopSequences }),
318
+ return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
319
+ model: this.withSettings({
320
+ stopSequences: [
321
+ ...(this.settings.stopSequences ?? []),
322
+ ...promptFormat.stopSequences,
323
+ ],
324
+ }),
333
325
  promptFormat,
334
326
  });
335
327
  }
@@ -404,8 +396,12 @@ exports.OpenAIChatResponseFormat = {
404
396
  /**
405
397
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
406
398
  */
407
- deltaIterable: {
399
+ textDeltaIterable: {
400
+ stream: true,
401
+ handler: async ({ response }) => (0, OpenAIChatStreamIterable_js_1.createOpenAIChatDeltaIterableQueue)(response.body, (delta) => delta[0]?.delta.content ?? ""),
402
+ },
403
+ structureDeltaIterable: {
408
404
  stream: true,
409
- handler: async ({ response }) => (0, OpenAIChatStreamIterable_js_1.createOpenAIChatFullDeltaIterableQueue)(response.body),
405
+ handler: async ({ response }) => (0, OpenAIChatStreamIterable_js_1.createOpenAIChatDeltaIterableQueue)(response.body, (delta) => (0, parsePartialJson_js_1.parsePartialJson)(delta[0]?.function_call?.arguments)),
410
406
  },
411
407
  };
@@ -1,18 +1,17 @@
1
1
  import z from "zod";
2
+ import { FunctionOptions } from "../../../core/FunctionOptions.js";
2
3
  import { ApiConfiguration } from "../../../core/api/ApiConfiguration.js";
3
4
  import { ResponseHandler } from "../../../core/api/postToApi.js";
4
5
  import { StructureDefinition } from "../../../core/structure/StructureDefinition.js";
5
6
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
- import { DeltaEvent } from "../../../model-function/DeltaEvent.js";
7
- import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
7
+ import { Delta } from "../../../model-function/Delta.js";
8
8
  import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
9
9
  import { StructureOrTextGenerationModel } from "../../../model-function/generate-structure/StructureOrTextGenerationModel.js";
10
- import { TextGenerationModel, TextGenerationModelSettings } from "../../../model-function/generate-text/TextGenerationModel.js";
10
+ import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
11
11
  import { PromptFormat } from "../../../prompt/PromptFormat.js";
12
- import { PromptFormatTextGenerationModel } from "../../../prompt/PromptFormatTextGenerationModel.js";
12
+ import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
13
13
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
14
14
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
15
- import { OpenAIChatDelta } from "./OpenAIChatStreamIterable.js";
16
15
  export declare const OPENAI_CHAT_MODELS: {
17
16
  "gpt-4": {
18
17
  contextWindowSize: number;
@@ -132,7 +131,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
132
131
  * ),
133
132
  * ]);
134
133
  */
135
- export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatSettings> {
134
+ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
136
135
  constructor(settings: OpenAIChatSettings);
137
136
  readonly provider: "openai";
138
137
  get modelName(): OpenAIChatModelType;
@@ -145,37 +144,50 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
145
144
  countPromptTokens(messages: OpenAIChatMessage[]): Promise<number>;
146
145
  callAPI<RESULT>(messages: Array<OpenAIChatMessage>, options: {
147
146
  responseFormat: OpenAIChatResponseFormatType<RESULT>;
148
- } & ModelFunctionOptions<Partial<OpenAIChatCallSettings & {
149
- user?: string;
150
- }>>): Promise<RESULT>;
147
+ } & FunctionOptions & {
148
+ functions?: Array<{
149
+ name: string;
150
+ description?: string;
151
+ parameters: unknown;
152
+ }>;
153
+ functionCall?: "none" | "auto" | {
154
+ name: string;
155
+ };
156
+ }): Promise<RESULT>;
151
157
  get settingsForEvent(): Partial<OpenAIChatSettings>;
152
- generateTextResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<{
153
- object: "chat.completion";
154
- model: string;
158
+ doGenerateText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
159
+ response: {
160
+ object: "chat.completion";
161
+ model: string;
162
+ usage: {
163
+ prompt_tokens: number;
164
+ completion_tokens: number;
165
+ total_tokens: number;
166
+ };
167
+ id: string;
168
+ created: number;
169
+ choices: {
170
+ message: {
171
+ content: string | null;
172
+ role: "assistant";
173
+ function_call?: {
174
+ name: string;
175
+ arguments: string;
176
+ } | undefined;
177
+ };
178
+ finish_reason: string;
179
+ index: number;
180
+ logprobs?: any;
181
+ }[];
182
+ };
183
+ text: string;
155
184
  usage: {
156
- prompt_tokens: number;
157
- completion_tokens: number;
158
- total_tokens: number;
185
+ promptTokens: number;
186
+ completionTokens: number;
187
+ totalTokens: number;
159
188
  };
160
- id: string;
161
- created: number;
162
- choices: {
163
- message: {
164
- content: string | null;
165
- role: "assistant";
166
- function_call?: {
167
- name: string;
168
- arguments: string;
169
- } | undefined;
170
- };
171
- finish_reason: string;
172
- index: number;
173
- logprobs?: any;
174
- }[];
175
189
  }>;
176
- extractText(response: OpenAIChatResponse): string;
177
- generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
178
- extractTextDelta(fullDelta: OpenAIChatDelta): string | undefined;
190
+ doStreamText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
179
191
  /**
180
192
  * JSON generation uses the OpenAI GPT function calling API.
181
193
  * It provides a single function specification and instructs the model to provide parameters for calling the function.
@@ -183,26 +195,85 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
183
195
  *
184
196
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
185
197
  */
186
- generateStructureResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
187
- extractStructure(response: OpenAIChatResponse): unknown;
188
- generateStructureStreamResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
189
- extractPartialStructure(fullDelta: OpenAIChatDelta): unknown | undefined;
190
- generateStructureOrTextResponse(structureDefinitions: Array<StructureDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
191
- extractStructureAndText(response: OpenAIChatResponse): {
192
- structure: null;
193
- value: null;
194
- text: string;
195
- } | {
196
- structure: string;
197
- value: any;
198
- text: string | null;
199
- };
198
+ doGenerateStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
199
+ response: {
200
+ object: "chat.completion";
201
+ model: string;
202
+ usage: {
203
+ prompt_tokens: number;
204
+ completion_tokens: number;
205
+ total_tokens: number;
206
+ };
207
+ id: string;
208
+ created: number;
209
+ choices: {
210
+ message: {
211
+ content: string | null;
212
+ role: "assistant";
213
+ function_call?: {
214
+ name: string;
215
+ arguments: string;
216
+ } | undefined;
217
+ };
218
+ finish_reason: string;
219
+ index: number;
220
+ logprobs?: any;
221
+ }[];
222
+ };
223
+ structure: any;
224
+ usage: {
225
+ promptTokens: number;
226
+ completionTokens: number;
227
+ totalTokens: number;
228
+ };
229
+ }>;
230
+ doStreamStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<unknown>>>;
231
+ doGenerateStructureOrText(structureDefinitions: Array<StructureDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
232
+ response: {
233
+ object: "chat.completion";
234
+ model: string;
235
+ usage: {
236
+ prompt_tokens: number;
237
+ completion_tokens: number;
238
+ total_tokens: number;
239
+ };
240
+ id: string;
241
+ created: number;
242
+ choices: {
243
+ message: {
244
+ content: string | null;
245
+ role: "assistant";
246
+ function_call?: {
247
+ name: string;
248
+ arguments: string;
249
+ } | undefined;
250
+ };
251
+ finish_reason: string;
252
+ index: number;
253
+ logprobs?: any;
254
+ }[];
255
+ };
256
+ structureAndText: {
257
+ structure: null;
258
+ value: null;
259
+ text: string;
260
+ } | {
261
+ structure: string;
262
+ value: any;
263
+ text: string | null;
264
+ };
265
+ usage: {
266
+ promptTokens: number;
267
+ completionTokens: number;
268
+ totalTokens: number;
269
+ };
270
+ }>;
200
271
  extractUsage(response: OpenAIChatResponse): {
201
272
  promptTokens: number;
202
273
  completionTokens: number;
203
274
  totalTokens: number;
204
275
  };
205
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextGenerationModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings, this>;
276
+ withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatSettings, this>;
206
277
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
207
278
  }
208
279
  declare const openAIChatResponseSchema: z.ZodObject<{
@@ -366,11 +437,17 @@ export declare const OpenAIChatResponseFormat: {
366
437
  /**
367
438
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
368
439
  */
369
- deltaIterable: {
440
+ textDeltaIterable: {
441
+ stream: true;
442
+ handler: ({ response }: {
443
+ response: Response;
444
+ }) => Promise<AsyncIterable<Delta<string>>>;
445
+ };
446
+ structureDeltaIterable: {
370
447
  stream: true;
371
448
  handler: ({ response }: {
372
449
  response: Response;
373
- }) => Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
450
+ }) => Promise<AsyncIterable<Delta<unknown>>>;
374
451
  };
375
452
  };
376
453
  export {};