modelfusion 0.40.1 → 0.41.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +14 -7
  2. package/composed-function/summarize/summarizeRecursivelyWithTextGenerationAndTokenSplitting.d.ts +3 -3
  3. package/core/FunctionEvent.d.ts +1 -1
  4. package/model-function/AsyncIterableResultPromise.d.ts +1 -1
  5. package/model-function/Delta.d.ts +8 -0
  6. package/model-function/ModelCallEvent.d.ts +1 -1
  7. package/model-function/ModelCallMetadata.d.ts +13 -0
  8. package/model-function/describe-image/ImageDescriptionEvent.d.ts +1 -1
  9. package/model-function/describe-image/ImageDescriptionModel.d.ts +6 -4
  10. package/model-function/describe-image/describeImage.cjs +7 -2
  11. package/model-function/describe-image/describeImage.d.ts +2 -2
  12. package/model-function/describe-image/describeImage.js +7 -2
  13. package/model-function/embed/EmbeddingEvent.d.ts +1 -1
  14. package/model-function/embed/EmbeddingModel.d.ts +6 -4
  15. package/model-function/embed/embed.cjs +16 -11
  16. package/model-function/embed/embed.d.ts +3 -3
  17. package/model-function/embed/embed.js +16 -11
  18. package/model-function/executeCall.cjs +26 -30
  19. package/model-function/executeCall.d.ts +19 -28
  20. package/model-function/executeCall.js +26 -30
  21. package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
  22. package/model-function/generate-image/ImageGenerationModel.d.ts +6 -4
  23. package/model-function/generate-image/generateImage.cjs +7 -2
  24. package/model-function/generate-image/generateImage.d.ts +2 -2
  25. package/model-function/generate-image/generateImage.js +7 -2
  26. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -5
  27. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +7 -5
  28. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -5
  29. package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
  30. package/model-function/generate-structure/StructureGenerationModel.d.ts +15 -18
  31. package/model-function/generate-structure/StructureOrTextGenerationModel.d.ts +19 -17
  32. package/model-function/generate-structure/generateStructure.cjs +10 -8
  33. package/model-function/generate-structure/generateStructure.d.ts +2 -2
  34. package/model-function/generate-structure/generateStructure.js +10 -8
  35. package/model-function/generate-structure/generateStructureOrText.cjs +15 -8
  36. package/model-function/generate-structure/generateStructureOrText.d.ts +4 -4
  37. package/model-function/generate-structure/generateStructureOrText.js +15 -8
  38. package/model-function/generate-structure/streamStructure.cjs +4 -16
  39. package/model-function/generate-structure/streamStructure.d.ts +3 -7
  40. package/model-function/generate-structure/streamStructure.js +4 -16
  41. package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
  42. package/model-function/generate-text/TextGenerationModel.d.ts +18 -19
  43. package/model-function/generate-text/generateText.cjs +8 -9
  44. package/model-function/generate-text/generateText.d.ts +2 -2
  45. package/model-function/generate-text/generateText.js +8 -9
  46. package/model-function/generate-text/streamText.cjs +8 -21
  47. package/model-function/generate-text/streamText.d.ts +3 -7
  48. package/model-function/generate-text/streamText.js +8 -21
  49. package/model-function/index.cjs +2 -2
  50. package/model-function/index.d.ts +2 -2
  51. package/model-function/index.js +2 -2
  52. package/model-function/synthesize-speech/SpeechSynthesisEvent.d.ts +1 -1
  53. package/model-function/synthesize-speech/SpeechSynthesisModel.d.ts +3 -3
  54. package/model-function/synthesize-speech/synthesizeSpeech.cjs +7 -2
  55. package/model-function/synthesize-speech/synthesizeSpeech.d.ts +2 -2
  56. package/model-function/synthesize-speech/synthesizeSpeech.js +7 -2
  57. package/model-function/transcribe-speech/TranscriptionEvent.d.ts +1 -1
  58. package/model-function/transcribe-speech/TranscriptionModel.d.ts +6 -4
  59. package/model-function/transcribe-speech/transcribe.cjs +7 -2
  60. package/model-function/transcribe-speech/transcribe.d.ts +2 -2
  61. package/model-function/transcribe-speech/transcribe.js +7 -2
  62. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +14 -18
  63. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +11 -9
  64. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +14 -18
  65. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +13 -16
  66. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +12 -10
  67. package/model-provider/cohere/CohereTextEmbeddingModel.js +13 -16
  68. package/model-provider/cohere/CohereTextGenerationModel.cjs +25 -28
  69. package/model-provider/cohere/CohereTextGenerationModel.d.ts +24 -22
  70. package/model-provider/cohere/CohereTextGenerationModel.js +25 -28
  71. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.cjs +10 -17
  72. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.d.ts +2 -2
  73. package/model-provider/elevenlabs/ElevenLabsSpeechSynthesisModel.js +10 -17
  74. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.cjs +13 -16
  75. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.d.ts +9 -7
  76. package/model-provider/huggingface/HuggingFaceImageDescriptionModel.js +13 -16
  77. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +19 -25
  78. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +8 -6
  79. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +19 -25
  80. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +18 -24
  81. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +10 -8
  82. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +18 -24
  83. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +13 -16
  84. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +8 -6
  85. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +13 -16
  86. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +27 -33
  87. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +62 -60
  88. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +27 -33
  89. package/model-provider/lmnt/LmntSpeechSynthesisModel.cjs +7 -12
  90. package/model-provider/lmnt/LmntSpeechSynthesisModel.d.ts +2 -2
  91. package/model-provider/lmnt/LmntSpeechSynthesisModel.js +7 -12
  92. package/model-provider/openai/OpenAIImageGenerationModel.cjs +8 -16
  93. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +11 -11
  94. package/model-provider/openai/OpenAIImageGenerationModel.js +8 -16
  95. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +18 -24
  96. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +18 -16
  97. package/model-provider/openai/OpenAITextEmbeddingModel.js +18 -24
  98. package/model-provider/openai/OpenAITextGenerationModel.cjs +19 -26
  99. package/model-provider/openai/OpenAITextGenerationModel.d.ts +31 -33
  100. package/model-provider/openai/OpenAITextGenerationModel.js +19 -26
  101. package/model-provider/openai/OpenAITranscriptionModel.cjs +19 -28
  102. package/model-provider/openai/OpenAITranscriptionModel.d.ts +27 -7
  103. package/model-provider/openai/OpenAITranscriptionModel.js +19 -28
  104. package/model-provider/openai/chat/OpenAIChatModel.cjs +76 -85
  105. package/model-provider/openai/chat/OpenAIChatModel.d.ts +127 -50
  106. package/model-provider/openai/chat/OpenAIChatModel.js +77 -86
  107. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +4 -3
  108. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +2 -2
  109. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +2 -1
  110. package/model-provider/stability/StabilityImageGenerationModel.cjs +16 -21
  111. package/model-provider/stability/StabilityImageGenerationModel.d.ts +13 -11
  112. package/model-provider/stability/StabilityImageGenerationModel.js +16 -21
  113. package/package.json +1 -1
  114. package/prompt/PromptFormatTextGenerationModel.cjs +2 -18
  115. package/prompt/PromptFormatTextGenerationModel.d.ts +14 -10
  116. package/prompt/PromptFormatTextGenerationModel.js +2 -18
  117. package/prompt/PromptFormatTextStreamingModel.cjs +31 -0
  118. package/prompt/PromptFormatTextStreamingModel.d.ts +13 -0
  119. package/prompt/PromptFormatTextStreamingModel.js +27 -0
  120. package/prompt/chat/trimChatPrompt.d.ts +2 -2
  121. package/prompt/index.cjs +1 -0
  122. package/prompt/index.d.ts +1 -0
  123. package/prompt/index.js +1 -0
  124. package/retriever/Retriever.d.ts +3 -6
  125. package/retriever/retrieve.cjs +2 -2
  126. package/retriever/retrieve.d.ts +3 -3
  127. package/retriever/retrieve.js +2 -2
  128. package/tool/executeTool.cjs +2 -2
  129. package/tool/executeTool.js +2 -2
  130. package/tool/useTool.cjs +2 -4
  131. package/tool/useTool.d.ts +2 -2
  132. package/tool/useTool.js +2 -4
  133. package/tool/useToolOrGenerateText.d.ts +2 -2
  134. package/util/SafeResult.d.ts +1 -1
  135. package/util/runSafe.cjs +1 -1
  136. package/util/runSafe.js +1 -1
  137. package/vector-index/VectorIndexRetriever.cjs +0 -7
  138. package/vector-index/VectorIndexRetriever.d.ts +5 -5
  139. package/vector-index/VectorIndexRetriever.js +0 -7
  140. package/vector-index/upsertIntoVectorIndex.d.ts +4 -4
  141. package/model-function/DeltaEvent.d.ts +0 -7
  142. package/model-function/ModelFunctionOptions.d.ts +0 -4
  143. /package/model-function/{DeltaEvent.cjs → Delta.cjs} +0 -0
  144. /package/model-function/{DeltaEvent.js → Delta.js} +0 -0
  145. /package/model-function/{ModelFunctionOptions.cjs → ModelCallMetadata.cjs} +0 -0
  146. /package/model-function/{ModelFunctionOptions.js → ModelCallMetadata.js} +0 -0
@@ -1,18 +1,17 @@
1
1
  import z from "zod";
2
+ import { FunctionOptions } from "../../../core/FunctionOptions.js";
2
3
  import { ApiConfiguration } from "../../../core/api/ApiConfiguration.js";
3
4
  import { ResponseHandler } from "../../../core/api/postToApi.js";
4
5
  import { StructureDefinition } from "../../../core/structure/StructureDefinition.js";
5
6
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
- import { DeltaEvent } from "../../../model-function/DeltaEvent.js";
7
- import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
7
+ import { Delta } from "../../../model-function/Delta.js";
8
8
  import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
9
9
  import { StructureOrTextGenerationModel } from "../../../model-function/generate-structure/StructureOrTextGenerationModel.js";
10
- import { TextGenerationModel, TextGenerationModelSettings } from "../../../model-function/generate-text/TextGenerationModel.js";
10
+ import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
11
11
  import { PromptFormat } from "../../../prompt/PromptFormat.js";
12
- import { PromptFormatTextGenerationModel } from "../../../prompt/PromptFormatTextGenerationModel.js";
12
+ import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
13
13
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
14
14
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
15
- import { OpenAIChatDelta } from "./OpenAIChatStreamIterable.js";
16
15
  export declare const OPENAI_CHAT_MODELS: {
17
16
  "gpt-4": {
18
17
  contextWindowSize: number;
@@ -132,7 +131,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
132
131
  * ),
133
132
  * ]);
134
133
  */
135
- export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatSettings> {
134
+ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatSettings> {
136
135
  constructor(settings: OpenAIChatSettings);
137
136
  readonly provider: "openai";
138
137
  get modelName(): OpenAIChatModelType;
@@ -145,37 +144,50 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
145
144
  countPromptTokens(messages: OpenAIChatMessage[]): Promise<number>;
146
145
  callAPI<RESULT>(messages: Array<OpenAIChatMessage>, options: {
147
146
  responseFormat: OpenAIChatResponseFormatType<RESULT>;
148
- } & ModelFunctionOptions<Partial<OpenAIChatCallSettings & {
149
- user?: string;
150
- }>>): Promise<RESULT>;
147
+ } & FunctionOptions & {
148
+ functions?: Array<{
149
+ name: string;
150
+ description?: string;
151
+ parameters: unknown;
152
+ }>;
153
+ functionCall?: "none" | "auto" | {
154
+ name: string;
155
+ };
156
+ }): Promise<RESULT>;
151
157
  get settingsForEvent(): Partial<OpenAIChatSettings>;
152
- generateTextResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<{
153
- object: "chat.completion";
154
- model: string;
158
+ doGenerateText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
159
+ response: {
160
+ object: "chat.completion";
161
+ model: string;
162
+ usage: {
163
+ prompt_tokens: number;
164
+ completion_tokens: number;
165
+ total_tokens: number;
166
+ };
167
+ id: string;
168
+ created: number;
169
+ choices: {
170
+ message: {
171
+ content: string | null;
172
+ role: "assistant";
173
+ function_call?: {
174
+ name: string;
175
+ arguments: string;
176
+ } | undefined;
177
+ };
178
+ finish_reason: string;
179
+ index: number;
180
+ logprobs?: any;
181
+ }[];
182
+ };
183
+ text: string;
155
184
  usage: {
156
- prompt_tokens: number;
157
- completion_tokens: number;
158
- total_tokens: number;
185
+ promptTokens: number;
186
+ completionTokens: number;
187
+ totalTokens: number;
159
188
  };
160
- id: string;
161
- created: number;
162
- choices: {
163
- message: {
164
- content: string | null;
165
- role: "assistant";
166
- function_call?: {
167
- name: string;
168
- arguments: string;
169
- } | undefined;
170
- };
171
- finish_reason: string;
172
- index: number;
173
- logprobs?: any;
174
- }[];
175
189
  }>;
176
- extractText(response: OpenAIChatResponse): string;
177
- generateDeltaStreamResponse(prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
178
- extractTextDelta(fullDelta: OpenAIChatDelta): string | undefined;
190
+ doStreamText(prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
179
191
  /**
180
192
  * JSON generation uses the OpenAI GPT function calling API.
181
193
  * It provides a single function specification and instructs the model to provide parameters for calling the function.
@@ -183,26 +195,85 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
183
195
  *
184
196
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
185
197
  */
186
- generateStructureResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
187
- extractStructure(response: OpenAIChatResponse): unknown;
188
- generateStructureStreamResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
189
- extractPartialStructure(fullDelta: OpenAIChatDelta): unknown | undefined;
190
- generateStructureOrTextResponse(structureDefinitions: Array<StructureDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
191
- extractStructureAndText(response: OpenAIChatResponse): {
192
- structure: null;
193
- value: null;
194
- text: string;
195
- } | {
196
- structure: string;
197
- value: any;
198
- text: string | null;
199
- };
198
+ doGenerateStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
199
+ response: {
200
+ object: "chat.completion";
201
+ model: string;
202
+ usage: {
203
+ prompt_tokens: number;
204
+ completion_tokens: number;
205
+ total_tokens: number;
206
+ };
207
+ id: string;
208
+ created: number;
209
+ choices: {
210
+ message: {
211
+ content: string | null;
212
+ role: "assistant";
213
+ function_call?: {
214
+ name: string;
215
+ arguments: string;
216
+ } | undefined;
217
+ };
218
+ finish_reason: string;
219
+ index: number;
220
+ logprobs?: any;
221
+ }[];
222
+ };
223
+ structure: any;
224
+ usage: {
225
+ promptTokens: number;
226
+ completionTokens: number;
227
+ totalTokens: number;
228
+ };
229
+ }>;
230
+ doStreamStructure(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<AsyncIterable<Delta<unknown>>>;
231
+ doGenerateStructureOrText(structureDefinitions: Array<StructureDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: FunctionOptions): Promise<{
232
+ response: {
233
+ object: "chat.completion";
234
+ model: string;
235
+ usage: {
236
+ prompt_tokens: number;
237
+ completion_tokens: number;
238
+ total_tokens: number;
239
+ };
240
+ id: string;
241
+ created: number;
242
+ choices: {
243
+ message: {
244
+ content: string | null;
245
+ role: "assistant";
246
+ function_call?: {
247
+ name: string;
248
+ arguments: string;
249
+ } | undefined;
250
+ };
251
+ finish_reason: string;
252
+ index: number;
253
+ logprobs?: any;
254
+ }[];
255
+ };
256
+ structureAndText: {
257
+ structure: null;
258
+ value: null;
259
+ text: string;
260
+ } | {
261
+ structure: string;
262
+ value: any;
263
+ text: string | null;
264
+ };
265
+ usage: {
266
+ promptTokens: number;
267
+ completionTokens: number;
268
+ totalTokens: number;
269
+ };
270
+ }>;
200
271
  extractUsage(response: OpenAIChatResponse): {
201
272
  promptTokens: number;
202
273
  completionTokens: number;
203
274
  totalTokens: number;
204
275
  };
205
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextGenerationModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings, this>;
276
+ withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatSettings, this>;
206
277
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
207
278
  }
208
279
  declare const openAIChatResponseSchema: z.ZodObject<{
@@ -366,11 +437,17 @@ export declare const OpenAIChatResponseFormat: {
366
437
  /**
367
438
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
368
439
  */
369
- deltaIterable: {
440
+ textDeltaIterable: {
441
+ stream: true;
442
+ handler: ({ response }: {
443
+ response: Response;
444
+ }) => Promise<AsyncIterable<Delta<string>>>;
445
+ };
446
+ structureDeltaIterable: {
370
447
  stream: true;
371
448
  handler: ({ response }: {
372
449
  response: Response;
373
- }) => Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
450
+ }) => Promise<AsyncIterable<Delta<unknown>>>;
374
451
  };
375
452
  };
376
453
  export {};
@@ -4,11 +4,11 @@ import { callWithRetryAndThrottle } from "../../../core/api/callWithRetryAndThro
4
4
  import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
6
  import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
7
- import { PromptFormatTextGenerationModel } from "../../../prompt/PromptFormatTextGenerationModel.js";
7
+ import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
8
8
  import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
9
9
  import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
10
10
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
11
- import { createOpenAIChatFullDeltaIterableQueue, } from "./OpenAIChatStreamIterable.js";
11
+ import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
12
12
  import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
13
13
  /*
14
14
  * Available OpenAI chat models, their token limits, and pricing.
@@ -174,27 +174,25 @@ export class OpenAIChatModel extends AbstractModel {
174
174
  });
175
175
  }
176
176
  async callAPI(messages, options) {
177
- const { run, settings, responseFormat } = options;
178
- const combinedSettings = {
179
- ...this.settings,
180
- ...settings,
181
- };
182
- const callSettings = {
183
- user: this.settings.isUserIdForwardingEnabled ? run?.userId : undefined,
184
- // Copied settings:
185
- ...combinedSettings,
186
- // map to OpenAI API names:
187
- stop: combinedSettings.stopSequences,
188
- maxTokens: combinedSettings.maxCompletionTokens,
189
- // other settings:
190
- abortSignal: run?.abortSignal,
191
- messages,
192
- responseFormat,
193
- };
194
177
  return callWithRetryAndThrottle({
195
- retry: callSettings.api?.retry,
196
- throttle: callSettings.api?.throttle,
197
- call: async () => callOpenAIChatCompletionAPI(callSettings),
178
+ retry: this.settings.api?.retry,
179
+ throttle: this.settings.api?.throttle,
180
+ call: async () => callOpenAIChatCompletionAPI({
181
+ ...this.settings,
182
+ // function calling:
183
+ functions: options.functions ?? this.settings.functions,
184
+ functionCall: options.functionCall ?? this.settings.functionCall,
185
+ // map to OpenAI API names:
186
+ stop: this.settings.stopSequences,
187
+ maxTokens: this.settings.maxCompletionTokens,
188
+ // other settings:
189
+ user: this.settings.isUserIdForwardingEnabled
190
+ ? options.run?.userId
191
+ : undefined,
192
+ abortSignal: options.run?.abortSignal,
193
+ responseFormat: options.responseFormat,
194
+ messages,
195
+ }),
198
196
  });
199
197
  }
200
198
  get settingsForEvent() {
@@ -212,24 +210,23 @@ export class OpenAIChatModel extends AbstractModel {
212
210
  ];
213
211
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
214
212
  }
215
- generateTextResponse(prompt, options) {
216
- return this.callAPI(prompt, {
213
+ async doGenerateText(prompt, options) {
214
+ const response = await this.callAPI(prompt, {
217
215
  ...options,
218
216
  responseFormat: OpenAIChatResponseFormat.json,
219
217
  });
218
+ return {
219
+ response,
220
+ text: response.choices[0].message.content,
221
+ usage: this.extractUsage(response),
222
+ };
220
223
  }
221
- extractText(response) {
222
- return response.choices[0].message.content;
223
- }
224
- generateDeltaStreamResponse(prompt, options) {
224
+ doStreamText(prompt, options) {
225
225
  return this.callAPI(prompt, {
226
226
  ...options,
227
- responseFormat: OpenAIChatResponseFormat.deltaIterable,
227
+ responseFormat: OpenAIChatResponseFormat.textDeltaIterable,
228
228
  });
229
229
  }
230
- extractTextDelta(fullDelta) {
231
- return fullDelta[0]?.delta.content ?? undefined;
232
- }
233
230
  /**
234
231
  * JSON generation uses the OpenAI GPT function calling API.
235
232
  * It provides a single function specification and instructs the model to provide parameters for calling the function.
@@ -237,69 +234,54 @@ export class OpenAIChatModel extends AbstractModel {
237
234
  *
238
235
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
239
236
  */
240
- generateStructureResponse(structureDefinition, prompt, options) {
241
- return this.callAPI(prompt, {
237
+ async doGenerateStructure(structureDefinition, prompt, options) {
238
+ const response = await this.callAPI(prompt, {
239
+ ...options,
242
240
  responseFormat: OpenAIChatResponseFormat.json,
243
- functionId: options?.functionId,
244
- settings: {
245
- ...options,
246
- functionCall: { name: structureDefinition.name },
247
- functions: [
248
- {
249
- name: structureDefinition.name,
250
- description: structureDefinition.description,
251
- parameters: structureDefinition.schema.getJsonSchema(),
252
- },
253
- ],
254
- },
255
- run: options?.run,
256
- });
257
- }
258
- extractStructure(response) {
259
- return SecureJSON.parse(response.choices[0].message.function_call.arguments);
260
- }
261
- generateStructureStreamResponse(structureDefinition, prompt, options) {
262
- return this.callAPI(prompt, {
263
- responseFormat: OpenAIChatResponseFormat.deltaIterable,
264
- functionId: options?.functionId,
265
- settings: {
266
- ...options,
267
- functionCall: { name: structureDefinition.name },
268
- functions: [
269
- {
270
- name: structureDefinition.name,
271
- description: structureDefinition.description,
272
- parameters: structureDefinition.schema.getJsonSchema(),
273
- },
274
- ],
275
- },
276
- run: options?.run,
241
+ functionCall: { name: structureDefinition.name },
242
+ functions: [
243
+ {
244
+ name: structureDefinition.name,
245
+ description: structureDefinition.description,
246
+ parameters: structureDefinition.schema.getJsonSchema(),
247
+ },
248
+ ],
277
249
  });
250
+ return {
251
+ response,
252
+ structure: SecureJSON.parse(response.choices[0].message.function_call.arguments),
253
+ usage: this.extractUsage(response),
254
+ };
278
255
  }
279
- extractPartialStructure(fullDelta) {
280
- return parsePartialJson(fullDelta[0]?.function_call?.arguments);
281
- }
282
- generateStructureOrTextResponse(structureDefinitions, prompt, options) {
256
+ async doStreamStructure(structureDefinition, prompt, options) {
283
257
  return this.callAPI(prompt, {
284
- responseFormat: OpenAIChatResponseFormat.json,
285
- functionId: options?.functionId,
286
- settings: {
287
- ...options,
288
- functionCall: "auto",
289
- functions: structureDefinitions.map((structureDefinition) => ({
258
+ ...options,
259
+ responseFormat: OpenAIChatResponseFormat.structureDeltaIterable,
260
+ functionCall: { name: structureDefinition.name },
261
+ functions: [
262
+ {
290
263
  name: structureDefinition.name,
291
264
  description: structureDefinition.description,
292
265
  parameters: structureDefinition.schema.getJsonSchema(),
293
- })),
294
- },
295
- run: options?.run,
266
+ },
267
+ ],
296
268
  });
297
269
  }
298
- extractStructureAndText(response) {
270
+ async doGenerateStructureOrText(structureDefinitions, prompt, options) {
271
+ const response = await this.callAPI(prompt, {
272
+ ...options,
273
+ responseFormat: OpenAIChatResponseFormat.json,
274
+ functionCall: "auto",
275
+ functions: structureDefinitions.map((structureDefinition) => ({
276
+ name: structureDefinition.name,
277
+ description: structureDefinition.description,
278
+ parameters: structureDefinition.schema.getJsonSchema(),
279
+ })),
280
+ });
299
281
  const message = response.choices[0].message;
300
282
  const content = message.content;
301
283
  const functionCall = message.function_call;
302
- return functionCall == null
284
+ const structureAndText = functionCall == null
303
285
  ? {
304
286
  structure: null,
305
287
  value: null,
@@ -310,6 +292,11 @@ export class OpenAIChatModel extends AbstractModel {
310
292
  value: SecureJSON.parse(functionCall.arguments),
311
293
  text: content,
312
294
  };
295
+ return {
296
+ response,
297
+ structureAndText,
298
+ usage: this.extractUsage(response),
299
+ };
313
300
  }
314
301
  extractUsage(response) {
315
302
  return {
@@ -319,7 +306,7 @@ export class OpenAIChatModel extends AbstractModel {
319
306
  };
320
307
  }
321
308
  withPromptFormat(promptFormat) {
322
- return new PromptFormatTextGenerationModel({
309
+ return new PromptFormatTextStreamingModel({
323
310
  model: this.withSettings({
324
311
  stopSequences: [
325
312
  ...(this.settings.stopSequences ?? []),
@@ -399,8 +386,12 @@ export const OpenAIChatResponseFormat = {
399
386
  /**
400
387
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
401
388
  */
402
- deltaIterable: {
389
+ textDeltaIterable: {
390
+ stream: true,
391
+ handler: async ({ response }) => createOpenAIChatDeltaIterableQueue(response.body, (delta) => delta[0]?.delta.content ?? ""),
392
+ },
393
+ structureDeltaIterable: {
403
394
  stream: true,
404
- handler: async ({ response }) => createOpenAIChatFullDeltaIterableQueue(response.body),
395
+ handler: async ({ response }) => createOpenAIChatDeltaIterableQueue(response.body, (delta) => parsePartialJson(delta[0]?.function_call?.arguments)),
405
396
  },
406
397
  };
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.createOpenAIChatFullDeltaIterableQueue = void 0;
6
+ exports.createOpenAIChatDeltaIterableQueue = void 0;
7
7
  const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
8
  const zod_1 = require("zod");
9
9
  const AsyncQueue_js_1 = require("../../../event-source/AsyncQueue.cjs");
@@ -28,7 +28,7 @@ const chatResponseStreamEventSchema = zod_1.z.object({
28
28
  model: zod_1.z.string(),
29
29
  object: zod_1.z.string(),
30
30
  });
31
- async function createOpenAIChatFullDeltaIterableQueue(stream) {
31
+ async function createOpenAIChatDeltaIterableQueue(stream, extractDeltaValue) {
32
32
  const queue = new AsyncQueue_js_1.AsyncQueue();
33
33
  const streamDelta = [];
34
34
  // process the stream asynchonously (no 'await' on purpose):
@@ -96,6 +96,7 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
96
96
  queue.push({
97
97
  type: "delta",
98
98
  fullDelta: streamDeltaDeepCopy,
99
+ valueDelta: extractDeltaValue(streamDeltaDeepCopy),
99
100
  });
100
101
  }
101
102
  }
@@ -112,4 +113,4 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
112
113
  });
113
114
  return queue;
114
115
  }
115
- exports.createOpenAIChatFullDeltaIterableQueue = createOpenAIChatFullDeltaIterableQueue;
116
+ exports.createOpenAIChatDeltaIterableQueue = createOpenAIChatDeltaIterableQueue;
@@ -1,4 +1,4 @@
1
- import { DeltaEvent } from "../../../model-function/DeltaEvent.js";
1
+ import { Delta } from "../../../model-function/Delta.js";
2
2
  export type OpenAIChatDelta = Array<{
3
3
  role: "assistant" | "user" | undefined;
4
4
  content: string;
@@ -16,4 +16,4 @@ export type OpenAIChatDelta = Array<{
16
16
  };
17
17
  };
18
18
  }>;
19
- export declare function createOpenAIChatFullDeltaIterableQueue(stream: ReadableStream<Uint8Array>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
19
+ export declare function createOpenAIChatDeltaIterableQueue<VALUE>(stream: ReadableStream<Uint8Array>, extractDeltaValue: (delta: OpenAIChatDelta) => VALUE): Promise<AsyncIterable<Delta<VALUE>>>;
@@ -22,7 +22,7 @@ const chatResponseStreamEventSchema = z.object({
22
22
  model: z.string(),
23
23
  object: z.string(),
24
24
  });
25
- export async function createOpenAIChatFullDeltaIterableQueue(stream) {
25
+ export async function createOpenAIChatDeltaIterableQueue(stream, extractDeltaValue) {
26
26
  const queue = new AsyncQueue();
27
27
  const streamDelta = [];
28
28
  // process the stream asynchonously (no 'await' on purpose):
@@ -90,6 +90,7 @@ export async function createOpenAIChatFullDeltaIterableQueue(stream) {
90
90
  queue.push({
91
91
  type: "delta",
92
92
  fullDelta: streamDeltaDeepCopy,
93
+ valueDelta: extractDeltaValue(streamDeltaDeepCopy),
93
94
  });
94
95
  }
95
96
  }
@@ -2,11 +2,11 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.StabilityImageGenerationModel = void 0;
4
4
  const zod_1 = require("zod");
5
- const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
6
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
7
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
8
- const StabilityError_js_1 = require("./StabilityError.cjs");
7
+ const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
8
  const StabilityApiConfiguration_js_1 = require("./StabilityApiConfiguration.cjs");
9
+ const StabilityError_js_1 = require("./StabilityError.cjs");
10
10
  /**
11
11
  * Create an image generation model that calls the Stability AI image generation API.
12
12
  *
@@ -43,21 +43,15 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
43
43
  return this.settings.model;
44
44
  }
45
45
  async callAPI(input, options) {
46
- const run = options?.run;
47
- const settings = options?.settings;
48
- const callSettings = {
49
- // copied settings:
50
- ...this.settings,
51
- ...settings,
52
- // other settings:
53
- abortSignal: run?.abortSignal,
54
- engineId: this.settings.model,
55
- textPrompts: input,
56
- };
57
46
  return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
58
- retry: callSettings.api?.retry,
59
- throttle: callSettings.api?.throttle,
60
- call: async () => callStabilityImageGenerationAPI(callSettings),
47
+ retry: this.settings.api?.retry,
48
+ throttle: this.settings.api?.throttle,
49
+ call: async () => callStabilityImageGenerationAPI({
50
+ ...this.settings,
51
+ abortSignal: options?.run?.abortSignal,
52
+ engineId: this.settings.model,
53
+ textPrompts: input,
54
+ }),
61
55
  });
62
56
  }
63
57
  get settingsForEvent() {
@@ -75,11 +69,12 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
75
69
  ];
76
70
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
77
71
  }
78
- generateImageResponse(prompt, options) {
79
- return this.callAPI(prompt, options);
80
- }
81
- extractBase64Image(response) {
82
- return response.artifacts[0].base64;
72
+ async doGenerateImage(prompt, options) {
73
+ const response = await this.callAPI(prompt, options);
74
+ return {
75
+ response,
76
+ base64Image: response.artifacts[0].base64,
77
+ };
83
78
  }
84
79
  withSettings(additionalSettings) {
85
80
  return new StabilityImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
@@ -1,7 +1,7 @@
1
1
  import { z } from "zod";
2
- import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
2
+ import { FunctionOptions } from "../../core/FunctionOptions.js";
4
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
5
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
6
6
  /**
7
7
  * Create an image generation model that calls the Stability AI image generation API.
@@ -25,20 +25,22 @@ import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-
25
25
  * ]
26
26
  * );
27
27
  */
28
- export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationModelSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationResponse, StabilityImageGenerationModelSettings> {
28
+ export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationModelSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationModelSettings> {
29
29
  constructor(settings: StabilityImageGenerationModelSettings);
30
30
  readonly provider: "stability";
31
31
  get modelName(): StabilityImageGenerationModelType;
32
- callAPI(input: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<StabilityImageGenerationResponse>;
32
+ callAPI(input: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<StabilityImageGenerationResponse>;
33
33
  get settingsForEvent(): Partial<StabilityImageGenerationModelSettings>;
34
- generateImageResponse(prompt: StabilityImageGenerationPrompt, options?: ModelFunctionOptions<StabilityImageGenerationModelSettings>): Promise<{
35
- artifacts: {
36
- seed: number;
37
- base64: string;
38
- finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
39
- }[];
34
+ doGenerateImage(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<{
35
+ response: {
36
+ artifacts: {
37
+ seed: number;
38
+ base64: string;
39
+ finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
40
+ }[];
41
+ };
42
+ base64Image: string;
40
43
  }>;
41
- extractBase64Image(response: StabilityImageGenerationResponse): string;
42
44
  withSettings(additionalSettings: StabilityImageGenerationModelSettings): this;
43
45
  }
44
46
  declare const stabilityImageGenerationModels: readonly ["stable-diffusion-v1-5", "stable-diffusion-512-v2-1", "stable-diffusion-xl-1024-v0-9", "stable-diffusion-xl-1024-v1-0"];