modelfusion 0.117.0 → 0.119.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/CHANGELOG.md +60 -0
  2. package/README.md +10 -9
  3. package/core/getFunctionCallLogger.cjs +6 -6
  4. package/core/getFunctionCallLogger.js +6 -6
  5. package/model-function/ModelCallEvent.d.ts +1 -1
  6. package/model-function/embed/EmbeddingEvent.d.ts +1 -1
  7. package/model-function/embed/EmbeddingModel.d.ts +1 -1
  8. package/model-function/embed/embed.cjs +5 -5
  9. package/model-function/embed/embed.d.ts +2 -2
  10. package/model-function/embed/embed.js +5 -5
  11. package/model-function/executeStandardCall.cjs +3 -3
  12. package/model-function/executeStandardCall.d.ts +2 -2
  13. package/model-function/executeStandardCall.js +3 -3
  14. package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
  15. package/model-function/generate-image/ImageGenerationModel.d.ts +1 -1
  16. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +1 -1
  17. package/model-function/generate-image/generateImage.cjs +2 -2
  18. package/model-function/generate-image/generateImage.d.ts +1 -1
  19. package/model-function/generate-image/generateImage.js +2 -2
  20. package/model-function/generate-speech/SpeechGenerationEvent.d.ts +1 -1
  21. package/model-function/generate-speech/generateSpeech.cjs +2 -2
  22. package/model-function/generate-speech/generateSpeech.d.ts +1 -1
  23. package/model-function/generate-speech/generateSpeech.js +2 -2
  24. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +10 -1
  25. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -0
  26. package/model-function/generate-structure/StructureFromTextGenerationModel.js +10 -1
  27. package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +12 -1
  28. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -22
  29. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -5
  30. package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -22
  31. package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
  32. package/model-function/generate-structure/generateStructure.cjs +2 -2
  33. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  34. package/model-function/generate-structure/generateStructure.js +2 -2
  35. package/model-function/generate-structure/jsonStructurePrompt.cjs +4 -12
  36. package/model-function/generate-structure/jsonStructurePrompt.js +4 -12
  37. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +2 -2
  38. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +6 -0
  39. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +5 -2
  40. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +6 -0
  41. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +6 -0
  42. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -0
  43. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +6 -0
  44. package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
  45. package/model-function/generate-text/TextGenerationModel.d.ts +7 -4
  46. package/model-function/generate-text/generateText.cjs +3 -3
  47. package/model-function/generate-text/generateText.d.ts +1 -1
  48. package/model-function/generate-text/generateText.js +3 -3
  49. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +8 -1
  50. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +5 -0
  51. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +6 -0
  52. package/model-function/generate-text/prompt-template/PromptTemplateProvider.cjs +2 -0
  53. package/model-function/generate-text/prompt-template/PromptTemplateProvider.d.ts +8 -0
  54. package/model-function/generate-text/prompt-template/PromptTemplateProvider.js +1 -0
  55. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +34 -1
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +9 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +31 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +28 -0
  59. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +29 -1
  60. package/model-function/generate-text/prompt-template/index.cjs +1 -0
  61. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  62. package/model-function/generate-text/prompt-template/index.js +1 -0
  63. package/model-function/generate-transcription/TranscriptionEvent.d.ts +1 -1
  64. package/model-function/generate-transcription/TranscriptionModel.d.ts +1 -1
  65. package/model-function/generate-transcription/generateTranscription.cjs +1 -1
  66. package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
  67. package/model-function/generate-transcription/generateTranscription.js +1 -1
  68. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +3 -3
  69. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
  70. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +3 -3
  71. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +3 -3
  72. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
  73. package/model-provider/cohere/CohereTextEmbeddingModel.js +3 -3
  74. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -3
  75. package/model-provider/cohere/CohereTextGenerationModel.d.ts +5 -4
  76. package/model-provider/cohere/CohereTextGenerationModel.js +6 -3
  77. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +3 -3
  78. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
  79. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +3 -3
  80. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +6 -3
  81. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -4
  82. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +6 -3
  83. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +15 -1
  84. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -0
  85. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +13 -0
  86. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +40 -33
  87. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +20 -9
  88. package/model-provider/llamacpp/LlamaCppCompletionModel.js +40 -33
  89. package/model-provider/llamacpp/LlamaCppFacade.cjs +4 -3
  90. package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -1
  91. package/model-provider/llamacpp/LlamaCppFacade.js +2 -1
  92. package/model-provider/llamacpp/LlamaCppGrammars.cjs +3 -1
  93. package/model-provider/llamacpp/LlamaCppGrammars.d.ts +1 -0
  94. package/model-provider/llamacpp/LlamaCppGrammars.js +1 -0
  95. package/model-provider/llamacpp/LlamaCppPrompt.cjs +59 -0
  96. package/model-provider/llamacpp/LlamaCppPrompt.d.ts +14 -0
  97. package/model-provider/llamacpp/LlamaCppPrompt.js +31 -0
  98. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -3
  99. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -1
  100. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -3
  101. package/model-provider/llamacpp/convertJsonSchemaToGBNF.cjs +113 -0
  102. package/model-provider/llamacpp/convertJsonSchemaToGBNF.d.ts +7 -0
  103. package/model-provider/llamacpp/convertJsonSchemaToGBNF.js +109 -0
  104. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.cjs +150 -0
  105. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.d.ts +1 -0
  106. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.js +148 -0
  107. package/model-provider/llamacpp/index.cjs +2 -3
  108. package/model-provider/llamacpp/index.d.ts +1 -2
  109. package/model-provider/llamacpp/index.js +1 -2
  110. package/model-provider/mistral/MistralChatModel.cjs +6 -3
  111. package/model-provider/mistral/MistralChatModel.d.ts +5 -4
  112. package/model-provider/mistral/MistralChatModel.js +6 -3
  113. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +3 -3
  114. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +1 -1
  115. package/model-provider/mistral/MistralTextEmbeddingModel.js +3 -3
  116. package/model-provider/ollama/OllamaChatModel.cjs +3 -3
  117. package/model-provider/ollama/OllamaChatModel.d.ts +2 -2
  118. package/model-provider/ollama/OllamaChatModel.js +3 -3
  119. package/model-provider/ollama/OllamaCompletionModel.cjs +6 -3
  120. package/model-provider/ollama/OllamaCompletionModel.d.ts +15 -14
  121. package/model-provider/ollama/OllamaCompletionModel.js +6 -3
  122. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +3 -3
  123. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +1 -1
  124. package/model-provider/ollama/OllamaTextEmbeddingModel.js +3 -3
  125. package/model-provider/openai/AbstractOpenAIChatModel.cjs +12 -12
  126. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +6 -6
  127. package/model-provider/openai/AbstractOpenAIChatModel.js +12 -12
  128. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +9 -6
  129. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +3 -2
  130. package/model-provider/openai/AbstractOpenAICompletionModel.js +9 -6
  131. package/model-provider/openai/OpenAIImageGenerationModel.cjs +3 -3
  132. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  133. package/model-provider/openai/OpenAIImageGenerationModel.js +3 -3
  134. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -3
  135. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
  136. package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -3
  137. package/model-provider/openai/OpenAITranscriptionModel.cjs +3 -3
  138. package/model-provider/openai/OpenAITranscriptionModel.d.ts +1 -1
  139. package/model-provider/openai/OpenAITranscriptionModel.js +3 -3
  140. package/model-provider/stability/StabilityImageGenerationModel.cjs +3 -3
  141. package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
  142. package/model-provider/stability/StabilityImageGenerationModel.js +3 -3
  143. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
  144. package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +1 -1
  145. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
  146. package/package.json +1 -1
  147. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +2 -2
  148. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
  149. package/tool/generate-tool-call/TextGenerationToolCallModel.js +2 -2
  150. package/tool/generate-tool-call/ToolCallGenerationEvent.d.ts +1 -1
  151. package/tool/generate-tool-call/ToolCallGenerationModel.d.ts +1 -1
  152. package/tool/generate-tool-call/generateToolCall.cjs +2 -2
  153. package/tool/generate-tool-call/generateToolCall.js +2 -2
  154. package/tool/generate-tool-calls/TextGenerationToolCallsModel.cjs +2 -2
  155. package/tool/generate-tool-calls/TextGenerationToolCallsModel.d.ts +1 -1
  156. package/tool/generate-tool-calls/TextGenerationToolCallsModel.js +2 -2
  157. package/tool/generate-tool-calls/ToolCallsGenerationEvent.d.ts +1 -1
  158. package/tool/generate-tool-calls/ToolCallsGenerationModel.d.ts +1 -1
  159. package/tool/generate-tool-calls/generateToolCalls.cjs +2 -2
  160. package/tool/generate-tool-calls/generateToolCalls.d.ts +1 -1
  161. package/tool/generate-tool-calls/generateToolCalls.js +2 -2
@@ -127,12 +127,12 @@ export class OllamaCompletionModel extends AbstractModel {
127
127
  schema: zodSchema(ollamaCompletionResponseSchema),
128
128
  }));
129
129
  }
130
- processTextGenerationResponse(response) {
130
+ processTextGenerationResponse(rawResponse) {
131
131
  return {
132
- response,
132
+ rawResponse,
133
133
  textGenerationResults: [
134
134
  {
135
- text: response.response,
135
+ text: rawResponse.response,
136
136
  finishReason: "unknown",
137
137
  },
138
138
  ],
@@ -160,6 +160,9 @@ export class OllamaCompletionModel extends AbstractModel {
160
160
  template: promptTemplate,
161
161
  });
162
162
  }
163
+ withJsonOutput() {
164
+ return this;
165
+ }
163
166
  withTextPrompt() {
164
167
  return this.withPromptTemplate({
165
168
  format(prompt) {
@@ -66,10 +66,10 @@ class OllamaTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
66
66
  };
67
67
  }
68
68
  async doEmbedValues(texts, options) {
69
- const response = await this.callAPI(texts, options);
69
+ const rawResponse = await this.callAPI(texts, options);
70
70
  return {
71
- response,
72
- embeddings: [response.embedding],
71
+ rawResponse,
72
+ embeddings: [rawResponse.embedding],
73
73
  };
74
74
  }
75
75
  withSettings(additionalSettings) {
@@ -19,7 +19,7 @@ export declare class OllamaTextEmbeddingModel extends AbstractModel<OllamaTextEm
19
19
  callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<OllamaTextEmbeddingResponse>;
20
20
  get settingsForEvent(): Partial<OllamaTextEmbeddingModelSettings>;
21
21
  doEmbedValues(texts: string[], options: FunctionCallOptions): Promise<{
22
- response: {
22
+ rawResponse: {
23
23
  embedding: number[];
24
24
  };
25
25
  embeddings: number[][];
@@ -63,10 +63,10 @@ export class OllamaTextEmbeddingModel extends AbstractModel {
63
63
  };
64
64
  }
65
65
  async doEmbedValues(texts, options) {
66
- const response = await this.callAPI(texts, options);
66
+ const rawResponse = await this.callAPI(texts, options);
67
67
  return {
68
- response,
69
- embeddings: [response.embedding],
68
+ rawResponse,
69
+ embeddings: [rawResponse.embedding],
70
70
  };
71
71
  }
72
72
  withSettings(additionalSettings) {
@@ -90,14 +90,14 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
90
90
  schema: (0, ZodSchema_js_1.zodSchema)(openAIChatResponseSchema),
91
91
  }));
92
92
  }
93
- processTextGenerationResponse(response) {
93
+ processTextGenerationResponse(rawResponse) {
94
94
  return {
95
- response,
96
- textGenerationResults: response.choices.map((choice) => ({
95
+ rawResponse,
96
+ textGenerationResults: rawResponse.choices.map((choice) => ({
97
97
  text: choice.message.content ?? "",
98
98
  finishReason: this.translateFinishReason(choice.finish_reason),
99
99
  })),
100
- usage: this.extractUsage(response),
100
+ usage: this.extractUsage(rawResponse),
101
101
  };
102
102
  }
103
103
  translateFinishReason(finishReason) {
@@ -133,7 +133,7 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
133
133
  return firstChoice.delta.content ?? undefined;
134
134
  }
135
135
  async doGenerateToolCall(tool, prompt, options) {
136
- const response = await this.callAPI(prompt, options, {
136
+ const rawResponse = await this.callAPI(prompt, options, {
137
137
  responseFormat: exports.OpenAIChatResponseFormat.json,
138
138
  toolChoice: {
139
139
  type: "function",
@@ -150,20 +150,20 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
150
150
  },
151
151
  ],
152
152
  });
153
- const toolCalls = response.choices[0]?.message.tool_calls;
153
+ const toolCalls = rawResponse.choices[0]?.message.tool_calls;
154
154
  return {
155
- response,
155
+ rawResponse,
156
156
  toolCall: toolCalls == null || toolCalls.length === 0
157
157
  ? null
158
158
  : {
159
159
  id: toolCalls[0].id,
160
160
  args: (0, parseJSON_js_1.parseJSON)({ text: toolCalls[0].function.arguments }),
161
161
  },
162
- usage: this.extractUsage(response),
162
+ usage: this.extractUsage(rawResponse),
163
163
  };
164
164
  }
165
165
  async doGenerateToolCalls(tools, prompt, options) {
166
- const response = await this.callAPI(prompt, options, {
166
+ const rawResponse = await this.callAPI(prompt, options, {
167
167
  responseFormat: exports.OpenAIChatResponseFormat.json,
168
168
  toolChoice: "auto",
169
169
  tools: tools.map((tool) => ({
@@ -175,16 +175,16 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
175
175
  },
176
176
  })),
177
177
  });
178
- const message = response.choices[0]?.message;
178
+ const message = rawResponse.choices[0]?.message;
179
179
  return {
180
- response,
180
+ rawResponse,
181
181
  text: message.content ?? null,
182
182
  toolCalls: message.tool_calls?.map((toolCall) => ({
183
183
  id: toolCall.id,
184
184
  name: toolCall.function.name,
185
185
  args: (0, parseJSON_js_1.parseJSON)({ text: toolCall.function.arguments }),
186
186
  })) ?? null,
187
- usage: this.extractUsage(response),
187
+ usage: this.extractUsage(rawResponse),
188
188
  };
189
189
  }
190
190
  extractUsage(response) {
@@ -90,7 +90,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
90
90
  toolChoice?: AbstractOpenAIChatSettings["toolChoice"];
91
91
  }): Promise<RESULT>;
92
92
  doGenerateTexts(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
93
- response: {
93
+ rawResponse: {
94
94
  object: "chat.completion";
95
95
  model: string;
96
96
  usage: {
@@ -134,7 +134,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
134
134
  };
135
135
  }>;
136
136
  restoreGeneratedTexts(rawResponse: unknown): {
137
- response: {
137
+ rawResponse: {
138
138
  object: "chat.completion";
139
139
  model: string;
140
140
  usage: {
@@ -177,8 +177,8 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
177
177
  totalTokens: number;
178
178
  };
179
179
  };
180
- processTextGenerationResponse(response: OpenAIChatResponse): {
181
- response: {
180
+ processTextGenerationResponse(rawResponse: OpenAIChatResponse): {
181
+ rawResponse: {
182
182
  object: "chat.completion";
183
183
  model: string;
184
184
  usage: {
@@ -251,7 +251,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
251
251
  }>>>;
252
252
  extractTextDelta(delta: unknown): string | undefined;
253
253
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
254
- response: {
254
+ rawResponse: {
255
255
  object: "chat.completion";
256
256
  model: string;
257
257
  usage: {
@@ -295,7 +295,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
295
295
  };
296
296
  }>;
297
297
  doGenerateToolCalls(tools: Array<ToolDefinition<string, unknown>>, prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<{
298
- response: {
298
+ rawResponse: {
299
299
  object: "chat.completion";
300
300
  model: string;
301
301
  usage: {
@@ -87,14 +87,14 @@ export class AbstractOpenAIChatModel extends AbstractModel {
87
87
  schema: zodSchema(openAIChatResponseSchema),
88
88
  }));
89
89
  }
90
- processTextGenerationResponse(response) {
90
+ processTextGenerationResponse(rawResponse) {
91
91
  return {
92
- response,
93
- textGenerationResults: response.choices.map((choice) => ({
92
+ rawResponse,
93
+ textGenerationResults: rawResponse.choices.map((choice) => ({
94
94
  text: choice.message.content ?? "",
95
95
  finishReason: this.translateFinishReason(choice.finish_reason),
96
96
  })),
97
- usage: this.extractUsage(response),
97
+ usage: this.extractUsage(rawResponse),
98
98
  };
99
99
  }
100
100
  translateFinishReason(finishReason) {
@@ -130,7 +130,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
130
130
  return firstChoice.delta.content ?? undefined;
131
131
  }
132
132
  async doGenerateToolCall(tool, prompt, options) {
133
- const response = await this.callAPI(prompt, options, {
133
+ const rawResponse = await this.callAPI(prompt, options, {
134
134
  responseFormat: OpenAIChatResponseFormat.json,
135
135
  toolChoice: {
136
136
  type: "function",
@@ -147,20 +147,20 @@ export class AbstractOpenAIChatModel extends AbstractModel {
147
147
  },
148
148
  ],
149
149
  });
150
- const toolCalls = response.choices[0]?.message.tool_calls;
150
+ const toolCalls = rawResponse.choices[0]?.message.tool_calls;
151
151
  return {
152
- response,
152
+ rawResponse,
153
153
  toolCall: toolCalls == null || toolCalls.length === 0
154
154
  ? null
155
155
  : {
156
156
  id: toolCalls[0].id,
157
157
  args: parseJSON({ text: toolCalls[0].function.arguments }),
158
158
  },
159
- usage: this.extractUsage(response),
159
+ usage: this.extractUsage(rawResponse),
160
160
  };
161
161
  }
162
162
  async doGenerateToolCalls(tools, prompt, options) {
163
- const response = await this.callAPI(prompt, options, {
163
+ const rawResponse = await this.callAPI(prompt, options, {
164
164
  responseFormat: OpenAIChatResponseFormat.json,
165
165
  toolChoice: "auto",
166
166
  tools: tools.map((tool) => ({
@@ -172,16 +172,16 @@ export class AbstractOpenAIChatModel extends AbstractModel {
172
172
  },
173
173
  })),
174
174
  });
175
- const message = response.choices[0]?.message;
175
+ const message = rawResponse.choices[0]?.message;
176
176
  return {
177
- response,
177
+ rawResponse,
178
178
  text: message.content ?? null,
179
179
  toolCalls: message.tool_calls?.map((toolCall) => ({
180
180
  id: toolCall.id,
181
181
  name: toolCall.function.name,
182
182
  args: parseJSON({ text: toolCall.function.arguments }),
183
183
  })) ?? null,
184
- usage: this.extractUsage(response),
184
+ usage: this.extractUsage(rawResponse),
185
185
  };
186
186
  }
187
187
  extractUsage(response) {
@@ -79,19 +79,19 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
79
79
  schema: (0, ZodSchema_js_1.zodSchema)(OpenAICompletionResponseSchema),
80
80
  }));
81
81
  }
82
- processTextGenerationResponse(response) {
82
+ processTextGenerationResponse(rawResponse) {
83
83
  return {
84
- response,
85
- textGenerationResults: response.choices.map((choice) => {
84
+ rawResponse,
85
+ textGenerationResults: rawResponse.choices.map((choice) => {
86
86
  return {
87
87
  finishReason: this.translateFinishReason(choice.finish_reason),
88
88
  text: choice.text,
89
89
  };
90
90
  }),
91
91
  usage: {
92
- promptTokens: response.usage.prompt_tokens,
93
- completionTokens: response.usage.completion_tokens,
94
- totalTokens: response.usage.total_tokens,
92
+ promptTokens: rawResponse.usage.prompt_tokens,
93
+ completionTokens: rawResponse.usage.completion_tokens,
94
+ totalTokens: rawResponse.usage.total_tokens,
95
95
  },
96
96
  };
97
97
  }
@@ -120,6 +120,9 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
120
120
  }
121
121
  return chunk.choices[0].text;
122
122
  }
123
+ withJsonOutput() {
124
+ return this;
125
+ }
123
126
  }
124
127
  exports.AbstractOpenAICompletionModel = AbstractOpenAICompletionModel;
125
128
  const OpenAICompletionResponseSchema = zod_1.z.object({
@@ -31,7 +31,7 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
31
31
  responseFormat: OpenAITextResponseFormatType<RESULT>;
32
32
  }): Promise<RESULT>;
33
33
  doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
34
- response: {
34
+ rawResponse: {
35
35
  object: "text_completion";
36
36
  model: string;
37
37
  usage: {
@@ -60,7 +60,7 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
60
60
  };
61
61
  }>;
62
62
  restoreGeneratedTexts(rawResponse: unknown): {
63
- response: {
63
+ rawResponse: {
64
64
  object: "text_completion";
65
65
  model: string;
66
66
  usage: {
@@ -103,6 +103,7 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
103
103
  system_fingerprint?: string | undefined;
104
104
  }>>>;
105
105
  extractTextDelta(delta: unknown): string | undefined;
106
+ withJsonOutput(): this;
106
107
  }
107
108
  declare const OpenAICompletionResponseSchema: z.ZodObject<{
108
109
  id: z.ZodString;
@@ -76,19 +76,19 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
76
76
  schema: zodSchema(OpenAICompletionResponseSchema),
77
77
  }));
78
78
  }
79
- processTextGenerationResponse(response) {
79
+ processTextGenerationResponse(rawResponse) {
80
80
  return {
81
- response,
82
- textGenerationResults: response.choices.map((choice) => {
81
+ rawResponse,
82
+ textGenerationResults: rawResponse.choices.map((choice) => {
83
83
  return {
84
84
  finishReason: this.translateFinishReason(choice.finish_reason),
85
85
  text: choice.text,
86
86
  };
87
87
  }),
88
88
  usage: {
89
- promptTokens: response.usage.prompt_tokens,
90
- completionTokens: response.usage.completion_tokens,
91
- totalTokens: response.usage.total_tokens,
89
+ promptTokens: rawResponse.usage.prompt_tokens,
90
+ completionTokens: rawResponse.usage.completion_tokens,
91
+ totalTokens: rawResponse.usage.total_tokens,
92
92
  },
93
93
  };
94
94
  }
@@ -117,6 +117,9 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
117
117
  }
118
118
  return chunk.choices[0].text;
119
119
  }
120
+ withJsonOutput() {
121
+ return this;
122
+ }
120
123
  }
121
124
  const OpenAICompletionResponseSchema = z.object({
122
125
  id: z.string(),
@@ -127,12 +127,12 @@ class OpenAIImageGenerationModel extends AbstractModel_js_1.AbstractModel {
127
127
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
128
128
  }
129
129
  async doGenerateImages(prompt, options) {
130
- const response = await this.callAPI(prompt, options, {
130
+ const rawResponse = await this.callAPI(prompt, options, {
131
131
  responseFormat: exports.OpenAIImageGenerationResponseFormat.base64Json,
132
132
  });
133
133
  return {
134
- response,
135
- base64Images: response.data.map((item) => item.b64_json),
134
+ rawResponse,
135
+ base64Images: rawResponse.data.map((item) => item.b64_json),
136
136
  };
137
137
  }
138
138
  withPromptTemplate(promptTemplate) {
@@ -52,7 +52,7 @@ export declare class OpenAIImageGenerationModel extends AbstractModel<OpenAIImag
52
52
  }): Promise<RESULT>;
53
53
  get settingsForEvent(): Partial<OpenAIImageGenerationSettings>;
54
54
  doGenerateImages(prompt: string, options: FunctionCallOptions): Promise<{
55
- response: {
55
+ rawResponse: {
56
56
  data: {
57
57
  b64_json: string;
58
58
  }[];
@@ -123,12 +123,12 @@ export class OpenAIImageGenerationModel extends AbstractModel {
123
123
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
124
124
  }
125
125
  async doGenerateImages(prompt, options) {
126
- const response = await this.callAPI(prompt, options, {
126
+ const rawResponse = await this.callAPI(prompt, options, {
127
127
  responseFormat: OpenAIImageGenerationResponseFormat.base64Json,
128
128
  });
129
129
  return {
130
- response,
131
- base64Images: response.data.map((item) => item.b64_json),
130
+ rawResponse,
131
+ base64Images: rawResponse.data.map((item) => item.b64_json),
132
132
  };
133
133
  }
134
134
  withPromptTemplate(promptTemplate) {
@@ -125,10 +125,10 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
125
125
  if (texts.length > this.maxValuesPerCall) {
126
126
  throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
127
127
  }
128
- const response = await this.callAPI(texts, callOptions);
128
+ const rawResponse = await this.callAPI(texts, callOptions);
129
129
  return {
130
- response,
131
- embeddings: response.data.map((data) => data.embedding),
130
+ rawResponse,
131
+ embeddings: rawResponse.data.map((data) => data.embedding),
132
132
  };
133
133
  }
134
134
  withSettings(additionalSettings) {
@@ -50,7 +50,7 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
50
50
  callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<OpenAITextEmbeddingResponse>;
51
51
  get settingsForEvent(): Partial<OpenAITextEmbeddingModelSettings>;
52
52
  doEmbedValues(texts: string[], callOptions: FunctionCallOptions): Promise<{
53
- response: {
53
+ rawResponse: {
54
54
  object: "list";
55
55
  model: string;
56
56
  usage: {
@@ -120,10 +120,10 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
120
120
  if (texts.length > this.maxValuesPerCall) {
121
121
  throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
122
122
  }
123
- const response = await this.callAPI(texts, callOptions);
123
+ const rawResponse = await this.callAPI(texts, callOptions);
124
124
  return {
125
- response,
126
- embeddings: response.data.map((data) => data.embedding),
125
+ rawResponse,
126
+ embeddings: rawResponse.data.map((data) => data.embedding),
127
127
  };
128
128
  }
129
129
  withSettings(additionalSettings) {
@@ -55,12 +55,12 @@ class OpenAITranscriptionModel extends AbstractModel_js_1.AbstractModel {
55
55
  return this.settings.model;
56
56
  }
57
57
  async doTranscribe(data, options) {
58
- const response = await this.callAPI(data, options, {
58
+ const rawResponse = await this.callAPI(data, options, {
59
59
  responseFormat: exports.OpenAITranscriptionResponseFormat.verboseJson,
60
60
  });
61
61
  return {
62
- response,
63
- transcription: response.text,
62
+ rawResponse,
63
+ transcription: rawResponse.text,
64
64
  };
65
65
  }
66
66
  async callAPI(data, callOptions, options) {
@@ -66,7 +66,7 @@ export declare class OpenAITranscriptionModel extends AbstractModel<OpenAITransc
66
66
  readonly provider: "openai";
67
67
  get modelName(): "whisper-1";
68
68
  doTranscribe(data: OpenAITranscriptionInput, options: FunctionCallOptions): Promise<{
69
- response: {
69
+ rawResponse: {
70
70
  text: string;
71
71
  duration: number;
72
72
  task: "transcribe";
@@ -51,12 +51,12 @@ export class OpenAITranscriptionModel extends AbstractModel {
51
51
  return this.settings.model;
52
52
  }
53
53
  async doTranscribe(data, options) {
54
- const response = await this.callAPI(data, options, {
54
+ const rawResponse = await this.callAPI(data, options, {
55
55
  responseFormat: OpenAITranscriptionResponseFormat.verboseJson,
56
56
  });
57
57
  return {
58
- response,
59
- transcription: response.text,
58
+ rawResponse,
59
+ transcription: rawResponse.text,
60
60
  };
61
61
  }
62
62
  async callAPI(data, callOptions, options) {
@@ -94,10 +94,10 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
94
94
  };
95
95
  }
96
96
  async doGenerateImages(prompt, callOptions) {
97
- const response = await this.callAPI(prompt, callOptions);
97
+ const rawResponse = await this.callAPI(prompt, callOptions);
98
98
  return {
99
- response,
100
- base64Images: response.artifacts.map((artifact) => artifact.base64),
99
+ rawResponse,
100
+ base64Images: rawResponse.artifacts.map((artifact) => artifact.base64),
101
101
  };
102
102
  }
103
103
  withTextPrompt() {
@@ -67,7 +67,7 @@ export declare class StabilityImageGenerationModel extends AbstractModel<Stabili
67
67
  callAPI(input: StabilityImageGenerationPrompt, callOptions: FunctionCallOptions): Promise<StabilityImageGenerationResponse>;
68
68
  get settingsForEvent(): Partial<StabilityImageGenerationSettings>;
69
69
  doGenerateImages(prompt: StabilityImageGenerationPrompt, callOptions: FunctionCallOptions): Promise<{
70
- response: {
70
+ rawResponse: {
71
71
  artifacts: {
72
72
  base64: string;
73
73
  finishReason: "ERROR" | "SUCCESS" | "CONTENT_FILTERED";
@@ -91,10 +91,10 @@ export class StabilityImageGenerationModel extends AbstractModel {
91
91
  };
92
92
  }
93
93
  async doGenerateImages(prompt, callOptions) {
94
- const response = await this.callAPI(prompt, callOptions);
94
+ const rawResponse = await this.callAPI(prompt, callOptions);
95
95
  return {
96
- response,
97
- base64Images: response.artifacts.map((artifact) => artifact.base64),
96
+ rawResponse,
97
+ base64Images: rawResponse.artifacts.map((artifact) => artifact.base64),
98
98
  };
99
99
  }
100
100
  withTextPrompt() {
@@ -26,10 +26,10 @@ class WhisperCppTranscriptionModel extends AbstractModel_js_1.AbstractModel {
26
26
  });
27
27
  }
28
28
  async doTranscribe(data, options) {
29
- const response = await this.callAPI(data, options);
29
+ const rawResponse = await this.callAPI(data, options);
30
30
  return {
31
- response,
32
- transcription: response.text,
31
+ rawResponse,
32
+ transcription: rawResponse.text,
33
33
  };
34
34
  }
35
35
  async callAPI(data, callOptions) {
@@ -16,7 +16,7 @@ export declare class WhisperCppTranscriptionModel extends AbstractModel<WhisperC
16
16
  readonly provider: "whispercpp";
17
17
  readonly modelName: null;
18
18
  doTranscribe(data: WhisperCppTranscriptionInput, options: FunctionCallOptions): Promise<{
19
- response: {
19
+ rawResponse: {
20
20
  text: string;
21
21
  };
22
22
  transcription: string;
@@ -23,10 +23,10 @@ export class WhisperCppTranscriptionModel extends AbstractModel {
23
23
  });
24
24
  }
25
25
  async doTranscribe(data, options) {
26
- const response = await this.callAPI(data, options);
26
+ const rawResponse = await this.callAPI(data, options);
27
27
  return {
28
- response,
29
- transcription: response.text,
28
+ rawResponse,
29
+ transcription: rawResponse.text,
30
30
  };
31
31
  }
32
32
  async callAPI(data, callOptions) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building AI applications.",
4
- "version": "0.117.0",
4
+ "version": "0.119.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -30,13 +30,13 @@ class TextGenerationToolCallModel {
30
30
  return this.model.settingsForEvent;
31
31
  }
32
32
  async doGenerateToolCall(tool, prompt, options) {
33
- const { response, text, metadata } = await (0, generateText_js_1.generateText)(this.model, this.format.createPrompt(prompt, tool), {
33
+ const { rawResponse, text, metadata } = await (0, generateText_js_1.generateText)(this.model, this.format.createPrompt(prompt, tool), {
34
34
  ...options,
35
35
  fullResponse: true,
36
36
  });
37
37
  try {
38
38
  return {
39
- response,
39
+ rawResponse,
40
40
  toolCall: this.format.extractToolCall(text, tool),
41
41
  usage: metadata?.usage,
42
42
  };
@@ -20,7 +20,7 @@ export declare class TextGenerationToolCallModel<SOURCE_PROMPT, TARGET_PROMPT, M
20
20
  get settings(): TextGenerationModelSettings;
21
21
  get settingsForEvent(): Partial<MODEL["settings"]>;
22
22
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<{
23
- response: unknown;
23
+ rawResponse: unknown;
24
24
  toolCall: {
25
25
  id: string;
26
26
  args: unknown;
@@ -27,13 +27,13 @@ export class TextGenerationToolCallModel {
27
27
  return this.model.settingsForEvent;
28
28
  }
29
29
  async doGenerateToolCall(tool, prompt, options) {
30
- const { response, text, metadata } = await generateText(this.model, this.format.createPrompt(prompt, tool), {
30
+ const { rawResponse, text, metadata } = await generateText(this.model, this.format.createPrompt(prompt, tool), {
31
31
  ...options,
32
32
  fullResponse: true,
33
33
  });
34
34
  try {
35
35
  return {
36
- response,
36
+ rawResponse,
37
37
  toolCall: this.format.extractToolCall(text, tool),
38
38
  usage: metadata?.usage,
39
39
  };
@@ -4,7 +4,7 @@ export interface ToolCallGenerationStartedEvent extends BaseModelCallStartedEven
4
4
  }
5
5
  export type ToolCallGenerationFinishedEventResult = {
6
6
  status: "success";
7
- response: unknown;
7
+ rawResponse: unknown;
8
8
  value: unknown;
9
9
  usage?: {
10
10
  promptTokens: number;
@@ -5,7 +5,7 @@ export interface ToolCallGenerationModelSettings extends ModelSettings {
5
5
  }
6
6
  export interface ToolCallGenerationModel<PROMPT, SETTINGS extends ToolCallGenerationModelSettings = ToolCallGenerationModelSettings> extends Model<SETTINGS> {
7
7
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: PROMPT, options?: FunctionOptions): PromiseLike<{
8
- response: unknown;
8
+ rawResponse: unknown;
9
9
  toolCall: {
10
10
  id: string;
11
11
  args: unknown;