modelfusion 0.92.1 → 0.93.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +19 -19
  2. package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
  3. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  4. package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
  5. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
  6. package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
  7. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
  8. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
  9. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
  10. package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
  11. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
  12. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
  13. package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
  14. package/model-function/generate-structure/index.cjs +1 -1
  15. package/model-function/generate-structure/index.d.ts +1 -1
  16. package/model-function/generate-structure/index.js +1 -1
  17. package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
  18. package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
  19. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
  20. package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
  21. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
  22. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
  23. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
  24. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  25. package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
  26. package/model-function/generate-text/index.cjs +4 -4
  27. package/model-function/generate-text/index.d.ts +4 -4
  28. package/model-function/generate-text/index.js +4 -4
  29. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
  30. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
  31. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
  32. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
  33. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
  34. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
  35. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
  36. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
  37. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
  38. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
  39. package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
  40. package/model-function/generate-text/prompt-template/Content.js +1 -0
  41. package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
  42. package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
  43. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
  44. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
  45. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
  46. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
  47. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
  48. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
  49. package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
  50. package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
  51. package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
  52. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
  53. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
  54. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
  55. package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
  59. package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
  60. package/model-function/generate-text/prompt-template/index.d.ts +10 -0
  61. package/model-function/generate-text/prompt-template/index.js +10 -0
  62. package/model-function/index.cjs +2 -2
  63. package/model-function/index.d.ts +2 -2
  64. package/model-function/index.js +2 -2
  65. package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
  66. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
  67. package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
  68. package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
  69. package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
  70. package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
  71. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
  72. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
  73. package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
  74. package/model-provider/anthropic/index.cjs +2 -2
  75. package/model-provider/anthropic/index.d.ts +1 -1
  76. package/model-provider/anthropic/index.js +1 -1
  77. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
  79. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
  80. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
  81. package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
  82. package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
  83. package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
  85. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
  87. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
  88. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
  89. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
  90. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
  91. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
  92. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
  93. package/model-provider/llamacpp/index.cjs +2 -2
  94. package/model-provider/llamacpp/index.d.ts +1 -1
  95. package/model-provider/llamacpp/index.js +1 -1
  96. package/model-provider/ollama/OllamaTextGenerationModel.cjs +9 -9
  97. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +7 -7
  98. package/model-provider/ollama/OllamaTextGenerationModel.js +9 -9
  99. package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
  100. package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
  101. package/model-provider/openai/OpenAICompletionModel.js +10 -10
  102. package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
  103. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  104. package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
  105. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
  106. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
  107. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
  108. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
  109. package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
  110. package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
  111. package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
  112. package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
  113. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
  114. package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
  115. package/model-provider/openai/index.cjs +2 -2
  116. package/model-provider/openai/index.d.ts +1 -1
  117. package/model-provider/openai/index.js +1 -1
  118. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
  119. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
  120. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
  121. package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
  122. package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
  123. package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
  124. package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
  125. package/package.json +1 -1
  126. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
  127. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
  128. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
  129. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
  130. package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
  131. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
  132. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
  133. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
  134. package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
  135. package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
  136. package/model-function/generate-text/prompt-format/index.d.ts +0 -10
  137. package/model-function/generate-text/prompt-format/index.js +0 -10
  138. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
  139. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
  140. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
  141. /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
  142. /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
  143. /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
  144. /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
  145. /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
  146. /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
  147. /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
  148. /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
  149. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
  150. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
  151. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
  152. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
  153. /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
  154. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
  155. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
  156. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
  157. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
  158. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
  159. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
  160. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
  161. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
- import { PromptFormatTextGenerationModel } from "../../model-function/generate-text/PromptFormatTextGenerationModel.js";
5
+ import { PromptTemplateTextGenerationModel } from "../../model-function/generate-text/PromptTemplateTextGenerationModel.js";
6
6
  import { HuggingFaceApiConfiguration } from "./HuggingFaceApiConfiguration.js";
7
7
  import { failedHuggingFaceCallResponseHandler } from "./HuggingFaceError.js";
8
8
  /**
@@ -92,10 +92,10 @@ export class HuggingFaceTextGenerationModel extends AbstractModel {
92
92
  text: response[0].generated_text,
93
93
  };
94
94
  }
95
- withPromptFormat(promptFormat) {
96
- return new PromptFormatTextGenerationModel({
95
+ withPromptTemplate(promptTemplate) {
96
+ return new PromptTemplateTextGenerationModel({
97
97
  model: this,
98
- promptFormat,
98
+ promptTemplate,
99
99
  });
100
100
  }
101
101
  withSettings(additionalSettings) {
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = void 0;
4
- const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-format/ChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("../../model-function/generate-text/prompt-template/ChatPrompt.cjs");
5
5
  // default Vicuna 1 system message
6
6
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
7
7
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
@@ -0,0 +1,11 @@
1
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
+ import { MultiModalChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
+ import { MultiModalInstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
+ import { LlamaCppTextGenerationPrompt } from "./LlamaCppTextGenerationModel.js";
5
+ /**
6
+ * BakLLaVA 1 uses a Vicuna 1 prompt. This mapping combines it with the LlamaCpp prompt structure.
7
+ *
8
+ * @see https://github.com/SkunkworksAI/BakLLaVA
9
+ */
10
+ export declare function instruction(): TextGenerationPromptTemplate<MultiModalInstructionPrompt, LlamaCppTextGenerationPrompt>;
11
+ export declare function chat(): TextGenerationPromptTemplate<MultiModalChatPrompt, LlamaCppTextGenerationPrompt>;
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt, } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
1
+ import { validateChatPrompt, } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
2
2
  // default Vicuna 1 system message
3
3
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
4
4
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
@@ -6,7 +6,7 @@ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndTh
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
8
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
9
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
10
10
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
11
11
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
12
12
  const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
@@ -101,7 +101,7 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
101
101
  });
102
102
  }
103
103
  withTextPrompt() {
104
- return this.withPromptFormat({
104
+ return this.withPromptTemplate({
105
105
  format(prompt) {
106
106
  return { text: prompt };
107
107
  },
@@ -109,31 +109,31 @@ class LlamaCppTextGenerationModel extends AbstractModel_js_1.AbstractModel {
109
109
  });
110
110
  }
111
111
  /**
112
- * Maps the prompt for a text version of the Llama.cpp prompt format (without image support).
112
+ * Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
113
113
  */
114
- withTextPromptFormat(promptFormat) {
115
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
114
+ withTextPromptTemplate(promptTemplate) {
115
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
116
116
  model: this.withTextPrompt().withSettings({
117
117
  stopSequences: [
118
118
  ...(this.settings.stopSequences ?? []),
119
- ...promptFormat.stopSequences,
119
+ ...promptTemplate.stopSequences,
120
120
  ],
121
121
  }),
122
- promptFormat,
122
+ promptTemplate,
123
123
  });
124
124
  }
125
125
  /**
126
- * Maps the prompt for the full Llama.cpp prompt format (incl. image support).
126
+ * Maps the prompt for the full Llama.cpp prompt template (incl. image support).
127
127
  */
128
- withPromptFormat(promptFormat) {
129
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
128
+ withPromptTemplate(promptTemplate) {
129
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
130
130
  model: this.withSettings({
131
131
  stopSequences: [
132
132
  ...(this.settings.stopSequences ?? []),
133
- ...promptFormat.stopSequences,
133
+ ...promptTemplate.stopSequences,
134
134
  ],
135
135
  }),
136
- promptFormat,
136
+ promptTemplate,
137
137
  });
138
138
  }
139
139
  withSettings(additionalSettings) {
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
9
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
10
  import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
11
11
  export interface LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE extends number | undefined> extends TextGenerationModelSettings {
12
12
  api?: ApiConfiguration;
@@ -110,15 +110,15 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
110
110
  };
111
111
  }>;
112
112
  doStreamText(prompt: LlamaCppTextGenerationPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
113
- withTextPrompt(): PromptFormatTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
113
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
114
114
  /**
115
- * Maps the prompt for a text version of the Llama.cpp prompt format (without image support).
115
+ * Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
116
116
  */
117
- withTextPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, PromptFormatTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>>;
117
+ withTextPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, PromptTemplateTextStreamingModel<string, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>>;
118
118
  /**
119
- * Maps the prompt for the full Llama.cpp prompt format (incl. image support).
119
+ * Maps the prompt for the full Llama.cpp prompt template (incl. image support).
120
120
  */
121
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, LlamaCppTextGenerationPrompt>): PromptFormatTextStreamingModel<INPUT_PROMPT, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
121
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, LlamaCppTextGenerationPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, LlamaCppTextGenerationPrompt, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
122
122
  withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
123
123
  }
124
124
  declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
@@ -3,7 +3,7 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { ZodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
6
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
7
  import { AsyncQueue } from "../../util/AsyncQueue.js";
8
8
  import { parseJSON } from "../../core/schema/parseJSON.js";
9
9
  import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
@@ -98,7 +98,7 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
98
98
  });
99
99
  }
100
100
  withTextPrompt() {
101
- return this.withPromptFormat({
101
+ return this.withPromptTemplate({
102
102
  format(prompt) {
103
103
  return { text: prompt };
104
104
  },
@@ -106,31 +106,31 @@ export class LlamaCppTextGenerationModel extends AbstractModel {
106
106
  });
107
107
  }
108
108
  /**
109
- * Maps the prompt for a text version of the Llama.cpp prompt format (without image support).
109
+ * Maps the prompt for a text version of the Llama.cpp prompt template (without image support).
110
110
  */
111
- withTextPromptFormat(promptFormat) {
112
- return new PromptFormatTextStreamingModel({
111
+ withTextPromptTemplate(promptTemplate) {
112
+ return new PromptTemplateTextStreamingModel({
113
113
  model: this.withTextPrompt().withSettings({
114
114
  stopSequences: [
115
115
  ...(this.settings.stopSequences ?? []),
116
- ...promptFormat.stopSequences,
116
+ ...promptTemplate.stopSequences,
117
117
  ],
118
118
  }),
119
- promptFormat,
119
+ promptTemplate,
120
120
  });
121
121
  }
122
122
  /**
123
- * Maps the prompt for the full Llama.cpp prompt format (incl. image support).
123
+ * Maps the prompt for the full Llama.cpp prompt template (incl. image support).
124
124
  */
125
- withPromptFormat(promptFormat) {
126
- return new PromptFormatTextStreamingModel({
125
+ withPromptTemplate(promptTemplate) {
126
+ return new PromptTemplateTextStreamingModel({
127
127
  model: this.withSettings({
128
128
  stopSequences: [
129
129
  ...(this.settings.stopSequences ?? []),
130
- ...promptFormat.stopSequences,
130
+ ...promptTemplate.stopSequences,
131
131
  ],
132
132
  }),
133
- promptFormat,
133
+ promptTemplate,
134
134
  });
135
135
  }
136
136
  withSettings(additionalSettings) {
@@ -26,9 +26,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.llamacpp = exports.LlamaCppError = exports.LlamaCppBakLLaVA1Format = void 0;
29
+ exports.llamacpp = exports.LlamaCppError = exports.LlamaCppBakLLaVA1Prompt = void 0;
30
30
  __exportStar(require("./LlamaCppApiConfiguration.cjs"), exports);
31
- exports.LlamaCppBakLLaVA1Format = __importStar(require("./LlamaCppBakLLaVA1Format.cjs"));
31
+ exports.LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
32
32
  var LlamaCppError_js_1 = require("./LlamaCppError.cjs");
33
33
  Object.defineProperty(exports, "LlamaCppError", { enumerable: true, get: function () { return LlamaCppError_js_1.LlamaCppError; } });
34
34
  exports.llamacpp = __importStar(require("./LlamaCppFacade.cjs"));
@@ -1,5 +1,5 @@
1
1
  export * from "./LlamaCppApiConfiguration.js";
2
- export * as LlamaCppBakLLaVA1Format from "./LlamaCppBakLLaVA1Format.js";
2
+ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
3
3
  export { LlamaCppError, LlamaCppErrorData } from "./LlamaCppError.js";
4
4
  export * as llamacpp from "./LlamaCppFacade.js";
5
5
  export * from "./LlamaCppTextEmbeddingModel.js";
@@ -1,5 +1,5 @@
1
1
  export * from "./LlamaCppApiConfiguration.js";
2
- export * as LlamaCppBakLLaVA1Format from "./LlamaCppBakLLaVA1Format.js";
2
+ export * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
3
3
  export { LlamaCppError } from "./LlamaCppError.js";
4
4
  export * as llamacpp from "./LlamaCppFacade.js";
5
5
  export * from "./LlamaCppTextEmbeddingModel.js";
@@ -8,7 +8,7 @@ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
8
8
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
9
9
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
10
10
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
11
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
11
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
12
12
  const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
13
13
  const TextGenerationToolCallsOrGenerateTextModel_js_1 = require("../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs");
14
14
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
@@ -98,27 +98,27 @@ class OllamaTextGenerationModel extends AbstractModel_js_1.AbstractModel {
98
98
  responseFormat: exports.OllamaTextGenerationResponseFormat.deltaIterable,
99
99
  });
100
100
  }
101
- asToolCallGenerationModel(promptFormat) {
101
+ asToolCallGenerationModel(promptTemplate) {
102
102
  return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
103
103
  model: this,
104
- format: promptFormat,
104
+ format: promptTemplate,
105
105
  });
106
106
  }
107
- asToolCallsOrTextGenerationModel(promptFormat) {
107
+ asToolCallsOrTextGenerationModel(promptTemplate) {
108
108
  return new TextGenerationToolCallsOrGenerateTextModel_js_1.TextGenerationToolCallsOrGenerateTextModel({
109
109
  model: this,
110
- format: promptFormat,
110
+ template: promptTemplate,
111
111
  });
112
112
  }
113
- withPromptFormat(promptFormat) {
114
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
113
+ withPromptTemplate(promptTemplate) {
114
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
115
115
  model: this.withSettings({
116
116
  stopSequences: [
117
117
  ...(this.settings.stopSequences ?? []),
118
- ...promptFormat.stopSequences,
118
+ ...promptTemplate.stopSequences,
119
119
  ],
120
120
  }),
121
- promptFormat,
121
+ promptTemplate,
122
122
  });
123
123
  }
124
124
  withSettings(additionalSettings) {
@@ -4,11 +4,11 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
10
- import { TextGenerationToolCallModel, ToolCallPromptFormat } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
11
- import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptFormat } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
9
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
+ import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
11
+ import { TextGenerationToolCallsOrGenerateTextModel, ToolCallsOrGenerateTextPromptTemplate } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
12
12
  /**
13
13
  * @see https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion
14
14
  */
@@ -137,9 +137,9 @@ export declare class OllamaTextGenerationModel<CONTEXT_WINDOW_SIZE extends numbe
137
137
  text: string;
138
138
  }>;
139
139
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
140
- asToolCallGenerationModel<INPUT_PROMPT>(promptFormat: ToolCallPromptFormat<INPUT_PROMPT, string>): TextGenerationToolCallModel<INPUT_PROMPT, string, this>;
141
- asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptFormat: ToolCallsOrGenerateTextPromptFormat<INPUT_PROMPT, string>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, string, this>;
142
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, OllamaTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
140
+ asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, string>): TextGenerationToolCallModel<INPUT_PROMPT, string, this>;
141
+ asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsOrGenerateTextPromptTemplate<INPUT_PROMPT, string>): TextGenerationToolCallsOrGenerateTextModel<INPUT_PROMPT, string, this>;
142
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OllamaTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
143
143
  withSettings(additionalSettings: Partial<OllamaTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
144
144
  }
145
145
  declare const ollamaTextGenerationResponseSchema: z.ZodObject<{
@@ -5,7 +5,7 @@ import { postJsonToApi } from "../../core/api/postToApi.js";
5
5
  import { ZodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { safeParseJSON } from "../../core/schema/parseJSON.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
8
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
9
9
  import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
10
10
  import { TextGenerationToolCallsOrGenerateTextModel, } from "../../tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js";
11
11
  import { AsyncQueue } from "../../util/AsyncQueue.js";
@@ -95,27 +95,27 @@ export class OllamaTextGenerationModel extends AbstractModel {
95
95
  responseFormat: OllamaTextGenerationResponseFormat.deltaIterable,
96
96
  });
97
97
  }
98
- asToolCallGenerationModel(promptFormat) {
98
+ asToolCallGenerationModel(promptTemplate) {
99
99
  return new TextGenerationToolCallModel({
100
100
  model: this,
101
- format: promptFormat,
101
+ format: promptTemplate,
102
102
  });
103
103
  }
104
- asToolCallsOrTextGenerationModel(promptFormat) {
104
+ asToolCallsOrTextGenerationModel(promptTemplate) {
105
105
  return new TextGenerationToolCallsOrGenerateTextModel({
106
106
  model: this,
107
- format: promptFormat,
107
+ template: promptTemplate,
108
108
  });
109
109
  }
110
- withPromptFormat(promptFormat) {
111
- return new PromptFormatTextStreamingModel({
110
+ withPromptTemplate(promptTemplate) {
111
+ return new PromptTemplateTextStreamingModel({
112
112
  model: this.withSettings({
113
113
  stopSequences: [
114
114
  ...(this.settings.stopSequences ?? []),
115
- ...promptFormat.stopSequences,
115
+ ...promptTemplate.stopSequences,
116
116
  ],
117
117
  }),
118
- promptFormat,
118
+ promptTemplate,
119
119
  });
120
120
  }
121
121
  withSettings(additionalSettings) {
@@ -7,8 +7,8 @@ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
8
8
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
9
9
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
10
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
11
- const TextPromptFormat_js_1 = require("../../model-function/generate-text/prompt-format/TextPromptFormat.cjs");
10
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
11
+ const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
12
12
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
13
13
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
14
14
  const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
@@ -239,26 +239,26 @@ class OpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
239
239
  });
240
240
  }
241
241
  /**
242
- * Returns this model with an instruction prompt format.
242
+ * Returns this model with an instruction prompt template.
243
243
  */
244
244
  withInstructionPrompt() {
245
- return this.withPromptFormat((0, TextPromptFormat_js_1.instruction)());
245
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.instruction)());
246
246
  }
247
247
  /**
248
- * Returns this model with a chat prompt format.
248
+ * Returns this model with a chat prompt template.
249
249
  */
250
250
  withChatPrompt(options) {
251
- return this.withPromptFormat((0, TextPromptFormat_js_1.chat)(options));
251
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
252
252
  }
253
- withPromptFormat(promptFormat) {
254
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
253
+ withPromptTemplate(promptTemplate) {
254
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
255
255
  model: this.withSettings({
256
256
  stopSequences: [
257
257
  ...(this.settings.stopSequences ?? []),
258
- ...promptFormat.stopSequences,
258
+ ...promptTemplate.stopSequences,
259
259
  ],
260
260
  }),
261
- promptFormat,
261
+ promptTemplate,
262
262
  });
263
263
  }
264
264
  withSettings(additionalSettings) {
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
9
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
10
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
11
11
  /**
12
12
  * @see https://platform.openai.com/docs/models/
@@ -174,17 +174,17 @@ export declare class OpenAICompletionModel extends AbstractModel<OpenAICompletio
174
174
  }>;
175
175
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
176
176
  /**
177
- * Returns this model with an instruction prompt format.
177
+ * Returns this model with an instruction prompt template.
178
178
  */
179
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, OpenAICompletionModelSettings, this>;
179
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, OpenAICompletionModelSettings, this>;
180
180
  /**
181
- * Returns this model with a chat prompt format.
181
+ * Returns this model with a chat prompt template.
182
182
  */
183
183
  withChatPrompt(options?: {
184
184
  user?: string;
185
185
  assistant?: string;
186
- }): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, OpenAICompletionModelSettings, this>;
187
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, OpenAICompletionModelSettings, this>;
186
+ }): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, string, OpenAICompletionModelSettings, this>;
187
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OpenAICompletionModelSettings, this>;
188
188
  withSettings(additionalSettings: Partial<OpenAICompletionModelSettings>): this;
189
189
  }
190
190
  declare const OpenAICompletionResponseSchema: z.ZodObject<{
@@ -4,8 +4,8 @@ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postTo
4
4
  import { ZodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { parseJSON } from "../../core/schema/parseJSON.js";
6
6
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
8
- import { chat, instruction, } from "../../model-function/generate-text/prompt-format/TextPromptFormat.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
+ import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
9
9
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
10
10
  import { AsyncQueue } from "../../util/AsyncQueue.js";
11
11
  import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
@@ -233,26 +233,26 @@ export class OpenAICompletionModel extends AbstractModel {
233
233
  });
234
234
  }
235
235
  /**
236
- * Returns this model with an instruction prompt format.
236
+ * Returns this model with an instruction prompt template.
237
237
  */
238
238
  withInstructionPrompt() {
239
- return this.withPromptFormat(instruction());
239
+ return this.withPromptTemplate(instruction());
240
240
  }
241
241
  /**
242
- * Returns this model with a chat prompt format.
242
+ * Returns this model with a chat prompt template.
243
243
  */
244
244
  withChatPrompt(options) {
245
- return this.withPromptFormat(chat(options));
245
+ return this.withPromptTemplate(chat(options));
246
246
  }
247
- withPromptFormat(promptFormat) {
248
- return new PromptFormatTextStreamingModel({
247
+ withPromptTemplate(promptTemplate) {
248
+ return new PromptTemplateTextStreamingModel({
249
249
  model: this.withSettings({
250
250
  stopSequences: [
251
251
  ...(this.settings.stopSequences ?? []),
252
- ...promptFormat.stopSequences,
252
+ ...promptTemplate.stopSequences,
253
253
  ],
254
254
  }),
255
- promptFormat,
255
+ promptTemplate,
256
256
  });
257
257
  }
258
258
  withSettings(additionalSettings) {
@@ -5,7 +5,7 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
- const PromptFormatImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptFormatImageGenerationModel.cjs");
8
+ const PromptTemplateImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptTemplateImageGenerationModel.cjs");
9
9
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
10
10
  const OpenAIError_js_1 = require("./OpenAIError.cjs");
11
11
  exports.OPENAI_IMAGE_MODELS = {
@@ -122,10 +122,10 @@ class OpenAIImageGenerationModel extends AbstractModel_js_1.AbstractModel {
122
122
  base64Image: response.data[0].b64_json,
123
123
  };
124
124
  }
125
- withPromptFormat(promptFormat) {
126
- return new PromptFormatImageGenerationModel_js_1.PromptFormatImageGenerationModel({
125
+ withPromptTemplate(promptTemplate) {
126
+ return new PromptTemplateImageGenerationModel_js_1.PromptTemplateImageGenerationModel({
127
127
  model: this,
128
- promptFormat,
128
+ promptTemplate,
129
129
  });
130
130
  }
131
131
  withSettings(additionalSettings) {
@@ -3,9 +3,9 @@ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { PromptFormat } from "../../model-function/PromptFormat.js";
6
+ import { PromptTemplate } from "../../model-function/PromptTemplate.js";
7
7
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
8
- import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
8
+ import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
9
9
  export declare const OPENAI_IMAGE_MODELS: {
10
10
  "dall-e-2": {
11
11
  getCost(settings: OpenAIImageGenerationSettings): 2000 | 1800 | 1600 | null;
@@ -61,7 +61,7 @@ export declare class OpenAIImageGenerationModel extends AbstractModel<OpenAIImag
61
61
  };
62
62
  base64Image: string;
63
63
  }>;
64
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatImageGenerationModel<INPUT_PROMPT, string, OpenAIImageGenerationSettings, this>;
64
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, string>): PromptTemplateImageGenerationModel<INPUT_PROMPT, string, OpenAIImageGenerationSettings, this>;
65
65
  withSettings(additionalSettings: Partial<OpenAIImageGenerationSettings>): this;
66
66
  }
67
67
  export type OpenAIImageGenerationResponseFormatType<T> = {
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
- import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
5
+ import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
6
6
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
7
7
  import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
8
8
  export const OPENAI_IMAGE_MODELS = {
@@ -118,10 +118,10 @@ export class OpenAIImageGenerationModel extends AbstractModel {
118
118
  base64Image: response.data[0].b64_json,
119
119
  };
120
120
  }
121
- withPromptFormat(promptFormat) {
122
- return new PromptFormatImageGenerationModel({
121
+ withPromptTemplate(promptTemplate) {
122
+ return new PromptTemplateImageGenerationModel({
123
123
  model: this,
124
- promptFormat,
124
+ promptTemplate,
125
125
  });
126
126
  }
127
127
  withSettings(additionalSettings) {