modelfusion 0.92.1 → 0.93.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +19 -19
  2. package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
  3. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  4. package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
  5. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
  6. package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
  7. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
  8. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
  9. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
  10. package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
  11. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
  12. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
  13. package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
  14. package/model-function/generate-structure/index.cjs +1 -1
  15. package/model-function/generate-structure/index.d.ts +1 -1
  16. package/model-function/generate-structure/index.js +1 -1
  17. package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
  18. package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
  19. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
  20. package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
  21. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
  22. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
  23. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
  24. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  25. package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
  26. package/model-function/generate-text/index.cjs +4 -4
  27. package/model-function/generate-text/index.d.ts +4 -4
  28. package/model-function/generate-text/index.js +4 -4
  29. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
  30. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
  31. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
  32. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
  33. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
  34. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
  35. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
  36. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
  37. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
  38. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
  39. package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
  40. package/model-function/generate-text/prompt-template/Content.js +1 -0
  41. package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
  42. package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
  43. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
  44. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
  45. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
  46. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
  47. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
  48. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
  49. package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
  50. package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
  51. package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
  52. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
  53. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
  54. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
  55. package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
  59. package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
  60. package/model-function/generate-text/prompt-template/index.d.ts +10 -0
  61. package/model-function/generate-text/prompt-template/index.js +10 -0
  62. package/model-function/index.cjs +2 -2
  63. package/model-function/index.d.ts +2 -2
  64. package/model-function/index.js +2 -2
  65. package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
  66. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
  67. package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
  68. package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
  69. package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
  70. package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
  71. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
  72. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
  73. package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
  74. package/model-provider/anthropic/index.cjs +2 -2
  75. package/model-provider/anthropic/index.d.ts +1 -1
  76. package/model-provider/anthropic/index.js +1 -1
  77. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
  79. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
  80. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
  81. package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
  82. package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
  83. package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
  85. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
  87. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
  88. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
  89. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
  90. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
  91. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
  92. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
  93. package/model-provider/llamacpp/index.cjs +2 -2
  94. package/model-provider/llamacpp/index.d.ts +1 -1
  95. package/model-provider/llamacpp/index.js +1 -1
  96. package/model-provider/ollama/OllamaTextGenerationModel.cjs +9 -9
  97. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +7 -7
  98. package/model-provider/ollama/OllamaTextGenerationModel.js +9 -9
  99. package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
  100. package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
  101. package/model-provider/openai/OpenAICompletionModel.js +10 -10
  102. package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
  103. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  104. package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
  105. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
  106. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
  107. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
  108. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
  109. package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
  110. package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
  111. package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
  112. package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
  113. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
  114. package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
  115. package/model-provider/openai/index.cjs +2 -2
  116. package/model-provider/openai/index.d.ts +1 -1
  117. package/model-provider/openai/index.js +1 -1
  118. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
  119. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
  120. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
  121. package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
  122. package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
  123. package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
  124. package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
  125. package/package.json +1 -1
  126. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
  127. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
  128. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
  129. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
  130. package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
  131. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
  132. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
  133. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
  134. package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
  135. package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
  136. package/model-function/generate-text/prompt-format/index.d.ts +0 -10
  137. package/model-function/generate-text/prompt-format/index.js +0 -10
  138. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
  139. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
  140. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
  141. /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
  142. /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
  143. /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
  144. /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
  145. /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
  146. /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
  147. /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
  148. /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
  149. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
  150. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
  151. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
  152. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
  153. /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
  154. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
  155. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
  156. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
  157. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
  158. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
  159. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
  160. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
  161. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
@@ -1,4 +1,6 @@
1
- import { validateChatPrompt, } from "../../model-function/generate-text/prompt-format/ChatPrompt.js";
1
+ import { validateChatPrompt, } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
2
+ const HUMAN_PREFIX = "\n\nHuman:";
3
+ const ASSISTANT_PREFIX = "\n\nAssistant:";
2
4
  /**
3
5
  * Formats a text prompt as an Anthropic prompt.
4
6
  */
@@ -6,9 +8,9 @@ export function text() {
6
8
  return {
7
9
  format(prompt) {
8
10
  let text = "";
9
- text += "\n\nHuman:";
11
+ text += HUMAN_PREFIX;
10
12
  text += prompt;
11
- text += "\n\nAssistant:";
13
+ text += ASSISTANT_PREFIX;
12
14
  return text;
13
15
  },
14
16
  stopSequences: [],
@@ -21,9 +23,12 @@ export function instruction() {
21
23
  return {
22
24
  format(prompt) {
23
25
  let text = prompt.system ?? "";
24
- text += "\n\nHuman:";
26
+ text += HUMAN_PREFIX;
25
27
  text += prompt.instruction;
26
- text += "\n\nAssistant:";
28
+ text += ASSISTANT_PREFIX;
29
+ if (prompt.responsePrefix != null) {
30
+ text += prompt.responsePrefix;
31
+ }
27
32
  return text;
28
33
  },
29
34
  stopSequences: [],
@@ -42,11 +47,13 @@ export function chat() {
42
47
  for (const { role, content } of prompt.messages) {
43
48
  switch (role) {
44
49
  case "user": {
45
- text += `\n\nHuman:${content}`;
50
+ text += HUMAN_PREFIX;
51
+ text += content;
46
52
  break;
47
53
  }
48
54
  case "assistant": {
49
- text += `\n\nAssistant:${content}`;
55
+ text += ASSISTANT_PREFIX;
56
+ text += content;
50
57
  break;
51
58
  }
52
59
  default: {
@@ -56,7 +63,7 @@ export function chat() {
56
63
  }
57
64
  }
58
65
  // AI message prefix:
59
- text += `\n\nAssistant:`;
66
+ text += ASSISTANT_PREFIX;
60
67
  return text;
61
68
  },
62
69
  stopSequences: [],
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const AnthropicPromptTemplate_js_1 = require("./AnthropicPromptTemplate.cjs");
4
+ describe("text prompt", () => {
5
+ it("should format prompt", () => {
6
+ const prompt = (0, AnthropicPromptTemplate_js_1.text)().format("prompt");
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ });
10
+ describe("instruction prompt", () => {
11
+ it("should format prompt with instruction", () => {
12
+ const prompt = (0, AnthropicPromptTemplate_js_1.instruction)().format({
13
+ instruction: "instruction",
14
+ });
15
+ expect(prompt).toMatchSnapshot();
16
+ });
17
+ it("should format prompt with system and instruction", () => {
18
+ const prompt = (0, AnthropicPromptTemplate_js_1.instruction)().format({
19
+ system: "system",
20
+ instruction: "instruction",
21
+ });
22
+ expect(prompt).toMatchSnapshot();
23
+ });
24
+ it("should format prompt with instruction and response prefix", () => {
25
+ const prompt = (0, AnthropicPromptTemplate_js_1.instruction)().format({
26
+ instruction: "instruction",
27
+ responsePrefix: "response prefix",
28
+ });
29
+ expect(prompt).toMatchSnapshot();
30
+ });
31
+ });
32
+ describe("chat prompt", () => {
33
+ it("should format prompt with user message", () => {
34
+ const prompt = (0, AnthropicPromptTemplate_js_1.chat)().format({
35
+ messages: [{ role: "user", content: "user message" }],
36
+ });
37
+ expect(prompt).toMatchSnapshot();
38
+ });
39
+ it("should format prompt with user-assistant-user messages", () => {
40
+ const prompt = (0, AnthropicPromptTemplate_js_1.chat)().format({
41
+ messages: [
42
+ { role: "user", content: "1st user message" },
43
+ { role: "assistant", content: "assistant message" },
44
+ { role: "user", content: "2nd user message" },
45
+ ],
46
+ });
47
+ expect(prompt).toMatchSnapshot();
48
+ });
49
+ });
@@ -0,0 +1,47 @@
1
+ import { chat, instruction, text } from "./AnthropicPromptTemplate.js";
2
+ describe("text prompt", () => {
3
+ it("should format prompt", () => {
4
+ const prompt = text().format("prompt");
5
+ expect(prompt).toMatchSnapshot();
6
+ });
7
+ });
8
+ describe("instruction prompt", () => {
9
+ it("should format prompt with instruction", () => {
10
+ const prompt = instruction().format({
11
+ instruction: "instruction",
12
+ });
13
+ expect(prompt).toMatchSnapshot();
14
+ });
15
+ it("should format prompt with system and instruction", () => {
16
+ const prompt = instruction().format({
17
+ system: "system",
18
+ instruction: "instruction",
19
+ });
20
+ expect(prompt).toMatchSnapshot();
21
+ });
22
+ it("should format prompt with instruction and response prefix", () => {
23
+ const prompt = instruction().format({
24
+ instruction: "instruction",
25
+ responsePrefix: "response prefix",
26
+ });
27
+ expect(prompt).toMatchSnapshot();
28
+ });
29
+ });
30
+ describe("chat prompt", () => {
31
+ it("should format prompt with user message", () => {
32
+ const prompt = chat().format({
33
+ messages: [{ role: "user", content: "user message" }],
34
+ });
35
+ expect(prompt).toMatchSnapshot();
36
+ });
37
+ it("should format prompt with user-assistant-user messages", () => {
38
+ const prompt = chat().format({
39
+ messages: [
40
+ { role: "user", content: "1st user message" },
41
+ { role: "assistant", content: "assistant message" },
42
+ { role: "user", content: "2nd user message" },
43
+ ],
44
+ });
45
+ expect(prompt).toMatchSnapshot();
46
+ });
47
+ });
@@ -6,13 +6,13 @@ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndTh
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
8
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
9
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
10
10
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
11
11
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
12
12
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
13
13
  const AnthropicApiConfiguration_js_1 = require("./AnthropicApiConfiguration.cjs");
14
14
  const AnthropicError_js_1 = require("./AnthropicError.cjs");
15
- const AnthropicPromptFormat_js_1 = require("./AnthropicPromptFormat.cjs");
15
+ const AnthropicPromptTemplate_js_1 = require("./AnthropicPromptTemplate.cjs");
16
16
  exports.ANTHROPIC_TEXT_GENERATION_MODELS = {
17
17
  "claude-instant-1": {
18
18
  contextWindowSize: 100000,
@@ -110,32 +110,32 @@ class AnthropicTextGenerationModel extends AbstractModel_js_1.AbstractModel {
110
110
  });
111
111
  }
112
112
  /**
113
- * Returns this model with a text prompt format.
113
+ * Returns this model with a text prompt template.
114
114
  */
115
115
  withTextPrompt() {
116
- return this.withPromptFormat((0, AnthropicPromptFormat_js_1.text)());
116
+ return this.withPromptTemplate((0, AnthropicPromptTemplate_js_1.text)());
117
117
  }
118
118
  /**
119
- * Returns this model with an instruction prompt format.
119
+ * Returns this model with an instruction prompt template.
120
120
  */
121
121
  withInstructionPrompt() {
122
- return this.withPromptFormat((0, AnthropicPromptFormat_js_1.instruction)());
122
+ return this.withPromptTemplate((0, AnthropicPromptTemplate_js_1.instruction)());
123
123
  }
124
124
  /**
125
- * Returns this model with a chat prompt format.
125
+ * Returns this model with a chat prompt template.
126
126
  */
127
127
  withChatPrompt() {
128
- return this.withPromptFormat((0, AnthropicPromptFormat_js_1.chat)());
128
+ return this.withPromptTemplate((0, AnthropicPromptTemplate_js_1.chat)());
129
129
  }
130
- withPromptFormat(promptFormat) {
131
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
130
+ withPromptTemplate(promptTemplate) {
131
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
132
132
  model: this.withSettings({
133
133
  stopSequences: [
134
134
  ...(this.settings.stopSequences ?? []),
135
- ...promptFormat.stopSequences,
135
+ ...promptTemplate.stopSequences,
136
136
  ],
137
137
  }),
138
- promptFormat,
138
+ promptTemplate,
139
139
  });
140
140
  }
141
141
  withSettings(additionalSettings) {
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
9
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
10
  export declare const ANTHROPIC_TEXT_GENERATION_MODELS: {
11
11
  "claude-instant-1": {
12
12
  contextWindowSize: number;
@@ -59,18 +59,18 @@ export declare class AnthropicTextGenerationModel extends AbstractModel<Anthropi
59
59
  }>;
60
60
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
61
61
  /**
62
- * Returns this model with a text prompt format.
62
+ * Returns this model with a text prompt template.
63
63
  */
64
- withTextPrompt(): PromptFormatTextStreamingModel<string, string, AnthropicTextGenerationModelSettings, this>;
64
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, string, AnthropicTextGenerationModelSettings, this>;
65
65
  /**
66
- * Returns this model with an instruction prompt format.
66
+ * Returns this model with an instruction prompt template.
67
67
  */
68
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
68
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
69
69
  /**
70
- * Returns this model with a chat prompt format.
70
+ * Returns this model with a chat prompt template.
71
71
  */
72
- withChatPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
73
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
72
+ withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
73
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
74
74
  withSettings(additionalSettings: Partial<AnthropicTextGenerationModelSettings>): this;
75
75
  }
76
76
  declare const anthropicTextGenerationResponseSchema: z.ZodObject<{
@@ -3,13 +3,13 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
6
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
7
  import { AsyncQueue } from "../../util/AsyncQueue.js";
8
8
  import { ZodSchema } from "../../core/schema/ZodSchema.js";
9
9
  import { parseJSON } from "../../core/schema/parseJSON.js";
10
10
  import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
11
11
  import { failedAnthropicCallResponseHandler } from "./AnthropicError.js";
12
- import { instruction, chat, text } from "./AnthropicPromptFormat.js";
12
+ import { instruction, chat, text } from "./AnthropicPromptTemplate.js";
13
13
  export const ANTHROPIC_TEXT_GENERATION_MODELS = {
14
14
  "claude-instant-1": {
15
15
  contextWindowSize: 100000,
@@ -107,32 +107,32 @@ export class AnthropicTextGenerationModel extends AbstractModel {
107
107
  });
108
108
  }
109
109
  /**
110
- * Returns this model with a text prompt format.
110
+ * Returns this model with a text prompt template.
111
111
  */
112
112
  withTextPrompt() {
113
- return this.withPromptFormat(text());
113
+ return this.withPromptTemplate(text());
114
114
  }
115
115
  /**
116
- * Returns this model with an instruction prompt format.
116
+ * Returns this model with an instruction prompt template.
117
117
  */
118
118
  withInstructionPrompt() {
119
- return this.withPromptFormat(instruction());
119
+ return this.withPromptTemplate(instruction());
120
120
  }
121
121
  /**
122
- * Returns this model with a chat prompt format.
122
+ * Returns this model with a chat prompt template.
123
123
  */
124
124
  withChatPrompt() {
125
- return this.withPromptFormat(chat());
125
+ return this.withPromptTemplate(chat());
126
126
  }
127
- withPromptFormat(promptFormat) {
128
- return new PromptFormatTextStreamingModel({
127
+ withPromptTemplate(promptTemplate) {
128
+ return new PromptTemplateTextStreamingModel({
129
129
  model: this.withSettings({
130
130
  stopSequences: [
131
131
  ...(this.settings.stopSequences ?? []),
132
- ...promptFormat.stopSequences,
132
+ ...promptTemplate.stopSequences,
133
133
  ],
134
134
  }),
135
- promptFormat,
135
+ promptTemplate,
136
136
  });
137
137
  }
138
138
  withSettings(additionalSettings) {
@@ -26,11 +26,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.AnthropicPromptFormat = exports.anthropic = exports.anthropicErrorDataSchema = exports.AnthropicError = void 0;
29
+ exports.AnthropicPromptTemplate = exports.anthropic = exports.anthropicErrorDataSchema = exports.AnthropicError = void 0;
30
30
  __exportStar(require("./AnthropicApiConfiguration.cjs"), exports);
31
31
  var AnthropicError_js_1 = require("./AnthropicError.cjs");
32
32
  Object.defineProperty(exports, "AnthropicError", { enumerable: true, get: function () { return AnthropicError_js_1.AnthropicError; } });
33
33
  Object.defineProperty(exports, "anthropicErrorDataSchema", { enumerable: true, get: function () { return AnthropicError_js_1.anthropicErrorDataSchema; } });
34
34
  exports.anthropic = __importStar(require("./AnthropicFacade.cjs"));
35
- exports.AnthropicPromptFormat = __importStar(require("./AnthropicPromptFormat.cjs"));
35
+ exports.AnthropicPromptTemplate = __importStar(require("./AnthropicPromptTemplate.cjs"));
36
36
  __exportStar(require("./AnthropicTextGenerationModel.cjs"), exports);
@@ -1,5 +1,5 @@
1
1
  export * from "./AnthropicApiConfiguration.js";
2
2
  export { AnthropicError, anthropicErrorDataSchema } from "./AnthropicError.js";
3
3
  export * as anthropic from "./AnthropicFacade.js";
4
- export * as AnthropicPromptFormat from "./AnthropicPromptFormat.js";
4
+ export * as AnthropicPromptTemplate from "./AnthropicPromptTemplate.js";
5
5
  export * from "./AnthropicTextGenerationModel.js";
@@ -1,5 +1,5 @@
1
1
  export * from "./AnthropicApiConfiguration.js";
2
2
  export { AnthropicError, anthropicErrorDataSchema } from "./AnthropicError.js";
3
3
  export * as anthropic from "./AnthropicFacade.js";
4
- export * as AnthropicPromptFormat from "./AnthropicPromptFormat.js";
4
+ export * as AnthropicPromptTemplate from "./AnthropicPromptTemplate.js";
5
5
  export * from "./AnthropicTextGenerationModel.js";
@@ -5,7 +5,7 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
- const PromptFormatImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptFormatImageGenerationModel.cjs");
8
+ const PromptTemplateImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptTemplateImageGenerationModel.cjs");
9
9
  const Automatic1111ApiConfiguration_js_1 = require("./Automatic1111ApiConfiguration.cjs");
10
10
  const Automatic1111Error_js_1 = require("./Automatic1111Error.cjs");
11
11
  const Automatic1111ImageGenerationPrompt_js_1 = require("./Automatic1111ImageGenerationPrompt.cjs");
@@ -54,12 +54,12 @@ class Automatic1111ImageGenerationModel extends AbstractModel_js_1.AbstractModel
54
54
  };
55
55
  }
56
56
  withTextPrompt() {
57
- return this.withPromptFormat((0, Automatic1111ImageGenerationPrompt_js_1.mapBasicPromptToAutomatic1111Format)());
57
+ return this.withPromptTemplate((0, Automatic1111ImageGenerationPrompt_js_1.mapBasicPromptToAutomatic1111Format)());
58
58
  }
59
- withPromptFormat(promptFormat) {
60
- return new PromptFormatImageGenerationModel_js_1.PromptFormatImageGenerationModel({
59
+ withPromptTemplate(promptTemplate) {
60
+ return new PromptTemplateImageGenerationModel_js_1.PromptTemplateImageGenerationModel({
61
61
  model: this,
62
- promptFormat,
62
+ promptTemplate,
63
63
  });
64
64
  }
65
65
  withSettings(additionalSettings) {
@@ -2,9 +2,9 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
- import { PromptFormat } from "../../model-function/PromptFormat.js";
5
+ import { PromptTemplate } from "../../model-function/PromptTemplate.js";
6
6
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
7
- import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
7
+ import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
8
8
  import { Automatic1111ImageGenerationPrompt } from "./Automatic1111ImageGenerationPrompt.js";
9
9
  /**
10
10
  * Create an image generation model that calls the AUTOMATIC1111 Stable Diffusion Web UI API.
@@ -25,8 +25,8 @@ export declare class Automatic1111ImageGenerationModel extends AbstractModel<Aut
25
25
  };
26
26
  base64Image: string;
27
27
  }>;
28
- withTextPrompt(): PromptFormatImageGenerationModel<string, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
29
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, Automatic1111ImageGenerationPrompt>): PromptFormatImageGenerationModel<INPUT_PROMPT, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
28
+ withTextPrompt(): PromptTemplateImageGenerationModel<string, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
29
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, Automatic1111ImageGenerationPrompt>): PromptTemplateImageGenerationModel<INPUT_PROMPT, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
30
30
  withSettings(additionalSettings: Automatic1111ImageGenerationSettings): this;
31
31
  }
32
32
  export interface Automatic1111ImageGenerationSettings extends ImageGenerationModelSettings {
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
- import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
5
+ import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
6
6
  import { Automatic1111ApiConfiguration } from "./Automatic1111ApiConfiguration.js";
7
7
  import { failedAutomatic1111CallResponseHandler } from "./Automatic1111Error.js";
8
8
  import { mapBasicPromptToAutomatic1111Format, } from "./Automatic1111ImageGenerationPrompt.js";
@@ -51,12 +51,12 @@ export class Automatic1111ImageGenerationModel extends AbstractModel {
51
51
  };
52
52
  }
53
53
  withTextPrompt() {
54
- return this.withPromptFormat(mapBasicPromptToAutomatic1111Format());
54
+ return this.withPromptTemplate(mapBasicPromptToAutomatic1111Format());
55
55
  }
56
- withPromptFormat(promptFormat) {
57
- return new PromptFormatImageGenerationModel({
56
+ withPromptTemplate(promptTemplate) {
57
+ return new PromptTemplateImageGenerationModel({
58
58
  model: this,
59
- promptFormat,
59
+ promptTemplate,
60
60
  });
61
61
  }
62
62
  withSettings(additionalSettings) {
@@ -1,4 +1,4 @@
1
- import { PromptFormat } from "../../model-function/PromptFormat.js";
1
+ import { PromptTemplate } from "../../model-function/PromptTemplate.js";
2
2
  export type Automatic1111ImageGenerationPrompt = {
3
3
  prompt: string;
4
4
  negativePrompt?: string;
@@ -7,4 +7,4 @@ export type Automatic1111ImageGenerationPrompt = {
7
7
  /**
8
8
  * Formats a basic text prompt as an Automatic1111 prompt.
9
9
  */
10
- export declare function mapBasicPromptToAutomatic1111Format(): PromptFormat<string, Automatic1111ImageGenerationPrompt>;
10
+ export declare function mapBasicPromptToAutomatic1111Format(): PromptTemplate<string, Automatic1111ImageGenerationPrompt>;
@@ -6,8 +6,8 @@ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndTh
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
8
8
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
10
- const TextPromptFormat_js_1 = require("../../model-function/generate-text/prompt-format/TextPromptFormat.cjs");
9
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
10
+ const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
11
11
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
12
12
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
13
13
  const parseJsonStream_js_1 = require("../../util/streaming/parseJsonStream.cjs");
@@ -134,26 +134,26 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
134
134
  return fullDelta.delta;
135
135
  }
136
136
  /**
137
- * Returns this model with an instruction prompt format.
137
+ * Returns this model with an instruction prompt template.
138
138
  */
139
139
  withInstructionPrompt() {
140
- return this.withPromptFormat((0, TextPromptFormat_js_1.instruction)());
140
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.instruction)());
141
141
  }
142
142
  /**
143
- * Returns this model with a chat prompt format.
143
+ * Returns this model with a chat prompt template.
144
144
  */
145
145
  withChatPrompt(options) {
146
- return this.withPromptFormat((0, TextPromptFormat_js_1.chat)(options));
146
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
147
147
  }
148
- withPromptFormat(promptFormat) {
149
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
148
+ withPromptTemplate(promptTemplate) {
149
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
150
150
  model: this.withSettings({
151
151
  stopSequences: [
152
152
  ...(this.settings.stopSequences ?? []),
153
- ...promptFormat.stopSequences,
153
+ ...promptTemplate.stopSequences,
154
154
  ],
155
155
  }),
156
- promptFormat,
156
+ promptTemplate,
157
157
  });
158
158
  }
159
159
  withSettings(additionalSettings) {
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
9
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
10
  import { CohereTokenizer } from "./CohereTokenizer.js";
11
11
  export declare const COHERE_TEXT_GENERATION_MODELS: {
12
12
  command: {
@@ -85,17 +85,17 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
85
85
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
86
86
  extractTextDelta(fullDelta: CohereTextGenerationDelta): string | undefined;
87
87
  /**
88
- * Returns this model with an instruction prompt format.
88
+ * Returns this model with an instruction prompt template.
89
89
  */
90
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, CohereTextGenerationModelSettings, this>;
90
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, string, CohereTextGenerationModelSettings, this>;
91
91
  /**
92
- * Returns this model with a chat prompt format.
92
+ * Returns this model with a chat prompt template.
93
93
  */
94
94
  withChatPrompt(options?: {
95
95
  user?: string;
96
96
  assistant?: string;
97
- }): PromptFormatTextStreamingModel<import("../../index.js").TextChatPrompt, string, CohereTextGenerationModelSettings, this>;
98
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
97
+ }): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, string, CohereTextGenerationModelSettings, this>;
98
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
99
99
  withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
100
100
  }
101
101
  declare const cohereTextGenerationResponseSchema: z.ZodObject<{
@@ -3,8 +3,8 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
3
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
4
  import { ZodSchema } from "../../core/schema/ZodSchema.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
- import { chat, instruction, } from "../../model-function/generate-text/prompt-format/TextPromptFormat.js";
6
+ import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
+ import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
8
8
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
9
9
  import { AsyncQueue } from "../../util/AsyncQueue.js";
10
10
  import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
@@ -131,26 +131,26 @@ export class CohereTextGenerationModel extends AbstractModel {
131
131
  return fullDelta.delta;
132
132
  }
133
133
  /**
134
- * Returns this model with an instruction prompt format.
134
+ * Returns this model with an instruction prompt template.
135
135
  */
136
136
  withInstructionPrompt() {
137
- return this.withPromptFormat(instruction());
137
+ return this.withPromptTemplate(instruction());
138
138
  }
139
139
  /**
140
- * Returns this model with a chat prompt format.
140
+ * Returns this model with a chat prompt template.
141
141
  */
142
142
  withChatPrompt(options) {
143
- return this.withPromptFormat(chat(options));
143
+ return this.withPromptTemplate(chat(options));
144
144
  }
145
- withPromptFormat(promptFormat) {
146
- return new PromptFormatTextStreamingModel({
145
+ withPromptTemplate(promptTemplate) {
146
+ return new PromptTemplateTextStreamingModel({
147
147
  model: this.withSettings({
148
148
  stopSequences: [
149
149
  ...(this.settings.stopSequences ?? []),
150
- ...promptFormat.stopSequences,
150
+ ...promptTemplate.stopSequences,
151
151
  ],
152
152
  }),
153
- promptFormat,
153
+ promptTemplate,
154
154
  });
155
155
  }
156
156
  withSettings(additionalSettings) {
@@ -5,7 +5,7 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
- const PromptFormatTextGenerationModel_js_1 = require("../../model-function/generate-text/PromptFormatTextGenerationModel.cjs");
8
+ const PromptTemplateTextGenerationModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextGenerationModel.cjs");
9
9
  const HuggingFaceApiConfiguration_js_1 = require("./HuggingFaceApiConfiguration.cjs");
10
10
  const HuggingFaceError_js_1 = require("./HuggingFaceError.cjs");
11
11
  /**
@@ -95,10 +95,10 @@ class HuggingFaceTextGenerationModel extends AbstractModel_js_1.AbstractModel {
95
95
  text: response[0].generated_text,
96
96
  };
97
97
  }
98
- withPromptFormat(promptFormat) {
99
- return new PromptFormatTextGenerationModel_js_1.PromptFormatTextGenerationModel({
98
+ withPromptTemplate(promptTemplate) {
99
+ return new PromptTemplateTextGenerationModel_js_1.PromptTemplateTextGenerationModel({
100
100
  model: this,
101
- promptFormat,
101
+ promptTemplate,
102
102
  });
103
103
  }
104
104
  withSettings(additionalSettings) {
@@ -2,9 +2,9 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
- import { PromptFormatTextGenerationModel } from "../../model-function/generate-text/PromptFormatTextGenerationModel.js";
5
+ import { PromptTemplateTextGenerationModel } from "../../model-function/generate-text/PromptTemplateTextGenerationModel.js";
6
6
  import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
7
- import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
7
+ import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
8
8
  export interface HuggingFaceTextGenerationModelSettings extends TextGenerationModelSettings {
9
9
  api?: ApiConfiguration;
10
10
  model: string;
@@ -53,7 +53,7 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
53
53
  }[];
54
54
  text: string;
55
55
  }>;
56
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
56
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
57
57
  withSettings(additionalSettings: Partial<HuggingFaceTextGenerationModelSettings>): this;
58
58
  }
59
59
  declare const huggingFaceTextGenerationResponseSchema: z.ZodArray<z.ZodObject<{