modelfusion 0.92.1 → 0.93.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +19 -19
  2. package/model-function/{PromptFormat.d.ts → PromptTemplate.d.ts} +2 -2
  3. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -2
  4. package/model-function/generate-image/{PromptFormatImageGenerationModel.cjs → PromptTemplateImageGenerationModel.cjs} +11 -11
  5. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +20 -0
  6. package/model-function/generate-image/{PromptFormatImageGenerationModel.js → PromptTemplateImageGenerationModel.js} +9 -9
  7. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +6 -6
  8. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +4 -4
  9. package/model-function/generate-structure/StructureFromTextGenerationModel.js +6 -6
  10. package/model-function/generate-structure/{StructureFromTextPromptFormat.d.ts → StructureFromTextPromptTemplate.d.ts} +1 -1
  11. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +4 -4
  12. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +2 -2
  13. package/model-function/generate-structure/StructureFromTextStreamingModel.js +4 -4
  14. package/model-function/generate-structure/index.cjs +1 -1
  15. package/model-function/generate-structure/index.d.ts +1 -1
  16. package/model-function/generate-structure/index.js +1 -1
  17. package/model-function/generate-structure/jsonStructurePrompt.d.ts +2 -2
  18. package/model-function/generate-text/{PromptFormatTextGenerationModel.cjs → PromptTemplateTextGenerationModel.cjs} +21 -21
  19. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +35 -0
  20. package/model-function/generate-text/{PromptFormatTextGenerationModel.js → PromptTemplateTextGenerationModel.js} +19 -19
  21. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +38 -0
  22. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +16 -0
  23. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +34 -0
  24. package/model-function/generate-text/TextGenerationModel.d.ts +3 -3
  25. package/model-function/generate-text/TextGenerationPromptTemplate.d.ts +11 -0
  26. package/model-function/generate-text/index.cjs +4 -4
  27. package/model-function/generate-text/index.d.ts +4 -4
  28. package/model-function/generate-text/index.js +4 -4
  29. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.cjs → prompt-template/AlpacaPromptTemplate.cjs} +5 -2
  30. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.d.ts → prompt-template/AlpacaPromptTemplate.d.ts} +5 -5
  31. package/model-function/generate-text/{prompt-format/AlpacaPromptFormat.js → prompt-template/AlpacaPromptTemplate.js} +5 -2
  32. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.cjs +31 -0
  33. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.test.js +29 -0
  34. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.cjs → prompt-template/ChatMLPromptTemplate.cjs} +5 -5
  35. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.d.ts → prompt-template/ChatMLPromptTemplate.d.ts} +7 -7
  36. package/model-function/generate-text/{prompt-format/ChatMLPromptFormat.js → prompt-template/ChatMLPromptTemplate.js} +5 -5
  37. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.cjs +49 -0
  38. package/model-function/generate-text/prompt-template/ChatMLPromptTemplate.test.js +47 -0
  39. package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.d.ts +1 -1
  40. package/model-function/generate-text/prompt-template/Content.js +1 -0
  41. package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.d.ts +7 -0
  42. package/model-function/generate-text/prompt-template/InstructionPrompt.js +1 -0
  43. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.cjs → prompt-template/Llama2PromptTemplate.cjs} +8 -7
  44. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.d.ts → prompt-template/Llama2PromptTemplate.d.ts} +7 -6
  45. package/model-function/generate-text/{prompt-format/Llama2PromptFormat.js → prompt-template/Llama2PromptTemplate.js} +8 -7
  46. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.cjs +49 -0
  47. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.d.ts +1 -0
  48. package/model-function/generate-text/prompt-template/Llama2PromptTemplate.test.js +47 -0
  49. package/model-function/generate-text/{prompt-format/TextPromptFormat.cjs → prompt-template/TextPromptTemplate.cjs} +3 -0
  50. package/model-function/generate-text/{prompt-format/TextPromptFormat.d.ts → prompt-template/TextPromptTemplate.d.ts} +4 -4
  51. package/model-function/generate-text/{prompt-format/TextPromptFormat.js → prompt-template/TextPromptTemplate.js} +3 -0
  52. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.cjs +49 -0
  53. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.d.ts +1 -0
  54. package/model-function/generate-text/prompt-template/TextPromptTemplate.test.js +47 -0
  55. package/model-function/generate-text/{prompt-format/VicunaPromptFormat.d.ts → prompt-template/VicunaPromptTemplate.d.ts} +2 -2
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +21 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.d.ts +1 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +19 -0
  59. package/model-function/generate-text/{prompt-format → prompt-template}/index.cjs +6 -6
  60. package/model-function/generate-text/prompt-template/index.d.ts +10 -0
  61. package/model-function/generate-text/prompt-template/index.js +10 -0
  62. package/model-function/index.cjs +2 -2
  63. package/model-function/index.d.ts +2 -2
  64. package/model-function/index.js +2 -2
  65. package/model-provider/anthropic/{AnthropicPromptFormat.cjs → AnthropicPromptTemplate.cjs} +15 -8
  66. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +17 -0
  67. package/model-provider/anthropic/{AnthropicPromptFormat.js → AnthropicPromptTemplate.js} +15 -8
  68. package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +49 -0
  69. package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +1 -0
  70. package/model-provider/anthropic/AnthropicPromptTemplate.test.js +47 -0
  71. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +12 -12
  72. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +9 -9
  73. package/model-provider/anthropic/AnthropicTextGenerationModel.js +12 -12
  74. package/model-provider/anthropic/index.cjs +2 -2
  75. package/model-provider/anthropic/index.d.ts +1 -1
  76. package/model-provider/anthropic/index.js +1 -1
  77. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +5 -5
  78. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +4 -4
  79. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +5 -5
  80. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +2 -2
  81. package/model-provider/cohere/CohereTextGenerationModel.cjs +10 -10
  82. package/model-provider/cohere/CohereTextGenerationModel.d.ts +7 -7
  83. package/model-provider/cohere/CohereTextGenerationModel.js +10 -10
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +4 -4
  85. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +3 -3
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +4 -4
  87. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.cjs → LlamaCppBakLLaVA1PromptTemplate.cjs} +1 -1
  88. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +11 -0
  89. package/model-provider/llamacpp/{LlamaCppBakLLaVA1Format.js → LlamaCppBakLLaVA1PromptTemplate.js} +1 -1
  90. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +12 -12
  91. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +7 -7
  92. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +12 -12
  93. package/model-provider/llamacpp/index.cjs +2 -2
  94. package/model-provider/llamacpp/index.d.ts +1 -1
  95. package/model-provider/llamacpp/index.js +1 -1
  96. package/model-provider/ollama/OllamaTextGenerationModel.cjs +9 -9
  97. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +7 -7
  98. package/model-provider/ollama/OllamaTextGenerationModel.js +9 -9
  99. package/model-provider/openai/OpenAICompletionModel.cjs +10 -10
  100. package/model-provider/openai/OpenAICompletionModel.d.ts +7 -7
  101. package/model-provider/openai/OpenAICompletionModel.js +10 -10
  102. package/model-provider/openai/OpenAIImageGenerationModel.cjs +4 -4
  103. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +3 -3
  104. package/model-provider/openai/OpenAIImageGenerationModel.js +4 -4
  105. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.cjs +16 -16
  106. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +14 -14
  107. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.js +16 -16
  108. package/model-provider/openai/chat/OpenAIChatMessage.d.ts +1 -1
  109. package/model-provider/openai/chat/OpenAIChatModel.cjs +15 -15
  110. package/model-provider/openai/chat/OpenAIChatModel.d.ts +12 -12
  111. package/model-provider/openai/chat/OpenAIChatModel.js +15 -15
  112. package/model-provider/openai/chat/{OpenAIChatPromptFormat.cjs → OpenAIChatPromptTemplate.cjs} +1 -1
  113. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +20 -0
  114. package/model-provider/openai/chat/{OpenAIChatPromptFormat.js → OpenAIChatPromptTemplate.js} +1 -1
  115. package/model-provider/openai/index.cjs +2 -2
  116. package/model-provider/openai/index.d.ts +1 -1
  117. package/model-provider/openai/index.js +1 -1
  118. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +14 -14
  119. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +11 -11
  120. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +14 -14
  121. package/model-provider/stability/StabilityImageGenerationModel.cjs +5 -5
  122. package/model-provider/stability/StabilityImageGenerationModel.d.ts +4 -4
  123. package/model-provider/stability/StabilityImageGenerationModel.js +5 -5
  124. package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +2 -2
  125. package/package.json +1 -1
  126. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +2 -2
  127. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.cjs +6 -6
  128. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.d.ts +4 -4
  129. package/tool/generate-tool-calls-or-text/TextGenerationToolCallsOrGenerateTextModel.js +6 -6
  130. package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +0 -20
  131. package/model-function/generate-text/PromptFormatTextGenerationModel.d.ts +0 -35
  132. package/model-function/generate-text/PromptFormatTextStreamingModel.cjs +0 -38
  133. package/model-function/generate-text/PromptFormatTextStreamingModel.d.ts +0 -16
  134. package/model-function/generate-text/PromptFormatTextStreamingModel.js +0 -34
  135. package/model-function/generate-text/TextGenerationPromptFormat.d.ts +0 -11
  136. package/model-function/generate-text/prompt-format/index.d.ts +0 -10
  137. package/model-function/generate-text/prompt-format/index.js +0 -10
  138. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +0 -17
  139. package/model-provider/llamacpp/LlamaCppBakLLaVA1Format.d.ts +0 -11
  140. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +0 -20
  141. /package/model-function/{PromptFormat.cjs → PromptTemplate.cjs} +0 -0
  142. /package/model-function/{PromptFormat.js → PromptTemplate.js} +0 -0
  143. /package/model-function/generate-structure/{StructureFromTextPromptFormat.cjs → StructureFromTextPromptTemplate.cjs} +0 -0
  144. /package/model-function/generate-structure/{StructureFromTextPromptFormat.js → StructureFromTextPromptTemplate.js} +0 -0
  145. /package/model-function/generate-text/{TextGenerationPromptFormat.cjs → TextGenerationPromptTemplate.cjs} +0 -0
  146. /package/model-function/generate-text/{TextGenerationPromptFormat.js → TextGenerationPromptTemplate.js} +0 -0
  147. /package/model-function/generate-text/{prompt-format/Content.js → prompt-template/AlpacaPromptTemplate.test.d.ts} +0 -0
  148. /package/model-function/generate-text/{prompt-format/InstructionPrompt.js → prompt-template/ChatMLPromptTemplate.test.d.ts} +0 -0
  149. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.cjs +0 -0
  150. /package/model-function/generate-text/{prompt-format → prompt-template}/ChatPrompt.js +0 -0
  151. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.cjs +0 -0
  152. /package/model-function/generate-text/{prompt-format → prompt-template}/Content.d.ts +0 -0
  153. /package/model-function/generate-text/{prompt-format → prompt-template}/InstructionPrompt.cjs +0 -0
  154. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.cjs +0 -0
  155. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.d.ts +0 -0
  156. /package/model-function/generate-text/{prompt-format → prompt-template}/InvalidPromptError.js +0 -0
  157. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.cjs → prompt-template/VicunaPromptTemplate.cjs} +0 -0
  158. /package/model-function/generate-text/{prompt-format/VicunaPromptFormat.js → prompt-template/VicunaPromptTemplate.js} +0 -0
  159. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.cjs +0 -0
  160. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.d.ts +0 -0
  161. /package/model-function/generate-text/{prompt-format → prompt-template}/trimChatPrompt.js +0 -0
@@ -7,9 +7,9 @@ exports.OpenAIChatFunctionCallStructureGenerationModel = void 0;
7
7
  const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
8
  const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
9
9
  const AbstractOpenAIChatModel_js_1 = require("./AbstractOpenAIChatModel.cjs");
10
- const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
10
+ const OpenAIChatPromptTemplate_js_1 = require("./OpenAIChatPromptTemplate.cjs");
11
11
  class OpenAIChatFunctionCallStructureGenerationModel {
12
- constructor({ model, fnName, fnDescription, promptFormat, }) {
12
+ constructor({ model, fnName, fnDescription, promptTemplate, }) {
13
13
  Object.defineProperty(this, "model", {
14
14
  enumerable: true,
15
15
  configurable: true,
@@ -28,7 +28,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
28
28
  writable: true,
29
29
  value: void 0
30
30
  });
31
- Object.defineProperty(this, "promptFormat", {
31
+ Object.defineProperty(this, "promptTemplate", {
32
32
  enumerable: true,
33
33
  configurable: true,
34
34
  writable: true,
@@ -37,7 +37,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
37
37
  this.model = model;
38
38
  this.fnName = fnName;
39
39
  this.fnDescription = fnDescription;
40
- this.promptFormat = promptFormat;
40
+ this.promptTemplate = promptTemplate;
41
41
  }
42
42
  get modelInformation() {
43
43
  return this.model.modelInformation;
@@ -49,29 +49,29 @@ class OpenAIChatFunctionCallStructureGenerationModel {
49
49
  return this.model.settingsForEvent;
50
50
  }
51
51
  /**
52
- * Returns this model with a text prompt format.
52
+ * Returns this model with a text prompt template.
53
53
  */
54
54
  withTextPrompt() {
55
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.text)());
55
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.text)());
56
56
  }
57
57
  /**
58
- * Returns this model with an instruction prompt format.
58
+ * Returns this model with an instruction prompt template.
59
59
  */
60
60
  withInstructionPrompt() {
61
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
61
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.instruction)());
62
62
  }
63
63
  /**
64
- * Returns this model with a chat prompt format.
64
+ * Returns this model with a chat prompt template.
65
65
  */
66
66
  withChatPrompt() {
67
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
67
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
68
68
  }
69
- withPromptFormat(promptFormat) {
69
+ withPromptTemplate(promptTemplate) {
70
70
  return new OpenAIChatFunctionCallStructureGenerationModel({
71
71
  model: this.model,
72
72
  fnName: this.fnName,
73
73
  fnDescription: this.fnDescription,
74
- promptFormat,
74
+ promptTemplate,
75
75
  });
76
76
  }
77
77
  withSettings(additionalSettings) {
@@ -79,7 +79,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
79
79
  model: this.model.withSettings(additionalSettings),
80
80
  fnName: this.fnName,
81
81
  fnDescription: this.fnDescription,
82
- promptFormat: this.promptFormat,
82
+ promptTemplate: this.promptTemplate,
83
83
  });
84
84
  }
85
85
  /**
@@ -91,12 +91,12 @@ class OpenAIChatFunctionCallStructureGenerationModel {
91
91
  */
92
92
  async doGenerateStructure(schema, prompt, // first argument of the function
93
93
  options) {
94
- const expandedPrompt = this.promptFormat.format(prompt);
94
+ const expandedPrompt = this.promptTemplate.format(prompt);
95
95
  const response = await this.model
96
96
  .withSettings({
97
97
  stopSequences: [
98
98
  ...(this.settings.stopSequences ?? []),
99
- ...this.promptFormat.stopSequences,
99
+ ...this.promptTemplate.stopSequences,
100
100
  ],
101
101
  })
102
102
  .callAPI(expandedPrompt, {
@@ -129,7 +129,7 @@ class OpenAIChatFunctionCallStructureGenerationModel {
129
129
  }
130
130
  async doStreamStructure(schema, prompt, // first argument of the function
131
131
  options) {
132
- const expandedPrompt = this.promptFormat.format(prompt);
132
+ const expandedPrompt = this.promptTemplate.format(prompt);
133
133
  return this.model.callAPI(expandedPrompt, {
134
134
  ...options,
135
135
  responseFormat: AbstractOpenAIChatModel_js_1.OpenAIChatResponseFormat.structureDeltaIterable,
@@ -2,37 +2,37 @@ import { FunctionOptions } from "../../../core/FunctionOptions.js";
2
2
  import { JsonSchemaProducer } from "../../../core/schema/JsonSchemaProducer.js";
3
3
  import { Schema } from "../../../core/schema/Schema.js";
4
4
  import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
5
- import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
5
+ import { TextGenerationPromptTemplate } from "../../../model-function/generate-text/TextGenerationPromptTemplate.js";
6
6
  import { OpenAIChatPrompt } from "./AbstractOpenAIChatModel.js";
7
7
  import { OpenAIChatModel, OpenAIChatSettings } from "./OpenAIChatModel";
8
- export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatPrompt>> implements StructureGenerationModel<Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
8
+ export declare class OpenAIChatFunctionCallStructureGenerationModel<PROMPT_TEMPLATE extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>> implements StructureGenerationModel<Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
9
9
  OpenAIChatSettings> {
10
10
  readonly model: OpenAIChatModel;
11
11
  readonly fnName: string;
12
12
  readonly fnDescription?: string;
13
- readonly promptFormat: PROMPT_FORMAT;
14
- constructor({ model, fnName, fnDescription, promptFormat, }: {
13
+ readonly promptTemplate: PROMPT_TEMPLATE;
14
+ constructor({ model, fnName, fnDescription, promptTemplate, }: {
15
15
  model: OpenAIChatModel;
16
16
  fnName: string;
17
17
  fnDescription?: string;
18
- promptFormat: PROMPT_FORMAT;
18
+ promptTemplate: PROMPT_TEMPLATE;
19
19
  });
20
20
  get modelInformation(): import("../../../index.js").ModelInformation;
21
21
  get settings(): OpenAIChatSettings;
22
22
  get settingsForEvent(): Partial<OpenAIChatSettings>;
23
23
  /**
24
- * Returns this model with a text prompt format.
24
+ * Returns this model with a text prompt template.
25
25
  */
26
- withTextPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<string, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
26
+ withTextPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<string, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
27
27
  /**
28
- * Returns this model with an instruction prompt format.
28
+ * Returns this model with an instruction prompt template.
29
29
  */
30
- withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").MultiModalInstructionPrompt | import("../../../index.js").TextInstructionPrompt, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
30
+ withInstructionPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("../../../index.js").MultiModalInstructionPrompt | import("../../../index.js").TextInstructionPrompt, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
31
31
  /**
32
- * Returns this model with a chat prompt format.
32
+ * Returns this model with a chat prompt template.
33
33
  */
34
- withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
35
- withPromptFormat<TARGET_PROMPT_FORMAT extends TextGenerationPromptFormat<unknown, OpenAIChatPrompt>>(promptFormat: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
34
+ withChatPrompt(): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
35
+ withPromptTemplate<TARGET_PROMPT_FORMAT extends TextGenerationPromptTemplate<unknown, OpenAIChatPrompt>>(promptTemplate: TARGET_PROMPT_FORMAT): OpenAIChatFunctionCallStructureGenerationModel<TARGET_PROMPT_FORMAT>;
36
36
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
37
37
  /**
38
38
  * JSON generation uses the OpenAI GPT function calling API.
@@ -41,7 +41,7 @@ OpenAIChatSettings> {
41
41
  *
42
42
  * @see https://platform.openai.com/docs/guides/gpt/function-calling
43
43
  */
44
- doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
44
+ doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
45
45
  options?: FunctionOptions): Promise<{
46
46
  response: {
47
47
  object: "chat.completion";
@@ -84,6 +84,6 @@ OpenAIChatSettings> {
84
84
  totalTokens: number;
85
85
  };
86
86
  }>;
87
- doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_FORMAT["format"]>[0], // first argument of the function
87
+ doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
88
88
  options?: FunctionOptions): Promise<AsyncIterable<import("../../../index.js").Delta<unknown>>>;
89
89
  }
@@ -1,9 +1,9 @@
1
1
  import SecureJSON from "secure-json-parse";
2
2
  import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
3
3
  import { OpenAIChatResponseFormat, } from "./AbstractOpenAIChatModel.js";
4
- import { chat, instruction, text } from "./OpenAIChatPromptFormat.js";
4
+ import { chat, instruction, text } from "./OpenAIChatPromptTemplate.js";
5
5
  export class OpenAIChatFunctionCallStructureGenerationModel {
6
- constructor({ model, fnName, fnDescription, promptFormat, }) {
6
+ constructor({ model, fnName, fnDescription, promptTemplate, }) {
7
7
  Object.defineProperty(this, "model", {
8
8
  enumerable: true,
9
9
  configurable: true,
@@ -22,7 +22,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
22
22
  writable: true,
23
23
  value: void 0
24
24
  });
25
- Object.defineProperty(this, "promptFormat", {
25
+ Object.defineProperty(this, "promptTemplate", {
26
26
  enumerable: true,
27
27
  configurable: true,
28
28
  writable: true,
@@ -31,7 +31,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
31
31
  this.model = model;
32
32
  this.fnName = fnName;
33
33
  this.fnDescription = fnDescription;
34
- this.promptFormat = promptFormat;
34
+ this.promptTemplate = promptTemplate;
35
35
  }
36
36
  get modelInformation() {
37
37
  return this.model.modelInformation;
@@ -43,29 +43,29 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
43
43
  return this.model.settingsForEvent;
44
44
  }
45
45
  /**
46
- * Returns this model with a text prompt format.
46
+ * Returns this model with a text prompt template.
47
47
  */
48
48
  withTextPrompt() {
49
- return this.withPromptFormat(text());
49
+ return this.withPromptTemplate(text());
50
50
  }
51
51
  /**
52
- * Returns this model with an instruction prompt format.
52
+ * Returns this model with an instruction prompt template.
53
53
  */
54
54
  withInstructionPrompt() {
55
- return this.withPromptFormat(instruction());
55
+ return this.withPromptTemplate(instruction());
56
56
  }
57
57
  /**
58
- * Returns this model with a chat prompt format.
58
+ * Returns this model with a chat prompt template.
59
59
  */
60
60
  withChatPrompt() {
61
- return this.withPromptFormat(chat());
61
+ return this.withPromptTemplate(chat());
62
62
  }
63
- withPromptFormat(promptFormat) {
63
+ withPromptTemplate(promptTemplate) {
64
64
  return new OpenAIChatFunctionCallStructureGenerationModel({
65
65
  model: this.model,
66
66
  fnName: this.fnName,
67
67
  fnDescription: this.fnDescription,
68
- promptFormat,
68
+ promptTemplate,
69
69
  });
70
70
  }
71
71
  withSettings(additionalSettings) {
@@ -73,7 +73,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
73
73
  model: this.model.withSettings(additionalSettings),
74
74
  fnName: this.fnName,
75
75
  fnDescription: this.fnDescription,
76
- promptFormat: this.promptFormat,
76
+ promptTemplate: this.promptTemplate,
77
77
  });
78
78
  }
79
79
  /**
@@ -85,12 +85,12 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
85
85
  */
86
86
  async doGenerateStructure(schema, prompt, // first argument of the function
87
87
  options) {
88
- const expandedPrompt = this.promptFormat.format(prompt);
88
+ const expandedPrompt = this.promptTemplate.format(prompt);
89
89
  const response = await this.model
90
90
  .withSettings({
91
91
  stopSequences: [
92
92
  ...(this.settings.stopSequences ?? []),
93
- ...this.promptFormat.stopSequences,
93
+ ...this.promptTemplate.stopSequences,
94
94
  ],
95
95
  })
96
96
  .callAPI(expandedPrompt, {
@@ -123,7 +123,7 @@ export class OpenAIChatFunctionCallStructureGenerationModel {
123
123
  }
124
124
  async doStreamStructure(schema, prompt, // first argument of the function
125
125
  options) {
126
- const expandedPrompt = this.promptFormat.format(prompt);
126
+ const expandedPrompt = this.promptTemplate.format(prompt);
127
127
  return this.model.callAPI(expandedPrompt, {
128
128
  ...options,
129
129
  responseFormat: OpenAIChatResponseFormat.structureDeltaIterable,
@@ -1,4 +1,4 @@
1
- import { MultiModalInput } from "../../../model-function/generate-text/prompt-format/Content.js";
1
+ import { MultiModalInput } from "../../../model-function/generate-text/prompt-template/Content.js";
2
2
  import { ToolCall } from "../../../tool/ToolCall.js";
3
3
  export type OpenAIChatMessage = {
4
4
  role: "system";
@@ -2,11 +2,11 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAIChatModel = exports.calculateOpenAIChatCostInMillicents = exports.isOpenAIChatModel = exports.getOpenAIChatModelInformation = exports.OPENAI_CHAT_MODELS = void 0;
4
4
  const StructureFromTextStreamingModel_js_1 = require("../../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
5
- const PromptFormatTextStreamingModel_js_1 = require("../../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
5
+ const PromptTemplateTextStreamingModel_js_1 = require("../../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
6
6
  const TikTokenTokenizer_js_1 = require("../TikTokenTokenizer.cjs");
7
7
  const AbstractOpenAIChatModel_js_1 = require("./AbstractOpenAIChatModel.cjs");
8
8
  const OpenAIChatFunctionCallStructureGenerationModel_js_1 = require("./OpenAIChatFunctionCallStructureGenerationModel.cjs");
9
- const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
9
+ const OpenAIChatPromptTemplate_js_1 = require("./OpenAIChatPromptTemplate.cjs");
10
10
  const countOpenAIChatMessageTokens_js_1 = require("./countOpenAIChatMessageTokens.cjs");
11
11
  /*
12
12
  * Available OpenAI chat models, their token limits, and pricing.
@@ -216,42 +216,42 @@ class OpenAIChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpenAIChatMod
216
216
  model: this,
217
217
  fnName,
218
218
  fnDescription,
219
- promptFormat: (0, OpenAIChatPromptFormat_js_1.identity)(),
219
+ promptTemplate: (0, OpenAIChatPromptTemplate_js_1.identity)(),
220
220
  });
221
221
  }
222
- asStructureGenerationModel(promptFormat) {
222
+ asStructureGenerationModel(promptTemplate) {
223
223
  return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
224
224
  model: this,
225
- format: promptFormat,
225
+ template: promptTemplate,
226
226
  });
227
227
  }
228
228
  /**
229
- * Returns this model with a text prompt format.
229
+ * Returns this model with a text prompt template.
230
230
  */
231
231
  withTextPrompt() {
232
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.text)());
232
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.text)());
233
233
  }
234
234
  /**
235
- * Returns this model with an instruction prompt format.
235
+ * Returns this model with an instruction prompt template.
236
236
  */
237
237
  withInstructionPrompt() {
238
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
238
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.instruction)());
239
239
  }
240
240
  /**
241
- * Returns this model with a chat prompt format.
241
+ * Returns this model with a chat prompt template.
242
242
  */
243
243
  withChatPrompt() {
244
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
244
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
245
245
  }
246
- withPromptFormat(promptFormat) {
247
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
246
+ withPromptTemplate(promptTemplate) {
247
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
248
248
  model: this.withSettings({
249
249
  stopSequences: [
250
250
  ...(this.settings.stopSequences ?? []),
251
- ...promptFormat.stopSequences,
251
+ ...promptTemplate.stopSequences,
252
252
  ],
253
253
  }),
254
- promptFormat,
254
+ promptTemplate,
255
255
  });
256
256
  }
257
257
  withSettings(additionalSettings) {
@@ -1,8 +1,8 @@
1
- import { StructureFromTextPromptFormat } from "../../../model-function/generate-structure/StructureFromTextPromptFormat.js";
1
+ import { StructureFromTextPromptTemplate } from "../../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
2
  import { StructureFromTextStreamingModel } from "../../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
- import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
3
+ import { PromptTemplateTextStreamingModel } from "../../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
4
4
  import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
5
- import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
5
+ import { TextGenerationPromptTemplate } from "../../../model-function/generate-text/TextGenerationPromptTemplate.js";
6
6
  import { ToolCallGenerationModel } from "../../../tool/generate-tool-call/ToolCallGenerationModel.js";
7
7
  import { ToolCallsOrTextGenerationModel } from "../../../tool/generate-tool-calls-or-text/ToolCallsOrTextGenerationModel.js";
8
8
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
@@ -142,21 +142,21 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
142
142
  asFunctionCallStructureGenerationModel({ fnName, fnDescription, }: {
143
143
  fnName: string;
144
144
  fnDescription?: string;
145
- }): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptFormat<import("./OpenAIChatMessage.js").OpenAIChatMessage[], import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
146
- asStructureGenerationModel<INPUT_PROMPT>(promptFormat: StructureFromTextPromptFormat<INPUT_PROMPT, OpenAIChatPrompt>): StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, this>;
145
+ }): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<import("./OpenAIChatMessage.js").OpenAIChatMessage[], import("./OpenAIChatMessage.js").OpenAIChatMessage[]>>;
146
+ asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, this>;
147
147
  /**
148
- * Returns this model with a text prompt format.
148
+ * Returns this model with a text prompt template.
149
149
  */
150
- withTextPrompt(): PromptFormatTextStreamingModel<string, OpenAIChatPrompt, OpenAIChatSettings, this>;
150
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, OpenAIChatPrompt, OpenAIChatSettings, this>;
151
151
  /**
152
- * Returns this model with an instruction prompt format.
152
+ * Returns this model with an instruction prompt template.
153
153
  */
154
- withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").MultiModalInstructionPrompt | import("../../../index.js").TextInstructionPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
154
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../../index.js").MultiModalInstructionPrompt | import("../../../index.js").TextInstructionPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
155
155
  /**
156
- * Returns this model with a chat prompt format.
156
+ * Returns this model with a chat prompt template.
157
157
  */
158
- withChatPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
159
- withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, OpenAIChatPrompt>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAIChatSettings, this>;
158
+ withChatPrompt(): PromptTemplateTextStreamingModel<import("../../../index.js").TextChatPrompt | import("../../../index.js").MultiModalChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
159
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAIChatSettings, this>;
160
160
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
161
161
  }
162
162
  export {};
@@ -1,9 +1,9 @@
1
1
  import { StructureFromTextStreamingModel } from "../../../model-function/generate-structure/StructureFromTextStreamingModel.js";
2
- import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
2
+ import { PromptTemplateTextStreamingModel } from "../../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
3
3
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
4
4
  import { AbstractOpenAIChatModel, } from "./AbstractOpenAIChatModel.js";
5
5
  import { OpenAIChatFunctionCallStructureGenerationModel } from "./OpenAIChatFunctionCallStructureGenerationModel.js";
6
- import { chat, identity, instruction, text } from "./OpenAIChatPromptFormat.js";
6
+ import { chat, identity, instruction, text, } from "./OpenAIChatPromptTemplate.js";
7
7
  import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
8
8
  /*
9
9
  * Available OpenAI chat models, their token limits, and pricing.
@@ -210,42 +210,42 @@ export class OpenAIChatModel extends AbstractOpenAIChatModel {
210
210
  model: this,
211
211
  fnName,
212
212
  fnDescription,
213
- promptFormat: identity(),
213
+ promptTemplate: identity(),
214
214
  });
215
215
  }
216
- asStructureGenerationModel(promptFormat) {
216
+ asStructureGenerationModel(promptTemplate) {
217
217
  return new StructureFromTextStreamingModel({
218
218
  model: this,
219
- format: promptFormat,
219
+ template: promptTemplate,
220
220
  });
221
221
  }
222
222
  /**
223
- * Returns this model with a text prompt format.
223
+ * Returns this model with a text prompt template.
224
224
  */
225
225
  withTextPrompt() {
226
- return this.withPromptFormat(text());
226
+ return this.withPromptTemplate(text());
227
227
  }
228
228
  /**
229
- * Returns this model with an instruction prompt format.
229
+ * Returns this model with an instruction prompt template.
230
230
  */
231
231
  withInstructionPrompt() {
232
- return this.withPromptFormat(instruction());
232
+ return this.withPromptTemplate(instruction());
233
233
  }
234
234
  /**
235
- * Returns this model with a chat prompt format.
235
+ * Returns this model with a chat prompt template.
236
236
  */
237
237
  withChatPrompt() {
238
- return this.withPromptFormat(chat());
238
+ return this.withPromptTemplate(chat());
239
239
  }
240
- withPromptFormat(promptFormat) {
241
- return new PromptFormatTextStreamingModel({
240
+ withPromptTemplate(promptTemplate) {
241
+ return new PromptTemplateTextStreamingModel({
242
242
  model: this.withSettings({
243
243
  stopSequences: [
244
244
  ...(this.settings.stopSequences ?? []),
245
- ...promptFormat.stopSequences,
245
+ ...promptTemplate.stopSequences,
246
246
  ],
247
247
  }),
248
- promptFormat,
248
+ promptTemplate,
249
249
  });
250
250
  }
251
251
  withSettings(additionalSettings) {
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.chat = exports.instruction = exports.text = exports.identity = void 0;
4
- const ChatPrompt_js_1 = require("../../../model-function/generate-text/prompt-format/ChatPrompt.cjs");
4
+ const ChatPrompt_js_1 = require("../../../model-function/generate-text/prompt-template/ChatPrompt.cjs");
5
5
  const OpenAIChatMessage_js_1 = require("./OpenAIChatMessage.cjs");
6
6
  /**
7
7
  * OpenAIMessage[] identity chat format.
@@ -0,0 +1,20 @@
1
+ import { TextGenerationPromptTemplate } from "../../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
+ import { MultiModalChatPrompt, TextChatPrompt } from "../../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
+ import { MultiModalInstructionPrompt, TextInstructionPrompt } from "../../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
+ import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
5
+ /**
6
+ * OpenAIMessage[] identity chat format.
7
+ */
8
+ export declare function identity(): TextGenerationPromptTemplate<Array<OpenAIChatMessage>, Array<OpenAIChatMessage>>;
9
+ /**
10
+ * Formats a text prompt as an OpenAI chat prompt.
11
+ */
12
+ export declare function text(): TextGenerationPromptTemplate<string, Array<OpenAIChatMessage>>;
13
+ /**
14
+ * Formats an instruction prompt as an OpenAI chat prompt.
15
+ */
16
+ export declare function instruction(): TextGenerationPromptTemplate<MultiModalInstructionPrompt | TextInstructionPrompt, Array<OpenAIChatMessage>>;
17
+ /**
18
+ * Formats a chat prompt as an OpenAI chat prompt.
19
+ */
20
+ export declare function chat(): TextGenerationPromptTemplate<MultiModalChatPrompt | TextChatPrompt, Array<OpenAIChatMessage>>;
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "../../../model-function/generate-text/prompt-format/ChatPrompt.js";
1
+ import { validateChatPrompt } from "../../../model-function/generate-text/prompt-template/ChatPrompt.js";
2
2
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
3
3
  /**
4
4
  * OpenAIMessage[] identity chat format.
@@ -26,7 +26,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.OpenAIChatPromptFormat = exports.openai = void 0;
29
+ exports.OpenAIChatPrompt = exports.openai = void 0;
30
30
  __exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
31
31
  __exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
32
32
  __exportStar(require("./OpenAICompletionModel.cjs"), exports);
@@ -40,5 +40,5 @@ __exportStar(require("./TikTokenTokenizer.cjs"), exports);
40
40
  __exportStar(require("./chat/AbstractOpenAIChatModel.cjs"), exports);
41
41
  __exportStar(require("./chat/OpenAIChatMessage.cjs"), exports);
42
42
  __exportStar(require("./chat/OpenAIChatModel.cjs"), exports);
43
- exports.OpenAIChatPromptFormat = __importStar(require("./chat/OpenAIChatPromptFormat.cjs"));
43
+ exports.OpenAIChatPrompt = __importStar(require("./chat/OpenAIChatPromptTemplate.cjs"));
44
44
  __exportStar(require("./chat/countOpenAIChatMessageTokens.cjs"), exports);
@@ -12,6 +12,6 @@ export * from "./TikTokenTokenizer.js";
12
12
  export * from "./chat/AbstractOpenAIChatModel.js";
13
13
  export * from "./chat/OpenAIChatMessage.js";
14
14
  export * from "./chat/OpenAIChatModel.js";
15
- export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
15
+ export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
16
16
  export { OpenAIChatDelta } from "./chat/OpenAIChatStreamIterable.js";
17
17
  export * from "./chat/countOpenAIChatMessageTokens.js";
@@ -11,5 +11,5 @@ export * from "./TikTokenTokenizer.js";
11
11
  export * from "./chat/AbstractOpenAIChatModel.js";
12
12
  export * from "./chat/OpenAIChatMessage.js";
13
13
  export * from "./chat/OpenAIChatModel.js";
14
- export * as OpenAIChatPromptFormat from "./chat/OpenAIChatPromptFormat.js";
14
+ export * as OpenAIChatPrompt from "./chat/OpenAIChatPromptTemplate.js";
15
15
  export * from "./chat/countOpenAIChatMessageTokens.js";
@@ -2,9 +2,9 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAICompatibleChatModel = void 0;
4
4
  const StructureFromTextStreamingModel_js_1 = require("../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
5
- const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
5
+ const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
6
6
  const AbstractOpenAIChatModel_js_1 = require("../openai/chat/AbstractOpenAIChatModel.cjs");
7
- const OpenAIChatPromptFormat_js_1 = require("../openai/chat/OpenAIChatPromptFormat.cjs");
7
+ const OpenAIChatPromptTemplate_js_1 = require("../openai/chat/OpenAIChatPromptTemplate.cjs");
8
8
  /**
9
9
  * Create a text generation model that calls an API that is compatible with OpenAI's chat API.
10
10
  *
@@ -58,39 +58,39 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
58
58
  ];
59
59
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
60
60
  }
61
- asStructureGenerationModel(promptFormat) {
61
+ asStructureGenerationModel(promptTemplate) {
62
62
  return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
63
63
  model: this,
64
- format: promptFormat,
64
+ template: promptTemplate,
65
65
  });
66
66
  }
67
67
  /**
68
- * Returns this model with a text prompt format.
68
+ * Returns this model with a text prompt template.
69
69
  */
70
70
  withTextPrompt() {
71
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.text)());
71
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.text)());
72
72
  }
73
73
  /**
74
- * Returns this model with an instruction prompt format.
74
+ * Returns this model with an instruction prompt template.
75
75
  */
76
76
  withInstructionPrompt() {
77
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.instruction)());
77
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.instruction)());
78
78
  }
79
79
  /**
80
- * Returns this model with a chat prompt format.
80
+ * Returns this model with a chat prompt template.
81
81
  */
82
82
  withChatPrompt() {
83
- return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.chat)());
83
+ return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
84
84
  }
85
- withPromptFormat(promptFormat) {
86
- return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
85
+ withPromptTemplate(promptTemplate) {
86
+ return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
87
87
  model: this.withSettings({
88
88
  stopSequences: [
89
89
  ...(this.settings.stopSequences ?? []),
90
- ...promptFormat.stopSequences,
90
+ ...promptTemplate.stopSequences,
91
91
  ],
92
92
  }),
93
- promptFormat,
93
+ promptTemplate,
94
94
  });
95
95
  }
96
96
  withSettings(additionalSettings) {