modelfusion 0.119.1 → 0.121.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/CHANGELOG.md +32 -0
  2. package/README.md +5 -5
  3. package/core/FunctionOptions.d.ts +7 -2
  4. package/core/executeFunction.cjs +1 -7
  5. package/core/executeFunction.d.ts +2 -2
  6. package/core/executeFunction.js +1 -7
  7. package/core/executeFunctionCall.cjs +3 -2
  8. package/core/executeFunctionCall.d.ts +2 -2
  9. package/core/executeFunctionCall.js +3 -2
  10. package/core/getFunctionCallLogger.cjs +22 -7
  11. package/core/getFunctionCallLogger.js +22 -7
  12. package/model-function/executeStandardCall.cjs +2 -2
  13. package/model-function/executeStandardCall.js +2 -2
  14. package/model-function/executeStreamCall.cjs +2 -2
  15. package/model-function/executeStreamCall.js +2 -2
  16. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.cjs +78 -0
  17. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.d.ts +35 -0
  18. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.js +72 -0
  19. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.test.cjs +60 -0
  20. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.test.d.ts +1 -0
  21. package/model-function/generate-text/prompt-template/SynthiaPromptTemplate.test.js +58 -0
  22. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +11 -13
  23. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +1 -1
  24. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +11 -13
  25. package/model-function/generate-text/prompt-template/index.cjs +2 -1
  26. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  27. package/model-function/generate-text/prompt-template/index.js +1 -0
  28. package/model-provider/llamacpp/LlamaCppPrompt.cjs +35 -1
  29. package/model-provider/llamacpp/LlamaCppPrompt.d.ts +33 -0
  30. package/model-provider/llamacpp/LlamaCppPrompt.js +34 -0
  31. package/model-provider/ollama/OllamaCompletionModel.cjs +22 -19
  32. package/model-provider/ollama/OllamaCompletionModel.d.ts +13 -4
  33. package/model-provider/ollama/OllamaCompletionModel.js +22 -19
  34. package/model-provider/ollama/OllamaCompletionModel.test.cjs +3 -27
  35. package/model-provider/ollama/OllamaCompletionModel.test.js +3 -4
  36. package/model-provider/ollama/OllamaCompletionPrompt.cjs +91 -0
  37. package/model-provider/ollama/OllamaCompletionPrompt.d.ts +45 -0
  38. package/model-provider/ollama/OllamaCompletionPrompt.js +63 -0
  39. package/model-provider/ollama/OllamaFacade.cjs +25 -1
  40. package/model-provider/ollama/OllamaFacade.d.ts +1 -0
  41. package/model-provider/ollama/OllamaFacade.js +1 -0
  42. package/package.json +1 -1
  43. package/tool/Tool.d.ts +2 -2
  44. package/tool/execute-tool/executeTool.cjs +3 -2
  45. package/tool/execute-tool/executeTool.js +3 -2
@@ -25,19 +25,17 @@ exports.text = text;
25
25
  /**
26
26
  * Formats an instruction prompt as a Vicuna prompt.
27
27
  */
28
- function instruction() {
29
- return {
30
- format(prompt) {
31
- let text = prompt.system != null
32
- ? `${prompt.system}\n\n`
33
- : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
34
- text += `USER: ${(0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt)}\n`;
35
- text += `ASSISTANT: `;
36
- return text;
37
- },
38
- stopSequences: [`\nUSER:`],
39
- };
40
- }
28
+ const instruction = () => ({
29
+ stopSequences: [`\nUSER:`],
30
+ format(prompt) {
31
+ let text = prompt.system != null
32
+ ? `${prompt.system}\n\n`
33
+ : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
34
+ text += `USER: ${(0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt)}\n`;
35
+ text += `ASSISTANT: `;
36
+ return text;
37
+ },
38
+ });
41
39
  exports.instruction = instruction;
42
40
  /**
43
41
  * Formats a chat prompt as a Vicuna prompt.
@@ -8,7 +8,7 @@ export declare function text(): TextGenerationPromptTemplate<string, string>;
8
8
  /**
9
9
  * Formats an instruction prompt as a Vicuna prompt.
10
10
  */
11
- export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
11
+ export declare const instruction: () => TextGenerationPromptTemplate<InstructionPrompt, string>;
12
12
  /**
13
13
  * Formats a chat prompt as a Vicuna prompt.
14
14
  *
@@ -21,19 +21,17 @@ export function text() {
21
21
  /**
22
22
  * Formats an instruction prompt as a Vicuna prompt.
23
23
  */
24
- export function instruction() {
25
- return {
26
- format(prompt) {
27
- let text = prompt.system != null
28
- ? `${prompt.system}\n\n`
29
- : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
30
- text += `USER: ${validateContentIsString(prompt.instruction, prompt)}\n`;
31
- text += `ASSISTANT: `;
32
- return text;
33
- },
34
- stopSequences: [`\nUSER:`],
35
- };
36
- }
24
+ export const instruction = () => ({
25
+ stopSequences: [`\nUSER:`],
26
+ format(prompt) {
27
+ let text = prompt.system != null
28
+ ? `${prompt.system}\n\n`
29
+ : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
30
+ text += `USER: ${validateContentIsString(prompt.instruction, prompt)}\n`;
31
+ text += `ASSISTANT: `;
32
+ return text;
33
+ },
34
+ });
37
35
  /**
38
36
  * Formats a chat prompt as a Vicuna prompt.
39
37
  *
@@ -26,7 +26,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
26
26
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.VicunaPrompt = exports.TextPrompt = exports.NeuralChatPrompt = exports.MistralInstructPrompt = exports.Llama2Prompt = exports.ChatMLPrompt = exports.AlpacaPrompt = void 0;
29
+ exports.VicunaPrompt = exports.TextPrompt = exports.SynthiaPrompt = exports.NeuralChatPrompt = exports.MistralInstructPrompt = exports.Llama2Prompt = exports.ChatMLPrompt = exports.AlpacaPrompt = void 0;
30
30
  exports.AlpacaPrompt = __importStar(require("./AlpacaPromptTemplate.cjs"));
31
31
  exports.ChatMLPrompt = __importStar(require("./ChatMLPromptTemplate.cjs"));
32
32
  __exportStar(require("./ChatPrompt.cjs"), exports);
@@ -37,6 +37,7 @@ exports.Llama2Prompt = __importStar(require("./Llama2PromptTemplate.cjs"));
37
37
  exports.MistralInstructPrompt = __importStar(require("./MistralInstructPromptTemplate.cjs"));
38
38
  exports.NeuralChatPrompt = __importStar(require("./NeuralChatPromptTemplate.cjs"));
39
39
  __exportStar(require("./PromptTemplateProvider.cjs"), exports);
40
+ exports.SynthiaPrompt = __importStar(require("./SynthiaPromptTemplate.cjs"));
40
41
  exports.TextPrompt = __importStar(require("./TextPromptTemplate.cjs"));
41
42
  exports.VicunaPrompt = __importStar(require("./VicunaPromptTemplate.cjs"));
42
43
  __exportStar(require("./trimChatPrompt.cjs"), exports);
@@ -8,6 +8,7 @@ export * as Llama2Prompt from "./Llama2PromptTemplate.js";
8
8
  export * as MistralInstructPrompt from "./MistralInstructPromptTemplate.js";
9
9
  export * as NeuralChatPrompt from "./NeuralChatPromptTemplate.js";
10
10
  export * from "./PromptTemplateProvider.js";
11
+ export * as SynthiaPrompt from "./SynthiaPromptTemplate.js";
11
12
  export * as TextPrompt from "./TextPromptTemplate.js";
12
13
  export * as VicunaPrompt from "./VicunaPromptTemplate.js";
13
14
  export * from "./trimChatPrompt.js";
@@ -8,6 +8,7 @@ export * as Llama2Prompt from "./Llama2PromptTemplate.js";
8
8
  export * as MistralInstructPrompt from "./MistralInstructPromptTemplate.js";
9
9
  export * as NeuralChatPrompt from "./NeuralChatPromptTemplate.js";
10
10
  export * from "./PromptTemplateProvider.js";
11
+ export * as SynthiaPrompt from "./SynthiaPromptTemplate.js";
11
12
  export * as TextPrompt from "./TextPromptTemplate.js";
12
13
  export * as VicunaPrompt from "./VicunaPromptTemplate.js";
13
14
  export * from "./trimChatPrompt.js";
@@ -23,12 +23,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
23
23
  return result;
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.BakLLaVA1 = exports.Vicuna = exports.Alpaca = exports.NeuralChat = exports.Llama2 = exports.ChatML = exports.Mistral = exports.Text = exports.asLlamaCppTextPromptTemplateProvider = exports.asLlamaCppPromptTemplate = void 0;
26
+ exports.BakLLaVA1 = exports.Vicuna = exports.Synthia = exports.Alpaca = exports.NeuralChat = exports.Llama2 = exports.ChatML = exports.Mistral = exports.Text = exports.asLlamaCppTextPromptTemplateProvider = exports.asLlamaCppPromptTemplate = void 0;
27
27
  const alpacaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs"));
28
28
  const chatMlPrompt = __importStar(require("../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs"));
29
29
  const llama2Prompt = __importStar(require("../../model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs"));
30
30
  const mistralPrompt = __importStar(require("../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs"));
31
31
  const neuralChatPrompt = __importStar(require("../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs"));
32
+ const synthiaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/SynthiaPromptTemplate.cjs"));
32
33
  const textPrompt = __importStar(require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs"));
33
34
  const vicunaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs"));
34
35
  const LlamaCppBakLLaVA1Prompt = __importStar(require("./LlamaCppBakLLaVA1PromptTemplate.cjs"));
@@ -50,10 +51,43 @@ function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
50
51
  }
51
52
  exports.asLlamaCppTextPromptTemplateProvider = asLlamaCppTextPromptTemplateProvider;
52
53
  exports.Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
54
+ /**
55
+ * Formats text, instruction or chat prompts as a Mistral instruct prompt.
56
+ *
57
+ * Note that Mistral does not support system prompts. We emulate them.
58
+ *
59
+ * Text prompt:
60
+ * ```
61
+ * <s>[INST] { instruction } [/INST]
62
+ * ```
63
+ *
64
+ * Instruction prompt when system prompt is set:
65
+ * ```
66
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
67
+ * ```
68
+ *
69
+ * Instruction prompt template when there is no system prompt:
70
+ * ```
71
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
72
+ * ```
73
+ *
74
+ * Chat prompt when system prompt is set:
75
+ * ```
76
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
77
+ * ```
78
+ *
79
+ * Chat prompt when there is no system prompt:
80
+ * ```
81
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
82
+ * ```
83
+ *
84
+ * @see https://docs.mistral.ai/models/#chat-template
85
+ */
53
86
  exports.Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
54
87
  exports.ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
55
88
  exports.Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
56
89
  exports.NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
57
90
  exports.Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
91
+ exports.Synthia = asLlamaCppTextPromptTemplateProvider(synthiaPrompt);
58
92
  exports.Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
59
93
  exports.BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
@@ -5,10 +5,43 @@ import { LlamaCppCompletionPrompt } from "./LlamaCppCompletionModel.js";
5
5
  export declare function asLlamaCppPromptTemplate<SOURCE_PROMPT>(promptTemplate: TextGenerationPromptTemplate<SOURCE_PROMPT, string>): TextGenerationPromptTemplate<SOURCE_PROMPT, LlamaCppCompletionPrompt>;
6
6
  export declare function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider: TextGenerationPromptTemplateProvider<string>): TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
7
7
  export declare const Text: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
8
+ /**
9
+ * Formats text, instruction or chat prompts as a Mistral instruct prompt.
10
+ *
11
+ * Note that Mistral does not support system prompts. We emulate them.
12
+ *
13
+ * Text prompt:
14
+ * ```
15
+ * <s>[INST] { instruction } [/INST]
16
+ * ```
17
+ *
18
+ * Instruction prompt when system prompt is set:
19
+ * ```
20
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
21
+ * ```
22
+ *
23
+ * Instruction prompt template when there is no system prompt:
24
+ * ```
25
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
26
+ * ```
27
+ *
28
+ * Chat prompt when system prompt is set:
29
+ * ```
30
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
31
+ * ```
32
+ *
33
+ * Chat prompt when there is no system prompt:
34
+ * ```
35
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
36
+ * ```
37
+ *
38
+ * @see https://docs.mistral.ai/models/#chat-template
39
+ */
8
40
  export declare const Mistral: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
9
41
  export declare const ChatML: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
10
42
  export declare const Llama2: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
11
43
  export declare const NeuralChat: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
12
44
  export declare const Alpaca: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
45
+ export declare const Synthia: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
13
46
  export declare const Vicuna: TextGenerationPromptTemplateProvider<LlamaCppCompletionPrompt>;
14
47
  export declare const BakLLaVA1: typeof LlamaCppBakLLaVA1Prompt;
@@ -3,6 +3,7 @@ import * as chatMlPrompt from "../../model-function/generate-text/prompt-templat
3
3
  import * as llama2Prompt from "../../model-function/generate-text/prompt-template/Llama2PromptTemplate.js";
4
4
  import * as mistralPrompt from "../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.js";
5
5
  import * as neuralChatPrompt from "../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.js";
6
+ import * as synthiaPrompt from "../../model-function/generate-text/prompt-template/SynthiaPromptTemplate.js";
6
7
  import * as textPrompt from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
7
8
  import * as vicunaPrompt from "../../model-function/generate-text/prompt-template/VicunaPromptTemplate.js";
8
9
  import * as LlamaCppBakLLaVA1Prompt from "./LlamaCppBakLLaVA1PromptTemplate.js";
@@ -22,10 +23,43 @@ export function asLlamaCppTextPromptTemplateProvider(promptTemplateProvider) {
22
23
  };
23
24
  }
24
25
  export const Text = asLlamaCppTextPromptTemplateProvider(textPrompt);
26
+ /**
27
+ * Formats text, instruction or chat prompts as a Mistral instruct prompt.
28
+ *
29
+ * Note that Mistral does not support system prompts. We emulate them.
30
+ *
31
+ * Text prompt:
32
+ * ```
33
+ * <s>[INST] { instruction } [/INST]
34
+ * ```
35
+ *
36
+ * Instruction prompt when system prompt is set:
37
+ * ```
38
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
39
+ * ```
40
+ *
41
+ * Instruction prompt template when there is no system prompt:
42
+ * ```
43
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
44
+ * ```
45
+ *
46
+ * Chat prompt when system prompt is set:
47
+ * ```
48
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
49
+ * ```
50
+ *
51
+ * Chat prompt when there is no system prompt:
52
+ * ```
53
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
54
+ * ```
55
+ *
56
+ * @see https://docs.mistral.ai/models/#chat-template
57
+ */
25
58
  export const Mistral = asLlamaCppTextPromptTemplateProvider(mistralPrompt);
26
59
  export const ChatML = asLlamaCppTextPromptTemplateProvider(chatMlPrompt);
27
60
  export const Llama2 = asLlamaCppTextPromptTemplateProvider(llama2Prompt);
28
61
  export const NeuralChat = asLlamaCppTextPromptTemplateProvider(neuralChatPrompt);
29
62
  export const Alpaca = asLlamaCppTextPromptTemplateProvider(alpacaPrompt);
63
+ export const Synthia = asLlamaCppTextPromptTemplateProvider(synthiaPrompt);
30
64
  export const Vicuna = asLlamaCppTextPromptTemplateProvider(vicunaPrompt);
31
65
  export const BakLLaVA1 = LlamaCppBakLLaVA1Prompt;
@@ -9,12 +9,14 @@ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
9
9
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
10
10
  const validateTypes_js_1 = require("../../core/schema/validateTypes.cjs");
11
11
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
12
+ const StructureFromTextStreamingModel_js_1 = require("../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
12
13
  const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
13
14
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
14
15
  const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
15
16
  const TextGenerationToolCallsModel_js_1 = require("../../tool/generate-tool-calls/TextGenerationToolCallsModel.cjs");
16
17
  const createJsonStreamResponseHandler_js_1 = require("../../util/streaming/createJsonStreamResponseHandler.cjs");
17
18
  const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
19
+ const OllamaCompletionPrompt_js_1 = require("./OllamaCompletionPrompt.cjs");
18
20
  const OllamaError_js_1 = require("./OllamaError.cjs");
19
21
  class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
20
22
  constructor(settings) {
@@ -151,6 +153,17 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
151
153
  const chunk = delta;
152
154
  return chunk.done === true ? undefined : chunk.response;
153
155
  }
156
+ asStructureGenerationModel(promptTemplate) {
157
+ return "adaptModel" in promptTemplate
158
+ ? new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
159
+ model: promptTemplate.adaptModel(this),
160
+ template: promptTemplate,
161
+ })
162
+ : new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
163
+ model: this,
164
+ template: promptTemplate,
165
+ });
166
+ }
154
167
  asToolCallGenerationModel(promptTemplate) {
155
168
  return new TextGenerationToolCallModel_js_1.TextGenerationToolCallModel({
156
169
  model: this,
@@ -163,30 +176,20 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
163
176
  template: promptTemplate,
164
177
  });
165
178
  }
179
+ get promptTemplateProvider() {
180
+ return this.settings.promptTemplate ?? OllamaCompletionPrompt_js_1.Text;
181
+ }
166
182
  withJsonOutput() {
167
183
  return this;
168
184
  }
169
185
  withTextPrompt() {
170
- return this.withPromptTemplate({
171
- format(prompt) {
172
- return { prompt };
173
- },
174
- stopSequences: [],
175
- });
186
+ return this.withPromptTemplate(this.promptTemplateProvider.text());
176
187
  }
177
- /**
178
- * Maps the prompt for a text version of the Ollama completion prompt template (without image support).
179
- */
180
- withTextPromptTemplate(promptTemplate) {
181
- return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
182
- model: this.withTextPrompt().withSettings({
183
- stopSequences: [
184
- ...(this.settings.stopSequences ?? []),
185
- ...promptTemplate.stopSequences,
186
- ],
187
- }),
188
- promptTemplate,
189
- });
188
+ withInstructionPrompt() {
189
+ return this.withPromptTemplate(this.promptTemplateProvider.instruction());
190
+ }
191
+ withChatPrompt() {
192
+ return this.withPromptTemplate(this.promptTemplateProvider.chat());
190
193
  }
191
194
  withPromptTemplate(promptTemplate) {
192
195
  return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
@@ -3,9 +3,14 @@ import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
+ import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
7
+ import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
6
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
9
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
10
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
11
+ import { ChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
12
+ import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
13
+ import { TextGenerationPromptTemplateProvider } from "../../model-function/generate-text/prompt-template/PromptTemplateProvider.js";
9
14
  import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
10
15
  import { TextGenerationToolCallsModel } from "../../tool/generate-tool-calls/TextGenerationToolCallsModel.js";
11
16
  import { ToolCallsPromptTemplate } from "../../tool/generate-tool-calls/ToolCallsPromptTemplate.js";
@@ -39,6 +44,10 @@ export interface OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE extends numbe
39
44
  raw?: boolean;
40
45
  system?: string;
41
46
  context?: number[];
47
+ /**
48
+ * Prompt template provider that is used when calling `.withTextPrompt()`, `withInstructionPrompt()` or `withChatPrompt()`.
49
+ */
50
+ promptTemplate?: TextGenerationPromptTemplateProvider<OllamaCompletionPrompt>;
42
51
  }
43
52
  export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number | undefined> extends AbstractModel<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> implements TextStreamingModel<OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> {
44
53
  constructor(settings: OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>);
@@ -128,14 +137,14 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
128
137
  context?: number[] | undefined;
129
138
  }>>>;
130
139
  extractTextDelta(delta: unknown): string | undefined;
140
+ asStructureGenerationModel<INPUT_PROMPT, OllamaCompletionPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, TextStreamingModel<OllamaCompletionPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
131
141
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
132
142
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
143
+ private get promptTemplateProvider();
133
144
  withJsonOutput(): this;
134
145
  withTextPrompt(): PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
135
- /**
136
- * Maps the prompt for a text version of the Ollama completion prompt template (without image support).
137
- */
138
- withTextPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>>;
146
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<InstructionPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
147
+ withChatPrompt(): PromptTemplateTextStreamingModel<ChatPrompt, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
139
148
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
140
149
  withSettings(additionalSettings: Partial<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>>): this;
141
150
  }
@@ -6,12 +6,14 @@ import { zodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { safeParseJSON } from "../../core/schema/parseJSON.js";
7
7
  import { validateTypes } from "../../core/schema/validateTypes.js";
8
8
  import { AbstractModel } from "../../model-function/AbstractModel.js";
9
+ import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
9
10
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
10
11
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
11
12
  import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
12
13
  import { TextGenerationToolCallsModel } from "../../tool/generate-tool-calls/TextGenerationToolCallsModel.js";
13
14
  import { createJsonStreamResponseHandler } from "../../util/streaming/createJsonStreamResponseHandler.js";
14
15
  import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
16
+ import { Text } from "./OllamaCompletionPrompt.js";
15
17
  import { failedOllamaCallResponseHandler } from "./OllamaError.js";
16
18
  export class OllamaCompletionModel extends AbstractModel {
17
19
  constructor(settings) {
@@ -148,6 +150,17 @@ export class OllamaCompletionModel extends AbstractModel {
148
150
  const chunk = delta;
149
151
  return chunk.done === true ? undefined : chunk.response;
150
152
  }
153
+ asStructureGenerationModel(promptTemplate) {
154
+ return "adaptModel" in promptTemplate
155
+ ? new StructureFromTextStreamingModel({
156
+ model: promptTemplate.adaptModel(this),
157
+ template: promptTemplate,
158
+ })
159
+ : new StructureFromTextStreamingModel({
160
+ model: this,
161
+ template: promptTemplate,
162
+ });
163
+ }
151
164
  asToolCallGenerationModel(promptTemplate) {
152
165
  return new TextGenerationToolCallModel({
153
166
  model: this,
@@ -160,30 +173,20 @@ export class OllamaCompletionModel extends AbstractModel {
160
173
  template: promptTemplate,
161
174
  });
162
175
  }
176
+ get promptTemplateProvider() {
177
+ return this.settings.promptTemplate ?? Text;
178
+ }
163
179
  withJsonOutput() {
164
180
  return this;
165
181
  }
166
182
  withTextPrompt() {
167
- return this.withPromptTemplate({
168
- format(prompt) {
169
- return { prompt };
170
- },
171
- stopSequences: [],
172
- });
183
+ return this.withPromptTemplate(this.promptTemplateProvider.text());
173
184
  }
174
- /**
175
- * Maps the prompt for a text version of the Ollama completion prompt template (without image support).
176
- */
177
- withTextPromptTemplate(promptTemplate) {
178
- return new PromptTemplateTextStreamingModel({
179
- model: this.withTextPrompt().withSettings({
180
- stopSequences: [
181
- ...(this.settings.stopSequences ?? []),
182
- ...promptTemplate.stopSequences,
183
- ],
184
- }),
185
- promptTemplate,
186
- });
185
+ withInstructionPrompt() {
186
+ return this.withPromptTemplate(this.promptTemplateProvider.instruction());
187
+ }
188
+ withChatPrompt() {
189
+ return this.withPromptTemplate(this.promptTemplateProvider.chat());
187
190
  }
188
191
  withPromptTemplate(promptTemplate) {
189
192
  return new PromptTemplateTextStreamingModel({
@@ -1,27 +1,4 @@
1
1
  "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
25
2
  Object.defineProperty(exports, "__esModule", { value: true });
26
3
  const assert_1 = require("assert");
27
4
  const zod_1 = require("zod");
@@ -31,13 +8,13 @@ const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
31
8
  const jsonStructurePrompt_js_1 = require("../../model-function/generate-structure/jsonStructurePrompt.cjs");
32
9
  const streamStructure_js_1 = require("../../model-function/generate-structure/streamStructure.cjs");
33
10
  const generateText_js_1 = require("../../model-function/generate-text/generateText.cjs");
34
- const TextPrompt = __importStar(require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs"));
35
11
  const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
36
12
  const JsonTestServer_js_1 = require("../../test/JsonTestServer.cjs");
37
13
  const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
38
14
  const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
39
15
  const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
40
16
  const OllamaCompletionModel_js_1 = require("./OllamaCompletionModel.cjs");
17
+ const OllamaCompletionPrompt_js_1 = require("./OllamaCompletionPrompt.cjs");
41
18
  describe("generateText", () => {
42
19
  const server = new JsonTestServer_js_1.JsonTestServer("http://127.0.0.1:11434/api/generate");
43
20
  server.setupTestEnvironment();
@@ -128,11 +105,10 @@ describe("streamStructure", () => {
128
105
  ];
129
106
  const stream = await (0, streamStructure_js_1.streamStructure)(new OllamaCompletionModel_js_1.OllamaCompletionModel({
130
107
  model: "mistral:text",
108
+ promptTemplate: OllamaCompletionPrompt_js_1.Text,
131
109
  format: "json",
132
110
  raw: true,
133
- })
134
- .withTextPromptTemplate(TextPrompt.instruction())
135
- .asStructureGenerationModel(jsonStructurePrompt_js_1.jsonStructurePrompt.text()), (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({ name: zod_1.z.string() })), "generate a name");
111
+ }).asStructureGenerationModel(jsonStructurePrompt_js_1.jsonStructurePrompt.text()), (0, ZodSchema_js_1.zodSchema)(zod_1.z.object({ name: zod_1.z.string() })), "generate a name");
136
112
  // note: space moved to last chunk bc of trimming
137
113
  expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
138
114
  { isComplete: false, value: {} },
@@ -6,13 +6,13 @@ import { zodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { jsonStructurePrompt } from "../../model-function/generate-structure/jsonStructurePrompt.js";
7
7
  import { streamStructure } from "../../model-function/generate-structure/streamStructure.js";
8
8
  import { generateText } from "../../model-function/generate-text/generateText.js";
9
- import * as TextPrompt from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
10
9
  import { streamText } from "../../model-function/generate-text/streamText.js";
11
10
  import { JsonTestServer } from "../../test/JsonTestServer.js";
12
11
  import { StreamingTestServer } from "../../test/StreamingTestServer.js";
13
12
  import { arrayFromAsync } from "../../test/arrayFromAsync.js";
14
13
  import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
15
14
  import { OllamaCompletionModel } from "./OllamaCompletionModel.js";
15
+ import { Text } from "./OllamaCompletionPrompt.js";
16
16
  describe("generateText", () => {
17
17
  const server = new JsonTestServer("http://127.0.0.1:11434/api/generate");
18
18
  server.setupTestEnvironment();
@@ -103,11 +103,10 @@ describe("streamStructure", () => {
103
103
  ];
104
104
  const stream = await streamStructure(new OllamaCompletionModel({
105
105
  model: "mistral:text",
106
+ promptTemplate: Text,
106
107
  format: "json",
107
108
  raw: true,
108
- })
109
- .withTextPromptTemplate(TextPrompt.instruction())
110
- .asStructureGenerationModel(jsonStructurePrompt.text()), zodSchema(z.object({ name: z.string() })), "generate a name");
109
+ }).asStructureGenerationModel(jsonStructurePrompt.text()), zodSchema(z.object({ name: z.string() })), "generate a name");
111
110
  // note: space moved to last chunk bc of trimming
112
111
  expect(await arrayFromAsync(stream)).toStrictEqual([
113
112
  { isComplete: false, value: {} },
@@ -0,0 +1,91 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ Object.defineProperty(exports, "__esModule", { value: true });
26
+ exports.Vicuna = exports.Synthia = exports.Alpaca = exports.NeuralChat = exports.Llama2 = exports.ChatML = exports.Mistral = exports.Text = exports.asOllamaCompletionTextPromptTemplateProvider = exports.asOllamaCompletionPromptTemplate = void 0;
27
+ const alpacaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs"));
28
+ const chatMlPrompt = __importStar(require("../../model-function/generate-text/prompt-template/ChatMLPromptTemplate.cjs"));
29
+ const llama2Prompt = __importStar(require("../../model-function/generate-text/prompt-template/Llama2PromptTemplate.cjs"));
30
+ const mistralPrompt = __importStar(require("../../model-function/generate-text/prompt-template/MistralInstructPromptTemplate.cjs"));
31
+ const neuralChatPrompt = __importStar(require("../../model-function/generate-text/prompt-template/NeuralChatPromptTemplate.cjs"));
32
+ const synthiaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/SynthiaPromptTemplate.cjs"));
33
+ const textPrompt = __importStar(require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs"));
34
+ const vicunaPrompt = __importStar(require("../../model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs"));
35
+ function asOllamaCompletionPromptTemplate(promptTemplate) {
36
+ return {
37
+ format: (prompt) => ({
38
+ prompt: promptTemplate.format(prompt),
39
+ }),
40
+ stopSequences: promptTemplate.stopSequences,
41
+ };
42
+ }
43
+ exports.asOllamaCompletionPromptTemplate = asOllamaCompletionPromptTemplate;
44
+ function asOllamaCompletionTextPromptTemplateProvider(promptTemplateProvider) {
45
+ return {
46
+ text: () => asOllamaCompletionPromptTemplate(promptTemplateProvider.text()),
47
+ instruction: () => asOllamaCompletionPromptTemplate(promptTemplateProvider.instruction()),
48
+ chat: () => asOllamaCompletionPromptTemplate(promptTemplateProvider.chat()),
49
+ };
50
+ }
51
+ exports.asOllamaCompletionTextPromptTemplateProvider = asOllamaCompletionTextPromptTemplateProvider;
52
+ exports.Text = asOllamaCompletionTextPromptTemplateProvider(textPrompt);
53
+ /**
54
+ * Formats text, instruction or chat prompts as a Mistral instruct prompt.
55
+ *
56
+ * Note that Mistral does not support system prompts. We emulate them.
57
+ *
58
+ * Text prompt:
59
+ * ```
60
+ * <s>[INST] { instruction } [/INST]
61
+ * ```
62
+ *
63
+ * Instruction prompt when system prompt is set:
64
+ * ```
65
+ * <s>[INST] ${ system prompt } [/INST] </s>[INST] ${instruction} [/INST] ${ response prefix }
66
+ * ```
67
+ *
68
+ * Instruction prompt template when there is no system prompt:
69
+ * ```
70
+ * <s>[INST] ${ instruction } [/INST] ${ response prefix }
71
+ * ```
72
+ *
73
+ * Chat prompt when system prompt is set:
74
+ * ```
75
+ * <s>[INST] ${ system prompt } [/INST] </s> [INST] ${ user msg 1 } [/INST] ${ model response 1 } [INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
76
+ * ```
77
+ *
78
+ * Chat prompt when there is no system prompt:
79
+ * ```
80
+ * <s>[INST] ${ user msg 1 } [/INST] ${ model response 1 } </s>[INST] ${ user msg 2 } [/INST] ${ model response 2 } [INST] ${ user msg 3 } [/INST]
81
+ * ```
82
+ *
83
+ * @see https://docs.mistral.ai/models/#chat-template
84
+ */
85
+ exports.Mistral = asOllamaCompletionTextPromptTemplateProvider(mistralPrompt);
86
+ exports.ChatML = asOllamaCompletionTextPromptTemplateProvider(chatMlPrompt);
87
+ exports.Llama2 = asOllamaCompletionTextPromptTemplateProvider(llama2Prompt);
88
+ exports.NeuralChat = asOllamaCompletionTextPromptTemplateProvider(neuralChatPrompt);
89
+ exports.Alpaca = asOllamaCompletionTextPromptTemplateProvider(alpacaPrompt);
90
+ exports.Synthia = asOllamaCompletionTextPromptTemplateProvider(synthiaPrompt);
91
+ exports.Vicuna = asOllamaCompletionTextPromptTemplateProvider(vicunaPrompt);