modelfusion 0.117.0 → 0.119.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/CHANGELOG.md +60 -0
  2. package/README.md +10 -9
  3. package/core/getFunctionCallLogger.cjs +6 -6
  4. package/core/getFunctionCallLogger.js +6 -6
  5. package/model-function/ModelCallEvent.d.ts +1 -1
  6. package/model-function/embed/EmbeddingEvent.d.ts +1 -1
  7. package/model-function/embed/EmbeddingModel.d.ts +1 -1
  8. package/model-function/embed/embed.cjs +5 -5
  9. package/model-function/embed/embed.d.ts +2 -2
  10. package/model-function/embed/embed.js +5 -5
  11. package/model-function/executeStandardCall.cjs +3 -3
  12. package/model-function/executeStandardCall.d.ts +2 -2
  13. package/model-function/executeStandardCall.js +3 -3
  14. package/model-function/generate-image/ImageGenerationEvent.d.ts +1 -1
  15. package/model-function/generate-image/ImageGenerationModel.d.ts +1 -1
  16. package/model-function/generate-image/PromptTemplateImageGenerationModel.d.ts +1 -1
  17. package/model-function/generate-image/generateImage.cjs +2 -2
  18. package/model-function/generate-image/generateImage.d.ts +1 -1
  19. package/model-function/generate-image/generateImage.js +2 -2
  20. package/model-function/generate-speech/SpeechGenerationEvent.d.ts +1 -1
  21. package/model-function/generate-speech/generateSpeech.cjs +2 -2
  22. package/model-function/generate-speech/generateSpeech.d.ts +1 -1
  23. package/model-function/generate-speech/generateSpeech.js +2 -2
  24. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +10 -1
  25. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -0
  26. package/model-function/generate-structure/StructureFromTextGenerationModel.js +10 -1
  27. package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +12 -1
  28. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -22
  29. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -5
  30. package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -22
  31. package/model-function/generate-structure/StructureGenerationEvent.d.ts +1 -1
  32. package/model-function/generate-structure/generateStructure.cjs +2 -2
  33. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  34. package/model-function/generate-structure/generateStructure.js +2 -2
  35. package/model-function/generate-structure/jsonStructurePrompt.cjs +4 -12
  36. package/model-function/generate-structure/jsonStructurePrompt.js +4 -12
  37. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +2 -2
  38. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +6 -0
  39. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +5 -2
  40. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +6 -0
  41. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +6 -0
  42. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -0
  43. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +6 -0
  44. package/model-function/generate-text/TextGenerationEvent.d.ts +1 -1
  45. package/model-function/generate-text/TextGenerationModel.d.ts +7 -4
  46. package/model-function/generate-text/generateText.cjs +3 -3
  47. package/model-function/generate-text/generateText.d.ts +1 -1
  48. package/model-function/generate-text/generateText.js +3 -3
  49. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.cjs +8 -1
  50. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.d.ts +5 -0
  51. package/model-function/generate-text/prompt-template/AlpacaPromptTemplate.js +6 -0
  52. package/model-function/generate-text/prompt-template/PromptTemplateProvider.cjs +2 -0
  53. package/model-function/generate-text/prompt-template/PromptTemplateProvider.d.ts +8 -0
  54. package/model-function/generate-text/prompt-template/PromptTemplateProvider.js +1 -0
  55. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.cjs +34 -1
  56. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.d.ts +9 -0
  57. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.js +31 -0
  58. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.cjs +28 -0
  59. package/model-function/generate-text/prompt-template/VicunaPromptTemplate.test.js +29 -1
  60. package/model-function/generate-text/prompt-template/index.cjs +1 -0
  61. package/model-function/generate-text/prompt-template/index.d.ts +1 -0
  62. package/model-function/generate-text/prompt-template/index.js +1 -0
  63. package/model-function/generate-transcription/TranscriptionEvent.d.ts +1 -1
  64. package/model-function/generate-transcription/TranscriptionModel.d.ts +1 -1
  65. package/model-function/generate-transcription/generateTranscription.cjs +1 -1
  66. package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
  67. package/model-function/generate-transcription/generateTranscription.js +1 -1
  68. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +3 -3
  69. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +1 -1
  70. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +3 -3
  71. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +3 -3
  72. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
  73. package/model-provider/cohere/CohereTextEmbeddingModel.js +3 -3
  74. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -3
  75. package/model-provider/cohere/CohereTextGenerationModel.d.ts +5 -4
  76. package/model-provider/cohere/CohereTextGenerationModel.js +6 -3
  77. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +3 -3
  78. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
  79. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +3 -3
  80. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +6 -3
  81. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -4
  82. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +6 -3
  83. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.cjs +15 -1
  84. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.d.ts +4 -0
  85. package/model-provider/llamacpp/LlamaCppBakLLaVA1PromptTemplate.js +13 -0
  86. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +40 -33
  87. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +20 -9
  88. package/model-provider/llamacpp/LlamaCppCompletionModel.js +40 -33
  89. package/model-provider/llamacpp/LlamaCppFacade.cjs +4 -3
  90. package/model-provider/llamacpp/LlamaCppFacade.d.ts +2 -1
  91. package/model-provider/llamacpp/LlamaCppFacade.js +2 -1
  92. package/model-provider/llamacpp/LlamaCppGrammars.cjs +3 -1
  93. package/model-provider/llamacpp/LlamaCppGrammars.d.ts +1 -0
  94. package/model-provider/llamacpp/LlamaCppGrammars.js +1 -0
  95. package/model-provider/llamacpp/LlamaCppPrompt.cjs +59 -0
  96. package/model-provider/llamacpp/LlamaCppPrompt.d.ts +14 -0
  97. package/model-provider/llamacpp/LlamaCppPrompt.js +31 -0
  98. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -3
  99. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +1 -1
  100. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -3
  101. package/model-provider/llamacpp/convertJsonSchemaToGBNF.cjs +113 -0
  102. package/model-provider/llamacpp/convertJsonSchemaToGBNF.d.ts +7 -0
  103. package/model-provider/llamacpp/convertJsonSchemaToGBNF.js +109 -0
  104. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.cjs +150 -0
  105. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.d.ts +1 -0
  106. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.js +148 -0
  107. package/model-provider/llamacpp/index.cjs +2 -3
  108. package/model-provider/llamacpp/index.d.ts +1 -2
  109. package/model-provider/llamacpp/index.js +1 -2
  110. package/model-provider/mistral/MistralChatModel.cjs +6 -3
  111. package/model-provider/mistral/MistralChatModel.d.ts +5 -4
  112. package/model-provider/mistral/MistralChatModel.js +6 -3
  113. package/model-provider/mistral/MistralTextEmbeddingModel.cjs +3 -3
  114. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +1 -1
  115. package/model-provider/mistral/MistralTextEmbeddingModel.js +3 -3
  116. package/model-provider/ollama/OllamaChatModel.cjs +3 -3
  117. package/model-provider/ollama/OllamaChatModel.d.ts +2 -2
  118. package/model-provider/ollama/OllamaChatModel.js +3 -3
  119. package/model-provider/ollama/OllamaCompletionModel.cjs +6 -3
  120. package/model-provider/ollama/OllamaCompletionModel.d.ts +15 -14
  121. package/model-provider/ollama/OllamaCompletionModel.js +6 -3
  122. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +3 -3
  123. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +1 -1
  124. package/model-provider/ollama/OllamaTextEmbeddingModel.js +3 -3
  125. package/model-provider/openai/AbstractOpenAIChatModel.cjs +12 -12
  126. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +6 -6
  127. package/model-provider/openai/AbstractOpenAIChatModel.js +12 -12
  128. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +9 -6
  129. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +3 -2
  130. package/model-provider/openai/AbstractOpenAICompletionModel.js +9 -6
  131. package/model-provider/openai/OpenAIImageGenerationModel.cjs +3 -3
  132. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +1 -1
  133. package/model-provider/openai/OpenAIImageGenerationModel.js +3 -3
  134. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -3
  135. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
  136. package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -3
  137. package/model-provider/openai/OpenAITranscriptionModel.cjs +3 -3
  138. package/model-provider/openai/OpenAITranscriptionModel.d.ts +1 -1
  139. package/model-provider/openai/OpenAITranscriptionModel.js +3 -3
  140. package/model-provider/stability/StabilityImageGenerationModel.cjs +3 -3
  141. package/model-provider/stability/StabilityImageGenerationModel.d.ts +1 -1
  142. package/model-provider/stability/StabilityImageGenerationModel.js +3 -3
  143. package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +3 -3
  144. package/model-provider/whispercpp/WhisperCppTranscriptionModel.d.ts +1 -1
  145. package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +3 -3
  146. package/package.json +1 -1
  147. package/tool/generate-tool-call/TextGenerationToolCallModel.cjs +2 -2
  148. package/tool/generate-tool-call/TextGenerationToolCallModel.d.ts +1 -1
  149. package/tool/generate-tool-call/TextGenerationToolCallModel.js +2 -2
  150. package/tool/generate-tool-call/ToolCallGenerationEvent.d.ts +1 -1
  151. package/tool/generate-tool-call/ToolCallGenerationModel.d.ts +1 -1
  152. package/tool/generate-tool-call/generateToolCall.cjs +2 -2
  153. package/tool/generate-tool-call/generateToolCall.js +2 -2
  154. package/tool/generate-tool-calls/TextGenerationToolCallsModel.cjs +2 -2
  155. package/tool/generate-tool-calls/TextGenerationToolCallsModel.d.ts +1 -1
  156. package/tool/generate-tool-calls/TextGenerationToolCallsModel.js +2 -2
  157. package/tool/generate-tool-calls/ToolCallsGenerationEvent.d.ts +1 -1
  158. package/tool/generate-tool-calls/ToolCallsGenerationModel.d.ts +1 -1
  159. package/tool/generate-tool-calls/generateToolCalls.cjs +2 -2
  160. package/tool/generate-tool-calls/generateToolCalls.d.ts +1 -1
  161. package/tool/generate-tool-calls/generateToolCalls.js +2 -2
@@ -1,34 +1,13 @@
1
1
  import { streamText } from "../../model-function/generate-text/streamText.js";
2
2
  import { AsyncQueue } from "../../util/AsyncQueue.js";
3
- import { generateText } from "../generate-text/generateText.js";
4
3
  import { StructureFromTextGenerationModel } from "./StructureFromTextGenerationModel.js";
5
- import { StructureParseError } from "./StructureParseError.js";
6
4
  import { parsePartialJson } from "./parsePartialJson.js";
7
5
  export class StructureFromTextStreamingModel extends StructureFromTextGenerationModel {
8
6
  constructor(options) {
9
7
  super(options);
10
8
  }
11
- async doGenerateStructure(schema, prompt, options) {
12
- const { response, text } = await generateText(this.model, this.template.createPrompt(prompt, schema), {
13
- ...options,
14
- fullResponse: true,
15
- });
16
- try {
17
- return {
18
- response,
19
- value: this.template.extractStructure(text),
20
- valueText: text,
21
- };
22
- }
23
- catch (error) {
24
- throw new StructureParseError({
25
- valueText: text,
26
- cause: error,
27
- });
28
- }
29
- }
30
9
  async doStreamStructure(schema, prompt, options) {
31
- const textStream = await streamText(this.model, this.template.createPrompt(prompt, schema), options);
10
+ const textStream = await streamText(this.getModelWithJsonOutput(schema), this.template.createPrompt(prompt, schema), options);
32
11
  const queue = new AsyncQueue();
33
12
  // run async on purpose:
34
13
  (async () => {
@@ -4,7 +4,7 @@ export interface StructureGenerationStartedEvent extends BaseModelCallStartedEve
4
4
  }
5
5
  export type StructureGenerationFinishedEventResult = {
6
6
  status: "success";
7
- response: unknown;
7
+ rawResponse: unknown;
8
8
  value: unknown;
9
9
  usage?: {
10
10
  promptTokens: number;
@@ -29,7 +29,7 @@ async function generateStructure(model, schema, prompt, options) {
29
29
  }
30
30
  const value = parseResult.data;
31
31
  return {
32
- response: result.response,
32
+ rawResponse: result.response,
33
33
  extractedValue: value,
34
34
  usage: result.usage,
35
35
  };
@@ -38,7 +38,7 @@ async function generateStructure(model, schema, prompt, options) {
38
38
  return options?.fullResponse
39
39
  ? {
40
40
  structure: fullResponse.value,
41
- response: fullResponse.response,
41
+ rawResponse: fullResponse.rawResponse,
42
42
  metadata: fullResponse.metadata,
43
43
  }
44
44
  : fullResponse.value;
@@ -44,6 +44,6 @@ export declare function generateStructure<STRUCTURE, PROMPT, SETTINGS extends St
44
44
  fullResponse: true;
45
45
  }): Promise<{
46
46
  structure: STRUCTURE;
47
- response: unknown;
47
+ rawResponse: unknown;
48
48
  metadata: ModelCallMetadata;
49
49
  }>;
@@ -26,7 +26,7 @@ export async function generateStructure(model, schema, prompt, options) {
26
26
  }
27
27
  const value = parseResult.data;
28
28
  return {
29
- response: result.response,
29
+ rawResponse: result.response,
30
30
  extractedValue: value,
31
31
  usage: result.usage,
32
32
  };
@@ -35,7 +35,7 @@ export async function generateStructure(model, schema, prompt, options) {
35
35
  return options?.fullResponse
36
36
  ? {
37
37
  structure: fullResponse.value,
38
- response: fullResponse.response,
38
+ rawResponse: fullResponse.rawResponse,
39
39
  metadata: fullResponse.metadata,
40
40
  }
41
41
  : fullResponse.value;
@@ -15,12 +15,8 @@ exports.jsonStructurePrompt = {
15
15
  instruction: prompt,
16
16
  }),
17
17
  extractStructure,
18
- adaptModel: (model) => {
19
- if (model.withJsonOutput != null) {
20
- model = model.withJsonOutput();
21
- }
22
- return model.withInstructionPrompt();
23
- },
18
+ adaptModel: (model) => model.withInstructionPrompt(),
19
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
24
20
  };
25
21
  },
26
22
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -35,12 +31,8 @@ exports.jsonStructurePrompt = {
35
31
  instruction: prompt.instruction,
36
32
  }),
37
33
  extractStructure,
38
- adaptModel: (model) => {
39
- if (model.withJsonOutput != null) {
40
- model = model.withJsonOutput();
41
- }
42
- return model.withInstructionPrompt();
43
- },
34
+ adaptModel: (model) => model.withInstructionPrompt(),
35
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
44
36
  };
45
37
  },
46
38
  };
@@ -12,12 +12,8 @@ export const jsonStructurePrompt = {
12
12
  instruction: prompt,
13
13
  }),
14
14
  extractStructure,
15
- adaptModel: (model) => {
16
- if (model.withJsonOutput != null) {
17
- model = model.withJsonOutput();
18
- }
19
- return model.withInstructionPrompt();
20
- },
15
+ adaptModel: (model) => model.withInstructionPrompt(),
16
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
21
17
  };
22
18
  },
23
19
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -32,12 +28,8 @@ export const jsonStructurePrompt = {
32
28
  instruction: prompt.instruction,
33
29
  }),
34
30
  extractStructure,
35
- adaptModel: (model) => {
36
- if (model.withJsonOutput != null) {
37
- model = model.withJsonOutput();
38
- }
39
- return model.withInstructionPrompt();
40
- },
31
+ adaptModel: (model) => model.withInstructionPrompt(),
32
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
41
33
  };
42
34
  },
43
35
  };
@@ -11,7 +11,7 @@ export declare class PromptTemplateFullTextModel<PROMPT, MODEL_PROMPT, SETTINGS
11
11
  promptTemplate: TextGenerationPromptTemplate<PROMPT, MODEL_PROMPT>;
12
12
  });
13
13
  doGenerateToolCall(tool: ToolDefinition<string, unknown>, prompt: PROMPT, options?: FunctionOptions | undefined): PromiseLike<{
14
- response: unknown;
14
+ rawResponse: unknown;
15
15
  toolCall: {
16
16
  id: string;
17
17
  args: unknown;
@@ -23,7 +23,7 @@ export declare class PromptTemplateFullTextModel<PROMPT, MODEL_PROMPT, SETTINGS
23
23
  } | undefined;
24
24
  }>;
25
25
  doGenerateToolCalls(tools: ToolDefinition<string, unknown>[], prompt: PROMPT, options?: FunctionOptions | undefined): PromiseLike<{
26
- response: unknown;
26
+ rawResponse: unknown;
27
27
  text: string | null;
28
28
  toolCalls: {
29
29
  id: string;
@@ -68,6 +68,12 @@ class PromptTemplateTextGenerationModel {
68
68
  template: promptTemplate,
69
69
  });
70
70
  }
71
+ withJsonOutput(schema) {
72
+ return new PromptTemplateTextGenerationModel({
73
+ model: this.model.withJsonOutput(schema),
74
+ promptTemplate: this.promptTemplate,
75
+ });
76
+ }
71
77
  withPromptTemplate(promptTemplate) {
72
78
  return new PromptTemplateTextGenerationModel({
73
79
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
3
5
  import { TextGenerationToolCallsModel } from "../../tool/generate-tool-calls/TextGenerationToolCallsModel.js";
4
6
  import { ToolCallsPromptTemplate } from "../../tool/generate-tool-calls/ToolCallsPromptTemplate.js";
@@ -19,7 +21,7 @@ export declare class PromptTemplateTextGenerationModel<PROMPT, MODEL_PROMPT, SET
19
21
  get contextWindowSize(): MODEL["contextWindowSize"];
20
22
  get countPromptTokens(): MODEL["countPromptTokens"] extends undefined ? undefined : (prompt: PROMPT) => PromiseLike<number>;
21
23
  doGenerateTexts(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<{
22
- response: unknown;
24
+ rawResponse: unknown;
23
25
  textGenerationResults: import("./TextGenerationResult.js").TextGenerationResult[];
24
26
  usage?: {
25
27
  promptTokens: number;
@@ -28,7 +30,7 @@ export declare class PromptTemplateTextGenerationModel<PROMPT, MODEL_PROMPT, SET
28
30
  } | undefined;
29
31
  }>;
30
32
  restoreGeneratedTexts(rawResponse: unknown): {
31
- response: unknown;
33
+ rawResponse: unknown;
32
34
  textGenerationResults: import("./TextGenerationResult.js").TextGenerationResult[];
33
35
  usage?: {
34
36
  promptTokens: number;
@@ -40,6 +42,7 @@ export declare class PromptTemplateTextGenerationModel<PROMPT, MODEL_PROMPT, SET
40
42
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallModel<INPUT_PROMPT, PROMPT, this>;
41
43
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallsModel<INPUT_PROMPT, PROMPT, this>;
42
44
  asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextGenerationModel<INPUT_PROMPT, PROMPT, this>;
45
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
43
46
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextGenerationModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
44
47
  withSettings(additionalSettings: Partial<SETTINGS>): this;
45
48
  }
@@ -65,6 +65,12 @@ export class PromptTemplateTextGenerationModel {
65
65
  template: promptTemplate,
66
66
  });
67
67
  }
68
+ withJsonOutput(schema) {
69
+ return new PromptTemplateTextGenerationModel({
70
+ model: this.model.withJsonOutput(schema),
71
+ promptTemplate: this.promptTemplate,
72
+ });
73
+ }
68
74
  withPromptTemplate(promptTemplate) {
69
75
  return new PromptTemplateTextGenerationModel({
70
76
  model: this.withSettings({
@@ -20,6 +20,12 @@ class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerationModel
20
20
  template: promptTemplate,
21
21
  });
22
22
  }
23
+ withJsonOutput(schema) {
24
+ return new PromptTemplateTextStreamingModel({
25
+ model: this.model.withJsonOutput(schema),
26
+ promptTemplate: this.promptTemplate,
27
+ });
28
+ }
23
29
  withPromptTemplate(promptTemplate) {
24
30
  return new PromptTemplateTextStreamingModel({
25
31
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { StructureFromTextPromptTemplate } from "../generate-structure/StructureFromTextPromptTemplate.js";
3
5
  import { StructureFromTextStreamingModel } from "../generate-structure/StructureFromTextStreamingModel.js";
4
6
  import { PromptTemplateTextGenerationModel } from "./PromptTemplateTextGenerationModel.js";
@@ -12,6 +14,7 @@ export declare class PromptTemplateTextStreamingModel<PROMPT, MODEL_PROMPT, SETT
12
14
  doStreamText(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<AsyncIterable<import("../Delta.js").Delta<unknown>>>;
13
15
  extractTextDelta(delta: unknown): string | undefined;
14
16
  asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextStreamingModel<INPUT_PROMPT, PROMPT, this>;
17
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
15
18
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextStreamingModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
16
19
  withSettings(additionalSettings: Partial<SETTINGS>): this;
17
20
  }
@@ -17,6 +17,12 @@ export class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerati
17
17
  template: promptTemplate,
18
18
  });
19
19
  }
20
+ withJsonOutput(schema) {
21
+ return new PromptTemplateTextStreamingModel({
22
+ model: this.model.withJsonOutput(schema),
23
+ promptTemplate: this.promptTemplate,
24
+ });
25
+ }
20
26
  withPromptTemplate(promptTemplate) {
21
27
  return new PromptTemplateTextStreamingModel({
22
28
  model: this.withSettings({
@@ -4,7 +4,7 @@ export interface TextGenerationStartedEvent extends BaseModelCallStartedEvent {
4
4
  }
5
5
  export type TextGenerationFinishedEventResult = {
6
6
  status: "success";
7
- response: unknown;
7
+ rawResponse: unknown;
8
8
  value: string;
9
9
  usage?: {
10
10
  promptTokens: number;
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { Delta } from "../Delta.js";
3
5
  import { Model, ModelSettings } from "../Model.js";
4
6
  import { BasicTokenizer, FullTokenizer } from "../tokenize-text/Tokenizer.js";
@@ -63,7 +65,7 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
63
65
  */
64
66
  readonly countPromptTokens: ((prompt: PROMPT) => PromiseLike<number>) | undefined;
65
67
  doGenerateTexts(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<{
66
- response: unknown;
68
+ rawResponse: unknown;
67
69
  textGenerationResults: TextGenerationResult[];
68
70
  usage?: {
69
71
  promptTokens: number;
@@ -72,7 +74,7 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
72
74
  };
73
75
  }>;
74
76
  restoreGeneratedTexts(rawResponse: unknown): {
75
- response: unknown;
77
+ rawResponse: unknown;
76
78
  textGenerationResults: TextGenerationResult[];
77
79
  usage?: {
78
80
  promptTokens: number;
@@ -82,9 +84,10 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
82
84
  };
83
85
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationModel<INPUT_PROMPT, SETTINGS>;
84
86
  /**
85
- * Optional. When available, forces the model to return JSON as the text output.
87
+ * When possible, limit the output generation to the specified JSON schema,
88
+ * or super sets of it (e.g. JSON in general).
86
89
  */
87
- withJsonOutput?(): this;
90
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
88
91
  }
89
92
  export interface TextStreamingModel<PROMPT, SETTINGS extends TextGenerationModelSettings = TextGenerationModelSettings> extends TextGenerationModel<PROMPT, SETTINGS> {
90
93
  doStreamText(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<AsyncIterable<Delta<unknown>>>;
@@ -40,7 +40,7 @@ async function generateText(model, prompt, options) {
40
40
  }
41
41
  const result = await model.doGenerateTexts(prompt, options);
42
42
  try {
43
- await options.cache.storeValue(cacheKey, result.response);
43
+ await options.cache.storeValue(cacheKey, result.rawResponse);
44
44
  }
45
45
  catch (err) {
46
46
  cacheErrors = [...(cacheErrors ?? []), err];
@@ -60,7 +60,7 @@ async function generateText(model, prompt, options) {
60
60
  : result.textGenerationResults;
61
61
  // TODO add cache information
62
62
  return {
63
- response: result.response,
63
+ rawResponse: result.rawResponse,
64
64
  extractedValue: textGenerationResults,
65
65
  usage: result.usage,
66
66
  };
@@ -74,7 +74,7 @@ async function generateText(model, prompt, options) {
74
74
  finishReason: firstResult.finishReason,
75
75
  texts: textGenerationResults.map((textGeneration) => textGeneration.text),
76
76
  textGenerationResults,
77
- response: fullResponse.response,
77
+ rawResponse: fullResponse.rawResponse,
78
78
  metadata: fullResponse.metadata,
79
79
  }
80
80
  : firstResult.text;
@@ -33,6 +33,6 @@ export declare function generateText<PROMPT>(model: TextGenerationModel<PROMPT,
33
33
  finishReason: TextGenerationFinishReason;
34
34
  texts: string[];
35
35
  textGenerationResults: TextGenerationResult[];
36
- response: unknown;
36
+ rawResponse: unknown;
37
37
  metadata: ModelCallMetadata;
38
38
  }>;
@@ -37,7 +37,7 @@ export async function generateText(model, prompt, options) {
37
37
  }
38
38
  const result = await model.doGenerateTexts(prompt, options);
39
39
  try {
40
- await options.cache.storeValue(cacheKey, result.response);
40
+ await options.cache.storeValue(cacheKey, result.rawResponse);
41
41
  }
42
42
  catch (err) {
43
43
  cacheErrors = [...(cacheErrors ?? []), err];
@@ -57,7 +57,7 @@ export async function generateText(model, prompt, options) {
57
57
  : result.textGenerationResults;
58
58
  // TODO add cache information
59
59
  return {
60
- response: result.response,
60
+ rawResponse: result.rawResponse,
61
61
  extractedValue: textGenerationResults,
62
62
  usage: result.usage,
63
63
  };
@@ -71,7 +71,7 @@ export async function generateText(model, prompt, options) {
71
71
  finishReason: firstResult.finishReason,
72
72
  texts: textGenerationResults.map((textGeneration) => textGeneration.text),
73
73
  textGenerationResults,
74
- response: fullResponse.response,
74
+ rawResponse: fullResponse.rawResponse,
75
75
  metadata: fullResponse.metadata,
76
76
  }
77
77
  : firstResult.text;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.instruction = exports.text = void 0;
3
+ exports.chat = exports.instruction = exports.text = void 0;
4
4
  const ContentPart_js_1 = require("./ContentPart.cjs");
5
5
  const DEFAULT_SYSTEM_PROMPT_INPUT = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.";
6
6
  const DEFAULT_SYSTEM_PROMPT_NO_INPUT = "Below is an instruction that describes a task. Write a response that appropriately completes the request.";
@@ -81,3 +81,10 @@ function instruction() {
81
81
  };
82
82
  }
83
83
  exports.instruction = instruction;
84
+ /**
85
+ * Not supported by Alpaca.
86
+ */
87
+ function chat() {
88
+ throw new Error("Chat prompts are not supported by the Alpaca format.");
89
+ }
90
+ exports.chat = chat;
@@ -1,4 +1,5 @@
1
1
  import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
2
3
  import { InstructionPrompt } from "./InstructionPrompt.js";
3
4
  /**
4
5
  * Formats a text prompt as an Alpaca prompt.
@@ -44,3 +45,7 @@ export declare function instruction(): TextGenerationPromptTemplate<InstructionP
44
45
  input?: string;
45
46
  }, // optional input supported by Alpaca
46
47
  string>;
48
+ /**
49
+ * Not supported by Alpaca.
50
+ */
51
+ export declare function chat(): TextGenerationPromptTemplate<ChatPrompt, string>;
@@ -76,3 +76,9 @@ export function instruction() {
76
76
  },
77
77
  };
78
78
  }
79
+ /**
80
+ * Not supported by Alpaca.
81
+ */
82
+ export function chat() {
83
+ throw new Error("Chat prompts are not supported by the Alpaca format.");
84
+ }
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,8 @@
1
+ import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
+ import { ChatPrompt } from "./ChatPrompt.js";
3
+ import { InstructionPrompt } from "./InstructionPrompt.js";
4
+ export interface TextGenerationPromptTemplateProvider<TARGET_PROMPT> {
5
+ text(): TextGenerationPromptTemplate<string, TARGET_PROMPT>;
6
+ instruction(): TextGenerationPromptTemplate<InstructionPrompt, TARGET_PROMPT>;
7
+ chat(): TextGenerationPromptTemplate<ChatPrompt, TARGET_PROMPT>;
8
+ }
@@ -1,11 +1,44 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.chat = void 0;
3
+ exports.chat = exports.instruction = exports.text = void 0;
4
4
  const ContentPart_js_1 = require("./ContentPart.cjs");
5
5
  const InvalidPromptError_js_1 = require("./InvalidPromptError.cjs");
6
6
  // default Vicuna 1 system message
7
7
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
8
8
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
9
+ /**
10
+ * Formats a text prompt as a Vicuna prompt.
11
+ */
12
+ function text() {
13
+ return {
14
+ stopSequences: [],
15
+ format(prompt) {
16
+ let text = DEFAULT_SYSTEM_MESSAGE;
17
+ text += "\n\nUSER: ";
18
+ text += prompt;
19
+ text += "\n\nASSISTANT: ";
20
+ return text;
21
+ },
22
+ };
23
+ }
24
+ exports.text = text;
25
+ /**
26
+ * Formats an instruction prompt as a Vicuna prompt.
27
+ */
28
+ function instruction() {
29
+ return {
30
+ format(prompt) {
31
+ let text = prompt.system != null
32
+ ? `${prompt.system}\n\n`
33
+ : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
34
+ text += `USER: ${(0, ContentPart_js_1.validateContentIsString)(prompt.instruction, prompt)}\n`;
35
+ text += `ASSISTANT: `;
36
+ return text;
37
+ },
38
+ stopSequences: [`\nUSER:`],
39
+ };
40
+ }
41
+ exports.instruction = instruction;
9
42
  /**
10
43
  * Formats a chat prompt as a Vicuna prompt.
11
44
  *
@@ -1,5 +1,14 @@
1
1
  import { TextGenerationPromptTemplate } from "../TextGenerationPromptTemplate.js";
2
2
  import { ChatPrompt } from "./ChatPrompt.js";
3
+ import { InstructionPrompt } from "./InstructionPrompt.js";
4
+ /**
5
+ * Formats a text prompt as a Vicuna prompt.
6
+ */
7
+ export declare function text(): TextGenerationPromptTemplate<string, string>;
8
+ /**
9
+ * Formats an instruction prompt as a Vicuna prompt.
10
+ */
11
+ export declare function instruction(): TextGenerationPromptTemplate<InstructionPrompt, string>;
3
12
  /**
4
13
  * Formats a chat prompt as a Vicuna prompt.
5
14
  *
@@ -3,6 +3,37 @@ import { InvalidPromptError } from "./InvalidPromptError.js";
3
3
  // default Vicuna 1 system message
4
4
  const DEFAULT_SYSTEM_MESSAGE = "A chat between a curious user and an artificial intelligence assistant. " +
5
5
  "The assistant gives helpful, detailed, and polite answers to the user's questions.";
6
+ /**
7
+ * Formats a text prompt as a Vicuna prompt.
8
+ */
9
+ export function text() {
10
+ return {
11
+ stopSequences: [],
12
+ format(prompt) {
13
+ let text = DEFAULT_SYSTEM_MESSAGE;
14
+ text += "\n\nUSER: ";
15
+ text += prompt;
16
+ text += "\n\nASSISTANT: ";
17
+ return text;
18
+ },
19
+ };
20
+ }
21
+ /**
22
+ * Formats an instruction prompt as a Vicuna prompt.
23
+ */
24
+ export function instruction() {
25
+ return {
26
+ format(prompt) {
27
+ let text = prompt.system != null
28
+ ? `${prompt.system}\n\n`
29
+ : `${DEFAULT_SYSTEM_MESSAGE}\n\n`;
30
+ text += `USER: ${validateContentIsString(prompt.instruction, prompt)}\n`;
31
+ text += `ASSISTANT: `;
32
+ return text;
33
+ },
34
+ stopSequences: [`\nUSER:`],
35
+ };
36
+ }
6
37
  /**
7
38
  * Formats a chat prompt as a Vicuna prompt.
8
39
  *
@@ -1,6 +1,34 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const VicunaPromptTemplate_js_1 = require("./VicunaPromptTemplate.cjs");
4
+ describe("text prompt", () => {
5
+ it("should format prompt", () => {
6
+ const prompt = (0, VicunaPromptTemplate_js_1.text)().format("prompt");
7
+ expect(prompt).toMatchSnapshot();
8
+ });
9
+ });
10
+ describe("instruction prompt", () => {
11
+ it("should format prompt with instruction", () => {
12
+ const prompt = (0, VicunaPromptTemplate_js_1.instruction)().format({
13
+ instruction: "instruction",
14
+ });
15
+ expect(prompt).toMatchSnapshot();
16
+ });
17
+ it("should format prompt with system and instruction", () => {
18
+ const prompt = (0, VicunaPromptTemplate_js_1.instruction)().format({
19
+ system: "system",
20
+ instruction: "instruction",
21
+ });
22
+ expect(prompt).toMatchSnapshot();
23
+ });
24
+ it("should format prompt with instruction and response prefix", () => {
25
+ const prompt = (0, VicunaPromptTemplate_js_1.instruction)().format({
26
+ instruction: "instruction",
27
+ responsePrefix: "response prefix",
28
+ });
29
+ expect(prompt).toMatchSnapshot();
30
+ });
31
+ });
4
32
  describe("chat prompt", () => {
5
33
  it("should format prompt with user message", () => {
6
34
  const prompt = (0, VicunaPromptTemplate_js_1.chat)().format({
@@ -1,4 +1,32 @@
1
- import { chat } from "./VicunaPromptTemplate.js";
1
+ import { chat, instruction, text } from "./VicunaPromptTemplate.js";
2
+ describe("text prompt", () => {
3
+ it("should format prompt", () => {
4
+ const prompt = text().format("prompt");
5
+ expect(prompt).toMatchSnapshot();
6
+ });
7
+ });
8
+ describe("instruction prompt", () => {
9
+ it("should format prompt with instruction", () => {
10
+ const prompt = instruction().format({
11
+ instruction: "instruction",
12
+ });
13
+ expect(prompt).toMatchSnapshot();
14
+ });
15
+ it("should format prompt with system and instruction", () => {
16
+ const prompt = instruction().format({
17
+ system: "system",
18
+ instruction: "instruction",
19
+ });
20
+ expect(prompt).toMatchSnapshot();
21
+ });
22
+ it("should format prompt with instruction and response prefix", () => {
23
+ const prompt = instruction().format({
24
+ instruction: "instruction",
25
+ responsePrefix: "response prefix",
26
+ });
27
+ expect(prompt).toMatchSnapshot();
28
+ });
29
+ });
2
30
  describe("chat prompt", () => {
3
31
  it("should format prompt with user message", () => {
4
32
  const prompt = chat().format({
@@ -36,6 +36,7 @@ __exportStar(require("./InvalidPromptError.cjs"), exports);
36
36
  exports.Llama2Prompt = __importStar(require("./Llama2PromptTemplate.cjs"));
37
37
  exports.MistralInstructPrompt = __importStar(require("./MistralInstructPromptTemplate.cjs"));
38
38
  exports.NeuralChatPrompt = __importStar(require("./NeuralChatPromptTemplate.cjs"));
39
+ __exportStar(require("./PromptTemplateProvider.cjs"), exports);
39
40
  exports.TextPrompt = __importStar(require("./TextPromptTemplate.cjs"));
40
41
  exports.VicunaPrompt = __importStar(require("./VicunaPromptTemplate.cjs"));
41
42
  __exportStar(require("./trimChatPrompt.cjs"), exports);
@@ -7,6 +7,7 @@ export * from "./InvalidPromptError.js";
7
7
  export * as Llama2Prompt from "./Llama2PromptTemplate.js";
8
8
  export * as MistralInstructPrompt from "./MistralInstructPromptTemplate.js";
9
9
  export * as NeuralChatPrompt from "./NeuralChatPromptTemplate.js";
10
+ export * from "./PromptTemplateProvider.js";
10
11
  export * as TextPrompt from "./TextPromptTemplate.js";
11
12
  export * as VicunaPrompt from "./VicunaPromptTemplate.js";
12
13
  export * from "./trimChatPrompt.js";