modelfusion 0.118.0 → 0.119.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/CHANGELOG.md +34 -0
  2. package/model-function/generate-structure/StructureFromTextGenerationModel.cjs +10 -1
  3. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -0
  4. package/model-function/generate-structure/StructureFromTextGenerationModel.js +10 -1
  5. package/model-function/generate-structure/StructureFromTextPromptTemplate.d.ts +12 -1
  6. package/model-function/generate-structure/StructureFromTextStreamingModel.cjs +1 -22
  7. package/model-function/generate-structure/StructureFromTextStreamingModel.d.ts +0 -5
  8. package/model-function/generate-structure/StructureFromTextStreamingModel.js +1 -22
  9. package/model-function/generate-structure/jsonStructurePrompt.cjs +4 -12
  10. package/model-function/generate-structure/jsonStructurePrompt.js +4 -12
  11. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +6 -0
  12. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +3 -0
  13. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +6 -0
  14. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +6 -0
  15. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +3 -0
  16. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +6 -0
  17. package/model-function/generate-text/TextGenerationModel.d.ts +5 -2
  18. package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -0
  19. package/model-provider/cohere/CohereTextGenerationModel.d.ts +1 -0
  20. package/model-provider/cohere/CohereTextGenerationModel.js +3 -0
  21. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +3 -0
  22. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +1 -0
  23. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +3 -0
  24. package/model-provider/llamacpp/LlamaCppCompletionModel.cjs +10 -6
  25. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +3 -1
  26. package/model-provider/llamacpp/LlamaCppCompletionModel.js +10 -6
  27. package/model-provider/llamacpp/LlamaCppGrammars.cjs +3 -1
  28. package/model-provider/llamacpp/LlamaCppGrammars.d.ts +1 -0
  29. package/model-provider/llamacpp/LlamaCppGrammars.js +1 -0
  30. package/model-provider/llamacpp/convertJsonSchemaToGBNF.cjs +113 -0
  31. package/model-provider/llamacpp/convertJsonSchemaToGBNF.d.ts +7 -0
  32. package/model-provider/llamacpp/convertJsonSchemaToGBNF.js +109 -0
  33. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.cjs +150 -0
  34. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.d.ts +1 -0
  35. package/model-provider/llamacpp/convertJsonSchemaToGBNF.test.js +148 -0
  36. package/model-provider/mistral/MistralChatModel.cjs +3 -0
  37. package/model-provider/mistral/MistralChatModel.d.ts +1 -0
  38. package/model-provider/mistral/MistralChatModel.js +3 -0
  39. package/model-provider/ollama/OllamaCompletionModel.cjs +3 -0
  40. package/model-provider/ollama/OllamaCompletionModel.d.ts +1 -0
  41. package/model-provider/ollama/OllamaCompletionModel.js +3 -0
  42. package/model-provider/openai/AbstractOpenAICompletionModel.cjs +3 -0
  43. package/model-provider/openai/AbstractOpenAICompletionModel.d.ts +1 -0
  44. package/model-provider/openai/AbstractOpenAICompletionModel.js +3 -0
  45. package/package.json +1 -1
package/CHANGELOG.md CHANGED
@@ -1,5 +1,39 @@
1
1
  # Changelog
2
2
 
3
+ ## v0.119.0 - 2024-01-07
4
+
5
+ ### Added
6
+
7
+ - Schema-specific GBNF grammar generator for `LlamaCppCompletionModel`. When using `jsonStructurePrompt`, it automatically uses a GBNF grammar for the JSON schema that you provide. Example:
8
+
9
+ ```ts
10
+ const structure = await generateStructure(
11
+ llamacpp
12
+ .CompletionTextGenerator({
13
+ // run openhermes-2.5-mistral-7b.Q4_K_M.gguf in llama.cpp
14
+ promptTemplate: llamacpp.prompt.ChatML,
15
+ maxGenerationTokens: 1024,
16
+ temperature: 0,
17
+ })
18
+ // automatically restrict the output to your schema using GBNF:
19
+ .asStructureGenerationModel(jsonStructurePrompt.text()),
20
+
21
+ zodSchema(
22
+ z.array(
23
+ z.object({
24
+ name: z.string(),
25
+ class: z
26
+ .string()
27
+ .describe("Character class, e.g. warrior, mage, or thief."),
28
+ description: z.string(),
29
+ })
30
+ )
31
+ ),
32
+
33
+ "Generate 3 character descriptions for a fantasy role playing game. "
34
+ );
35
+ ```
36
+
3
37
  ## v0.118.0 - 2024-01-07
4
38
 
5
39
  ### Added
@@ -29,8 +29,17 @@ class StructureFromTextGenerationModel {
29
29
  get settingsForEvent() {
30
30
  return this.model.settingsForEvent;
31
31
  }
32
+ getModelWithJsonOutput(schema) {
33
+ if (this.template.withJsonOutput != null) {
34
+ return this.template.withJsonOutput?.({
35
+ model: this.model,
36
+ schema,
37
+ });
38
+ }
39
+ return this.model;
40
+ }
32
41
  async doGenerateStructure(schema, prompt, options) {
33
- const { rawResponse: response, text } = await (0, generateText_js_1.generateText)(this.model, this.template.createPrompt(prompt, schema), {
42
+ const { rawResponse: response, text } = await (0, generateText_js_1.generateText)(this.getModelWithJsonOutput(schema), this.template.createPrompt(prompt, schema), {
34
43
  ...options,
35
44
  fullResponse: true,
36
45
  });
@@ -14,6 +14,7 @@ export declare class StructureFromTextGenerationModel<SOURCE_PROMPT, TARGET_PROM
14
14
  get modelInformation(): import("../ModelInformation.js").ModelInformation;
15
15
  get settings(): TextGenerationModelSettings;
16
16
  get settingsForEvent(): Partial<MODEL["settings"]>;
17
+ getModelWithJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): MODEL;
17
18
  doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<{
18
19
  response: unknown;
19
20
  value: unknown;
@@ -26,8 +26,17 @@ export class StructureFromTextGenerationModel {
26
26
  get settingsForEvent() {
27
27
  return this.model.settingsForEvent;
28
28
  }
29
+ getModelWithJsonOutput(schema) {
30
+ if (this.template.withJsonOutput != null) {
31
+ return this.template.withJsonOutput?.({
32
+ model: this.model,
33
+ schema,
34
+ });
35
+ }
36
+ return this.model;
37
+ }
29
38
  async doGenerateStructure(schema, prompt, options) {
30
- const { rawResponse: response, text } = await generateText(this.model, this.template.createPrompt(prompt, schema), {
39
+ const { rawResponse: response, text } = await generateText(this.getModelWithJsonOutput(schema), this.template.createPrompt(prompt, schema), {
31
40
  ...options,
32
41
  fullResponse: true,
33
42
  });
@@ -6,6 +6,12 @@ import { InstructionPrompt } from "../generate-text/prompt-template/InstructionP
6
6
  export type StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT> = {
7
7
  createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT;
8
8
  extractStructure: (response: string) => unknown;
9
+ withJsonOutput?({ model, schema, }: {
10
+ model: {
11
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): typeof model;
12
+ };
13
+ schema: Schema<unknown> & JsonSchemaProducer;
14
+ }): typeof model;
9
15
  };
10
16
  export type FlexibleStructureFromTextPromptTemplate<SOURCE_PROMPT, INTERMEDIATE_PROMPT> = {
11
17
  createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => INTERMEDIATE_PROMPT;
@@ -14,6 +20,11 @@ export type FlexibleStructureFromTextPromptTemplate<SOURCE_PROMPT, INTERMEDIATE_
14
20
  withTextPrompt(): TextStreamingModel<string>;
15
21
  withInstructionPrompt(): TextStreamingModel<InstructionPrompt>;
16
22
  withChatPrompt(): TextStreamingModel<ChatPrompt>;
17
- withJsonOutput?: () => typeof model;
18
23
  }) => TextStreamingModel<INTERMEDIATE_PROMPT>;
24
+ withJsonOutput?({ model, schema, }: {
25
+ model: {
26
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): typeof model;
27
+ };
28
+ schema: Schema<unknown> & JsonSchemaProducer;
29
+ }): typeof model;
19
30
  };
@@ -3,35 +3,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.StructureFromTextStreamingModel = void 0;
4
4
  const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
5
5
  const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
6
- const generateText_js_1 = require("../generate-text/generateText.cjs");
7
6
  const StructureFromTextGenerationModel_js_1 = require("./StructureFromTextGenerationModel.cjs");
8
- const StructureParseError_js_1 = require("./StructureParseError.cjs");
9
7
  const parsePartialJson_js_1 = require("./parsePartialJson.cjs");
10
8
  class StructureFromTextStreamingModel extends StructureFromTextGenerationModel_js_1.StructureFromTextGenerationModel {
11
9
  constructor(options) {
12
10
  super(options);
13
11
  }
14
- async doGenerateStructure(schema, prompt, options) {
15
- const { rawResponse: response, text } = await (0, generateText_js_1.generateText)(this.model, this.template.createPrompt(prompt, schema), {
16
- ...options,
17
- fullResponse: true,
18
- });
19
- try {
20
- return {
21
- response,
22
- value: this.template.extractStructure(text),
23
- valueText: text,
24
- };
25
- }
26
- catch (error) {
27
- throw new StructureParseError_js_1.StructureParseError({
28
- valueText: text,
29
- cause: error,
30
- });
31
- }
32
- }
33
12
  async doStreamStructure(schema, prompt, options) {
34
- const textStream = await (0, streamText_js_1.streamText)(this.model, this.template.createPrompt(prompt, schema), options);
13
+ const textStream = await (0, streamText_js_1.streamText)(this.getModelWithJsonOutput(schema), this.template.createPrompt(prompt, schema), options);
35
14
  const queue = new AsyncQueue_js_1.AsyncQueue();
36
15
  // run async on purpose:
37
16
  (async () => {
@@ -12,11 +12,6 @@ export declare class StructureFromTextStreamingModel<SOURCE_PROMPT, TARGET_PROMP
12
12
  model: MODEL;
13
13
  template: StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
14
14
  });
15
- doGenerateStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<{
16
- response: unknown;
17
- value: unknown;
18
- valueText: string;
19
- }>;
20
15
  doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: SOURCE_PROMPT, options?: FunctionOptions): Promise<AsyncQueue<Delta<string>>>;
21
16
  extractStructureTextDelta(delta: unknown): string;
22
17
  parseAccumulatedStructureText(accumulatedText: string): unknown;
@@ -1,34 +1,13 @@
1
1
  import { streamText } from "../../model-function/generate-text/streamText.js";
2
2
  import { AsyncQueue } from "../../util/AsyncQueue.js";
3
- import { generateText } from "../generate-text/generateText.js";
4
3
  import { StructureFromTextGenerationModel } from "./StructureFromTextGenerationModel.js";
5
- import { StructureParseError } from "./StructureParseError.js";
6
4
  import { parsePartialJson } from "./parsePartialJson.js";
7
5
  export class StructureFromTextStreamingModel extends StructureFromTextGenerationModel {
8
6
  constructor(options) {
9
7
  super(options);
10
8
  }
11
- async doGenerateStructure(schema, prompt, options) {
12
- const { rawResponse: response, text } = await generateText(this.model, this.template.createPrompt(prompt, schema), {
13
- ...options,
14
- fullResponse: true,
15
- });
16
- try {
17
- return {
18
- response,
19
- value: this.template.extractStructure(text),
20
- valueText: text,
21
- };
22
- }
23
- catch (error) {
24
- throw new StructureParseError({
25
- valueText: text,
26
- cause: error,
27
- });
28
- }
29
- }
30
9
  async doStreamStructure(schema, prompt, options) {
31
- const textStream = await streamText(this.model, this.template.createPrompt(prompt, schema), options);
10
+ const textStream = await streamText(this.getModelWithJsonOutput(schema), this.template.createPrompt(prompt, schema), options);
32
11
  const queue = new AsyncQueue();
33
12
  // run async on purpose:
34
13
  (async () => {
@@ -15,12 +15,8 @@ exports.jsonStructurePrompt = {
15
15
  instruction: prompt,
16
16
  }),
17
17
  extractStructure,
18
- adaptModel: (model) => {
19
- if (model.withJsonOutput != null) {
20
- model = model.withJsonOutput();
21
- }
22
- return model.withInstructionPrompt();
23
- },
18
+ adaptModel: (model) => model.withInstructionPrompt(),
19
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
24
20
  };
25
21
  },
26
22
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -35,12 +31,8 @@ exports.jsonStructurePrompt = {
35
31
  instruction: prompt.instruction,
36
32
  }),
37
33
  extractStructure,
38
- adaptModel: (model) => {
39
- if (model.withJsonOutput != null) {
40
- model = model.withJsonOutput();
41
- }
42
- return model.withInstructionPrompt();
43
- },
34
+ adaptModel: (model) => model.withInstructionPrompt(),
35
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
44
36
  };
45
37
  },
46
38
  };
@@ -12,12 +12,8 @@ export const jsonStructurePrompt = {
12
12
  instruction: prompt,
13
13
  }),
14
14
  extractStructure,
15
- adaptModel: (model) => {
16
- if (model.withJsonOutput != null) {
17
- model = model.withJsonOutput();
18
- }
19
- return model.withInstructionPrompt();
20
- },
15
+ adaptModel: (model) => model.withInstructionPrompt(),
16
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
21
17
  };
22
18
  },
23
19
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -32,12 +28,8 @@ export const jsonStructurePrompt = {
32
28
  instruction: prompt.instruction,
33
29
  }),
34
30
  extractStructure,
35
- adaptModel: (model) => {
36
- if (model.withJsonOutput != null) {
37
- model = model.withJsonOutput();
38
- }
39
- return model.withInstructionPrompt();
40
- },
31
+ adaptModel: (model) => model.withInstructionPrompt(),
32
+ withJsonOutput: ({ model, schema }) => model.withJsonOutput(schema),
41
33
  };
42
34
  },
43
35
  };
@@ -68,6 +68,12 @@ class PromptTemplateTextGenerationModel {
68
68
  template: promptTemplate,
69
69
  });
70
70
  }
71
+ withJsonOutput(schema) {
72
+ return new PromptTemplateTextGenerationModel({
73
+ model: this.model.withJsonOutput(schema),
74
+ promptTemplate: this.promptTemplate,
75
+ });
76
+ }
71
77
  withPromptTemplate(promptTemplate) {
72
78
  return new PromptTemplateTextGenerationModel({
73
79
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
3
5
  import { TextGenerationToolCallsModel } from "../../tool/generate-tool-calls/TextGenerationToolCallsModel.js";
4
6
  import { ToolCallsPromptTemplate } from "../../tool/generate-tool-calls/ToolCallsPromptTemplate.js";
@@ -40,6 +42,7 @@ export declare class PromptTemplateTextGenerationModel<PROMPT, MODEL_PROMPT, SET
40
42
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallModel<INPUT_PROMPT, PROMPT, this>;
41
43
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationToolCallsModel<INPUT_PROMPT, PROMPT, this>;
42
44
  asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextGenerationModel<INPUT_PROMPT, PROMPT, this>;
45
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
43
46
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextGenerationModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
44
47
  withSettings(additionalSettings: Partial<SETTINGS>): this;
45
48
  }
@@ -65,6 +65,12 @@ export class PromptTemplateTextGenerationModel {
65
65
  template: promptTemplate,
66
66
  });
67
67
  }
68
+ withJsonOutput(schema) {
69
+ return new PromptTemplateTextGenerationModel({
70
+ model: this.model.withJsonOutput(schema),
71
+ promptTemplate: this.promptTemplate,
72
+ });
73
+ }
68
74
  withPromptTemplate(promptTemplate) {
69
75
  return new PromptTemplateTextGenerationModel({
70
76
  model: this.withSettings({
@@ -20,6 +20,12 @@ class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerationModel
20
20
  template: promptTemplate,
21
21
  });
22
22
  }
23
+ withJsonOutput(schema) {
24
+ return new PromptTemplateTextStreamingModel({
25
+ model: this.model.withJsonOutput(schema),
26
+ promptTemplate: this.promptTemplate,
27
+ });
28
+ }
23
29
  withPromptTemplate(promptTemplate) {
24
30
  return new PromptTemplateTextStreamingModel({
25
31
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { StructureFromTextPromptTemplate } from "../generate-structure/StructureFromTextPromptTemplate.js";
3
5
  import { StructureFromTextStreamingModel } from "../generate-structure/StructureFromTextStreamingModel.js";
4
6
  import { PromptTemplateTextGenerationModel } from "./PromptTemplateTextGenerationModel.js";
@@ -12,6 +14,7 @@ export declare class PromptTemplateTextStreamingModel<PROMPT, MODEL_PROMPT, SETT
12
14
  doStreamText(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<AsyncIterable<import("../Delta.js").Delta<unknown>>>;
13
15
  extractTextDelta(delta: unknown): string | undefined;
14
16
  asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, PROMPT>): StructureFromTextStreamingModel<INPUT_PROMPT, PROMPT, this>;
17
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
15
18
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): PromptTemplateTextStreamingModel<INPUT_PROMPT, PROMPT, SETTINGS, this>;
16
19
  withSettings(additionalSettings: Partial<SETTINGS>): this;
17
20
  }
@@ -17,6 +17,12 @@ export class PromptTemplateTextStreamingModel extends PromptTemplateTextGenerati
17
17
  template: promptTemplate,
18
18
  });
19
19
  }
20
+ withJsonOutput(schema) {
21
+ return new PromptTemplateTextStreamingModel({
22
+ model: this.model.withJsonOutput(schema),
23
+ promptTemplate: this.promptTemplate,
24
+ });
25
+ }
20
26
  withPromptTemplate(promptTemplate) {
21
27
  return new PromptTemplateTextStreamingModel({
22
28
  model: this.withSettings({
@@ -1,4 +1,6 @@
1
1
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
2
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
3
+ import { Schema } from "../../core/schema/Schema.js";
2
4
  import { Delta } from "../Delta.js";
3
5
  import { Model, ModelSettings } from "../Model.js";
4
6
  import { BasicTokenizer, FullTokenizer } from "../tokenize-text/Tokenizer.js";
@@ -82,9 +84,10 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
82
84
  };
83
85
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationModel<INPUT_PROMPT, SETTINGS>;
84
86
  /**
85
- * Optional. When available, forces the model to return JSON as the text output.
87
+ * When possible, limit the output generation to the specified JSON schema,
88
+ * or super sets of it (e.g. JSON in general).
86
89
  */
87
- withJsonOutput?(): this;
90
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
88
91
  }
89
92
  export interface TextStreamingModel<PROMPT, SETTINGS extends TextGenerationModelSettings = TextGenerationModelSettings> extends TextGenerationModel<PROMPT, SETTINGS> {
90
93
  doStreamText(prompt: PROMPT, options?: FunctionCallOptions): PromiseLike<AsyncIterable<Delta<unknown>>>;
@@ -182,6 +182,9 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
182
182
  withChatPrompt(options) {
183
183
  return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
184
184
  }
185
+ withJsonOutput() {
186
+ return this;
187
+ }
185
188
  withPromptTemplate(promptTemplate) {
186
189
  return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
187
190
  model: this.withSettings({
@@ -152,6 +152,7 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
152
152
  user?: string;
153
153
  assistant?: string;
154
154
  }): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, CohereTextGenerationModelSettings, this>;
155
+ withJsonOutput(): this;
155
156
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
156
157
  withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
157
158
  }
@@ -179,6 +179,9 @@ export class CohereTextGenerationModel extends AbstractModel {
179
179
  withChatPrompt(options) {
180
180
  return this.withPromptTemplate(chat(options));
181
181
  }
182
+ withJsonOutput() {
183
+ return this;
184
+ }
182
185
  withPromptTemplate(promptTemplate) {
183
186
  return new PromptTemplateTextStreamingModel({
184
187
  model: this.withSettings({
@@ -125,6 +125,9 @@ class HuggingFaceTextGenerationModel extends AbstractModel_js_1.AbstractModel {
125
125
  })),
126
126
  };
127
127
  }
128
+ withJsonOutput() {
129
+ return this;
130
+ }
128
131
  withPromptTemplate(promptTemplate) {
129
132
  return new PromptTemplateTextGenerationModel_js_1.PromptTemplateTextGenerationModel({
130
133
  model: this, // stop tokens are not supported by this model
@@ -69,6 +69,7 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
69
69
  finishReason: "unknown";
70
70
  }[];
71
71
  };
72
+ withJsonOutput(): this;
72
73
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
73
74
  withSettings(additionalSettings: Partial<HuggingFaceTextGenerationModelSettings>): this;
74
75
  }
@@ -122,6 +122,9 @@ export class HuggingFaceTextGenerationModel extends AbstractModel {
122
122
  })),
123
123
  };
124
124
  }
125
+ withJsonOutput() {
126
+ return this;
127
+ }
125
128
  withPromptTemplate(promptTemplate) {
126
129
  return new PromptTemplateTextGenerationModel({
127
130
  model: this, // stop tokens are not supported by this model
@@ -15,9 +15,9 @@ const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
15
15
  const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
16
16
  const LlamaCppApiConfiguration_js_1 = require("./LlamaCppApiConfiguration.cjs");
17
17
  const LlamaCppError_js_1 = require("./LlamaCppError.cjs");
18
- const LlamaCppGrammars_js_1 = require("./LlamaCppGrammars.cjs");
19
18
  const LlamaCppPrompt_js_1 = require("./LlamaCppPrompt.cjs");
20
19
  const LlamaCppTokenizer_js_1 = require("./LlamaCppTokenizer.cjs");
20
+ const convertJsonSchemaToGBNF_js_1 = require("./convertJsonSchemaToGBNF.cjs");
21
21
  class LlamaCppCompletionModel extends AbstractModel_js_1.AbstractModel {
22
22
  constructor(settings = {}) {
23
23
  super({ settings });
@@ -181,11 +181,15 @@ class LlamaCppCompletionModel extends AbstractModel_js_1.AbstractModel {
181
181
  template: promptTemplate,
182
182
  });
183
183
  }
184
- withJsonOutput() {
185
- // don't override the grammar if it's already set (to support more restrictive grammars)
186
- return this.settings.grammar == null
187
- ? this.withSettings({ grammar: LlamaCppGrammars_js_1.json })
188
- : this;
184
+ withJsonOutput(schema) {
185
+ // don't override the grammar if it's already set (to allow user to override)
186
+ if (this.settings.grammar != null) {
187
+ return this;
188
+ }
189
+ const grammar = (0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)(schema.getJsonSchema());
190
+ return this.withSettings({
191
+ grammar: grammar,
192
+ });
189
193
  }
190
194
  get promptTemplateProvider() {
191
195
  return this.settings.promptTemplate ?? LlamaCppPrompt_js_1.Text;
@@ -2,6 +2,8 @@ import { z } from "zod";
2
2
  import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
+ import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
6
+ import { Schema } from "../../core/schema/Schema.js";
5
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
8
  import { Delta } from "../../model-function/Delta.js";
7
9
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
@@ -386,7 +388,7 @@ export declare class LlamaCppCompletionModel<CONTEXT_WINDOW_SIZE extends number
386
388
  }>>>;
387
389
  extractTextDelta(delta: unknown): string;
388
390
  asStructureGenerationModel<INPUT_PROMPT, LlamaCppPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, LlamaCppPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, LlamaCppPrompt, TextStreamingModel<LlamaCppPrompt, TextGenerationModelSettings>>;
389
- withJsonOutput(): this;
391
+ withJsonOutput(schema: Schema<unknown> & JsonSchemaProducer): this;
390
392
  private get promptTemplateProvider();
391
393
  withTextPrompt(): PromptTemplateTextStreamingModel<string, LlamaCppCompletionPrompt, LlamaCppCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
392
394
  withInstructionPrompt(): PromptTemplateTextStreamingModel<InstructionPrompt, LlamaCppCompletionPrompt, LlamaCppCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
@@ -12,9 +12,9 @@ import { AsyncQueue } from "../../util/AsyncQueue.js";
12
12
  import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
13
13
  import { LlamaCppApiConfiguration } from "./LlamaCppApiConfiguration.js";
14
14
  import { failedLlamaCppCallResponseHandler } from "./LlamaCppError.js";
15
- import { json } from "./LlamaCppGrammars.js";
16
15
  import { Text } from "./LlamaCppPrompt.js";
17
16
  import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
17
+ import { convertJsonSchemaToGBNF } from "./convertJsonSchemaToGBNF.js";
18
18
  export class LlamaCppCompletionModel extends AbstractModel {
19
19
  constructor(settings = {}) {
20
20
  super({ settings });
@@ -178,11 +178,15 @@ export class LlamaCppCompletionModel extends AbstractModel {
178
178
  template: promptTemplate,
179
179
  });
180
180
  }
181
- withJsonOutput() {
182
- // don't override the grammar if it's already set (to support more restrictive grammars)
183
- return this.settings.grammar == null
184
- ? this.withSettings({ grammar: json })
185
- : this;
181
+ withJsonOutput(schema) {
182
+ // don't override the grammar if it's already set (to allow user to override)
183
+ if (this.settings.grammar != null) {
184
+ return this;
185
+ }
186
+ const grammar = convertJsonSchemaToGBNF(schema.getJsonSchema());
187
+ return this.withSettings({
188
+ grammar: grammar,
189
+ });
186
190
  }
187
191
  get promptTemplateProvider() {
188
192
  return this.settings.promptTemplate ?? Text;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.list = exports.jsonArray = exports.json = void 0;
3
+ exports.fromJsonSchema = exports.list = exports.jsonArray = exports.json = void 0;
4
4
  /**
5
5
  * GBNF grammar for JSON.
6
6
  *
@@ -82,3 +82,5 @@ root ::= item+
82
82
  # Excludes various line break characters
83
83
  item ::= "- " [^\r\n\x0b\x0c\x85\u2028\u2029]+ "\n"
84
84
  `;
85
+ var convertJsonSchemaToGBNF_1 = require("./convertJsonSchemaToGBNF");
86
+ Object.defineProperty(exports, "fromJsonSchema", { enumerable: true, get: function () { return convertJsonSchemaToGBNF_1.convertJsonSchemaToGBNF; } });
@@ -16,3 +16,4 @@ export declare const jsonArray: string;
16
16
  * @see https://github.com/ggerganov/llama.cpp/blob/master/grammars/list.gbnf
17
17
  */
18
18
  export declare const list: string;
19
+ export { convertJsonSchemaToGBNF as fromJsonSchema } from "./convertJsonSchemaToGBNF";
@@ -79,3 +79,4 @@ root ::= item+
79
79
  # Excludes various line break characters
80
80
  item ::= "- " [^\r\n\x0b\x0c\x85\u2028\u2029]+ "\n"
81
81
  `;
82
+ export { convertJsonSchemaToGBNF as fromJsonSchema } from "./convertJsonSchemaToGBNF";
@@ -0,0 +1,113 @@
1
+ "use strict";
2
+ /* eslint-disable @typescript-eslint/no-explicit-any */
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.convertJsonSchemaToGBNF = void 0;
5
+ /**
6
+ * Convert JSON Schema to a GBNF grammar.
7
+ *
8
+ * This is a modified version of
9
+ * https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
10
+ */
11
+ function convertJsonSchemaToGBNF(schema) {
12
+ const rules = new RuleMap();
13
+ rules.add("space", SPACE_RULE);
14
+ visit(schema, undefined, rules);
15
+ return rules.toGBNF();
16
+ }
17
+ exports.convertJsonSchemaToGBNF = convertJsonSchemaToGBNF;
18
+ const SPACE_RULE = '" "?';
19
+ const PRIMITIVE_RULES = {
20
+ boolean: '("true" | "false") space',
21
+ number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space',
22
+ integer: '("-"? ([0-9] | [1-9] [0-9]*)) space',
23
+ string: ` "\\"" ( [^"\\\\] | "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) )* "\\"" space`,
24
+ null: '"null" space',
25
+ };
26
+ class RuleMap {
27
+ constructor() {
28
+ Object.defineProperty(this, "rules", {
29
+ enumerable: true,
30
+ configurable: true,
31
+ writable: true,
32
+ value: new Map()
33
+ });
34
+ }
35
+ add(name, rule) {
36
+ const escapedName = this.escapeRuleName(name, rule);
37
+ this.rules.set(escapedName, rule);
38
+ return escapedName;
39
+ }
40
+ /**
41
+ * Replace invalid characters in rule name with hyphens.
42
+ * Disambiguate the name if it already exists.
43
+ */
44
+ escapeRuleName(name, rule) {
45
+ const baseName = name.replace(/[^\dA-Za-z-]+/g, "-");
46
+ if (!this.rules.has(baseName) || this.rules.get(baseName) === rule) {
47
+ return baseName;
48
+ }
49
+ let i = 0;
50
+ while (this.rules.has(`${baseName}${i}`)) {
51
+ if (this.rules.get(`${baseName}${i}`) === rule) {
52
+ return `${baseName}${i}`;
53
+ }
54
+ i++;
55
+ }
56
+ return `${baseName}${i}`;
57
+ }
58
+ toGBNF() {
59
+ return Array.from(this.rules)
60
+ .map(([name, rule]) => `${name} ::= ${rule}`)
61
+ .join("\n");
62
+ }
63
+ }
64
+ const GRAMMAR_LITERAL_ESCAPES = {
65
+ "\r": "\\r",
66
+ "\n": "\\n",
67
+ '"': '\\"',
68
+ };
69
+ function formatLiteral(literal) {
70
+ const escaped = JSON.stringify(literal).replace(/[\n\r"]/g, (m) => GRAMMAR_LITERAL_ESCAPES[m]);
71
+ return `"${escaped}"`;
72
+ }
73
+ function visit(schema, name, rules) {
74
+ const schemaType = schema.type;
75
+ const ruleName = name || "root";
76
+ if (schema.oneOf || schema.anyOf) {
77
+ const rule = (schema.oneOf || schema.anyOf)
78
+ .map((altSchema, i) => visit(altSchema, `${name}${name ? "-" : ""}${i}`, rules))
79
+ .join(" | ");
80
+ return rules.add(ruleName, rule);
81
+ }
82
+ else if ("const" in schema) {
83
+ return rules.add(ruleName, formatLiteral(schema.const));
84
+ }
85
+ else if ("enum" in schema) {
86
+ const rule = schema.enum.map(formatLiteral).join(" | ");
87
+ return rules.add(ruleName, rule);
88
+ }
89
+ else if (schemaType === "object" && "properties" in schema) {
90
+ const propPairs = Object.entries(schema.properties);
91
+ let rule = '"{" space';
92
+ propPairs.forEach(([propName, propSchema], i) => {
93
+ const propRuleName = visit(propSchema, `${name ?? ""}${name ? "-" : ""}${propName}`, rules);
94
+ if (i > 0) {
95
+ rule += ' "," space';
96
+ }
97
+ rule += ` ${formatLiteral(propName)} space ":" space ${propRuleName}`;
98
+ });
99
+ rule += ' "}" space';
100
+ return rules.add(ruleName, rule);
101
+ }
102
+ else if (schemaType === "array" && "items" in schema) {
103
+ const itemRuleName = visit(schema.items, `${name ?? ""}${name ? "-" : ""}item`, rules);
104
+ const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`;
105
+ return rules.add(ruleName, rule);
106
+ }
107
+ else {
108
+ if (!PRIMITIVE_RULES[schemaType]) {
109
+ throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`);
110
+ }
111
+ return rules.add(ruleName === "root" ? "root" : schemaType, PRIMITIVE_RULES[schemaType]);
112
+ }
113
+ }
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Convert JSON Schema to a GBNF grammar.
3
+ *
4
+ * This is a modified version of
5
+ * https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
6
+ */
7
+ export declare function convertJsonSchemaToGBNF(schema: unknown): string;
@@ -0,0 +1,109 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
2
+ /**
3
+ * Convert JSON Schema to a GBNF grammar.
4
+ *
5
+ * This is a modified version of
6
+ * https://github.com/ggerganov/llama.cpp/blob/master/examples/server/public/json-schema-to-grammar.mjs
7
+ */
8
+ export function convertJsonSchemaToGBNF(schema) {
9
+ const rules = new RuleMap();
10
+ rules.add("space", SPACE_RULE);
11
+ visit(schema, undefined, rules);
12
+ return rules.toGBNF();
13
+ }
14
+ const SPACE_RULE = '" "?';
15
+ const PRIMITIVE_RULES = {
16
+ boolean: '("true" | "false") space',
17
+ number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space',
18
+ integer: '("-"? ([0-9] | [1-9] [0-9]*)) space',
19
+ string: ` "\\"" ( [^"\\\\] | "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) )* "\\"" space`,
20
+ null: '"null" space',
21
+ };
22
+ class RuleMap {
23
+ constructor() {
24
+ Object.defineProperty(this, "rules", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: new Map()
29
+ });
30
+ }
31
+ add(name, rule) {
32
+ const escapedName = this.escapeRuleName(name, rule);
33
+ this.rules.set(escapedName, rule);
34
+ return escapedName;
35
+ }
36
+ /**
37
+ * Replace invalid characters in rule name with hyphens.
38
+ * Disambiguate the name if it already exists.
39
+ */
40
+ escapeRuleName(name, rule) {
41
+ const baseName = name.replace(/[^\dA-Za-z-]+/g, "-");
42
+ if (!this.rules.has(baseName) || this.rules.get(baseName) === rule) {
43
+ return baseName;
44
+ }
45
+ let i = 0;
46
+ while (this.rules.has(`${baseName}${i}`)) {
47
+ if (this.rules.get(`${baseName}${i}`) === rule) {
48
+ return `${baseName}${i}`;
49
+ }
50
+ i++;
51
+ }
52
+ return `${baseName}${i}`;
53
+ }
54
+ toGBNF() {
55
+ return Array.from(this.rules)
56
+ .map(([name, rule]) => `${name} ::= ${rule}`)
57
+ .join("\n");
58
+ }
59
+ }
60
+ const GRAMMAR_LITERAL_ESCAPES = {
61
+ "\r": "\\r",
62
+ "\n": "\\n",
63
+ '"': '\\"',
64
+ };
65
+ function formatLiteral(literal) {
66
+ const escaped = JSON.stringify(literal).replace(/[\n\r"]/g, (m) => GRAMMAR_LITERAL_ESCAPES[m]);
67
+ return `"${escaped}"`;
68
+ }
69
+ function visit(schema, name, rules) {
70
+ const schemaType = schema.type;
71
+ const ruleName = name || "root";
72
+ if (schema.oneOf || schema.anyOf) {
73
+ const rule = (schema.oneOf || schema.anyOf)
74
+ .map((altSchema, i) => visit(altSchema, `${name}${name ? "-" : ""}${i}`, rules))
75
+ .join(" | ");
76
+ return rules.add(ruleName, rule);
77
+ }
78
+ else if ("const" in schema) {
79
+ return rules.add(ruleName, formatLiteral(schema.const));
80
+ }
81
+ else if ("enum" in schema) {
82
+ const rule = schema.enum.map(formatLiteral).join(" | ");
83
+ return rules.add(ruleName, rule);
84
+ }
85
+ else if (schemaType === "object" && "properties" in schema) {
86
+ const propPairs = Object.entries(schema.properties);
87
+ let rule = '"{" space';
88
+ propPairs.forEach(([propName, propSchema], i) => {
89
+ const propRuleName = visit(propSchema, `${name ?? ""}${name ? "-" : ""}${propName}`, rules);
90
+ if (i > 0) {
91
+ rule += ' "," space';
92
+ }
93
+ rule += ` ${formatLiteral(propName)} space ":" space ${propRuleName}`;
94
+ });
95
+ rule += ' "}" space';
96
+ return rules.add(ruleName, rule);
97
+ }
98
+ else if (schemaType === "array" && "items" in schema) {
99
+ const itemRuleName = visit(schema.items, `${name ?? ""}${name ? "-" : ""}item`, rules);
100
+ const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`;
101
+ return rules.add(ruleName, rule);
102
+ }
103
+ else {
104
+ if (!PRIMITIVE_RULES[schemaType]) {
105
+ throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`);
106
+ }
107
+ return rules.add(ruleName === "root" ? "root" : schemaType, PRIMITIVE_RULES[schemaType]);
108
+ }
109
+ }
@@ -0,0 +1,150 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const convertJsonSchemaToGBNF_js_1 = require("./convertJsonSchemaToGBNF.cjs");
4
+ describe("primitives", () => {
5
+ it("should convert string", () => {
6
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
7
+ type: "string",
8
+ })).toMatchSnapshot();
9
+ });
10
+ it("should convert number", () => {
11
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
12
+ type: "number",
13
+ })).toMatchSnapshot();
14
+ });
15
+ it("should convert integer", () => {
16
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
17
+ type: "integer",
18
+ })).toMatchSnapshot();
19
+ });
20
+ it("should convert boolean", () => {
21
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
22
+ type: "boolean",
23
+ })).toMatchSnapshot();
24
+ });
25
+ it("should convert null", () => {
26
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
27
+ type: "null",
28
+ })).toMatchSnapshot();
29
+ });
30
+ });
31
+ describe("array", () => {
32
+ it("should convert array of string", () => {
33
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
34
+ type: "array",
35
+ items: {
36
+ type: "string",
37
+ },
38
+ })).toMatchSnapshot();
39
+ });
40
+ it("should convert array of array of string", () => {
41
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
42
+ type: "array",
43
+ items: {
44
+ type: "array",
45
+ items: {
46
+ type: "string",
47
+ },
48
+ },
49
+ })).toMatchSnapshot();
50
+ });
51
+ it("should convert array of object", () => {
52
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
53
+ type: "array",
54
+ items: {
55
+ type: "object",
56
+ properties: {
57
+ name: {
58
+ type: "string",
59
+ },
60
+ age: {
61
+ type: "number",
62
+ },
63
+ },
64
+ },
65
+ })).toMatchSnapshot();
66
+ });
67
+ });
68
+ describe("object", () => {
69
+ it("should convert object", () => {
70
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
71
+ type: "object",
72
+ properties: {
73
+ name: {
74
+ type: "string",
75
+ },
76
+ age: {
77
+ type: "number",
78
+ },
79
+ },
80
+ })).toMatchSnapshot();
81
+ });
82
+ it("should convert object with required properties", () => {
83
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
84
+ type: "object",
85
+ properties: {
86
+ name: {
87
+ type: "string",
88
+ },
89
+ age: {
90
+ type: "number",
91
+ },
92
+ },
93
+ required: ["name"],
94
+ })).toMatchSnapshot();
95
+ });
96
+ it("should convert object with additional properties", () => {
97
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
98
+ type: "object",
99
+ properties: {
100
+ name: {
101
+ type: "string",
102
+ },
103
+ age: {
104
+ type: "number",
105
+ },
106
+ },
107
+ additionalProperties: true,
108
+ })).toMatchSnapshot();
109
+ });
110
+ it("should convert object with additional properties of string", () => {
111
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
112
+ type: "object",
113
+ properties: {
114
+ name: {
115
+ type: "string",
116
+ },
117
+ age: {
118
+ type: "number",
119
+ },
120
+ },
121
+ additionalProperties: {
122
+ type: "string",
123
+ },
124
+ })).toMatchSnapshot();
125
+ });
126
+ it("should convert object with additional properties of object", () => {
127
+ expect((0, convertJsonSchemaToGBNF_js_1.convertJsonSchemaToGBNF)({
128
+ type: "object",
129
+ properties: {
130
+ name: {
131
+ type: "string",
132
+ },
133
+ age: {
134
+ type: "number",
135
+ },
136
+ },
137
+ additionalProperties: {
138
+ type: "object",
139
+ properties: {
140
+ name: {
141
+ type: "string",
142
+ },
143
+ age: {
144
+ type: "number",
145
+ },
146
+ },
147
+ },
148
+ })).toMatchSnapshot();
149
+ });
150
+ });
@@ -0,0 +1,148 @@
1
+ import { convertJsonSchemaToGBNF } from "./convertJsonSchemaToGBNF.js";
2
+ describe("primitives", () => {
3
+ it("should convert string", () => {
4
+ expect(convertJsonSchemaToGBNF({
5
+ type: "string",
6
+ })).toMatchSnapshot();
7
+ });
8
+ it("should convert number", () => {
9
+ expect(convertJsonSchemaToGBNF({
10
+ type: "number",
11
+ })).toMatchSnapshot();
12
+ });
13
+ it("should convert integer", () => {
14
+ expect(convertJsonSchemaToGBNF({
15
+ type: "integer",
16
+ })).toMatchSnapshot();
17
+ });
18
+ it("should convert boolean", () => {
19
+ expect(convertJsonSchemaToGBNF({
20
+ type: "boolean",
21
+ })).toMatchSnapshot();
22
+ });
23
+ it("should convert null", () => {
24
+ expect(convertJsonSchemaToGBNF({
25
+ type: "null",
26
+ })).toMatchSnapshot();
27
+ });
28
+ });
29
+ describe("array", () => {
30
+ it("should convert array of string", () => {
31
+ expect(convertJsonSchemaToGBNF({
32
+ type: "array",
33
+ items: {
34
+ type: "string",
35
+ },
36
+ })).toMatchSnapshot();
37
+ });
38
+ it("should convert array of array of string", () => {
39
+ expect(convertJsonSchemaToGBNF({
40
+ type: "array",
41
+ items: {
42
+ type: "array",
43
+ items: {
44
+ type: "string",
45
+ },
46
+ },
47
+ })).toMatchSnapshot();
48
+ });
49
+ it("should convert array of object", () => {
50
+ expect(convertJsonSchemaToGBNF({
51
+ type: "array",
52
+ items: {
53
+ type: "object",
54
+ properties: {
55
+ name: {
56
+ type: "string",
57
+ },
58
+ age: {
59
+ type: "number",
60
+ },
61
+ },
62
+ },
63
+ })).toMatchSnapshot();
64
+ });
65
+ });
66
+ describe("object", () => {
67
+ it("should convert object", () => {
68
+ expect(convertJsonSchemaToGBNF({
69
+ type: "object",
70
+ properties: {
71
+ name: {
72
+ type: "string",
73
+ },
74
+ age: {
75
+ type: "number",
76
+ },
77
+ },
78
+ })).toMatchSnapshot();
79
+ });
80
+ it("should convert object with required properties", () => {
81
+ expect(convertJsonSchemaToGBNF({
82
+ type: "object",
83
+ properties: {
84
+ name: {
85
+ type: "string",
86
+ },
87
+ age: {
88
+ type: "number",
89
+ },
90
+ },
91
+ required: ["name"],
92
+ })).toMatchSnapshot();
93
+ });
94
+ it("should convert object with additional properties", () => {
95
+ expect(convertJsonSchemaToGBNF({
96
+ type: "object",
97
+ properties: {
98
+ name: {
99
+ type: "string",
100
+ },
101
+ age: {
102
+ type: "number",
103
+ },
104
+ },
105
+ additionalProperties: true,
106
+ })).toMatchSnapshot();
107
+ });
108
+ it("should convert object with additional properties of string", () => {
109
+ expect(convertJsonSchemaToGBNF({
110
+ type: "object",
111
+ properties: {
112
+ name: {
113
+ type: "string",
114
+ },
115
+ age: {
116
+ type: "number",
117
+ },
118
+ },
119
+ additionalProperties: {
120
+ type: "string",
121
+ },
122
+ })).toMatchSnapshot();
123
+ });
124
+ it("should convert object with additional properties of object", () => {
125
+ expect(convertJsonSchemaToGBNF({
126
+ type: "object",
127
+ properties: {
128
+ name: {
129
+ type: "string",
130
+ },
131
+ age: {
132
+ type: "number",
133
+ },
134
+ },
135
+ additionalProperties: {
136
+ type: "object",
137
+ properties: {
138
+ name: {
139
+ type: "string",
140
+ },
141
+ age: {
142
+ type: "number",
143
+ },
144
+ },
145
+ },
146
+ })).toMatchSnapshot();
147
+ });
148
+ });
@@ -144,6 +144,9 @@ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
144
144
  withChatPrompt() {
145
145
  return this.withPromptTemplate((0, MistralChatPromptTemplate_js_1.chat)());
146
146
  }
147
+ withJsonOutput() {
148
+ return this;
149
+ }
147
150
  withPromptTemplate(promptTemplate) {
148
151
  return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
149
152
  model: this, // stop tokens are not supported by this model
@@ -161,6 +161,7 @@ export declare class MistralChatModel extends AbstractModel<MistralChatModelSett
161
161
  * Returns this model with a chat prompt template.
162
162
  */
163
163
  withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, MistralChatPrompt, MistralChatModelSettings, this>;
164
+ withJsonOutput(): this;
164
165
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, MistralChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, MistralChatPrompt, MistralChatModelSettings, this>;
165
166
  withSettings(additionalSettings: Partial<MistralChatModelSettings>): this;
166
167
  }
@@ -141,6 +141,9 @@ export class MistralChatModel extends AbstractModel {
141
141
  withChatPrompt() {
142
142
  return this.withPromptTemplate(chat());
143
143
  }
144
+ withJsonOutput() {
145
+ return this;
146
+ }
144
147
  withPromptTemplate(promptTemplate) {
145
148
  return new PromptTemplateTextStreamingModel({
146
149
  model: this, // stop tokens are not supported by this model
@@ -163,6 +163,9 @@ class OllamaCompletionModel extends AbstractModel_js_1.AbstractModel {
163
163
  template: promptTemplate,
164
164
  });
165
165
  }
166
+ withJsonOutput() {
167
+ return this;
168
+ }
166
169
  withTextPrompt() {
167
170
  return this.withPromptTemplate({
168
171
  format(prompt) {
@@ -130,6 +130,7 @@ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number |
130
130
  extractTextDelta(delta: unknown): string | undefined;
131
131
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
132
132
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, OllamaCompletionPrompt>): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaCompletionPrompt, this>;
133
+ withJsonOutput(): this;
133
134
  withTextPrompt(): PromptTemplateTextStreamingModel<string, OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>, this>;
134
135
  /**
135
136
  * Maps the prompt for a text version of the Ollama completion prompt template (without image support).
@@ -160,6 +160,9 @@ export class OllamaCompletionModel extends AbstractModel {
160
160
  template: promptTemplate,
161
161
  });
162
162
  }
163
+ withJsonOutput() {
164
+ return this;
165
+ }
163
166
  withTextPrompt() {
164
167
  return this.withPromptTemplate({
165
168
  format(prompt) {
@@ -120,6 +120,9 @@ class AbstractOpenAICompletionModel extends AbstractModel_js_1.AbstractModel {
120
120
  }
121
121
  return chunk.choices[0].text;
122
122
  }
123
+ withJsonOutput() {
124
+ return this;
125
+ }
123
126
  }
124
127
  exports.AbstractOpenAICompletionModel = AbstractOpenAICompletionModel;
125
128
  const OpenAICompletionResponseSchema = zod_1.z.object({
@@ -103,6 +103,7 @@ export declare abstract class AbstractOpenAICompletionModel<SETTINGS extends Abs
103
103
  system_fingerprint?: string | undefined;
104
104
  }>>>;
105
105
  extractTextDelta(delta: unknown): string | undefined;
106
+ withJsonOutput(): this;
106
107
  }
107
108
  declare const OpenAICompletionResponseSchema: z.ZodObject<{
108
109
  id: z.ZodString;
@@ -117,6 +117,9 @@ export class AbstractOpenAICompletionModel extends AbstractModel {
117
117
  }
118
118
  return chunk.choices[0].text;
119
119
  }
120
+ withJsonOutput() {
121
+ return this;
122
+ }
120
123
  }
121
124
  const OpenAICompletionResponseSchema = z.object({
122
125
  id: z.string(),
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building AI applications.",
4
- "version": "0.118.0",
4
+ "version": "0.119.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [