modelfusion 0.112.0 → 0.113.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,51 @@
1
1
  # Changelog
2
2
 
3
+ ## v0.113.0 - 2024-01-03
4
+
5
+ [Structure generation](https://modelfusion.dev/guide/function/generate-structure) improvements.
6
+
7
+ ### Added
8
+
9
+ - `.asStructureGenerationModel(...)` function to `OpenAIChatModel` and `OllamaChatModel` to create structure generation models from chat models.
10
+ - `jsonStructurePrompt` helper function to create structure generation models.
11
+
12
+ ### Example
13
+
14
+ ```ts
15
+ import {
16
+ generateStructure,
17
+ jsonStructurePrompt,
18
+ ollama,
19
+ zodSchema,
20
+ } from "modelfusion";
21
+
22
+ const structure = await generateStructure(
23
+ ollama
24
+ .ChatTextGenerator({
25
+ model: "openhermes2.5-mistral",
26
+ maxGenerationTokens: 1024,
27
+ temperature: 0,
28
+ })
29
+ .asStructureGenerationModel(jsonStructurePrompt.text()),
30
+
31
+ zodSchema(
32
+ z.object({
33
+ characters: z.array(
34
+ z.object({
35
+ name: z.string(),
36
+ class: z
37
+ .string()
38
+ .describe("Character class, e.g. warrior, mage, or thief."),
39
+ description: z.string(),
40
+ })
41
+ ),
42
+ })
43
+ ),
44
+
45
+ "Generate 3 character descriptions for a fantasy role playing game. "
46
+ );
47
+ ```
48
+
3
49
  ## v0.112.0 - 2024-01-02
4
50
 
5
51
  ### Changed
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # ModelFusion
2
2
 
3
- > ### The TypeScript library for building multi-modal AI applications.
3
+ > ### The TypeScript library for building sAI applications.
4
4
 
5
5
  [![NPM Version](https://img.shields.io/npm/v/modelfusion?color=33cd56&logo=npm)](https://www.npmjs.com/package/modelfusion)
6
6
  [![MIT License](https://img.shields.io/github/license/lgrammel/modelfusion)](https://opensource.org/licenses/MIT)
@@ -12,7 +12,7 @@
12
12
 
13
13
  ## Introduction
14
14
 
15
- **ModelFusion** is an abstraction layer for integrating AI models into JavaScript and TypeScript applications, unifying the API for common operations such as text streaming, structure generation, and tool usage. It provides features to support production environments, including observability hooks, logging, and automatic retries. You can use ModelFusion to build AI applications, chatbots, and agents.
15
+ **ModelFusion** is an abstraction layer for integrating AI models into JavaScript and TypeScript applications, unifying the API for common operations such as **text streaming**, **structure generation**, and **tool usage**. It provides features to support production environments, including observability hooks, logging, and automatic retries. You can use ModelFusion to build AI applications, chatbots, and agents.
16
16
 
17
17
  - **Vendor-neutral**: ModelFusion is a non-commercial open source project that is community-driven. You can use it with any supported provider.
18
18
  - **Multi-modal**: ModelFusion supports a wide range of models including text generation, image generation, vision, text-to-speech, speech-to-text, and embedding models.
@@ -201,18 +201,22 @@ Generate typed objects using a language model and a schema.
201
201
  Generate a structure that matches a schema.
202
202
 
203
203
  ```ts
204
- import { zodSchema, generateStructure, openai } from "modelfusion";
204
+ import {
205
+ ollama,
206
+ zodSchema,
207
+ generateStructure,
208
+ jsonStructurePrompt,
209
+ } from "modelfusion";
205
210
 
206
211
  const sentiment = await generateStructure(
207
212
  // model:
208
- openai
213
+ ollama
209
214
  .ChatTextGenerator({
210
- model: "gpt-3.5-turbo",
215
+ model: "openhermes2.5-mistral",
216
+ maxGenerationTokens: 1024,
211
217
  temperature: 0,
212
- maxGenerationTokens: 50,
213
218
  })
214
- .asFunctionCallStructureGenerationModel({ fnName: "sentiment" })
215
- .withInstructionPrompt(),
219
+ .asStructureGenerationModel(jsonStructurePrompt.instruction()),
216
220
 
217
221
  // schema:
218
222
  zodSchema(
@@ -326,25 +330,13 @@ Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai),
326
330
 
327
331
  ### [Tools](https://modelfusion.dev/guide/tools)
328
332
 
329
- Tools are functions that can be executed by an AI model. They are useful for building chatbots and agents.
333
+ Tools are functions (and associated metadata) that can be executed by an AI model. They are useful for building chatbots and agents.
330
334
 
331
- Predefined tools: [Math.js](https://modelfusion.dev/guide/tools/predefined-tools/mathjs), [MediaWiki Search](https://modelfusion.dev/guide/tools/predefined-tools/mediawiki-search), [SerpAPI](https://modelfusion.dev/guide/tools/predefined-tools/serpapi), [Google Custom Search](https://modelfusion.dev/guide/tools/predefined-tools/google-custom-search)
332
-
333
- #### [executeTool](https://modelfusion.dev/guide/tools/execute-tool)
334
-
335
- You can directly invoke a tool with `executeTool`:
336
-
337
- ```ts
338
- const result = await executeTool(calculator, {
339
- a: 14,
340
- b: 12,
341
- operator: "*",
342
- });
343
- ```
335
+ ModelFusion offers several tools out-of-the-box: [Math.js](https://modelfusion.dev/guide/tools/available-tools/mathjs), [MediaWiki Search](https://modelfusion.dev/guide/tools/available-tools/mediawiki-search), [SerpAPI](https://modelfusion.dev/guide/tools/available-tools/serpapi), [Google Custom Search](https://modelfusion.dev/guide/tools/available-tools/google-custom-search). You can also create [custom tools](https://modelfusion.dev/guide/tools).
344
336
 
345
337
  #### [useTool](https://modelfusion.dev/guide/tools/use-tool)
346
338
 
347
- With `useTool`, you can use a tool with a language model that supports tools calls (e.g. OpenAI Chat). `useTool` first generates a tool call and then executes the tool with the arguments.
339
+ With `useTool`, you can ask a tool-compatible language model (e.g. OpenAI chat) to invoke a single tool. `useTool` first generates a tool call and then executes the tool with the arguments.
348
340
 
349
341
  ```ts
350
342
  const { tool, toolCall, args, ok, result } = await useTool(
@@ -360,7 +352,7 @@ console.log(`Ok:`, ok);
360
352
  console.log(`Result or Error:`, result);
361
353
  ```
362
354
 
363
- #### [useTools](https://modelfusion.dev/guide/tools/use-tools-or-generate-text)
355
+ #### [useTools](https://modelfusion.dev/guide/tools/use-tools)
364
356
 
365
357
  With `useTools`, you can ask a language model to generate several tool calls as well as text. The model will choose which tools (if any) should be called with which arguments. Both the text and the tool calls are optional. This function executes the tools.
366
358
 
@@ -372,42 +364,6 @@ const { text, toolResults } = await useTools(
372
364
  );
373
365
  ```
374
366
 
375
- #### [Creating Tools](https://modelfusion.dev/guide/tools/create-tools)
376
-
377
- A tool is comprised of an async execute function, a name, a description, and a schema for the input parameters.
378
-
379
- ```ts
380
- const calculator = new Tool({
381
- name: "calculator",
382
- description: "Execute a calculation",
383
-
384
- parameters: zodSchema(
385
- z.object({
386
- a: z.number().describe("The first number."),
387
- b: z.number().describe("The second number."),
388
- operator: z
389
- .enum(["+", "-", "*", "/"])
390
- .describe("The operator (+, -, *, /)."),
391
- })
392
- ),
393
-
394
- execute: async ({ a, b, operator }) => {
395
- switch (operator) {
396
- case "+":
397
- return a + b;
398
- case "-":
399
- return a - b;
400
- case "*":
401
- return a * b;
402
- case "/":
403
- return a / b;
404
- default:
405
- throw new Error(`Unknown operator: ${operator}`);
406
- }
407
- },
408
- });
409
- ```
410
-
411
367
  #### [Agent Loop](https://modelfusion.dev/guide/tools/agent-loop)
412
368
 
413
369
  You can use `useTools` to implement an agent loop that responds to user messages and executes tools. [Learn more](https://modelfusion.dev/guide/tools/agent-loop).
@@ -582,85 +538,25 @@ import { modelfusion } from "modelfusion";
582
538
  modelfusion.setLogFormat("detailed-object"); // log full events
583
539
  ```
584
540
 
585
- ### [Server](https://modelfusion.dev/guide/experimental/server/)
586
-
587
- > [!WARNING]
588
- > ModelFusion Server is in its initial development phase and not feature-complete. The API is experimental and breaking changes are likely. Feedback and suggestions are welcome.
589
-
590
- ModelFusion Server is desigend for running multi-modal generative AI flows that take up to several minutes to complete. It provides the following benefits:
591
-
592
- - 🔄 Real-time progress updates via custom server-sent events
593
- - 🔒Type-safety with Zod-schema for inputs/events
594
- - 📦 Efficient handling of dynamically created binary assets (images, audio)
595
- - 📜 Auto-logging for AI model interactions within flows
596
-
597
- ModelFusion provides a [Fastify](https://fastify.dev/) plugin that allows you to set up a server that exposes your ModelFusion flows as REST endpoints using server-sent events.
598
-
599
- ```ts
600
- import {
601
- FileSystemAssetStorage,
602
- FileSystemLogger,
603
- modelFusionFastifyPlugin,
604
- } from "modelfusion-experimental/fastify-server"; // '/fastify-server' import path
605
-
606
- // configurable logging for all runs using ModelFusion observability:
607
- const logger = new FileSystemLogger({
608
- path: (run) => path.join(fsBasePath, run.runId, "logs"),
609
- });
610
-
611
- // configurable storage for large files like images and audio files:
612
- const assetStorage = new FileSystemAssetStorage({
613
- path: (run) => path.join(fsBasePath, run.runId, "assets"),
614
- logger,
615
- });
616
-
617
- fastify.register(modelFusionFastifyPlugin, {
618
- baseUrl,
619
- basePath: "/myFlow",
620
- logger,
621
- assetStorage,
622
- flow: exampleFlow,
623
- });
624
- ```
625
-
626
- Using `invokeFlow`, you can easily connect your client to a ModelFusion flow endpoint:
627
-
628
- ```ts
629
- import { invokeFlow } from "modelfusion-experimental/browser"; // '/browser' import path
630
-
631
- invokeFlow({
632
- url: `${BASE_URL}/myFlow`,
633
- schema: myFlowSchema,
634
- input: { prompt },
635
- onEvent(event) {
636
- switch (event.type) {
637
- case "my-event": {
638
- // do something with the event
639
- break;
640
- }
641
- // more events...
642
- }
643
- },
644
- onStop() {
645
- // flow finished
646
- },
647
- });
648
- ```
649
-
650
541
  ## Documentation
651
542
 
652
543
  ### [Guide](https://modelfusion.dev/guide)
653
544
 
654
545
  - [Model Functions](https://modelfusion.dev/guide/function/)
655
546
  - [Generate text](https://modelfusion.dev/guide/function/generate-text)
547
+ - [Generate structure](https://modelfusion.dev/guide/function/generate-structure)
656
548
  - [Generate image](https://modelfusion.dev/guide/function/generate-image)
657
549
  - [Generate speech](https://modelfusion.dev/guide/function/generate-speech)
658
550
  - [Generate transcription](https://modelfusion.dev/guide/function/generation-transcription)
659
- - [Generate structure](https://modelfusion.dev/guide/function/generate-structure)
660
- - [Generate structure or text](https://modelfusion.dev/guide/function/generate-structure-or-text)
661
551
  - [Tokenize Text](https://modelfusion.dev/guide/function/tokenize-text)
662
552
  - [Embed Value](https://modelfusion.dev/guide/function/embed)
663
553
  - [Tools](https://modelfusion.dev/guide/tools)
554
+ - [Use Tool](https://modelfusion.dev/guide/tools/use-tool)
555
+ - [Use Tools](https://modelfusion.dev/guide/tools/use-tools)
556
+ - [Agent Loop](https://modelfusion.dev/guide/tools/agent-loop)
557
+ - [Available Tools](https://modelfusion.dev/guide/tools/available-tools/)
558
+ - [Custom Tools](https://modelfusion.dev/guide/tools/custom-tools)
559
+ - [Advanced](https://modelfusion.dev/guide/tools/advanced)
664
560
  - [Vector Indices](https://modelfusion.dev/guide/vector-index)
665
561
  - [Upsert](https://modelfusion.dev/guide/vector-index/upsert)
666
562
  - [Retrieve](https://modelfusion.dev/guide/vector-index/retrieve)
@@ -1,6 +1,19 @@
1
1
  import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
2
2
  import { Schema } from "../../core/schema/Schema.js";
3
+ import { TextStreamingModel } from "../generate-text/TextGenerationModel.js";
4
+ import { ChatPrompt } from "../generate-text/prompt-template/ChatPrompt.js";
5
+ import { InstructionPrompt } from "../generate-text/prompt-template/InstructionPrompt.js";
3
6
  export type StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT> = {
4
7
  createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT;
5
8
  extractStructure: (response: string) => unknown;
6
9
  };
10
+ export type FlexibleStructureFromTextPromptTemplate<SOURCE_PROMPT, INTERMEDIATE_PROMPT> = {
11
+ createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => INTERMEDIATE_PROMPT;
12
+ extractStructure: (response: string) => unknown;
13
+ adaptModel: (model: TextStreamingModel<never> & {
14
+ withTextPrompt(): TextStreamingModel<string>;
15
+ withInstructionPrompt(): TextStreamingModel<InstructionPrompt>;
16
+ withChatPrompt(): TextStreamingModel<ChatPrompt>;
17
+ withJsonOutput?: () => typeof model;
18
+ }) => TextStreamingModel<INTERMEDIATE_PROMPT>;
19
+ };
@@ -15,6 +15,12 @@ exports.jsonStructurePrompt = {
15
15
  instruction: prompt,
16
16
  }),
17
17
  extractStructure,
18
+ adaptModel: (model) => {
19
+ if (model.withJsonOutput != null) {
20
+ model = model.withJsonOutput();
21
+ }
22
+ return model.withInstructionPrompt();
23
+ },
18
24
  };
19
25
  },
20
26
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -29,6 +35,12 @@ exports.jsonStructurePrompt = {
29
35
  instruction: prompt.instruction,
30
36
  }),
31
37
  extractStructure,
38
+ adaptModel: (model) => {
39
+ if (model.withJsonOutput != null) {
40
+ model = model.withJsonOutput();
41
+ }
42
+ return model.withInstructionPrompt();
43
+ },
32
44
  };
33
45
  },
34
46
  };
@@ -1,15 +1,15 @@
1
1
  import { JsonSchemaProducer } from "../../core/schema/JsonSchemaProducer.js";
2
2
  import { Schema } from "../../core/schema/Schema.js";
3
3
  import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
- import { StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
4
+ import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "./StructureFromTextPromptTemplate.js";
5
5
  export declare const jsonStructurePrompt: {
6
6
  custom<SOURCE_PROMPT, TARGET_PROMPT>(createPrompt: (prompt: SOURCE_PROMPT, schema: Schema<unknown> & JsonSchemaProducer) => TARGET_PROMPT): StructureFromTextPromptTemplate<SOURCE_PROMPT, TARGET_PROMPT>;
7
7
  text({ schemaPrefix, schemaSuffix, }?: {
8
8
  schemaPrefix?: string | undefined;
9
9
  schemaSuffix?: string | undefined;
10
- }): StructureFromTextPromptTemplate<string, InstructionPrompt>;
10
+ }): FlexibleStructureFromTextPromptTemplate<string, InstructionPrompt>;
11
11
  instruction({ schemaPrefix, schemaSuffix, }?: {
12
12
  schemaPrefix?: string | undefined;
13
13
  schemaSuffix?: string | undefined;
14
- }): StructureFromTextPromptTemplate<InstructionPrompt, InstructionPrompt>;
14
+ }): FlexibleStructureFromTextPromptTemplate<InstructionPrompt, InstructionPrompt>;
15
15
  };
@@ -12,6 +12,12 @@ export const jsonStructurePrompt = {
12
12
  instruction: prompt,
13
13
  }),
14
14
  extractStructure,
15
+ adaptModel: (model) => {
16
+ if (model.withJsonOutput != null) {
17
+ model = model.withJsonOutput();
18
+ }
19
+ return model.withInstructionPrompt();
20
+ },
15
21
  };
16
22
  },
17
23
  instruction({ schemaPrefix, schemaSuffix, } = {}) {
@@ -26,6 +32,12 @@ export const jsonStructurePrompt = {
26
32
  instruction: prompt.instruction,
27
33
  }),
28
34
  extractStructure,
35
+ adaptModel: (model) => {
36
+ if (model.withJsonOutput != null) {
37
+ model = model.withJsonOutput();
38
+ }
39
+ return model.withInstructionPrompt();
40
+ },
29
41
  };
30
42
  },
31
43
  };
@@ -72,6 +72,10 @@ export interface TextGenerationModel<PROMPT, SETTINGS extends TextGenerationMode
72
72
  };
73
73
  }>;
74
74
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, PROMPT>): TextGenerationModel<INPUT_PROMPT, SETTINGS>;
75
+ /**
76
+ * Optional. When available, forces the model to return JSON as the text output.
77
+ */
78
+ withJsonOutput?(): this;
75
79
  }
76
80
  export interface TextStreamingModel<PROMPT, SETTINGS extends TextGenerationModelSettings = TextGenerationModelSettings> extends TextGenerationModel<PROMPT, SETTINGS> {
77
81
  doStreamText(prompt: PROMPT, options?: FunctionOptions): PromiseLike<AsyncIterable<Delta<unknown>>>;
@@ -8,6 +8,7 @@ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
8
8
  const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
9
9
  const parseJSON_js_1 = require("../../core/schema/parseJSON.cjs");
10
10
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
11
+ const StructureFromTextStreamingModel_js_1 = require("../../model-function/generate-structure/StructureFromTextStreamingModel.cjs");
11
12
  const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
12
13
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
13
14
  const TextGenerationToolCallModel_js_1 = require("../../tool/generate-tool-call/TextGenerationToolCallModel.cjs");
@@ -148,6 +149,17 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
148
149
  template: promptTemplate,
149
150
  });
150
151
  }
152
+ asStructureGenerationModel(promptTemplate) {
153
+ return "adaptModel" in promptTemplate
154
+ ? new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
155
+ model: promptTemplate.adaptModel(this),
156
+ template: promptTemplate,
157
+ })
158
+ : new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
159
+ model: this,
160
+ template: promptTemplate,
161
+ });
162
+ }
151
163
  /**
152
164
  * Returns this model with a text prompt template.
153
165
  */
@@ -177,6 +189,9 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
177
189
  promptTemplate,
178
190
  });
179
191
  }
192
+ withJsonOutput() {
193
+ return this.withSettings({ format: "json" });
194
+ }
180
195
  withSettings(additionalSettings) {
181
196
  return new OllamaChatModel(Object.assign({}, this.settings, additionalSettings));
182
197
  }
@@ -3,6 +3,8 @@ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
+ import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
7
+ import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
6
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
7
9
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
10
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
@@ -79,6 +81,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
79
81
  extractTextDelta(delta: unknown): string | undefined;
80
82
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaChatPrompt, this>;
81
83
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaChatPrompt, this>;
84
+ asStructureGenerationModel<INPUT_PROMPT, OllamaChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OllamaChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OllamaChatPrompt, TextStreamingModel<OllamaChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
82
85
  /**
83
86
  * Returns this model with a text prompt template.
84
87
  */
@@ -92,6 +95,7 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
92
95
  */
93
96
  withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, OllamaChatPrompt, OllamaChatModelSettings, this>;
94
97
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, OllamaChatPrompt, OllamaChatModelSettings, this>;
98
+ withJsonOutput(): this;
95
99
  withSettings(additionalSettings: Partial<OllamaChatModelSettings>): this;
96
100
  }
97
101
  declare const ollamaChatResponseSchema: z.ZodObject<{
@@ -5,6 +5,7 @@ import { postJsonToApi } from "../../core/api/postToApi.js";
5
5
  import { zodSchema } from "../../core/schema/ZodSchema.js";
6
6
  import { safeParseJSON } from "../../core/schema/parseJSON.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
+ import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
8
9
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
9
10
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
10
11
  import { TextGenerationToolCallModel, } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
@@ -145,6 +146,17 @@ export class OllamaChatModel extends AbstractModel {
145
146
  template: promptTemplate,
146
147
  });
147
148
  }
149
+ asStructureGenerationModel(promptTemplate) {
150
+ return "adaptModel" in promptTemplate
151
+ ? new StructureFromTextStreamingModel({
152
+ model: promptTemplate.adaptModel(this),
153
+ template: promptTemplate,
154
+ })
155
+ : new StructureFromTextStreamingModel({
156
+ model: this,
157
+ template: promptTemplate,
158
+ });
159
+ }
148
160
  /**
149
161
  * Returns this model with a text prompt template.
150
162
  */
@@ -174,6 +186,9 @@ export class OllamaChatModel extends AbstractModel {
174
186
  promptTemplate,
175
187
  });
176
188
  }
189
+ withJsonOutput() {
190
+ return this.withSettings({ format: "json" });
191
+ }
177
192
  withSettings(additionalSettings) {
178
193
  return new OllamaChatModel(Object.assign({}, this.settings, additionalSettings));
179
194
  }
@@ -219,10 +219,15 @@ class OpenAIChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpenAIChatMod
219
219
  });
220
220
  }
221
221
  asStructureGenerationModel(promptTemplate) {
222
- return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
223
- model: this,
224
- template: promptTemplate,
225
- });
222
+ return "adaptModel" in promptTemplate
223
+ ? new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
224
+ model: promptTemplate.adaptModel(this),
225
+ template: promptTemplate,
226
+ })
227
+ : new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
228
+ model: this,
229
+ template: promptTemplate,
230
+ });
226
231
  }
227
232
  /**
228
233
  * Returns this model with a text prompt template.
@@ -253,6 +258,9 @@ class OpenAIChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpenAIChatMod
253
258
  promptTemplate,
254
259
  });
255
260
  }
261
+ withJsonOutput() {
262
+ return this.withSettings({ responseFormat: { type: "json_object" } });
263
+ }
256
264
  withSettings(additionalSettings) {
257
265
  return new OpenAIChatModel(Object.assign({}, this.settings, additionalSettings));
258
266
  }
@@ -1,4 +1,4 @@
1
- import { StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
1
+ import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
2
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
3
  import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
4
4
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -140,7 +140,7 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
140
140
  fnName: string;
141
141
  fnDescription?: string;
142
142
  }): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<OpenAIChatPrompt, OpenAIChatPrompt>>;
143
- asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, this>;
143
+ asStructureGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
144
144
  /**
145
145
  * Returns this model with a text prompt template.
146
146
  */
@@ -154,6 +154,7 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
154
154
  */
155
155
  withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
156
156
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAIChatSettings, this>;
157
+ withJsonOutput(): this;
157
158
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
158
159
  }
159
160
  export {};
@@ -213,10 +213,15 @@ export class OpenAIChatModel extends AbstractOpenAIChatModel {
213
213
  });
214
214
  }
215
215
  asStructureGenerationModel(promptTemplate) {
216
- return new StructureFromTextStreamingModel({
217
- model: this,
218
- template: promptTemplate,
219
- });
216
+ return "adaptModel" in promptTemplate
217
+ ? new StructureFromTextStreamingModel({
218
+ model: promptTemplate.adaptModel(this),
219
+ template: promptTemplate,
220
+ })
221
+ : new StructureFromTextStreamingModel({
222
+ model: this,
223
+ template: promptTemplate,
224
+ });
220
225
  }
221
226
  /**
222
227
  * Returns this model with a text prompt template.
@@ -247,6 +252,9 @@ export class OpenAIChatModel extends AbstractOpenAIChatModel {
247
252
  promptTemplate,
248
253
  });
249
254
  }
255
+ withJsonOutput() {
256
+ return this.withSettings({ responseFormat: { type: "json_object" } });
257
+ }
250
258
  withSettings(additionalSettings) {
251
259
  return new OpenAIChatModel(Object.assign({}, this.settings, additionalSettings));
252
260
  }
@@ -58,10 +58,15 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
58
58
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
59
59
  }
60
60
  asStructureGenerationModel(promptTemplate) {
61
- return new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
62
- model: this,
63
- template: promptTemplate,
64
- });
61
+ return "adaptModel" in promptTemplate
62
+ ? new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
63
+ model: promptTemplate.adaptModel(this),
64
+ template: promptTemplate,
65
+ })
66
+ : new StructureFromTextStreamingModel_js_1.StructureFromTextStreamingModel({
67
+ model: this,
68
+ template: promptTemplate,
69
+ });
65
70
  }
66
71
  /**
67
72
  * Returns this model with a text prompt template.
@@ -92,6 +97,9 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
92
97
  promptTemplate,
93
98
  });
94
99
  }
100
+ withJsonOutput() {
101
+ return this.withSettings({ responseFormat: { type: "json_object" } });
102
+ }
95
103
  withSettings(additionalSettings) {
96
104
  return new OpenAICompatibleChatModel(Object.assign({}, this.settings, additionalSettings));
97
105
  }
@@ -1,4 +1,4 @@
1
- import { StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
1
+ import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
2
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
3
  import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
4
4
  import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
@@ -26,7 +26,7 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
26
26
  readonly tokenizer: undefined;
27
27
  readonly countPromptTokens: undefined;
28
28
  get settingsForEvent(): Partial<OpenAICompatibleChatSettings>;
29
- asStructureGenerationModel<INPUT_PROMPT>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, this>;
29
+ asStructureGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
30
30
  /**
31
31
  * Returns this model with a text prompt template.
32
32
  */
@@ -40,5 +40,6 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
40
40
  */
41
41
  withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
42
42
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
43
+ withJsonOutput(): this;
43
44
  withSettings(additionalSettings: Partial<OpenAICompatibleChatSettings>): this;
44
45
  }
@@ -55,10 +55,15 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
55
55
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
56
56
  }
57
57
  asStructureGenerationModel(promptTemplate) {
58
- return new StructureFromTextStreamingModel({
59
- model: this,
60
- template: promptTemplate,
61
- });
58
+ return "adaptModel" in promptTemplate
59
+ ? new StructureFromTextStreamingModel({
60
+ model: promptTemplate.adaptModel(this),
61
+ template: promptTemplate,
62
+ })
63
+ : new StructureFromTextStreamingModel({
64
+ model: this,
65
+ template: promptTemplate,
66
+ });
62
67
  }
63
68
  /**
64
69
  * Returns this model with a text prompt template.
@@ -89,6 +94,9 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
89
94
  promptTemplate,
90
95
  });
91
96
  }
97
+ withJsonOutput() {
98
+ return this.withSettings({ responseFormat: { type: "json_object" } });
99
+ }
92
100
  withSettings(additionalSettings) {
93
101
  return new OpenAICompatibleChatModel(Object.assign({}, this.settings, additionalSettings));
94
102
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
- "description": "The TypeScript library for building multi-modal AI applications.",
4
- "version": "0.112.0",
3
+ "description": "The TypeScript library for building AI applications.",
4
+ "version": "0.113.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [