modelfusion 0.99.0 → 0.101.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/CHANGELOG.md +1411 -0
  2. package/README.md +9 -15
  3. package/core/api/BaseUrlApiConfiguration.d.ts +7 -6
  4. package/core/api/BaseUrlPartsApiConfiguration.cjs +53 -0
  5. package/core/api/BaseUrlPartsApiConfiguration.d.ts +26 -0
  6. package/core/api/BaseUrlPartsApiConfiguration.js +49 -0
  7. package/core/api/index.cjs +1 -0
  8. package/core/api/index.d.ts +1 -0
  9. package/core/api/index.js +1 -0
  10. package/guard/fixStructure.cjs +3 -3
  11. package/guard/fixStructure.d.ts +3 -3
  12. package/guard/fixStructure.js +3 -3
  13. package/model-function/generate-structure/generateStructure.d.ts +2 -2
  14. package/model-function/generate-structure/streamStructure.d.ts +1 -1
  15. package/model-provider/automatic1111/Automatic1111ApiConfiguration.cjs +8 -9
  16. package/model-provider/automatic1111/Automatic1111ApiConfiguration.d.ts +7 -9
  17. package/model-provider/automatic1111/Automatic1111ApiConfiguration.js +8 -9
  18. package/model-provider/automatic1111/Automatic1111Error.cjs +7 -31
  19. package/model-provider/automatic1111/Automatic1111Error.d.ts +2 -11
  20. package/model-provider/automatic1111/Automatic1111Error.js +6 -28
  21. package/model-provider/automatic1111/Automatic1111Facade.cjs +10 -1
  22. package/model-provider/automatic1111/Automatic1111Facade.d.ts +7 -0
  23. package/model-provider/automatic1111/Automatic1111Facade.js +8 -0
  24. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +22 -27
  25. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +8 -8
  26. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +22 -27
  27. package/model-provider/automatic1111/index.cjs +1 -3
  28. package/model-provider/automatic1111/index.d.ts +1 -1
  29. package/model-provider/automatic1111/index.js +0 -1
  30. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +6 -6
  31. package/model-provider/mistral/{MistralTextGenerationModel.cjs → MistralChatModel.cjs} +13 -13
  32. package/model-provider/mistral/{MistralTextGenerationModel.d.ts → MistralChatModel.d.ts} +21 -20
  33. package/model-provider/mistral/{MistralTextGenerationModel.js → MistralChatModel.js} +11 -11
  34. package/model-provider/mistral/MistralFacade.cjs +5 -5
  35. package/model-provider/mistral/MistralFacade.d.ts +3 -2
  36. package/model-provider/mistral/MistralFacade.js +3 -3
  37. package/model-provider/mistral/MistralPromptTemplate.d.ts +4 -4
  38. package/model-provider/mistral/index.cjs +1 -1
  39. package/model-provider/mistral/index.d.ts +1 -1
  40. package/model-provider/mistral/index.js +1 -1
  41. package/model-provider/ollama/OllamaApiConfiguration.d.ts +6 -5
  42. package/model-provider/ollama/OllamaChatModel.cjs +303 -0
  43. package/model-provider/ollama/OllamaChatModel.d.ts +171 -0
  44. package/model-provider/ollama/OllamaChatModel.js +299 -0
  45. package/model-provider/ollama/OllamaChatPromptTemplate.cjs +76 -0
  46. package/model-provider/ollama/OllamaChatPromptTemplate.d.ts +20 -0
  47. package/model-provider/ollama/OllamaChatPromptTemplate.js +69 -0
  48. package/model-provider/ollama/{OllamaTextGenerationModel.cjs → OllamaCompletionModel.cjs} +13 -11
  49. package/model-provider/ollama/OllamaCompletionModel.d.ts +159 -0
  50. package/model-provider/ollama/{OllamaTextGenerationModel.js → OllamaCompletionModel.js} +11 -9
  51. package/model-provider/ollama/{OllamaTextGenerationModel.test.cjs → OllamaCompletionModel.test.cjs} +3 -3
  52. package/model-provider/ollama/{OllamaTextGenerationModel.test.js → OllamaCompletionModel.test.js} +3 -3
  53. package/model-provider/ollama/OllamaFacade.cjs +15 -5
  54. package/model-provider/ollama/OllamaFacade.d.ts +7 -2
  55. package/model-provider/ollama/OllamaFacade.js +11 -3
  56. package/model-provider/ollama/OllamaTextGenerationSettings.cjs +2 -0
  57. package/model-provider/ollama/OllamaTextGenerationSettings.d.ts +87 -0
  58. package/model-provider/ollama/OllamaTextGenerationSettings.js +1 -0
  59. package/model-provider/ollama/index.cjs +4 -1
  60. package/model-provider/ollama/index.d.ts +4 -1
  61. package/model-provider/ollama/index.js +4 -1
  62. package/model-provider/openai/OpenAIFacade.cjs +4 -2
  63. package/model-provider/openai/OpenAIFacade.d.ts +3 -1
  64. package/model-provider/openai/OpenAIFacade.js +2 -1
  65. package/model-provider/openai/chat/AbstractOpenAIChatModel.d.ts +1 -1
  66. package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +3 -3
  67. package/model-provider/openai/chat/OpenAIChatModel.cjs +1 -1
  68. package/model-provider/openai/chat/OpenAIChatModel.d.ts +2 -2
  69. package/model-provider/openai/chat/OpenAIChatModel.js +1 -1
  70. package/model-provider/openai/chat/OpenAIChatPromptTemplate.d.ts +5 -5
  71. package/model-provider/openai/chat/OpenAIChatPromptTemplate.js +1 -1
  72. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +1 -1
  73. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +1 -1
  74. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +1 -1
  75. package/package.json +15 -15
  76. package/model-provider/ollama/OllamaTextGenerationModel.d.ts +0 -230
  77. /package/model-provider/ollama/{OllamaTextGenerationModel.test.d.ts → OllamaCompletionModel.test.d.ts} +0 -0
@@ -6,6 +6,14 @@ import { PromptTemplate } from "../../model-function/PromptTemplate.js";
6
6
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
7
7
  import { PromptTemplateImageGenerationModel } from "../../model-function/generate-image/PromptTemplateImageGenerationModel.js";
8
8
  import { Automatic1111ImageGenerationPrompt } from "./Automatic1111ImageGenerationPrompt.js";
9
+ export interface Automatic1111ImageGenerationSettings extends ImageGenerationModelSettings {
10
+ api?: ApiConfiguration;
11
+ model: string;
12
+ height?: number;
13
+ width?: number;
14
+ sampler?: string;
15
+ steps?: number;
16
+ }
9
17
  /**
10
18
  * Create an image generation model that calls the AUTOMATIC1111 Stable Diffusion Web UI API.
11
19
  *
@@ -29,14 +37,6 @@ export declare class Automatic1111ImageGenerationModel extends AbstractModel<Aut
29
37
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: PromptTemplate<INPUT_PROMPT, Automatic1111ImageGenerationPrompt>): PromptTemplateImageGenerationModel<INPUT_PROMPT, Automatic1111ImageGenerationPrompt, Automatic1111ImageGenerationSettings, this>;
30
38
  withSettings(additionalSettings: Automatic1111ImageGenerationSettings): this;
31
39
  }
32
- export interface Automatic1111ImageGenerationSettings extends ImageGenerationModelSettings {
33
- api?: ApiConfiguration;
34
- model: string;
35
- height?: number;
36
- width?: number;
37
- sampler?: string;
38
- steps?: number;
39
- }
40
40
  declare const Automatic1111ImageGenerationResponseSchema: z.ZodObject<{
41
41
  images: z.ZodArray<z.ZodString, "many">;
42
42
  parameters: z.ZodObject<{}, "strip", z.ZodTypeAny, {}, {}>;
@@ -25,13 +25,29 @@ export class Automatic1111ImageGenerationModel extends AbstractModel {
25
25
  return this.settings.model;
26
26
  }
27
27
  async callAPI(input, options) {
28
+ const api = this.settings.api ?? new Automatic1111ApiConfiguration();
29
+ const abortSignal = options?.run?.abortSignal;
28
30
  return callWithRetryAndThrottle({
29
- retry: this.settings.api?.retry,
30
- throttle: this.settings.api?.throttle,
31
- call: async () => callAutomatic1111ImageGenerationAPI({
32
- ...this.settings,
33
- abortSignal: options?.run?.abortSignal,
34
- prompt: input.prompt,
31
+ retry: api.retry,
32
+ throttle: api.throttle,
33
+ call: async () => postJsonToApi({
34
+ url: api.assembleUrl(`/txt2img`),
35
+ headers: api.headers,
36
+ body: {
37
+ height: this.settings.height,
38
+ width: this.settings.width,
39
+ prompt: input.prompt,
40
+ negative_prompt: input.negativePrompt,
41
+ sampler_index: this.settings.sampler,
42
+ steps: this.settings.steps,
43
+ seed: input.seed,
44
+ override_settings: {
45
+ sd_model_checkpoint: this.settings.model,
46
+ },
47
+ },
48
+ failedResponseHandler: failedAutomatic1111CallResponseHandler,
49
+ successfulResponseHandler: createJsonResponseHandler(Automatic1111ImageGenerationResponseSchema),
50
+ abortSignal,
35
51
  }),
36
52
  });
37
53
  }
@@ -68,24 +84,3 @@ const Automatic1111ImageGenerationResponseSchema = z.object({
68
84
  parameters: z.object({}),
69
85
  info: z.string(),
70
86
  });
71
- async function callAutomatic1111ImageGenerationAPI({ api = new Automatic1111ApiConfiguration(), abortSignal, height, width, prompt, negativePrompt, sampler, steps, seed, model, }) {
72
- return postJsonToApi({
73
- url: api.assembleUrl(`/txt2img`),
74
- headers: api.headers,
75
- body: {
76
- height,
77
- width,
78
- prompt,
79
- negative_prompt: negativePrompt,
80
- sampler_index: sampler,
81
- steps,
82
- seed,
83
- override_settings: {
84
- sd_model_checkpoint: model,
85
- },
86
- },
87
- failedResponseHandler: failedAutomatic1111CallResponseHandler,
88
- successfulResponseHandler: createJsonResponseHandler(Automatic1111ImageGenerationResponseSchema),
89
- abortSignal,
90
- });
91
- }
@@ -26,10 +26,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.automatic1111 = exports.Automatic1111Error = void 0;
29
+ exports.automatic1111 = void 0;
30
30
  __exportStar(require("./Automatic1111ApiConfiguration.cjs"), exports);
31
- var Automatic1111Error_js_1 = require("./Automatic1111Error.cjs");
32
- Object.defineProperty(exports, "Automatic1111Error", { enumerable: true, get: function () { return Automatic1111Error_js_1.Automatic1111Error; } });
33
31
  exports.automatic1111 = __importStar(require("./Automatic1111Facade.cjs"));
34
32
  __exportStar(require("./Automatic1111ImageGenerationModel.cjs"), exports);
35
33
  __exportStar(require("./Automatic1111ImageGenerationPrompt.cjs"), exports);
@@ -1,5 +1,5 @@
1
1
  export * from "./Automatic1111ApiConfiguration.js";
2
- export { Automatic1111Error, Automatic1111ErrorData, } from "./Automatic1111Error.js";
2
+ export { Automatic1111ErrorData } from "./Automatic1111Error.js";
3
3
  export * as automatic1111 from "./Automatic1111Facade.js";
4
4
  export * from "./Automatic1111ImageGenerationModel.js";
5
5
  export * from "./Automatic1111ImageGenerationPrompt.js";
@@ -1,5 +1,4 @@
1
1
  export * from "./Automatic1111ApiConfiguration.js";
2
- export { Automatic1111Error, } from "./Automatic1111Error.js";
3
2
  export * as automatic1111 from "./Automatic1111Facade.js";
4
3
  export * from "./Automatic1111ImageGenerationModel.js";
5
4
  export * from "./Automatic1111ImageGenerationPrompt.js";
@@ -65,8 +65,8 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
65
65
  generation_settings: {
66
66
  model: string;
67
67
  stream: boolean;
68
- seed: number;
69
68
  mirostat: number;
69
+ seed: number;
70
70
  stop: string[];
71
71
  frequency_penalty: number;
72
72
  ignore_eos: boolean;
@@ -155,8 +155,8 @@ declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
155
155
  }, "strip", z.ZodTypeAny, {
156
156
  model: string;
157
157
  stream: boolean;
158
- seed: number;
159
158
  mirostat: number;
159
+ seed: number;
160
160
  stop: string[];
161
161
  frequency_penalty: number;
162
162
  ignore_eos: boolean;
@@ -179,8 +179,8 @@ declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
179
179
  }, {
180
180
  model: string;
181
181
  stream: boolean;
182
- seed: number;
183
182
  mirostat: number;
183
+ seed: number;
184
184
  stop: string[];
185
185
  frequency_penalty: number;
186
186
  ignore_eos: boolean;
@@ -247,8 +247,8 @@ declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
247
247
  generation_settings: {
248
248
  model: string;
249
249
  stream: boolean;
250
- seed: number;
251
250
  mirostat: number;
251
+ seed: number;
252
252
  stop: string[];
253
253
  frequency_penalty: number;
254
254
  ignore_eos: boolean;
@@ -295,8 +295,8 @@ declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
295
295
  generation_settings: {
296
296
  model: string;
297
297
  stream: boolean;
298
- seed: number;
299
298
  mirostat: number;
299
+ seed: number;
300
300
  stop: string[];
301
301
  frequency_penalty: number;
302
302
  ignore_eos: boolean;
@@ -360,8 +360,8 @@ export declare const LlamaCppTextGenerationResponseFormat: {
360
360
  generation_settings: {
361
361
  model: string;
362
362
  stream: boolean;
363
- seed: number;
364
363
  mirostat: number;
364
+ seed: number;
365
365
  stop: string[];
366
366
  frequency_penalty: number;
367
367
  ignore_eos: boolean;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.MistralTextGenerationResponseFormat = exports.MistralTextGenerationModel = void 0;
3
+ exports.MistralChatResponseFormat = exports.MistralChatModel = void 0;
4
4
  const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
@@ -13,7 +13,7 @@ const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSour
13
13
  const MistralApiConfiguration_js_1 = require("./MistralApiConfiguration.cjs");
14
14
  const MistralError_js_1 = require("./MistralError.cjs");
15
15
  const MistralPromptTemplate_js_1 = require("./MistralPromptTemplate.cjs");
16
- class MistralTextGenerationModel extends AbstractModel_js_1.AbstractModel {
16
+ class MistralChatModel extends AbstractModel_js_1.AbstractModel {
17
17
  constructor(settings) {
18
18
  super({ settings });
19
19
  Object.defineProperty(this, "provider", {
@@ -85,7 +85,7 @@ class MistralTextGenerationModel extends AbstractModel_js_1.AbstractModel {
85
85
  async doGenerateTexts(prompt, options) {
86
86
  const response = await this.callAPI(prompt, {
87
87
  ...options,
88
- responseFormat: exports.MistralTextGenerationResponseFormat.json,
88
+ responseFormat: exports.MistralChatResponseFormat.json,
89
89
  });
90
90
  return {
91
91
  response,
@@ -95,7 +95,7 @@ class MistralTextGenerationModel extends AbstractModel_js_1.AbstractModel {
95
95
  doStreamText(prompt, options) {
96
96
  return this.callAPI(prompt, {
97
97
  ...options,
98
- responseFormat: exports.MistralTextGenerationResponseFormat.textDeltaIterable,
98
+ responseFormat: exports.MistralChatResponseFormat.textDeltaIterable,
99
99
  });
100
100
  }
101
101
  /**
@@ -123,11 +123,11 @@ class MistralTextGenerationModel extends AbstractModel_js_1.AbstractModel {
123
123
  });
124
124
  }
125
125
  withSettings(additionalSettings) {
126
- return new MistralTextGenerationModel(Object.assign({}, this.settings, additionalSettings));
126
+ return new MistralChatModel(Object.assign({}, this.settings, additionalSettings));
127
127
  }
128
128
  }
129
- exports.MistralTextGenerationModel = MistralTextGenerationModel;
130
- const mistralTextGenerationResponseSchema = zod_1.z.object({
129
+ exports.MistralChatModel = MistralChatModel;
130
+ const mistralChatResponseSchema = zod_1.z.object({
131
131
  id: zod_1.z.string(),
132
132
  object: zod_1.z.string(),
133
133
  created: zod_1.z.number(),
@@ -146,23 +146,23 @@ const mistralTextGenerationResponseSchema = zod_1.z.object({
146
146
  total_tokens: zod_1.z.number(),
147
147
  }),
148
148
  });
149
- exports.MistralTextGenerationResponseFormat = {
149
+ exports.MistralChatResponseFormat = {
150
150
  /**
151
151
  * Returns the response as a JSON object.
152
152
  */
153
153
  json: {
154
154
  stream: false,
155
- handler: (0, postToApi_js_1.createJsonResponseHandler)(mistralTextGenerationResponseSchema),
155
+ handler: (0, postToApi_js_1.createJsonResponseHandler)(mistralChatResponseSchema),
156
156
  },
157
157
  /**
158
158
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
159
159
  */
160
160
  textDeltaIterable: {
161
161
  stream: true,
162
- handler: async ({ response }) => createMistralTextGenerationDeltaIterableQueue(response.body, (delta) => delta[0]?.delta.content ?? ""),
162
+ handler: async ({ response }) => createMistralChatDeltaIterableQueue(response.body, (delta) => delta[0]?.delta.content ?? ""),
163
163
  },
164
164
  };
165
- const mistralTextGenerationChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
165
+ const mistralChatChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.object({
166
166
  id: zod_1.z.string(),
167
167
  object: zod_1.z.string().optional(),
168
168
  created: zod_1.z.number().optional(),
@@ -179,7 +179,7 @@ const mistralTextGenerationChunkSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.ob
179
179
  .optional(),
180
180
  })),
181
181
  }));
182
- async function createMistralTextGenerationDeltaIterableQueue(stream, extractDeltaValue) {
182
+ async function createMistralChatDeltaIterableQueue(stream, extractDeltaValue) {
183
183
  const queue = new AsyncQueue_js_1.AsyncQueue();
184
184
  const streamDelta = [];
185
185
  // process the stream asynchonously (no 'await' on purpose):
@@ -194,7 +194,7 @@ async function createMistralTextGenerationDeltaIterableQueue(stream, extractDelt
194
194
  }
195
195
  const parseResult = (0, parseJSON_js_1.safeParseJSON)({
196
196
  text: data,
197
- schema: mistralTextGenerationChunkSchema,
197
+ schema: mistralChatChunkSchema,
198
198
  });
199
199
  if (!parseResult.success) {
200
200
  queue.push({
@@ -7,11 +7,12 @@ import { Delta } from "../../model-function/Delta.js";
7
7
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
9
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
- export type MistralTextGenerationPrompt = Array<{
10
+ export type MistralChatMessage = {
11
11
  role: "system" | "user" | "assistant";
12
12
  content: string;
13
- }>;
14
- export interface MistralTextGenerationModelSettings extends TextGenerationModelSettings {
13
+ };
14
+ export type MistralChatPrompt = Array<MistralChatMessage>;
15
+ export interface MistralChatModelSettings extends TextGenerationModelSettings {
15
16
  api?: ApiConfiguration;
16
17
  model: "mistral-tiny" | "mistral-small" | "mistral-medium";
17
18
  /**
@@ -46,18 +47,18 @@ export interface MistralTextGenerationModelSettings extends TextGenerationModelS
46
47
  */
47
48
  randomSeed?: number | null;
48
49
  }
49
- export declare class MistralTextGenerationModel extends AbstractModel<MistralTextGenerationModelSettings> implements TextStreamingModel<MistralTextGenerationPrompt, MistralTextGenerationModelSettings> {
50
- constructor(settings: MistralTextGenerationModelSettings);
50
+ export declare class MistralChatModel extends AbstractModel<MistralChatModelSettings> implements TextStreamingModel<MistralChatPrompt, MistralChatModelSettings> {
51
+ constructor(settings: MistralChatModelSettings);
51
52
  readonly provider = "mistral";
52
53
  get modelName(): "mistral-tiny" | "mistral-small" | "mistral-medium";
53
54
  readonly contextWindowSize: undefined;
54
55
  readonly tokenizer: undefined;
55
56
  readonly countPromptTokens: undefined;
56
- callAPI<RESULT>(prompt: MistralTextGenerationPrompt, options: {
57
- responseFormat: MistralTextGenerationResponseFormatType<RESULT>;
57
+ callAPI<RESULT>(prompt: MistralChatPrompt, options: {
58
+ responseFormat: MistralChatResponseFormatType<RESULT>;
58
59
  } & FunctionOptions): Promise<RESULT>;
59
- get settingsForEvent(): Partial<MistralTextGenerationModelSettings>;
60
- doGenerateTexts(prompt: MistralTextGenerationPrompt, options?: FunctionOptions): Promise<{
60
+ get settingsForEvent(): Partial<MistralChatModelSettings>;
61
+ doGenerateTexts(prompt: MistralChatPrompt, options?: FunctionOptions): Promise<{
61
62
  response: {
62
63
  object: string;
63
64
  usage: {
@@ -79,23 +80,23 @@ export declare class MistralTextGenerationModel extends AbstractModel<MistralTex
79
80
  };
80
81
  texts: string[];
81
82
  }>;
82
- doStreamText(prompt: MistralTextGenerationPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
83
+ doStreamText(prompt: MistralChatPrompt, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
83
84
  /**
84
85
  * Returns this model with a text prompt template.
85
86
  */
86
- withTextPrompt(): PromptTemplateTextStreamingModel<string, MistralTextGenerationPrompt, MistralTextGenerationModelSettings, this>;
87
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, MistralChatPrompt, MistralChatModelSettings, this>;
87
88
  /**
88
89
  * Returns this model with an instruction prompt template.
89
90
  */
90
- withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, MistralTextGenerationPrompt, MistralTextGenerationModelSettings, this>;
91
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextInstructionPrompt, MistralChatPrompt, MistralChatModelSettings, this>;
91
92
  /**
92
93
  * Returns this model with a chat prompt template.
93
94
  */
94
- withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, MistralTextGenerationPrompt, MistralTextGenerationModelSettings, this>;
95
- withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, MistralTextGenerationPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, MistralTextGenerationPrompt, MistralTextGenerationModelSettings, this>;
96
- withSettings(additionalSettings: Partial<MistralTextGenerationModelSettings>): this;
95
+ withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").TextChatPrompt, MistralChatPrompt, MistralChatModelSettings, this>;
96
+ withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, MistralChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, MistralChatPrompt, MistralChatModelSettings, this>;
97
+ withSettings(additionalSettings: Partial<MistralChatModelSettings>): this;
97
98
  }
98
- declare const mistralTextGenerationResponseSchema: z.ZodObject<{
99
+ declare const mistralChatResponseSchema: z.ZodObject<{
99
100
  id: z.ZodString;
100
101
  object: z.ZodString;
101
102
  created: z.ZodNumber;
@@ -178,12 +179,12 @@ declare const mistralTextGenerationResponseSchema: z.ZodObject<{
178
179
  index: number;
179
180
  }[];
180
181
  }>;
181
- export type MistralTextGenerationResponse = z.infer<typeof mistralTextGenerationResponseSchema>;
182
- export type MistralTextGenerationResponseFormatType<T> = {
182
+ export type MistralChatResponse = z.infer<typeof mistralChatResponseSchema>;
183
+ export type MistralChatResponseFormatType<T> = {
183
184
  stream: boolean;
184
185
  handler: ResponseHandler<T>;
185
186
  };
186
- export declare const MistralTextGenerationResponseFormat: {
187
+ export declare const MistralChatResponseFormat: {
187
188
  /**
188
189
  * Returns the response as a JSON object.
189
190
  */
@@ -219,7 +220,7 @@ export declare const MistralTextGenerationResponseFormat: {
219
220
  }) => Promise<AsyncIterable<Delta<string>>>;
220
221
  };
221
222
  };
222
- export type MistralTextGenerationDelta = Array<{
223
+ export type MistralChatDelta = Array<{
223
224
  role: "assistant" | "user" | undefined;
224
225
  content: string;
225
226
  isComplete: boolean;
@@ -10,7 +10,7 @@ import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStr
10
10
  import { MistralApiConfiguration } from "./MistralApiConfiguration.js";
11
11
  import { failedMistralCallResponseHandler } from "./MistralError.js";
12
12
  import { chat, instruction, text } from "./MistralPromptTemplate.js";
13
- export class MistralTextGenerationModel extends AbstractModel {
13
+ export class MistralChatModel extends AbstractModel {
14
14
  constructor(settings) {
15
15
  super({ settings });
16
16
  Object.defineProperty(this, "provider", {
@@ -82,7 +82,7 @@ export class MistralTextGenerationModel extends AbstractModel {
82
82
  async doGenerateTexts(prompt, options) {
83
83
  const response = await this.callAPI(prompt, {
84
84
  ...options,
85
- responseFormat: MistralTextGenerationResponseFormat.json,
85
+ responseFormat: MistralChatResponseFormat.json,
86
86
  });
87
87
  return {
88
88
  response,
@@ -92,7 +92,7 @@ export class MistralTextGenerationModel extends AbstractModel {
92
92
  doStreamText(prompt, options) {
93
93
  return this.callAPI(prompt, {
94
94
  ...options,
95
- responseFormat: MistralTextGenerationResponseFormat.textDeltaIterable,
95
+ responseFormat: MistralChatResponseFormat.textDeltaIterable,
96
96
  });
97
97
  }
98
98
  /**
@@ -120,10 +120,10 @@ export class MistralTextGenerationModel extends AbstractModel {
120
120
  });
121
121
  }
122
122
  withSettings(additionalSettings) {
123
- return new MistralTextGenerationModel(Object.assign({}, this.settings, additionalSettings));
123
+ return new MistralChatModel(Object.assign({}, this.settings, additionalSettings));
124
124
  }
125
125
  }
126
- const mistralTextGenerationResponseSchema = z.object({
126
+ const mistralChatResponseSchema = z.object({
127
127
  id: z.string(),
128
128
  object: z.string(),
129
129
  created: z.number(),
@@ -142,23 +142,23 @@ const mistralTextGenerationResponseSchema = z.object({
142
142
  total_tokens: z.number(),
143
143
  }),
144
144
  });
145
- export const MistralTextGenerationResponseFormat = {
145
+ export const MistralChatResponseFormat = {
146
146
  /**
147
147
  * Returns the response as a JSON object.
148
148
  */
149
149
  json: {
150
150
  stream: false,
151
- handler: createJsonResponseHandler(mistralTextGenerationResponseSchema),
151
+ handler: createJsonResponseHandler(mistralChatResponseSchema),
152
152
  },
153
153
  /**
154
154
  * Returns an async iterable over the text deltas (only the tex different of the first choice).
155
155
  */
156
156
  textDeltaIterable: {
157
157
  stream: true,
158
- handler: async ({ response }) => createMistralTextGenerationDeltaIterableQueue(response.body, (delta) => delta[0]?.delta.content ?? ""),
158
+ handler: async ({ response }) => createMistralChatDeltaIterableQueue(response.body, (delta) => delta[0]?.delta.content ?? ""),
159
159
  },
160
160
  };
161
- const mistralTextGenerationChunkSchema = new ZodSchema(z.object({
161
+ const mistralChatChunkSchema = new ZodSchema(z.object({
162
162
  id: z.string(),
163
163
  object: z.string().optional(),
164
164
  created: z.number().optional(),
@@ -175,7 +175,7 @@ const mistralTextGenerationChunkSchema = new ZodSchema(z.object({
175
175
  .optional(),
176
176
  })),
177
177
  }));
178
- async function createMistralTextGenerationDeltaIterableQueue(stream, extractDeltaValue) {
178
+ async function createMistralChatDeltaIterableQueue(stream, extractDeltaValue) {
179
179
  const queue = new AsyncQueue();
180
180
  const streamDelta = [];
181
181
  // process the stream asynchonously (no 'await' on purpose):
@@ -190,7 +190,7 @@ async function createMistralTextGenerationDeltaIterableQueue(stream, extractDelt
190
190
  }
191
191
  const parseResult = safeParseJSON({
192
192
  text: data,
193
- schema: mistralTextGenerationChunkSchema,
193
+ schema: mistralChatChunkSchema,
194
194
  });
195
195
  if (!parseResult.success) {
196
196
  queue.push({
@@ -1,17 +1,17 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.TextEmbedder = exports.TextGenerator = exports.Api = void 0;
3
+ exports.TextEmbedder = exports.ChatTextGenerator = exports.Api = void 0;
4
4
  const MistralApiConfiguration_js_1 = require("./MistralApiConfiguration.cjs");
5
5
  const MistralTextEmbeddingModel_js_1 = require("./MistralTextEmbeddingModel.cjs");
6
- const MistralTextGenerationModel_js_1 = require("./MistralTextGenerationModel.cjs");
6
+ const MistralChatModel_js_1 = require("./MistralChatModel.cjs");
7
7
  function Api(settings) {
8
8
  return new MistralApiConfiguration_js_1.MistralApiConfiguration(settings);
9
9
  }
10
10
  exports.Api = Api;
11
- function TextGenerator(settings) {
12
- return new MistralTextGenerationModel_js_1.MistralTextGenerationModel(settings);
11
+ function ChatTextGenerator(settings) {
12
+ return new MistralChatModel_js_1.MistralChatModel(settings);
13
13
  }
14
- exports.TextGenerator = TextGenerator;
14
+ exports.ChatTextGenerator = ChatTextGenerator;
15
15
  function TextEmbedder(settings) {
16
16
  return new MistralTextEmbeddingModel_js_1.MistralTextEmbeddingModel(settings);
17
17
  }
@@ -1,6 +1,7 @@
1
1
  import { MistralApiConfiguration, MistralApiConfigurationSettings } from "./MistralApiConfiguration.js";
2
2
  import { MistralTextEmbeddingModel, MistralTextEmbeddingModelSettings } from "./MistralTextEmbeddingModel.js";
3
- import { MistralTextGenerationModel, MistralTextGenerationModelSettings } from "./MistralTextGenerationModel.js";
3
+ import { MistralChatModel, MistralChatModelSettings } from "./MistralChatModel.js";
4
4
  export declare function Api(settings: MistralApiConfigurationSettings): MistralApiConfiguration;
5
- export declare function TextGenerator(settings: MistralTextGenerationModelSettings): MistralTextGenerationModel;
5
+ export declare function ChatTextGenerator(settings: MistralChatModelSettings): MistralChatModel;
6
6
  export declare function TextEmbedder(settings: MistralTextEmbeddingModelSettings): MistralTextEmbeddingModel;
7
+ export { MistralChatMessage as ChatMessage, MistralChatPrompt as ChatPrompt, } from "./MistralChatModel.js";
@@ -1,11 +1,11 @@
1
1
  import { MistralApiConfiguration, } from "./MistralApiConfiguration.js";
2
2
  import { MistralTextEmbeddingModel, } from "./MistralTextEmbeddingModel.js";
3
- import { MistralTextGenerationModel, } from "./MistralTextGenerationModel.js";
3
+ import { MistralChatModel, } from "./MistralChatModel.js";
4
4
  export function Api(settings) {
5
5
  return new MistralApiConfiguration(settings);
6
6
  }
7
- export function TextGenerator(settings) {
8
- return new MistralTextGenerationModel(settings);
7
+ export function ChatTextGenerator(settings) {
8
+ return new MistralChatModel(settings);
9
9
  }
10
10
  export function TextEmbedder(settings) {
11
11
  return new MistralTextEmbeddingModel(settings);
@@ -1,16 +1,16 @@
1
1
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
2
2
  import { TextChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
3
3
  import { TextInstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
4
- import { MistralTextGenerationPrompt } from "./MistralTextGenerationModel.js";
4
+ import { MistralChatPrompt } from "./MistralChatModel.js";
5
5
  /**
6
6
  * Formats a text prompt as a Mistral prompt.
7
7
  */
8
- export declare function text(): TextGenerationPromptTemplate<string, MistralTextGenerationPrompt>;
8
+ export declare function text(): TextGenerationPromptTemplate<string, MistralChatPrompt>;
9
9
  /**
10
10
  * Formats an instruction prompt as a Mistral prompt.
11
11
  */
12
- export declare function instruction(): TextGenerationPromptTemplate<TextInstructionPrompt, MistralTextGenerationPrompt>;
12
+ export declare function instruction(): TextGenerationPromptTemplate<TextInstructionPrompt, MistralChatPrompt>;
13
13
  /**
14
14
  * Formats a chat prompt as a Mistral prompt.
15
15
  */
16
- export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, MistralTextGenerationPrompt>;
16
+ export declare function chat(): TextGenerationPromptTemplate<TextChatPrompt, MistralChatPrompt>;
@@ -31,4 +31,4 @@ __exportStar(require("./MistralApiConfiguration.cjs"), exports);
31
31
  exports.mistral = __importStar(require("./MistralFacade.cjs"));
32
32
  exports.MistralPrompt = __importStar(require("./MistralPromptTemplate.cjs"));
33
33
  __exportStar(require("./MistralTextEmbeddingModel.cjs"), exports);
34
- __exportStar(require("./MistralTextGenerationModel.cjs"), exports);
34
+ __exportStar(require("./MistralChatModel.cjs"), exports);
@@ -3,4 +3,4 @@ export { MistralErrorData } from "./MistralError.js";
3
3
  export * as mistral from "./MistralFacade.js";
4
4
  export * as MistralPrompt from "./MistralPromptTemplate.js";
5
5
  export * from "./MistralTextEmbeddingModel.js";
6
- export * from "./MistralTextGenerationModel.js";
6
+ export * from "./MistralChatModel.js";
@@ -2,4 +2,4 @@ export * from "./MistralApiConfiguration.js";
2
2
  export * as mistral from "./MistralFacade.js";
3
3
  export * as MistralPrompt from "./MistralPromptTemplate.js";
4
4
  export * from "./MistralTextEmbeddingModel.js";
5
- export * from "./MistralTextGenerationModel.js";
5
+ export * from "./MistralChatModel.js";
@@ -1,10 +1,11 @@
1
1
  import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
2
2
  import { RetryFunction } from "../../core/api/RetryFunction.js";
3
3
  import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
4
+ export type OllamaApiConfigurationSettings = {
5
+ baseUrl?: string;
6
+ retry?: RetryFunction;
7
+ throttle?: ThrottleFunction;
8
+ };
4
9
  export declare class OllamaApiConfiguration extends BaseUrlApiConfiguration {
5
- constructor({ baseUrl, retry, throttle, }?: {
6
- baseUrl?: string;
7
- retry?: RetryFunction;
8
- throttle?: ThrottleFunction;
9
- });
10
+ constructor({ baseUrl, retry, throttle, }?: OllamaApiConfigurationSettings);
10
11
  }