modelfusion 0.124.0 → 0.126.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/CHANGELOG.md +34 -1
  2. package/README.md +17 -30
  3. package/core/FunctionEvent.d.ts +3 -3
  4. package/model-function/generate-text/PromptTemplateFullTextModel.cjs +0 -11
  5. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +0 -1
  6. package/model-function/generate-text/PromptTemplateFullTextModel.js +0 -11
  7. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +0 -11
  8. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +0 -1
  9. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +0 -11
  10. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +0 -11
  11. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +0 -1
  12. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +0 -11
  13. package/model-function/generate-text/TextGenerationModel.d.ts +31 -1
  14. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  15. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -9
  16. package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -9
  17. package/model-provider/cohere/CohereTextGenerationModel.js +7 -10
  18. package/model-provider/cohere/CohereTokenizer.d.ts +3 -3
  19. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +2 -2
  20. package/model-provider/mistral/MistralChatModel.cjs +0 -9
  21. package/model-provider/mistral/MistralChatModel.d.ts +2 -11
  22. package/model-provider/mistral/MistralChatModel.js +0 -9
  23. package/model-provider/mistral/index.cjs +1 -2
  24. package/model-provider/mistral/index.d.ts +0 -1
  25. package/model-provider/mistral/index.js +0 -1
  26. package/model-provider/ollama/OllamaChatModel.cjs +0 -9
  27. package/model-provider/ollama/OllamaChatModel.d.ts +2 -11
  28. package/model-provider/ollama/OllamaChatModel.js +0 -9
  29. package/model-provider/ollama/OllamaCompletionModel.d.ts +2 -2
  30. package/model-provider/ollama/index.cjs +0 -1
  31. package/model-provider/ollama/index.d.ts +0 -1
  32. package/model-provider/ollama/index.js +0 -1
  33. package/model-provider/openai/AbstractOpenAIChatModel.cjs +5 -3
  34. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +5 -5
  35. package/model-provider/openai/AbstractOpenAIChatModel.js +5 -3
  36. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
  37. package/model-provider/openai/OpenAIChatModel.cjs +0 -9
  38. package/model-provider/openai/OpenAIChatModel.d.ts +2 -11
  39. package/model-provider/openai/OpenAIChatModel.js +0 -9
  40. package/model-provider/openai/OpenAICompletionModel.cjs +3 -6
  41. package/model-provider/openai/OpenAICompletionModel.d.ts +3 -8
  42. package/model-provider/openai/OpenAICompletionModel.js +4 -7
  43. package/model-provider/openai/index.cjs +1 -2
  44. package/model-provider/openai/index.d.ts +0 -1
  45. package/model-provider/openai/index.js +0 -1
  46. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +0 -9
  47. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +2 -11
  48. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +0 -9
  49. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.cjs +10 -0
  50. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.d.ts +8 -2
  51. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.js +10 -0
  52. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +13 -1
  53. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +11 -0
  54. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +11 -0
  55. package/model-provider/openai-compatible/PerplexityApiConfiguration.cjs +33 -0
  56. package/model-provider/openai-compatible/PerplexityApiConfiguration.d.ts +13 -0
  57. package/model-provider/openai-compatible/PerplexityApiConfiguration.js +29 -0
  58. package/model-provider/openai-compatible/index.cjs +2 -1
  59. package/model-provider/openai-compatible/index.d.ts +2 -1
  60. package/model-provider/openai-compatible/index.js +2 -1
  61. package/package.json +1 -1
  62. package/tool/index.cjs +2 -2
  63. package/tool/index.d.ts +2 -2
  64. package/tool/index.js +2 -2
  65. package/tool/run-tool/RunToolEvent.d.ts +7 -0
  66. package/tool/{use-tool → run-tool}/index.cjs +2 -2
  67. package/tool/run-tool/index.d.ts +2 -0
  68. package/tool/run-tool/index.js +2 -0
  69. package/tool/{use-tool/useTool.cjs → run-tool/runTool.cjs} +5 -5
  70. package/tool/{use-tool/useTool.d.ts → run-tool/runTool.d.ts} +2 -2
  71. package/tool/{use-tool/useTool.js → run-tool/runTool.js} +3 -3
  72. package/tool/run-tools/RunToolsEvent.d.ts +7 -0
  73. package/tool/{use-tools → run-tools}/index.cjs +2 -2
  74. package/tool/run-tools/index.d.ts +2 -0
  75. package/tool/run-tools/index.js +2 -0
  76. package/tool/{use-tools/useTools.cjs → run-tools/runTools.cjs} +4 -4
  77. package/tool/{use-tools/useTools.d.ts → run-tools/runTools.d.ts} +1 -1
  78. package/tool/{use-tools/useTools.js → run-tools/runTools.js} +2 -2
  79. package/tool/use-tool/UseToolEvent.d.ts +0 -7
  80. package/tool/use-tool/index.d.ts +0 -2
  81. package/tool/use-tool/index.js +0 -2
  82. package/tool/use-tools/UseToolsEvent.d.ts +0 -7
  83. package/tool/use-tools/index.d.ts +0 -2
  84. package/tool/use-tools/index.js +0 -2
  85. /package/tool/{use-tool/UseToolEvent.cjs → run-tool/RunToolEvent.cjs} +0 -0
  86. /package/tool/{use-tool/UseToolEvent.js → run-tool/RunToolEvent.js} +0 -0
  87. /package/tool/{use-tools/UseToolsEvent.cjs → run-tools/RunToolsEvent.cjs} +0 -0
  88. /package/tool/{use-tools/UseToolsEvent.js → run-tools/RunToolsEvent.js} +0 -0
@@ -123,21 +123,12 @@ export class MistralChatModel extends AbstractModel {
123
123
  const chunk = delta;
124
124
  return chunk.choices[0].delta.content ?? undefined;
125
125
  }
126
- /**
127
- * Returns this model with a text prompt template.
128
- */
129
126
  withTextPrompt() {
130
127
  return this.withPromptTemplate(text());
131
128
  }
132
- /**
133
- * Returns this model with an instruction prompt template.
134
- */
135
129
  withInstructionPrompt() {
136
130
  return this.withPromptTemplate(instruction());
137
131
  }
138
- /**
139
- * Returns this model with a chat prompt template.
140
- */
141
132
  withChatPrompt() {
142
133
  return this.withPromptTemplate(chat());
143
134
  }
@@ -26,9 +26,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.mistral = exports.MistralChatPrompt = void 0;
29
+ exports.mistral = void 0;
30
30
  __exportStar(require("./MistralApiConfiguration.cjs"), exports);
31
31
  __exportStar(require("./MistralChatModel.cjs"), exports);
32
- exports.MistralChatPrompt = __importStar(require("./MistralChatPromptTemplate.cjs"));
33
32
  exports.mistral = __importStar(require("./MistralFacade.cjs"));
34
33
  __exportStar(require("./MistralTextEmbeddingModel.cjs"), exports);
@@ -1,6 +1,5 @@
1
1
  export * from "./MistralApiConfiguration.js";
2
2
  export * from "./MistralChatModel.js";
3
- export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
4
3
  export { MistralErrorData } from "./MistralError.js";
5
4
  export * as mistral from "./MistralFacade.js";
6
5
  export * from "./MistralTextEmbeddingModel.js";
@@ -1,5 +1,4 @@
1
1
  export * from "./MistralApiConfiguration.js";
2
2
  export * from "./MistralChatModel.js";
3
- export * as MistralChatPrompt from "./MistralChatPromptTemplate.js";
4
3
  export * as mistral from "./MistralFacade.js";
5
4
  export * from "./MistralTextEmbeddingModel.js";
@@ -172,21 +172,12 @@ class OllamaChatModel extends AbstractModel_js_1.AbstractModel {
172
172
  template: promptTemplate,
173
173
  });
174
174
  }
175
- /**
176
- * Returns this model with a text prompt template.
177
- */
178
175
  withTextPrompt() {
179
176
  return this.withPromptTemplate((0, OllamaChatPromptTemplate_js_1.text)());
180
177
  }
181
- /**
182
- * Returns this model with an instruction prompt template.
183
- */
184
178
  withInstructionPrompt() {
185
179
  return this.withPromptTemplate((0, OllamaChatPromptTemplate_js_1.instruction)());
186
180
  }
187
- /**
188
- * Returns this model with a chat prompt template.
189
- */
190
181
  withChatPrompt() {
191
182
  return this.withPromptTemplate((0, OllamaChatPromptTemplate_js_1.chat)());
192
183
  }
@@ -6,7 +6,7 @@ import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
7
7
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
8
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
9
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
+ import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
10
10
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
11
11
  import { TextGenerationToolCallModel, ToolCallPromptTemplate } from "../../tool/generate-tool-call/TextGenerationToolCallModel.js";
12
12
  import { TextGenerationToolCallsModel } from "../../tool/generate-tool-calls/TextGenerationToolCallsModel.js";
@@ -27,7 +27,7 @@ export interface OllamaChatModelSettings extends OllamaTextGenerationSettings {
27
27
  /**
28
28
  * Text generation model that uses the Ollama chat API.
29
29
  */
30
- export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettings> implements TextStreamingModel<OllamaChatPrompt, OllamaChatModelSettings> {
30
+ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettings> implements TextStreamingBaseModel<OllamaChatPrompt, OllamaChatModelSettings> {
31
31
  constructor(settings: OllamaChatModelSettings);
32
32
  readonly provider = "ollama";
33
33
  get modelName(): string;
@@ -104,17 +104,8 @@ export declare class OllamaChatModel extends AbstractModel<OllamaChatModelSettin
104
104
  asToolCallGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallModel<INPUT_PROMPT, OllamaChatPrompt, this>;
105
105
  asToolCallsOrTextGenerationModel<INPUT_PROMPT>(promptTemplate: ToolCallsPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): TextGenerationToolCallsModel<INPUT_PROMPT, OllamaChatPrompt, this>;
106
106
  asStructureGenerationModel<INPUT_PROMPT, OllamaChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OllamaChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OllamaChatPrompt, TextStreamingModel<OllamaChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
107
- /**
108
- * Returns this model with a text prompt template.
109
- */
110
107
  withTextPrompt(): PromptTemplateTextStreamingModel<string, OllamaChatPrompt, OllamaChatModelSettings, this>;
111
- /**
112
- * Returns this model with an instruction prompt template.
113
- */
114
108
  withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, OllamaChatPrompt, OllamaChatModelSettings, this>;
115
- /**
116
- * Returns this model with a chat prompt template.
117
- */
118
109
  withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, OllamaChatPrompt, OllamaChatModelSettings, this>;
119
110
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OllamaChatPrompt>): PromptTemplateTextStreamingModel<INPUT_PROMPT, OllamaChatPrompt, OllamaChatModelSettings, this>;
120
111
  withJsonOutput(): this;
@@ -169,21 +169,12 @@ export class OllamaChatModel extends AbstractModel {
169
169
  template: promptTemplate,
170
170
  });
171
171
  }
172
- /**
173
- * Returns this model with a text prompt template.
174
- */
175
172
  withTextPrompt() {
176
173
  return this.withPromptTemplate(text());
177
174
  }
178
- /**
179
- * Returns this model with an instruction prompt template.
180
- */
181
175
  withInstructionPrompt() {
182
176
  return this.withPromptTemplate(instruction());
183
177
  }
184
- /**
185
- * Returns this model with a chat prompt template.
186
- */
187
178
  withChatPrompt() {
188
179
  return this.withPromptTemplate(chat());
189
180
  }
@@ -6,7 +6,7 @@ import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
7
7
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
8
8
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
9
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
+ import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
10
10
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
11
11
  import { ChatPrompt } from "../../model-function/generate-text/prompt-template/ChatPrompt.js";
12
12
  import { InstructionPrompt } from "../../model-function/generate-text/prompt-template/InstructionPrompt.js";
@@ -49,7 +49,7 @@ export interface OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE extends numbe
49
49
  */
50
50
  promptTemplate?: TextGenerationPromptTemplateProvider<OllamaCompletionPrompt>;
51
51
  }
52
- export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number | undefined> extends AbstractModel<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> implements TextStreamingModel<OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> {
52
+ export declare class OllamaCompletionModel<CONTEXT_WINDOW_SIZE extends number | undefined> extends AbstractModel<OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> implements TextStreamingBaseModel<OllamaCompletionPrompt, OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>> {
53
53
  constructor(settings: OllamaCompletionModelSettings<CONTEXT_WINDOW_SIZE>);
54
54
  readonly provider = "ollama";
55
55
  get modelName(): string;
@@ -29,7 +29,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
29
29
  exports.ollama = void 0;
30
30
  __exportStar(require("./OllamaApiConfiguration.cjs"), exports);
31
31
  __exportStar(require("./OllamaChatModel.cjs"), exports);
32
- __exportStar(require("./OllamaChatPromptTemplate.cjs"), exports);
33
32
  __exportStar(require("./OllamaCompletionModel.cjs"), exports);
34
33
  exports.ollama = __importStar(require("./OllamaFacade.cjs"));
35
34
  __exportStar(require("./OllamaTextEmbeddingModel.cjs"), exports);
@@ -1,6 +1,5 @@
1
1
  export * from "./OllamaApiConfiguration.js";
2
2
  export * from "./OllamaChatModel.js";
3
- export * from "./OllamaChatPromptTemplate.js";
4
3
  export * from "./OllamaCompletionModel.js";
5
4
  export { OllamaErrorData } from "./OllamaError.js";
6
5
  export * as ollama from "./OllamaFacade.js";
@@ -1,6 +1,5 @@
1
1
  export * from "./OllamaApiConfiguration.js";
2
2
  export * from "./OllamaChatModel.js";
3
- export * from "./OllamaChatPromptTemplate.js";
4
3
  export * from "./OllamaCompletionModel.js";
5
4
  export * as ollama from "./OllamaFacade.js";
6
5
  export * from "./OllamaTextEmbeddingModel.js";
@@ -63,7 +63,7 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
63
63
  temperature: this.settings.temperature,
64
64
  top_p: this.settings.topP,
65
65
  n: this.settings.numberOfGenerations,
66
- stop: this.settings.stopSequences,
66
+ stop: stopSequences,
67
67
  max_tokens: this.settings.maxGenerationTokens,
68
68
  presence_penalty: this.settings.presencePenalty,
69
69
  frequency_penalty: this.settings.frequencyPenalty,
@@ -122,7 +122,9 @@ class AbstractOpenAIChatModel extends AbstractModel_js_1.AbstractModel {
122
122
  }
123
123
  extractTextDelta(delta) {
124
124
  const chunk = delta;
125
- if (chunk.object !== "chat.completion.chunk") {
125
+ if (chunk.object !== "chat.completion.chunk" &&
126
+ chunk.object !== "chat.completion" // for OpenAI-compatible models
127
+ ) {
126
128
  return undefined;
127
129
  }
128
130
  const chatChunk = chunk;
@@ -243,7 +245,7 @@ const openAIChatResponseSchema = zod_1.z.object({
243
245
  }),
244
246
  });
245
247
  const openaiChatChunkSchema = zod_1.z.object({
246
- object: zod_1.z.literal("chat.completion.chunk"),
248
+ object: zod_1.z.string(), // generalized for openai compatible providers, z.literal("chat.completion.chunk")
247
249
  id: zod_1.z.string(),
248
250
  choices: zod_1.z.array(zod_1.z.object({
249
251
  delta: zod_1.z.object({
@@ -223,7 +223,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
223
223
  };
224
224
  private translateFinishReason;
225
225
  doStreamText(prompt: OpenAIChatPrompt, options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
226
- object: "chat.completion.chunk";
226
+ object: string;
227
227
  id: string;
228
228
  created: number;
229
229
  choices: {
@@ -549,7 +549,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
549
549
  }>;
550
550
  export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
551
551
  declare const openaiChatChunkSchema: z.ZodObject<{
552
- object: z.ZodLiteral<"chat.completion.chunk">;
552
+ object: z.ZodString;
553
553
  id: z.ZodString;
554
554
  choices: z.ZodArray<z.ZodObject<{
555
555
  delta: z.ZodObject<{
@@ -669,7 +669,7 @@ declare const openaiChatChunkSchema: z.ZodObject<{
669
669
  model: z.ZodOptional<z.ZodString>;
670
670
  system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
671
671
  }, "strip", z.ZodTypeAny, {
672
- object: "chat.completion.chunk";
672
+ object: string;
673
673
  id: string;
674
674
  created: number;
675
675
  choices: {
@@ -695,7 +695,7 @@ declare const openaiChatChunkSchema: z.ZodObject<{
695
695
  model?: string | undefined;
696
696
  system_fingerprint?: string | null | undefined;
697
697
  }, {
698
- object: "chat.completion.chunk";
698
+ object: string;
699
699
  id: string;
700
700
  created: number;
701
701
  choices: {
@@ -774,7 +774,7 @@ export declare const OpenAIChatResponseFormat: {
774
774
  handler: ({ response }: {
775
775
  response: Response;
776
776
  }) => Promise<AsyncIterable<import("../../index.js").Delta<{
777
- object: "chat.completion.chunk";
777
+ object: string;
778
778
  id: string;
779
779
  created: number;
780
780
  choices: {
@@ -60,7 +60,7 @@ export class AbstractOpenAIChatModel extends AbstractModel {
60
60
  temperature: this.settings.temperature,
61
61
  top_p: this.settings.topP,
62
62
  n: this.settings.numberOfGenerations,
63
- stop: this.settings.stopSequences,
63
+ stop: stopSequences,
64
64
  max_tokens: this.settings.maxGenerationTokens,
65
65
  presence_penalty: this.settings.presencePenalty,
66
66
  frequency_penalty: this.settings.frequencyPenalty,
@@ -119,7 +119,9 @@ export class AbstractOpenAIChatModel extends AbstractModel {
119
119
  }
120
120
  extractTextDelta(delta) {
121
121
  const chunk = delta;
122
- if (chunk.object !== "chat.completion.chunk") {
122
+ if (chunk.object !== "chat.completion.chunk" &&
123
+ chunk.object !== "chat.completion" // for OpenAI-compatible models
124
+ ) {
123
125
  return undefined;
124
126
  }
125
127
  const chatChunk = chunk;
@@ -239,7 +241,7 @@ const openAIChatResponseSchema = z.object({
239
241
  }),
240
242
  });
241
243
  const openaiChatChunkSchema = z.object({
242
- object: z.literal("chat.completion.chunk"),
244
+ object: z.string(), // generalized for openai compatible providers, z.literal("chat.completion.chunk")
243
245
  id: z.string(),
244
246
  choices: z.array(z.object({
245
247
  delta: z.object({
@@ -170,7 +170,7 @@ OpenAIChatSettings> {
170
170
  }>;
171
171
  doStreamStructure(schema: Schema<unknown> & JsonSchemaProducer, prompt: Parameters<PROMPT_TEMPLATE["format"]>[0], // first argument of the function
172
172
  options: FunctionCallOptions): Promise<AsyncIterable<import("../../index.js").Delta<{
173
- object: "chat.completion.chunk";
173
+ object: string;
174
174
  id: string;
175
175
  created: number;
176
176
  choices: {
@@ -229,21 +229,12 @@ class OpenAIChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpenAIChatMod
229
229
  template: promptTemplate,
230
230
  });
231
231
  }
232
- /**
233
- * Returns this model with a text prompt template.
234
- */
235
232
  withTextPrompt() {
236
233
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.text)());
237
234
  }
238
- /**
239
- * Returns this model with an instruction prompt template.
240
- */
241
235
  withInstructionPrompt() {
242
236
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.instruction)());
243
237
  }
244
- /**
245
- * Returns this model with a chat prompt template.
246
- */
247
238
  withChatPrompt() {
248
239
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
249
240
  }
@@ -1,7 +1,7 @@
1
1
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
2
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
3
  import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
4
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
4
+ import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
5
5
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
6
6
  import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
7
7
  import { ToolCallsGenerationModel } from "../../tool/generate-tool-calls/ToolCallsGenerationModel.js";
@@ -124,7 +124,7 @@ export interface OpenAIChatSettings extends AbstractOpenAIChatSettings {
124
124
  * ),
125
125
  * ]);
126
126
  */
127
- export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatSettings> implements TextStreamingModel<OpenAIChatPrompt, OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAIChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAIChatSettings> {
127
+ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatSettings> implements TextStreamingBaseModel<OpenAIChatPrompt, OpenAIChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAIChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAIChatSettings> {
128
128
  constructor(settings: OpenAIChatSettings);
129
129
  readonly provider: "openai";
130
130
  get modelName(): OpenAIChatModelType;
@@ -141,17 +141,8 @@ export declare class OpenAIChatModel extends AbstractOpenAIChatModel<OpenAIChatS
141
141
  fnDescription?: string;
142
142
  }): OpenAIChatFunctionCallStructureGenerationModel<TextGenerationPromptTemplate<OpenAIChatPrompt, OpenAIChatPrompt>>;
143
143
  asStructureGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
144
- /**
145
- * Returns this model with a text prompt template.
146
- */
147
144
  withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAIChatSettings, this>;
148
- /**
149
- * Returns this model with an instruction prompt template.
150
- */
151
145
  withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
152
- /**
153
- * Returns this model with a chat prompt template.
154
- */
155
146
  withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAIChatSettings, this>;
156
147
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAIChatSettings, this>;
157
148
  withJsonOutput(): this;
@@ -223,21 +223,12 @@ export class OpenAIChatModel extends AbstractOpenAIChatModel {
223
223
  template: promptTemplate,
224
224
  });
225
225
  }
226
- /**
227
- * Returns this model with a text prompt template.
228
- */
229
226
  withTextPrompt() {
230
227
  return this.withPromptTemplate(text());
231
228
  }
232
- /**
233
- * Returns this model with an instruction prompt template.
234
- */
235
229
  withInstructionPrompt() {
236
230
  return this.withPromptTemplate(instruction());
237
231
  }
238
- /**
239
- * Returns this model with a chat prompt template.
240
- */
241
232
  withChatPrompt() {
242
233
  return this.withPromptTemplate(chat());
243
234
  }
@@ -99,15 +99,12 @@ class OpenAICompletionModel extends AbstractOpenAICompletionModel_js_1.AbstractO
99
99
  ];
100
100
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
101
101
  }
102
- /**
103
- * Returns this model with an instruction prompt template.
104
- */
102
+ withTextPrompt() {
103
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.text)());
104
+ }
105
105
  withInstructionPrompt() {
106
106
  return this.withPromptTemplate((0, TextPromptTemplate_js_1.instruction)());
107
107
  }
108
- /**
109
- * Returns this model with a chat prompt template.
110
- */
111
108
  withChatPrompt(options) {
112
109
  return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
113
110
  }
@@ -1,5 +1,5 @@
1
1
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
2
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
2
+ import { TextStreamingBaseModel } from "../../model-function/generate-text/TextGenerationModel.js";
3
3
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
4
4
  import { AbstractOpenAICompletionModel, AbstractOpenAICompletionModelSettings, OpenAICompletionResponse } from "./AbstractOpenAICompletionModel.js";
5
5
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
@@ -46,7 +46,7 @@ export interface OpenAICompletionModelSettings extends AbstractOpenAICompletionM
46
46
  * "Write a short story about a robot learning to love:\n\n"
47
47
  * );
48
48
  */
49
- export declare class OpenAICompletionModel extends AbstractOpenAICompletionModel<OpenAICompletionModelSettings> implements TextStreamingModel<string, OpenAICompletionModelSettings> {
49
+ export declare class OpenAICompletionModel extends AbstractOpenAICompletionModel<OpenAICompletionModelSettings> implements TextStreamingBaseModel<string, OpenAICompletionModelSettings> {
50
50
  constructor(settings: OpenAICompletionModelSettings);
51
51
  readonly provider: "openai";
52
52
  get modelName(): "gpt-3.5-turbo-instruct";
@@ -54,13 +54,8 @@ export declare class OpenAICompletionModel extends AbstractOpenAICompletionModel
54
54
  readonly tokenizer: TikTokenTokenizer;
55
55
  countPromptTokens(input: string): Promise<number>;
56
56
  get settingsForEvent(): Partial<OpenAICompletionModelSettings>;
57
- /**
58
- * Returns this model with an instruction prompt template.
59
- */
57
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, string, OpenAICompletionModelSettings, this>;
60
58
  withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, OpenAICompletionModelSettings, this>;
61
- /**
62
- * Returns this model with a chat prompt template.
63
- */
64
59
  withChatPrompt(options?: {
65
60
  user?: string;
66
61
  assistant?: string;
@@ -1,6 +1,6 @@
1
1
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
2
2
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
3
- import { chat, instruction, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
3
+ import { chat, instruction, text, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
4
4
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
5
5
  import { AbstractOpenAICompletionModel, } from "./AbstractOpenAICompletionModel.js";
6
6
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
@@ -93,15 +93,12 @@ export class OpenAICompletionModel extends AbstractOpenAICompletionModel {
93
93
  ];
94
94
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
95
95
  }
96
- /**
97
- * Returns this model with an instruction prompt template.
98
- */
96
+ withTextPrompt() {
97
+ return this.withPromptTemplate(text());
98
+ }
99
99
  withInstructionPrompt() {
100
100
  return this.withPromptTemplate(instruction());
101
101
  }
102
- /**
103
- * Returns this model with a chat prompt template.
104
- */
105
102
  withChatPrompt(options) {
106
103
  return this.withPromptTemplate(chat(options));
107
104
  }
@@ -26,7 +26,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
26
26
  return result;
27
27
  };
28
28
  Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.openai = exports.OpenAIChatPrompt = void 0;
29
+ exports.openai = void 0;
30
30
  __exportStar(require("./AbstractOpenAIChatModel.cjs"), exports);
31
31
  __exportStar(require("./AbstractOpenAICompletionModel.cjs"), exports);
32
32
  __exportStar(require("./AbstractOpenAITextEmbeddingModel.cjs"), exports);
@@ -34,7 +34,6 @@ __exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
34
34
  __exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
35
35
  __exportStar(require("./OpenAIChatMessage.cjs"), exports);
36
36
  __exportStar(require("./OpenAIChatModel.cjs"), exports);
37
- exports.OpenAIChatPrompt = __importStar(require("./OpenAIChatPromptTemplate.cjs"));
38
37
  __exportStar(require("./OpenAICompletionModel.cjs"), exports);
39
38
  exports.openai = __importStar(require("./OpenAIFacade.cjs"));
40
39
  __exportStar(require("./OpenAIImageGenerationModel.cjs"), exports);
@@ -5,7 +5,6 @@ export * from "./AzureOpenAIApiConfiguration.js";
5
5
  export * from "./OpenAIApiConfiguration.js";
6
6
  export * from "./OpenAIChatMessage.js";
7
7
  export * from "./OpenAIChatModel.js";
8
- export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
9
8
  export * from "./OpenAICompletionModel.js";
10
9
  export { OpenAIErrorData } from "./OpenAIError.js";
11
10
  export * as openai from "./OpenAIFacade.js";
@@ -5,7 +5,6 @@ export * from "./AzureOpenAIApiConfiguration.js";
5
5
  export * from "./OpenAIApiConfiguration.js";
6
6
  export * from "./OpenAIChatMessage.js";
7
7
  export * from "./OpenAIChatModel.js";
8
- export * as OpenAIChatPrompt from "./OpenAIChatPromptTemplate.js";
9
8
  export * from "./OpenAICompletionModel.js";
10
9
  export * as openai from "./OpenAIFacade.js";
11
10
  export * from "./OpenAIImageGenerationModel.js";
@@ -68,21 +68,12 @@ class OpenAICompatibleChatModel extends AbstractOpenAIChatModel_js_1.AbstractOpe
68
68
  template: promptTemplate,
69
69
  });
70
70
  }
71
- /**
72
- * Returns this model with a text prompt template.
73
- */
74
71
  withTextPrompt() {
75
72
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.text)());
76
73
  }
77
- /**
78
- * Returns this model with an instruction prompt template.
79
- */
80
74
  withInstructionPrompt() {
81
75
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.instruction)());
82
76
  }
83
- /**
84
- * Returns this model with a chat prompt template.
85
- */
86
77
  withChatPrompt() {
87
78
  return this.withPromptTemplate((0, OpenAIChatPromptTemplate_js_1.chat)());
88
79
  }
@@ -2,7 +2,7 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
2
2
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
3
3
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
4
4
  import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
5
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
5
+ import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
6
6
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
7
7
  import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
8
8
  import { ToolCallsGenerationModel } from "../../tool/generate-tool-calls/ToolCallsGenerationModel.js";
@@ -20,7 +20,7 @@ export interface OpenAICompatibleChatSettings extends AbstractOpenAIChatSettings
20
20
  *
21
21
  * @see https://platform.openai.com/docs/api-reference/chat/create
22
22
  */
23
- export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<OpenAICompatibleChatSettings> implements TextStreamingModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings> {
23
+ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<OpenAICompatibleChatSettings> implements TextStreamingBaseModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings> {
24
24
  constructor(settings: OpenAICompatibleChatSettings);
25
25
  get provider(): OpenAICompatibleProviderName;
26
26
  get modelName(): string;
@@ -29,17 +29,8 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
29
29
  readonly countPromptTokens: undefined;
30
30
  get settingsForEvent(): Partial<OpenAICompatibleChatSettings>;
31
31
  asStructureGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
32
- /**
33
- * Returns this model with a text prompt template.
34
- */
35
32
  withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
36
- /**
37
- * Returns this model with an instruction prompt template.
38
- */
39
33
  withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
40
- /**
41
- * Returns this model with a chat prompt template.
42
- */
43
34
  withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
44
35
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
45
36
  withJsonOutput(): this;
@@ -65,21 +65,12 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
65
65
  template: promptTemplate,
66
66
  });
67
67
  }
68
- /**
69
- * Returns this model with a text prompt template.
70
- */
71
68
  withTextPrompt() {
72
69
  return this.withPromptTemplate(text());
73
70
  }
74
- /**
75
- * Returns this model with an instruction prompt template.
76
- */
77
71
  withInstructionPrompt() {
78
72
  return this.withPromptTemplate(instruction());
79
73
  }
80
- /**
81
- * Returns this model with a chat prompt template.
82
- */
83
74
  withChatPrompt() {
84
75
  return this.withPromptTemplate(chat());
85
76
  }
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAICompatibleCompletionModel = void 0;
4
4
  const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
5
5
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
6
+ const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
6
7
  const AbstractOpenAICompletionModel_js_1 = require("../openai/AbstractOpenAICompletionModel.cjs");
7
8
  /**
8
9
  * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
@@ -56,6 +57,15 @@ class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel_js_1
56
57
  ];
57
58
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
58
59
  }
60
+ withTextPrompt() {
61
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.text)());
62
+ }
63
+ withInstructionPrompt() {
64
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.instruction)());
65
+ }
66
+ withChatPrompt(options) {
67
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
68
+ }
59
69
  withPromptTemplate(promptTemplate) {
60
70
  return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
61
71
  model: this.withSettings({
@@ -1,6 +1,6 @@
1
1
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
2
2
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
3
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
3
+ import { TextStreamingBaseModel } from "../../model-function/generate-text/TextGenerationModel.js";
4
4
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
5
5
  import { AbstractOpenAICompletionModel, AbstractOpenAICompletionModelSettings } from "../openai/AbstractOpenAICompletionModel.js";
6
6
  import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
@@ -16,7 +16,7 @@ export interface OpenAICompatibleCompletionModelSettings extends AbstractOpenAIC
16
16
  *
17
17
  * @see https://platform.openai.com/docs/api-reference/completions/create
18
18
  */
19
- export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel<OpenAICompatibleCompletionModelSettings> implements TextStreamingModel<string, OpenAICompatibleCompletionModelSettings> {
19
+ export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel<OpenAICompatibleCompletionModelSettings> implements TextStreamingBaseModel<string, OpenAICompatibleCompletionModelSettings> {
20
20
  constructor(settings: OpenAICompatibleCompletionModelSettings);
21
21
  get provider(): OpenAICompatibleProviderName;
22
22
  get modelName(): string;
@@ -24,6 +24,12 @@ export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompl
24
24
  readonly tokenizer: undefined;
25
25
  readonly countPromptTokens: undefined;
26
26
  get settingsForEvent(): Partial<OpenAICompatibleCompletionModelSettings>;
27
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, string, OpenAICompatibleCompletionModelSettings, this>;
28
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, OpenAICompatibleCompletionModelSettings, this>;
29
+ withChatPrompt(options?: {
30
+ user?: string;
31
+ assistant?: string;
32
+ }): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, OpenAICompatibleCompletionModelSettings, this>;
27
33
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OpenAICompatibleCompletionModelSettings, this>;
28
34
  withSettings(additionalSettings: Partial<OpenAICompatibleCompletionModelSettings>): this;
29
35
  }