modelfusion 0.45.3 → 0.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. package/README.md +92 -65
  2. package/index.cjs +0 -1
  3. package/index.d.ts +0 -1
  4. package/index.js +0 -1
  5. package/model-function/ModelFunctionPromise.cjs +37 -0
  6. package/model-function/ModelFunctionPromise.d.ts +18 -0
  7. package/model-function/ModelFunctionPromise.js +33 -0
  8. package/{prompt → model-function}/PromptFormat.d.ts +0 -5
  9. package/model-function/describe-image/describeImage.cjs +3 -2
  10. package/model-function/describe-image/describeImage.d.ts +1 -1
  11. package/model-function/describe-image/describeImage.js +3 -2
  12. package/model-function/embed/embed.cjs +5 -4
  13. package/model-function/embed/embed.d.ts +1 -1
  14. package/model-function/embed/embed.js +5 -4
  15. package/model-function/executeCall.cjs +3 -46
  16. package/model-function/executeCall.d.ts +5 -18
  17. package/model-function/executeCall.js +1 -43
  18. package/model-function/generate-image/ImageGenerationModel.d.ts +2 -0
  19. package/model-function/generate-image/ImageGenerationPromise.cjs +50 -0
  20. package/model-function/generate-image/ImageGenerationPromise.d.ts +22 -0
  21. package/model-function/generate-image/ImageGenerationPromise.js +46 -0
  22. package/model-function/generate-image/PromptFormatImageGenerationModel.cjs +44 -0
  23. package/model-function/generate-image/PromptFormatImageGenerationModel.d.ts +20 -0
  24. package/model-function/generate-image/PromptFormatImageGenerationModel.js +40 -0
  25. package/model-function/generate-image/generateImage.cjs +3 -2
  26. package/model-function/generate-image/generateImage.d.ts +2 -2
  27. package/model-function/generate-image/generateImage.js +3 -2
  28. package/model-function/generate-structure/generateStructure.cjs +3 -2
  29. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  30. package/model-function/generate-structure/generateStructure.js +3 -2
  31. package/model-function/generate-structure/generateStructureOrText.cjs +3 -2
  32. package/model-function/generate-structure/generateStructureOrText.d.ts +1 -1
  33. package/model-function/generate-structure/generateStructureOrText.js +3 -2
  34. package/{prompt → model-function/generate-text}/AlpacaPromptFormat.d.ts +2 -2
  35. package/{prompt → model-function/generate-text}/Llama2PromptFormat.cjs +1 -1
  36. package/model-function/generate-text/Llama2PromptFormat.d.ts +13 -0
  37. package/{prompt → model-function/generate-text}/Llama2PromptFormat.js +1 -1
  38. package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.d.ts +7 -7
  39. package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.d.ts +6 -6
  40. package/model-function/generate-text/TextGenerationModel.d.ts +10 -3
  41. package/model-function/generate-text/TextGenerationPromptFormat.cjs +2 -0
  42. package/model-function/generate-text/TextGenerationPromptFormat.d.ts +11 -0
  43. package/model-function/generate-text/TextGenerationPromptFormat.js +1 -0
  44. package/{prompt → model-function/generate-text}/TextPromptFormat.cjs +2 -2
  45. package/model-function/generate-text/TextPromptFormat.d.ts +17 -0
  46. package/{prompt → model-function/generate-text}/TextPromptFormat.js +2 -2
  47. package/{prompt → model-function/generate-text}/VicunaPromptFormat.cjs +1 -1
  48. package/{prompt → model-function/generate-text}/VicunaPromptFormat.d.ts +3 -3
  49. package/{prompt → model-function/generate-text}/VicunaPromptFormat.js +1 -1
  50. package/model-function/generate-text/generateText.cjs +6 -3
  51. package/model-function/generate-text/generateText.d.ts +1 -1
  52. package/model-function/generate-text/generateText.js +6 -3
  53. package/{prompt → model-function/generate-text}/index.cjs +9 -4
  54. package/model-function/generate-text/index.d.ts +16 -0
  55. package/model-function/generate-text/index.js +16 -0
  56. package/{prompt/chat → model-function/generate-text}/trimChatPrompt.d.ts +2 -5
  57. package/model-function/index.cjs +3 -5
  58. package/model-function/index.d.ts +3 -5
  59. package/model-function/index.js +3 -5
  60. package/model-function/synthesize-speech/synthesizeSpeech.cjs +3 -2
  61. package/model-function/synthesize-speech/synthesizeSpeech.d.ts +1 -1
  62. package/model-function/synthesize-speech/synthesizeSpeech.js +3 -2
  63. package/model-function/transcribe-speech/transcribe.cjs +3 -2
  64. package/model-function/transcribe-speech/transcribe.d.ts +1 -1
  65. package/model-function/transcribe-speech/transcribe.js +3 -2
  66. package/model-provider/anthropic/AnthropicPromptFormat.cjs +1 -1
  67. package/model-provider/anthropic/AnthropicPromptFormat.d.ts +5 -5
  68. package/model-provider/anthropic/AnthropicPromptFormat.js +1 -1
  69. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +14 -1
  70. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +11 -3
  71. package/model-provider/anthropic/AnthropicTextGenerationModel.js +14 -1
  72. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.cjs +11 -0
  73. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.d.ts +12 -12
  74. package/model-provider/automatic1111/Automatic1111ImageGenerationModel.js +11 -0
  75. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.cjs +12 -0
  76. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.d.ts +10 -0
  77. package/model-provider/automatic1111/Automatic1111ImageGenerationPrompt.js +8 -0
  78. package/model-provider/automatic1111/index.cjs +1 -0
  79. package/model-provider/automatic1111/index.d.ts +1 -0
  80. package/model-provider/automatic1111/index.js +1 -0
  81. package/model-provider/cohere/CohereTextGenerationModel.cjs +14 -1
  82. package/model-provider/cohere/CohereTextGenerationModel.d.ts +14 -3
  83. package/model-provider/cohere/CohereTextGenerationModel.js +14 -1
  84. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +2 -2
  85. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +5 -5
  86. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +2 -2
  87. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +1 -1
  88. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +3 -3
  89. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +1 -1
  90. package/model-provider/openai/OpenAIImageGenerationModel.cjs +8 -1
  91. package/model-provider/openai/OpenAIImageGenerationModel.d.ts +6 -3
  92. package/model-provider/openai/OpenAIImageGenerationModel.js +8 -1
  93. package/model-provider/openai/OpenAITextGenerationModel.cjs +14 -1
  94. package/model-provider/openai/OpenAITextGenerationModel.d.ts +14 -3
  95. package/model-provider/openai/OpenAITextGenerationModel.js +14 -1
  96. package/model-provider/openai/chat/OpenAIChatModel.cjs +14 -1
  97. package/model-provider/openai/chat/OpenAIChatModel.d.ts +11 -3
  98. package/model-provider/openai/chat/OpenAIChatModel.js +14 -1
  99. package/model-provider/openai/chat/OpenAIChatPromptFormat.cjs +1 -1
  100. package/model-provider/openai/chat/OpenAIChatPromptFormat.d.ts +5 -5
  101. package/model-provider/openai/chat/OpenAIChatPromptFormat.js +1 -1
  102. package/model-provider/stability/StabilityImageGenerationModel.cjs +11 -0
  103. package/model-provider/stability/StabilityImageGenerationModel.d.ts +15 -14
  104. package/model-provider/stability/StabilityImageGenerationModel.js +11 -0
  105. package/model-provider/stability/StabilityImageGenerationPrompt.cjs +12 -0
  106. package/model-provider/stability/StabilityImageGenerationPrompt.d.ts +9 -0
  107. package/model-provider/stability/StabilityImageGenerationPrompt.js +8 -0
  108. package/model-provider/stability/index.cjs +1 -0
  109. package/model-provider/stability/index.d.ts +1 -0
  110. package/model-provider/stability/index.js +1 -0
  111. package/package.json +1 -1
  112. package/prompt/Llama2PromptFormat.d.ts +0 -13
  113. package/prompt/TextPromptFormat.d.ts +0 -17
  114. package/prompt/index.d.ts +0 -11
  115. package/prompt/index.js +0 -11
  116. /package/{prompt → model-function}/PromptFormat.cjs +0 -0
  117. /package/{prompt → model-function}/PromptFormat.js +0 -0
  118. /package/{prompt → model-function/generate-text}/AlpacaPromptFormat.cjs +0 -0
  119. /package/{prompt → model-function/generate-text}/AlpacaPromptFormat.js +0 -0
  120. /package/{prompt/chat → model-function/generate-text}/ChatPrompt.cjs +0 -0
  121. /package/{prompt/chat → model-function/generate-text}/ChatPrompt.d.ts +0 -0
  122. /package/{prompt/chat → model-function/generate-text}/ChatPrompt.js +0 -0
  123. /package/{prompt → model-function/generate-text}/InstructionPrompt.cjs +0 -0
  124. /package/{prompt → model-function/generate-text}/InstructionPrompt.d.ts +0 -0
  125. /package/{prompt → model-function/generate-text}/InstructionPrompt.js +0 -0
  126. /package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.cjs +0 -0
  127. /package/{prompt → model-function/generate-text}/PromptFormatTextGenerationModel.js +0 -0
  128. /package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.cjs +0 -0
  129. /package/{prompt → model-function/generate-text}/PromptFormatTextStreamingModel.js +0 -0
  130. /package/{prompt/chat → model-function/generate-text}/trimChatPrompt.cjs +0 -0
  131. /package/{prompt/chat → model-function/generate-text}/trimChatPrompt.js +0 -0
  132. /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.cjs +0 -0
  133. /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.d.ts +0 -0
  134. /package/{prompt/chat → model-function/generate-text}/validateChatPrompt.js +0 -0
@@ -10,8 +10,9 @@ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndTh
10
10
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
11
11
  const AsyncQueue_js_1 = require("../../event-source/AsyncQueue.cjs");
12
12
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
13
+ const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
14
+ const TextPromptFormat_js_1 = require("../../model-function/generate-text/TextPromptFormat.cjs");
13
15
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
14
- const PromptFormatTextStreamingModel_js_1 = require("../../prompt/PromptFormatTextStreamingModel.cjs");
15
16
  const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
16
17
  const CohereError_js_1 = require("./CohereError.cjs");
17
18
  const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
@@ -134,6 +135,18 @@ class CohereTextGenerationModel extends AbstractModel_js_1.AbstractModel {
134
135
  extractTextDelta(fullDelta) {
135
136
  return fullDelta.delta;
136
137
  }
138
+ /**
139
+ * Returns this model with an instruction prompt format.
140
+ */
141
+ withInstructionPrompt() {
142
+ return this.withPromptFormat((0, TextPromptFormat_js_1.mapInstructionPromptToTextFormat)());
143
+ }
144
+ /**
145
+ * Returns this model with a chat prompt format.
146
+ */
147
+ withChatPrompt(options) {
148
+ return this.withPromptFormat((0, TextPromptFormat_js_1.mapChatPromptToTextFormat)(options));
149
+ }
137
150
  withPromptFormat(promptFormat) {
138
151
  return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
139
152
  model: this.withSettings({
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
- import { PromptFormat } from "../../prompt/PromptFormat.js";
9
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
9
+ import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
10
10
  import { CohereTokenizer } from "./CohereTokenizer.js";
11
11
  export declare const COHERE_TEXT_GENERATION_MODELS: {
12
12
  command: {
@@ -84,7 +84,18 @@ export declare class CohereTextGenerationModel extends AbstractModel<CohereTextG
84
84
  }>;
85
85
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
86
86
  extractTextDelta(fullDelta: CohereTextGenerationDelta): string | undefined;
87
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
87
+ /**
88
+ * Returns this model with an instruction prompt format.
89
+ */
90
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").InstructionPrompt, string, CohereTextGenerationModelSettings, this>;
91
+ /**
92
+ * Returns this model with a chat prompt format.
93
+ */
94
+ withChatPrompt(options?: {
95
+ user?: string;
96
+ ai?: string;
97
+ }): PromptFormatTextStreamingModel<import("../../index.js").ChatPrompt, string, CohereTextGenerationModelSettings, this>;
98
+ withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, CohereTextGenerationModelSettings, this>;
88
99
  withSettings(additionalSettings: Partial<CohereTextGenerationModelSettings>): this;
89
100
  }
90
101
  declare const cohereTextGenerationResponseSchema: z.ZodObject<{
@@ -4,8 +4,9 @@ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottl
4
4
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
5
5
  import { AsyncQueue } from "../../event-source/AsyncQueue.js";
6
6
  import { AbstractModel } from "../../model-function/AbstractModel.js";
7
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
8
+ import { mapChatPromptToTextFormat, mapInstructionPromptToTextFormat, } from "../../model-function/generate-text/TextPromptFormat.js";
7
9
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
8
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
9
10
  import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
10
11
  import { failedCohereCallResponseHandler } from "./CohereError.js";
11
12
  import { CohereTokenizer } from "./CohereTokenizer.js";
@@ -128,6 +129,18 @@ export class CohereTextGenerationModel extends AbstractModel {
128
129
  extractTextDelta(fullDelta) {
129
130
  return fullDelta.delta;
130
131
  }
132
+ /**
133
+ * Returns this model with an instruction prompt format.
134
+ */
135
+ withInstructionPrompt() {
136
+ return this.withPromptFormat(mapInstructionPromptToTextFormat());
137
+ }
138
+ /**
139
+ * Returns this model with a chat prompt format.
140
+ */
141
+ withChatPrompt(options) {
142
+ return this.withPromptFormat(mapChatPromptToTextFormat(options));
143
+ }
131
144
  withPromptFormat(promptFormat) {
132
145
  return new PromptFormatTextStreamingModel({
133
146
  model: this.withSettings({
@@ -5,10 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.HuggingFaceTextGenerationModel = void 0;
7
7
  const zod_1 = __importDefault(require("zod"));
8
- const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
9
- const PromptFormatTextGenerationModel_js_1 = require("../../prompt/PromptFormatTextGenerationModel.cjs");
10
8
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
11
9
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
10
+ const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
11
+ const PromptFormatTextGenerationModel_js_1 = require("../../model-function/generate-text/PromptFormatTextGenerationModel.cjs");
12
12
  const HuggingFaceApiConfiguration_js_1 = require("./HuggingFaceApiConfiguration.cjs");
13
13
  const HuggingFaceError_js_1 = require("./HuggingFaceError.cjs");
14
14
  /**
@@ -1,10 +1,10 @@
1
1
  import z from "zod";
2
- import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { PromptFormatTextGenerationModel } from "../../model-function/generate-text/PromptFormatTextGenerationModel.js";
5
6
  import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
6
- import { PromptFormat } from "../../prompt/PromptFormat.js";
7
- import { PromptFormatTextGenerationModel } from "../../prompt/PromptFormatTextGenerationModel.js";
7
+ import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
8
8
  export interface HuggingFaceTextGenerationModelSettings extends TextGenerationModelSettings {
9
9
  api?: ApiConfiguration;
10
10
  model: string;
@@ -53,7 +53,7 @@ export declare class HuggingFaceTextGenerationModel extends AbstractModel<Huggin
53
53
  }[];
54
54
  text: string;
55
55
  }>;
56
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
56
+ withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextGenerationModel<INPUT_PROMPT, string, HuggingFaceTextGenerationModelSettings, this>;
57
57
  withSettings(additionalSettings: Partial<HuggingFaceTextGenerationModelSettings>): this;
58
58
  }
59
59
  declare const huggingFaceTextGenerationResponseSchema: z.ZodArray<z.ZodObject<{
@@ -1,8 +1,8 @@
1
1
  import z from "zod";
2
- import { AbstractModel } from "../../model-function/AbstractModel.js";
3
- import { PromptFormatTextGenerationModel } from "../../prompt/PromptFormatTextGenerationModel.js";
4
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
5
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { PromptFormatTextGenerationModel } from "../../model-function/generate-text/PromptFormatTextGenerationModel.js";
6
6
  import { HuggingFaceApiConfiguration } from "./HuggingFaceApiConfiguration.js";
7
7
  import { failedHuggingFaceCallResponseHandler } from "./HuggingFaceError.js";
8
8
  /**
@@ -11,7 +11,7 @@ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
11
11
  const AsyncQueue_js_1 = require("../../event-source/AsyncQueue.cjs");
12
12
  const parseEventSourceStream_js_1 = require("../../event-source/parseEventSourceStream.cjs");
13
13
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
14
- const PromptFormatTextStreamingModel_js_1 = require("../../prompt/PromptFormatTextStreamingModel.cjs");
14
+ const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
15
15
  const LlamaCppApiConfiguration_js_1 = require("./LlamaCppApiConfiguration.cjs");
16
16
  const LlamaCppError_js_1 = require("./LlamaCppError.cjs");
17
17
  const LlamaCppTokenizer_js_1 = require("./LlamaCppTokenizer.cjs");
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
- import { PromptFormat } from "../../prompt/PromptFormat.js";
9
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
9
+ import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
10
10
  import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
11
11
  export interface LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE extends number | undefined> extends TextGenerationModelSettings {
12
12
  api?: ApiConfiguration;
@@ -100,7 +100,7 @@ export declare class LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE extends num
100
100
  };
101
101
  }>;
102
102
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
103
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
103
+ withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>, this>;
104
104
  withSettings(additionalSettings: Partial<LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>>): this;
105
105
  }
106
106
  declare const llamaCppTextGenerationResponseSchema: z.ZodObject<{
@@ -5,7 +5,7 @@ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postTo
5
5
  import { AsyncQueue } from "../../event-source/AsyncQueue.js";
6
6
  import { parseEventSourceStream } from "../../event-source/parseEventSourceStream.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
8
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
9
9
  import { LlamaCppApiConfiguration } from "./LlamaCppApiConfiguration.js";
10
10
  import { failedLlamaCppCallResponseHandler } from "./LlamaCppError.js";
11
11
  import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
@@ -2,9 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAIImageGenerationResponseFormat = exports.OpenAIImageGenerationModel = exports.calculateOpenAIImageGenerationCostInMillicents = void 0;
4
4
  const zod_1 = require("zod");
5
- const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
6
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
7
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
+ const PromptFormatImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptFormatImageGenerationModel.cjs");
8
9
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
9
10
  const OpenAIError_js_1 = require("./OpenAIError.cjs");
10
11
  /**
@@ -77,6 +78,12 @@ class OpenAIImageGenerationModel extends AbstractModel_js_1.AbstractModel {
77
78
  base64Image: response.data[0].b64_json,
78
79
  };
79
80
  }
81
+ withPromptFormat(promptFormat) {
82
+ return new PromptFormatImageGenerationModel_js_1.PromptFormatImageGenerationModel({
83
+ model: this,
84
+ promptFormat,
85
+ });
86
+ }
80
87
  withSettings(additionalSettings) {
81
88
  return new OpenAIImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
82
89
  }
@@ -1,9 +1,11 @@
1
1
  import { z } from "zod";
2
- import { AbstractModel } from "../../model-function/AbstractModel.js";
2
+ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
- import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
5
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
6
- import { FunctionOptions } from "../../core/FunctionOptions.js";
5
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
6
+ import { PromptFormat } from "../../model-function/PromptFormat.js";
7
+ import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
8
+ import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
7
9
  export interface OpenAIImageGenerationCallSettings {
8
10
  n?: number;
9
11
  size?: "256x256" | "512x512" | "1024x1024";
@@ -43,6 +45,7 @@ export declare class OpenAIImageGenerationModel extends AbstractModel<OpenAIImag
43
45
  };
44
46
  base64Image: string;
45
47
  }>;
48
+ withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatImageGenerationModel<INPUT_PROMPT, string, OpenAIImageGenerationSettings, this>;
46
49
  withSettings(additionalSettings: Partial<OpenAIImageGenerationSettings>): this;
47
50
  }
48
51
  export type OpenAIImageGenerationResponseFormatType<T> = {
@@ -1,7 +1,8 @@
1
1
  import { z } from "zod";
2
- import { AbstractModel } from "../../model-function/AbstractModel.js";
3
2
  import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
4
3
  import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
5
6
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
6
7
  import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
7
8
  /**
@@ -73,6 +74,12 @@ export class OpenAIImageGenerationModel extends AbstractModel {
73
74
  base64Image: response.data[0].b64_json,
74
75
  };
75
76
  }
77
+ withPromptFormat(promptFormat) {
78
+ return new PromptFormatImageGenerationModel({
79
+ model: this,
80
+ promptFormat,
81
+ });
82
+ }
76
83
  withSettings(additionalSettings) {
77
84
  return new OpenAIImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
78
85
  }
@@ -11,8 +11,9 @@ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
11
11
  const AsyncQueue_js_1 = require("../../event-source/AsyncQueue.cjs");
12
12
  const parseEventSourceStream_js_1 = require("../../event-source/parseEventSourceStream.cjs");
13
13
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
14
+ const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
15
+ const TextPromptFormat_js_1 = require("../../model-function/generate-text/TextPromptFormat.cjs");
14
16
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
15
- const PromptFormatTextStreamingModel_js_1 = require("../../prompt/PromptFormatTextStreamingModel.cjs");
16
17
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
17
18
  const OpenAIError_js_1 = require("./OpenAIError.cjs");
18
19
  const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
@@ -238,6 +239,18 @@ class OpenAITextGenerationModel extends AbstractModel_js_1.AbstractModel {
238
239
  responseFormat: exports.OpenAITextResponseFormat.deltaIterable,
239
240
  });
240
241
  }
242
+ /**
243
+ * Returns this model with an instruction prompt format.
244
+ */
245
+ withInstructionPrompt() {
246
+ return this.withPromptFormat((0, TextPromptFormat_js_1.mapInstructionPromptToTextFormat)());
247
+ }
248
+ /**
249
+ * Returns this model with a chat prompt format.
250
+ */
251
+ withChatPrompt(options) {
252
+ return this.withPromptFormat((0, TextPromptFormat_js_1.mapChatPromptToTextFormat)(options));
253
+ }
241
254
  withPromptFormat(promptFormat) {
242
255
  return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
243
256
  model: this.withSettings({
@@ -4,9 +4,9 @@ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { ResponseHandler } from "../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../model-function/AbstractModel.js";
6
6
  import { Delta } from "../../model-function/Delta.js";
7
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
7
8
  import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
8
- import { PromptFormat } from "../../prompt/PromptFormat.js";
9
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
9
+ import { TextGenerationPromptFormat } from "../../model-function/generate-text/TextGenerationPromptFormat.js";
10
10
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
11
11
  /**
12
12
  * @see https://platform.openai.com/docs/models/
@@ -171,7 +171,18 @@ export declare class OpenAITextGenerationModel extends AbstractModel<OpenAITextG
171
171
  };
172
172
  }>;
173
173
  doStreamText(prompt: string, options?: FunctionOptions): Promise<AsyncIterable<Delta<string>>>;
174
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, OpenAITextGenerationModelSettings, this>;
174
+ /**
175
+ * Returns this model with an instruction prompt format.
176
+ */
177
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../index.js").InstructionPrompt, string, OpenAITextGenerationModelSettings, this>;
178
+ /**
179
+ * Returns this model with a chat prompt format.
180
+ */
181
+ withChatPrompt(options?: {
182
+ user?: string;
183
+ ai?: string;
184
+ }): PromptFormatTextStreamingModel<import("../../index.js").ChatPrompt, string, OpenAITextGenerationModelSettings, this>;
185
+ withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, string>): PromptFormatTextStreamingModel<INPUT_PROMPT, string, OpenAITextGenerationModelSettings, this>;
175
186
  withSettings(additionalSettings: Partial<OpenAITextGenerationModelSettings>): this;
176
187
  }
177
188
  declare const openAITextGenerationResponseSchema: z.ZodObject<{
@@ -5,8 +5,9 @@ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postTo
5
5
  import { AsyncQueue } from "../../event-source/AsyncQueue.js";
6
6
  import { parseEventSourceStream } from "../../event-source/parseEventSourceStream.js";
7
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
8
+ import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
9
+ import { mapChatPromptToTextFormat, mapInstructionPromptToTextFormat, } from "../../model-function/generate-text/TextPromptFormat.js";
8
10
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
9
- import { PromptFormatTextStreamingModel } from "../../prompt/PromptFormatTextStreamingModel.js";
10
11
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
11
12
  import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
12
13
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
@@ -229,6 +230,18 @@ export class OpenAITextGenerationModel extends AbstractModel {
229
230
  responseFormat: OpenAITextResponseFormat.deltaIterable,
230
231
  });
231
232
  }
233
+ /**
234
+ * Returns this model with an instruction prompt format.
235
+ */
236
+ withInstructionPrompt() {
237
+ return this.withPromptFormat(mapInstructionPromptToTextFormat());
238
+ }
239
+ /**
240
+ * Returns this model with a chat prompt format.
241
+ */
242
+ withChatPrompt(options) {
243
+ return this.withPromptFormat(mapChatPromptToTextFormat(options));
244
+ }
232
245
  withPromptFormat(promptFormat) {
233
246
  return new PromptFormatTextStreamingModel({
234
247
  model: this.withSettings({
@@ -11,10 +11,11 @@ const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
11
11
  const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
12
12
  const StructureParseError_js_1 = require("../../../model-function/generate-structure/StructureParseError.cjs");
13
13
  const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
14
- const PromptFormatTextStreamingModel_js_1 = require("../../../prompt/PromptFormatTextStreamingModel.cjs");
14
+ const PromptFormatTextStreamingModel_js_1 = require("../../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
15
15
  const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
16
16
  const OpenAIError_js_1 = require("../OpenAIError.cjs");
17
17
  const TikTokenTokenizer_js_1 = require("../TikTokenTokenizer.cjs");
18
+ const OpenAIChatPromptFormat_js_1 = require("./OpenAIChatPromptFormat.cjs");
18
19
  const OpenAIChatStreamIterable_js_1 = require("./OpenAIChatStreamIterable.cjs");
19
20
  const countOpenAIChatMessageTokens_js_1 = require("./countOpenAIChatMessageTokens.cjs");
20
21
  /*
@@ -341,6 +342,18 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
341
342
  totalTokens: response.usage.total_tokens,
342
343
  };
343
344
  }
345
+ /**
346
+ * Returns this model with an instruction prompt format.
347
+ */
348
+ withInstructionPrompt() {
349
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.mapInstructionPromptToOpenAIChatFormat)());
350
+ }
351
+ /**
352
+ * Returns this model with a chat prompt format.
353
+ */
354
+ withChatPrompt() {
355
+ return this.withPromptFormat((0, OpenAIChatPromptFormat_js_1.mapChatPromptToOpenAIChatFormat)());
356
+ }
344
357
  withPromptFormat(promptFormat) {
345
358
  return new PromptFormatTextStreamingModel_js_1.PromptFormatTextStreamingModel({
346
359
  model: this.withSettings({
@@ -7,9 +7,9 @@ import { AbstractModel } from "../../../model-function/AbstractModel.js";
7
7
  import { Delta } from "../../../model-function/Delta.js";
8
8
  import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
9
9
  import { StructureOrTextGenerationModel } from "../../../model-function/generate-structure/StructureOrTextGenerationModel.js";
10
+ import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
10
11
  import { TextGenerationModelSettings, TextStreamingModel } from "../../../model-function/generate-text/TextGenerationModel.js";
11
- import { PromptFormat } from "../../../prompt/PromptFormat.js";
12
- import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
12
+ import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
13
13
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
14
14
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
15
15
  export declare const OPENAI_CHAT_MODELS: {
@@ -307,7 +307,15 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
307
307
  completionTokens: number;
308
308
  totalTokens: number;
309
309
  };
310
- withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatSettings, this>;
310
+ /**
311
+ * Returns this model with an instruction prompt format.
312
+ */
313
+ withInstructionPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").InstructionPrompt, OpenAIChatMessage[], OpenAIChatSettings, this>;
314
+ /**
315
+ * Returns this model with a chat prompt format.
316
+ */
317
+ withChatPrompt(): PromptFormatTextStreamingModel<import("../../../index.js").ChatPrompt, OpenAIChatMessage[], OpenAIChatSettings, this>;
318
+ withPromptFormat<INPUT_PROMPT>(promptFormat: TextGenerationPromptFormat<INPUT_PROMPT, OpenAIChatMessage[]>): PromptFormatTextStreamingModel<INPUT_PROMPT, OpenAIChatMessage[], OpenAIChatSettings, this>;
311
319
  withSettings(additionalSettings: Partial<OpenAIChatSettings>): this;
312
320
  }
313
321
  declare const openAIChatResponseSchema: z.ZodObject<{
@@ -5,10 +5,11 @@ import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/pos
5
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
6
  import { StructureParseError } from "../../../model-function/generate-structure/StructureParseError.js";
7
7
  import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
8
- import { PromptFormatTextStreamingModel } from "../../../prompt/PromptFormatTextStreamingModel.js";
8
+ import { PromptFormatTextStreamingModel } from "../../../model-function/generate-text/PromptFormatTextStreamingModel.js";
9
9
  import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
10
10
  import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
11
11
  import { TikTokenTokenizer } from "../TikTokenTokenizer.js";
12
+ import { mapChatPromptToOpenAIChatFormat, mapInstructionPromptToOpenAIChatFormat, } from "./OpenAIChatPromptFormat.js";
12
13
  import { createOpenAIChatDeltaIterableQueue } from "./OpenAIChatStreamIterable.js";
13
14
  import { countOpenAIChatPromptTokens } from "./countOpenAIChatMessageTokens.js";
14
15
  /*
@@ -332,6 +333,18 @@ export class OpenAIChatModel extends AbstractModel {
332
333
  totalTokens: response.usage.total_tokens,
333
334
  };
334
335
  }
336
+ /**
337
+ * Returns this model with an instruction prompt format.
338
+ */
339
+ withInstructionPrompt() {
340
+ return this.withPromptFormat(mapInstructionPromptToOpenAIChatFormat());
341
+ }
342
+ /**
343
+ * Returns this model with a chat prompt format.
344
+ */
345
+ withChatPrompt() {
346
+ return this.withPromptFormat(mapChatPromptToOpenAIChatFormat());
347
+ }
335
348
  withPromptFormat(promptFormat) {
336
349
  return new PromptFormatTextStreamingModel({
337
350
  model: this.withSettings({
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.mapChatPromptToOpenAIChatFormat = exports.mapInstructionPromptToOpenAIChatFormat = void 0;
4
- const validateChatPrompt_js_1 = require("../../../prompt/chat/validateChatPrompt.cjs");
4
+ const validateChatPrompt_js_1 = require("../../../model-function/generate-text/validateChatPrompt.cjs");
5
5
  /**
6
6
  * Formats an instruction prompt as an OpenAI chat prompt.
7
7
  */
@@ -1,12 +1,12 @@
1
+ import { ChatPrompt } from "../../../model-function/generate-text/ChatPrompt.js";
2
+ import { InstructionPrompt } from "../../../model-function/generate-text/InstructionPrompt.js";
3
+ import { TextGenerationPromptFormat } from "../../../model-function/generate-text/TextGenerationPromptFormat.js";
1
4
  import { OpenAIChatMessage } from "./OpenAIChatMessage.js";
2
- import { ChatPrompt } from "../../../prompt/chat/ChatPrompt.js";
3
- import { InstructionPrompt } from "../../../prompt/InstructionPrompt.js";
4
- import { PromptFormat } from "../../../prompt/PromptFormat.js";
5
5
  /**
6
6
  * Formats an instruction prompt as an OpenAI chat prompt.
7
7
  */
8
- export declare function mapInstructionPromptToOpenAIChatFormat(): PromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
8
+ export declare function mapInstructionPromptToOpenAIChatFormat(): TextGenerationPromptFormat<InstructionPrompt, Array<OpenAIChatMessage>>;
9
9
  /**
10
10
  * Formats a chat prompt as an OpenAI chat prompt.
11
11
  */
12
- export declare function mapChatPromptToOpenAIChatFormat(): PromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
12
+ export declare function mapChatPromptToOpenAIChatFormat(): TextGenerationPromptFormat<ChatPrompt, Array<OpenAIChatMessage>>;
@@ -1,4 +1,4 @@
1
- import { validateChatPrompt } from "../../../prompt/chat/validateChatPrompt.js";
1
+ import { validateChatPrompt } from "../../../model-function/generate-text/validateChatPrompt.js";
2
2
  /**
3
3
  * Formats an instruction prompt as an OpenAI chat prompt.
4
4
  */
@@ -5,8 +5,10 @@ const zod_1 = require("zod");
5
5
  const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
6
  const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
7
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
+ const PromptFormatImageGenerationModel_js_1 = require("../../model-function/generate-image/PromptFormatImageGenerationModel.cjs");
8
9
  const StabilityApiConfiguration_js_1 = require("./StabilityApiConfiguration.cjs");
9
10
  const StabilityError_js_1 = require("./StabilityError.cjs");
11
+ const StabilityImageGenerationPrompt_js_1 = require("./StabilityImageGenerationPrompt.cjs");
10
12
  /**
11
13
  * Create an image generation model that calls the Stability AI image generation API.
12
14
  *
@@ -76,6 +78,15 @@ class StabilityImageGenerationModel extends AbstractModel_js_1.AbstractModel {
76
78
  base64Image: response.artifacts[0].base64,
77
79
  };
78
80
  }
81
+ withBasicPrompt() {
82
+ return this.withPromptFormat((0, StabilityImageGenerationPrompt_js_1.mapBasicPromptToStabilityFormat)());
83
+ }
84
+ withPromptFormat(promptFormat) {
85
+ return new PromptFormatImageGenerationModel_js_1.PromptFormatImageGenerationModel({
86
+ model: this,
87
+ promptFormat,
88
+ });
89
+ }
79
90
  withSettings(additionalSettings) {
80
91
  return new StabilityImageGenerationModel(Object.assign({}, this.settings, additionalSettings));
81
92
  }
@@ -2,7 +2,10 @@ import { z } from "zod";
2
2
  import { FunctionOptions } from "../../core/FunctionOptions.js";
3
3
  import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { PromptFormat } from "../../model-function/PromptFormat.js";
5
6
  import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-function/generate-image/ImageGenerationModel.js";
7
+ import { PromptFormatImageGenerationModel } from "../../model-function/generate-image/PromptFormatImageGenerationModel.js";
8
+ import { StabilityImageGenerationPrompt } from "./StabilityImageGenerationPrompt.js";
6
9
  /**
7
10
  * Create an image generation model that calls the Stability AI image generation API.
8
11
  *
@@ -25,27 +28,29 @@ import { ImageGenerationModel, ImageGenerationModelSettings } from "../../model-
25
28
  * ]
26
29
  * );
27
30
  */
28
- export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationModelSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationModelSettings> {
29
- constructor(settings: StabilityImageGenerationModelSettings);
31
+ export declare class StabilityImageGenerationModel extends AbstractModel<StabilityImageGenerationSettings> implements ImageGenerationModel<StabilityImageGenerationPrompt, StabilityImageGenerationSettings> {
32
+ constructor(settings: StabilityImageGenerationSettings);
30
33
  readonly provider: "stability";
31
34
  get modelName(): StabilityImageGenerationModelType;
32
35
  callAPI(input: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<StabilityImageGenerationResponse>;
33
- get settingsForEvent(): Partial<StabilityImageGenerationModelSettings>;
36
+ get settingsForEvent(): Partial<StabilityImageGenerationSettings>;
34
37
  doGenerateImage(prompt: StabilityImageGenerationPrompt, options?: FunctionOptions): Promise<{
35
38
  response: {
36
39
  artifacts: {
37
- seed: number;
38
40
  base64: string;
41
+ seed: number;
39
42
  finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
40
43
  }[];
41
44
  };
42
45
  base64Image: string;
43
46
  }>;
44
- withSettings(additionalSettings: StabilityImageGenerationModelSettings): this;
47
+ withBasicPrompt(): PromptFormatImageGenerationModel<string, StabilityImageGenerationPrompt, StabilityImageGenerationSettings, this>;
48
+ withPromptFormat<INPUT_PROMPT>(promptFormat: PromptFormat<INPUT_PROMPT, StabilityImageGenerationPrompt>): PromptFormatImageGenerationModel<INPUT_PROMPT, StabilityImageGenerationPrompt, StabilityImageGenerationSettings, this>;
49
+ withSettings(additionalSettings: StabilityImageGenerationSettings): this;
45
50
  }
46
51
  declare const stabilityImageGenerationModels: readonly ["stable-diffusion-v1-5", "stable-diffusion-512-v2-1", "stable-diffusion-xl-1024-v0-9", "stable-diffusion-xl-1024-v1-0"];
47
52
  export type StabilityImageGenerationModelType = (typeof stabilityImageGenerationModels)[number] | (string & {});
48
- export interface StabilityImageGenerationModelSettings extends ImageGenerationModelSettings {
53
+ export interface StabilityImageGenerationSettings extends ImageGenerationModelSettings {
49
54
  api?: ApiConfiguration;
50
55
  model: StabilityImageGenerationModelType;
51
56
  height?: number;
@@ -64,32 +69,28 @@ declare const stabilityImageGenerationResponseSchema: z.ZodObject<{
64
69
  seed: z.ZodNumber;
65
70
  finishReason: z.ZodEnum<["SUCCESS", "ERROR", "CONTENT_FILTERED"]>;
66
71
  }, "strip", z.ZodTypeAny, {
67
- seed: number;
68
72
  base64: string;
73
+ seed: number;
69
74
  finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
70
75
  }, {
71
- seed: number;
72
76
  base64: string;
77
+ seed: number;
73
78
  finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
74
79
  }>, "many">;
75
80
  }, "strip", z.ZodTypeAny, {
76
81
  artifacts: {
77
- seed: number;
78
82
  base64: string;
83
+ seed: number;
79
84
  finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
80
85
  }[];
81
86
  }, {
82
87
  artifacts: {
83
- seed: number;
84
88
  base64: string;
89
+ seed: number;
85
90
  finishReason: "SUCCESS" | "ERROR" | "CONTENT_FILTERED";
86
91
  }[];
87
92
  }>;
88
93
  export type StabilityImageGenerationResponse = z.infer<typeof stabilityImageGenerationResponseSchema>;
89
94
  export type StabilityImageGenerationStylePreset = "enhance" | "anime" | "photographic" | "digital-art" | "comic-book" | "fantasy-art" | "line-art" | "analog-film" | "neon-punk" | "isometric" | "low-poly" | "origami" | "modeling-compound" | "cinematic" | "3d-model" | "pixel-art" | "tile-texture";
90
95
  export type StabilityImageGenerationSampler = "DDIM" | "DDPM" | "K_DPMPP_2M" | "K_DPMPP_2S_ANCESTRAL" | "K_DPM_2" | "K_DPM_2_ANCESTRAL" | "K_EULER" | "K_EULER_ANCESTRAL" | "K_HEUN" | "K_LMS";
91
- export type StabilityImageGenerationPrompt = Array<{
92
- text: string;
93
- weight?: number;
94
- }>;
95
96
  export {};