modelfusion 0.88.0 → 0.89.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/model-provider/openai/OpenAIFacade.cjs +7 -5
- package/model-provider/openai/OpenAIFacade.d.ts +7 -5
- package/model-provider/openai/OpenAIFacade.js +7 -5
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -6
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +2 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -6
- package/model-provider/openai/chat/AbstractOpenAIChatModel.cjs +1 -1
- package/model-provider/openai/chat/AbstractOpenAIChatModel.d.ts +7 -7
- package/model-provider/openai/chat/AbstractOpenAIChatModel.js +1 -1
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +1 -1
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +1 -1
- package/package.json +2 -2
@@ -44,12 +44,14 @@ exports.CompletionTextGenerator = CompletionTextGenerator;
|
|
44
44
|
* maxCompletionTokens: 500,
|
45
45
|
* });
|
46
46
|
*
|
47
|
-
* const text = await generateText(
|
47
|
+
* const text = await generateText(
|
48
48
|
* model,
|
49
|
-
*
|
50
|
-
*
|
51
|
-
*
|
52
|
-
*
|
49
|
+
* [
|
50
|
+
* OpenAIChatMessage.system(
|
51
|
+
* "Write a short story about a robot learning to love:"
|
52
|
+
* ),
|
53
|
+
* ]
|
54
|
+
* );
|
53
55
|
*/
|
54
56
|
function ChatTextGenerator(settings) {
|
55
57
|
return new OpenAIChatModel_js_1.OpenAIChatModel(settings);
|
@@ -38,12 +38,14 @@ export declare function CompletionTextGenerator(settings: OpenAICompletionModelS
|
|
38
38
|
* maxCompletionTokens: 500,
|
39
39
|
* });
|
40
40
|
*
|
41
|
-
* const text = await generateText(
|
41
|
+
* const text = await generateText(
|
42
42
|
* model,
|
43
|
-
*
|
44
|
-
*
|
45
|
-
*
|
46
|
-
*
|
43
|
+
* [
|
44
|
+
* OpenAIChatMessage.system(
|
45
|
+
* "Write a short story about a robot learning to love:"
|
46
|
+
* ),
|
47
|
+
* ]
|
48
|
+
* );
|
47
49
|
*/
|
48
50
|
export declare function ChatTextGenerator(settings: OpenAIChatSettings): OpenAIChatModel;
|
49
51
|
/**
|
@@ -40,12 +40,14 @@ export function CompletionTextGenerator(settings) {
|
|
40
40
|
* maxCompletionTokens: 500,
|
41
41
|
* });
|
42
42
|
*
|
43
|
-
* const text = await generateText(
|
43
|
+
* const text = await generateText(
|
44
44
|
* model,
|
45
|
-
*
|
46
|
-
*
|
47
|
-
*
|
48
|
-
*
|
45
|
+
* [
|
46
|
+
* OpenAIChatMessage.system(
|
47
|
+
* "Write a short story about a robot learning to love:"
|
48
|
+
* ),
|
49
|
+
* ]
|
50
|
+
* );
|
49
51
|
*/
|
50
52
|
export function ChatTextGenerator(settings) {
|
51
53
|
return new OpenAIChatModel(settings);
|
@@ -51,12 +51,6 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
51
51
|
writable: true,
|
52
52
|
value: "openai"
|
53
53
|
});
|
54
|
-
Object.defineProperty(this, "maxValuesPerCall", {
|
55
|
-
enumerable: true,
|
56
|
-
configurable: true,
|
57
|
-
writable: true,
|
58
|
-
value: 2048
|
59
|
-
});
|
60
54
|
Object.defineProperty(this, "isParallelizable", {
|
61
55
|
enumerable: true,
|
62
56
|
configurable: true,
|
@@ -90,6 +84,9 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
90
84
|
get modelName() {
|
91
85
|
return this.settings.model;
|
92
86
|
}
|
87
|
+
get maxValuesPerCall() {
|
88
|
+
return this.settings.maxValuesPerCall ?? 2048;
|
89
|
+
}
|
93
90
|
async countTokens(input) {
|
94
91
|
return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
|
95
92
|
}
|
@@ -19,6 +19,7 @@ export declare const calculateOpenAIEmbeddingCostInMillicents: ({ model, respons
|
|
19
19
|
}) => number;
|
20
20
|
export interface OpenAITextEmbeddingModelSettings extends EmbeddingModelSettings {
|
21
21
|
api?: ApiConfiguration;
|
22
|
+
maxValuesPerCall?: number | undefined;
|
22
23
|
model: OpenAITextEmbeddingModelType;
|
23
24
|
isUserIdForwardingEnabled?: boolean;
|
24
25
|
}
|
@@ -40,7 +41,7 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
|
|
40
41
|
constructor(settings: OpenAITextEmbeddingModelSettings);
|
41
42
|
readonly provider: "openai";
|
42
43
|
get modelName(): "text-embedding-ada-002";
|
43
|
-
|
44
|
+
get maxValuesPerCall(): number;
|
44
45
|
readonly isParallelizable = true;
|
45
46
|
readonly embeddingDimensions: number;
|
46
47
|
readonly tokenizer: TikTokenTokenizer;
|
@@ -46,12 +46,6 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
46
46
|
writable: true,
|
47
47
|
value: "openai"
|
48
48
|
});
|
49
|
-
Object.defineProperty(this, "maxValuesPerCall", {
|
50
|
-
enumerable: true,
|
51
|
-
configurable: true,
|
52
|
-
writable: true,
|
53
|
-
value: 2048
|
54
|
-
});
|
55
49
|
Object.defineProperty(this, "isParallelizable", {
|
56
50
|
enumerable: true,
|
57
51
|
configurable: true,
|
@@ -85,6 +79,9 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
85
79
|
get modelName() {
|
86
80
|
return this.settings.model;
|
87
81
|
}
|
82
|
+
get maxValuesPerCall() {
|
83
|
+
return this.settings.maxValuesPerCall ?? 2048;
|
84
|
+
}
|
88
85
|
async countTokens(input) {
|
89
86
|
return countTokens(this.tokenizer, input);
|
90
87
|
}
|
@@ -165,7 +165,7 @@ const openAIChatResponseSchema = zod_1.z.object({
|
|
165
165
|
})),
|
166
166
|
created: zod_1.z.number(),
|
167
167
|
model: zod_1.z.string(),
|
168
|
-
system_fingerprint: zod_1.z.string().optional(),
|
168
|
+
system_fingerprint: zod_1.z.string().optional().nullable(),
|
169
169
|
object: zod_1.z.literal("chat.completion"),
|
170
170
|
usage: zod_1.z.object({
|
171
171
|
prompt_tokens: zod_1.z.number(),
|
@@ -96,7 +96,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
96
96
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
97
97
|
}[];
|
98
98
|
created: number;
|
99
|
-
system_fingerprint?: string | undefined;
|
99
|
+
system_fingerprint?: string | null | undefined;
|
100
100
|
};
|
101
101
|
text: string;
|
102
102
|
usage: {
|
@@ -138,7 +138,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
138
138
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
139
139
|
}[];
|
140
140
|
created: number;
|
141
|
-
system_fingerprint?: string | undefined;
|
141
|
+
system_fingerprint?: string | null | undefined;
|
142
142
|
};
|
143
143
|
toolCall: {
|
144
144
|
id: string;
|
@@ -182,7 +182,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
182
182
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
183
183
|
}[];
|
184
184
|
created: number;
|
185
|
-
system_fingerprint?: string | undefined;
|
185
|
+
system_fingerprint?: string | null | undefined;
|
186
186
|
};
|
187
187
|
text: string | null;
|
188
188
|
toolCalls: {
|
@@ -323,7 +323,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
323
323
|
}>, "many">;
|
324
324
|
created: z.ZodNumber;
|
325
325
|
model: z.ZodString;
|
326
|
-
system_fingerprint: z.ZodOptional<z.ZodString
|
326
|
+
system_fingerprint: z.ZodNullable<z.ZodOptional<z.ZodString>>;
|
327
327
|
object: z.ZodLiteral<"chat.completion">;
|
328
328
|
usage: z.ZodObject<{
|
329
329
|
prompt_tokens: z.ZodNumber;
|
@@ -369,7 +369,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
369
369
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
370
370
|
}[];
|
371
371
|
created: number;
|
372
|
-
system_fingerprint?: string | undefined;
|
372
|
+
system_fingerprint?: string | null | undefined;
|
373
373
|
}, {
|
374
374
|
object: "chat.completion";
|
375
375
|
usage: {
|
@@ -401,7 +401,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
401
401
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
402
402
|
}[];
|
403
403
|
created: number;
|
404
|
-
system_fingerprint?: string | undefined;
|
404
|
+
system_fingerprint?: string | null | undefined;
|
405
405
|
}>;
|
406
406
|
export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
|
407
407
|
export type OpenAIChatResponseFormatType<T> = {
|
@@ -445,7 +445,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
445
445
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
446
446
|
}[];
|
447
447
|
created: number;
|
448
|
-
system_fingerprint?: string | undefined;
|
448
|
+
system_fingerprint?: string | null | undefined;
|
449
449
|
}>;
|
450
450
|
};
|
451
451
|
/**
|
@@ -161,7 +161,7 @@ const openAIChatResponseSchema = z.object({
|
|
161
161
|
})),
|
162
162
|
created: z.number(),
|
163
163
|
model: z.string(),
|
164
|
-
system_fingerprint: z.string().optional(),
|
164
|
+
system_fingerprint: z.string().optional().nullable(),
|
165
165
|
object: z.literal("chat.completion"),
|
166
166
|
usage: z.object({
|
167
167
|
prompt_tokens: z.number(),
|
@@ -74,7 +74,7 @@ OpenAIChatSettings> {
|
|
74
74
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
75
75
|
}[];
|
76
76
|
created: number;
|
77
|
-
system_fingerprint?: string | undefined;
|
77
|
+
system_fingerprint?: string | null | undefined;
|
78
78
|
};
|
79
79
|
valueText: string;
|
80
80
|
value: any;
|
@@ -44,7 +44,7 @@ const chatCompletionChunkSchema = zod_1.z.object({
|
|
44
44
|
})),
|
45
45
|
created: zod_1.z.number(),
|
46
46
|
model: zod_1.z.string(),
|
47
|
-
system_fingerprint: zod_1.z.string().optional(),
|
47
|
+
system_fingerprint: zod_1.z.string().optional().nullable(),
|
48
48
|
});
|
49
49
|
const chatResponseStreamEventSchema = new ZodSchema_js_1.ZodSchema(zod_1.z.union([
|
50
50
|
chatCompletionChunkSchema,
|
@@ -41,7 +41,7 @@ const chatCompletionChunkSchema = z.object({
|
|
41
41
|
})),
|
42
42
|
created: z.number(),
|
43
43
|
model: z.string(),
|
44
|
-
system_fingerprint: z.string().optional(),
|
44
|
+
system_fingerprint: z.string().optional().nullable(),
|
45
45
|
});
|
46
46
|
const chatResponseStreamEventSchema = new ZodSchema(z.union([
|
47
47
|
chatCompletionChunkSchema,
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "modelfusion",
|
3
3
|
"description": "The TypeScript library for building multi-modal AI applications.",
|
4
|
-
"version": "0.
|
4
|
+
"version": "0.89.1",
|
5
5
|
"author": "Lars Grammel",
|
6
6
|
"license": "MIT",
|
7
7
|
"keywords": [
|
@@ -74,7 +74,7 @@
|
|
74
74
|
"secure-json-parse": "2.7.0",
|
75
75
|
"ws": "8.14.2",
|
76
76
|
"zod": "3.22.4",
|
77
|
-
"zod-to-json-schema": "3.22.
|
77
|
+
"zod-to-json-schema": "3.22.1"
|
78
78
|
},
|
79
79
|
"devDependencies": {
|
80
80
|
"@types/node": "18.11.9",
|