modelfusion 0.95.0 → 0.96.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/core/api/postToApi.cjs +30 -1
- package/core/api/postToApi.d.ts +7 -1
- package/core/api/postToApi.js +29 -1
- package/model-provider/index.cjs +1 -0
- package/model-provider/index.d.ts +1 -0
- package/model-provider/index.js +1 -0
- package/model-provider/mistral/MistralApiConfiguration.cjs +22 -0
- package/model-provider/mistral/MistralApiConfiguration.d.ts +12 -0
- package/model-provider/mistral/MistralApiConfiguration.js +18 -0
- package/model-provider/mistral/MistralError.cjs +17 -0
- package/model-provider/mistral/MistralError.d.ts +13 -0
- package/model-provider/mistral/MistralError.js +14 -0
- package/model-provider/mistral/MistralFacade.cjs +18 -0
- package/model-provider/mistral/MistralFacade.d.ts +6 -0
- package/model-provider/mistral/MistralFacade.js +12 -0
- package/model-provider/mistral/MistralPromptTemplate.cjs +64 -0
- package/model-provider/mistral/MistralPromptTemplate.d.ts +16 -0
- package/model-provider/mistral/MistralPromptTemplate.js +58 -0
- package/model-provider/mistral/MistralTextEmbeddingModel.cjs +100 -0
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +106 -0
- package/model-provider/mistral/MistralTextEmbeddingModel.js +96 -0
- package/model-provider/mistral/MistralTextGenerationModel.cjs +254 -0
- package/model-provider/mistral/MistralTextGenerationModel.d.ts +231 -0
- package/model-provider/mistral/MistralTextGenerationModel.js +250 -0
- package/model-provider/mistral/index.cjs +34 -0
- package/model-provider/mistral/index.d.ts +6 -0
- package/model-provider/mistral/index.js +5 -0
- package/model-provider/ollama/OllamaError.cjs +5 -30
- package/model-provider/ollama/OllamaError.js +5 -29
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +1 -7
- package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +0 -1
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +1 -7
- package/model-provider/openai/OpenAICompletionModel.d.ts +4 -4
- package/model-provider/openai/OpenAIError.cjs +9 -34
- package/model-provider/openai/OpenAIError.d.ts +1 -3
- package/model-provider/openai/OpenAIError.js +9 -33
- package/model-provider/openai/chat/AbstractOpenAIChatModel.d.ts +6 -6
- package/model-provider/openai/chat/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.cjs +2 -1
- package/model-provider/whispercpp/WhisperCppTranscriptionModel.js +2 -1
- package/package.json +1 -1
@@ -74,6 +74,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
74
74
|
};
|
75
75
|
model: string;
|
76
76
|
id: string;
|
77
|
+
created: number;
|
77
78
|
choices: {
|
78
79
|
message: {
|
79
80
|
role: "assistant";
|
@@ -95,7 +96,6 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
95
96
|
logprobs?: any;
|
96
97
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
97
98
|
}[];
|
98
|
-
created: number;
|
99
99
|
system_fingerprint?: string | null | undefined;
|
100
100
|
};
|
101
101
|
text: string;
|
@@ -116,6 +116,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
116
116
|
};
|
117
117
|
model: string;
|
118
118
|
id: string;
|
119
|
+
created: number;
|
119
120
|
choices: {
|
120
121
|
message: {
|
121
122
|
role: "assistant";
|
@@ -137,7 +138,6 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
137
138
|
logprobs?: any;
|
138
139
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
139
140
|
}[];
|
140
|
-
created: number;
|
141
141
|
system_fingerprint?: string | null | undefined;
|
142
142
|
};
|
143
143
|
toolCall: {
|
@@ -160,6 +160,7 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
160
160
|
};
|
161
161
|
model: string;
|
162
162
|
id: string;
|
163
|
+
created: number;
|
163
164
|
choices: {
|
164
165
|
message: {
|
165
166
|
role: "assistant";
|
@@ -181,7 +182,6 @@ export declare abstract class AbstractOpenAIChatModel<SETTINGS extends AbstractO
|
|
181
182
|
logprobs?: any;
|
182
183
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
183
184
|
}[];
|
184
|
-
created: number;
|
185
185
|
system_fingerprint?: string | null | undefined;
|
186
186
|
};
|
187
187
|
text: string | null;
|
@@ -347,6 +347,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
347
347
|
};
|
348
348
|
model: string;
|
349
349
|
id: string;
|
350
|
+
created: number;
|
350
351
|
choices: {
|
351
352
|
message: {
|
352
353
|
role: "assistant";
|
@@ -368,7 +369,6 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
368
369
|
logprobs?: any;
|
369
370
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
370
371
|
}[];
|
371
|
-
created: number;
|
372
372
|
system_fingerprint?: string | null | undefined;
|
373
373
|
}, {
|
374
374
|
object: "chat.completion";
|
@@ -379,6 +379,7 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
379
379
|
};
|
380
380
|
model: string;
|
381
381
|
id: string;
|
382
|
+
created: number;
|
382
383
|
choices: {
|
383
384
|
message: {
|
384
385
|
role: "assistant";
|
@@ -400,7 +401,6 @@ declare const openAIChatResponseSchema: z.ZodObject<{
|
|
400
401
|
logprobs?: any;
|
401
402
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
402
403
|
}[];
|
403
|
-
created: number;
|
404
404
|
system_fingerprint?: string | null | undefined;
|
405
405
|
}>;
|
406
406
|
export type OpenAIChatResponse = z.infer<typeof openAIChatResponseSchema>;
|
@@ -423,6 +423,7 @@ export declare const OpenAIChatResponseFormat: {
|
|
423
423
|
};
|
424
424
|
model: string;
|
425
425
|
id: string;
|
426
|
+
created: number;
|
426
427
|
choices: {
|
427
428
|
message: {
|
428
429
|
role: "assistant";
|
@@ -444,7 +445,6 @@ export declare const OpenAIChatResponseFormat: {
|
|
444
445
|
logprobs?: any;
|
445
446
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
446
447
|
}[];
|
447
|
-
created: number;
|
448
448
|
system_fingerprint?: string | null | undefined;
|
449
449
|
}>;
|
450
450
|
};
|
@@ -52,6 +52,7 @@ OpenAIChatSettings> {
|
|
52
52
|
};
|
53
53
|
model: string;
|
54
54
|
id: string;
|
55
|
+
created: number;
|
55
56
|
choices: {
|
56
57
|
message: {
|
57
58
|
role: "assistant";
|
@@ -73,7 +74,6 @@ OpenAIChatSettings> {
|
|
73
74
|
logprobs?: any;
|
74
75
|
finish_reason?: "length" | "stop" | "function_call" | "tool_calls" | "content_filter" | null | undefined;
|
75
76
|
}[];
|
76
|
-
created: number;
|
77
77
|
system_fingerprint?: string | null | undefined;
|
78
78
|
};
|
79
79
|
valueText: string;
|
@@ -38,6 +38,7 @@ class WhisperCppTranscriptionModel extends AbstractModel_js_1.AbstractModel {
|
|
38
38
|
async callAPI(data, options) {
|
39
39
|
const { temperature } = this.settings;
|
40
40
|
const api = this.settings.api ?? new WhisperCppApiConfiguration_js_1.WhisperCppApiConfiguration();
|
41
|
+
const abortSignal = options.run?.abortSignal;
|
41
42
|
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
42
43
|
retry: api.retry,
|
43
44
|
throttle: api.throttle,
|
@@ -57,7 +58,7 @@ class WhisperCppTranscriptionModel extends AbstractModel_js_1.AbstractModel {
|
|
57
58
|
},
|
58
59
|
failedResponseHandler,
|
59
60
|
successfulResponseHandler,
|
60
|
-
abortSignal
|
61
|
+
abortSignal,
|
61
62
|
});
|
62
63
|
},
|
63
64
|
});
|
@@ -35,6 +35,7 @@ export class WhisperCppTranscriptionModel extends AbstractModel {
|
|
35
35
|
async callAPI(data, options) {
|
36
36
|
const { temperature } = this.settings;
|
37
37
|
const api = this.settings.api ?? new WhisperCppApiConfiguration();
|
38
|
+
const abortSignal = options.run?.abortSignal;
|
38
39
|
return callWithRetryAndThrottle({
|
39
40
|
retry: api.retry,
|
40
41
|
throttle: api.throttle,
|
@@ -54,7 +55,7 @@ export class WhisperCppTranscriptionModel extends AbstractModel {
|
|
54
55
|
},
|
55
56
|
failedResponseHandler,
|
56
57
|
successfulResponseHandler,
|
57
|
-
abortSignal
|
58
|
+
abortSignal,
|
58
59
|
});
|
59
60
|
},
|
60
61
|
});
|