modelfusion 0.122.0 → 0.124.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +56 -0
- package/README.md +42 -2
- package/index.cjs +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/model-function/ModelCallEvent.d.ts +3 -2
- package/model-function/classify/Classifier.cjs +2 -0
- package/model-function/classify/Classifier.d.ts +10 -0
- package/model-function/classify/Classifier.js +1 -0
- package/model-function/classify/ClassifyEvent.cjs +2 -0
- package/model-function/classify/ClassifyEvent.d.ts +20 -0
- package/model-function/classify/ClassifyEvent.js +1 -0
- package/model-function/classify/EmbeddingSimilarityClassifier.cjs +97 -0
- package/model-function/classify/EmbeddingSimilarityClassifier.d.ts +40 -0
- package/model-function/classify/EmbeddingSimilarityClassifier.js +93 -0
- package/model-function/classify/classify.cjs +27 -0
- package/model-function/classify/classify.d.ts +17 -0
- package/model-function/classify/classify.js +23 -0
- package/{classifier → model-function/classify}/index.cjs +4 -1
- package/model-function/classify/index.d.ts +4 -0
- package/model-function/classify/index.js +4 -0
- package/model-function/index.cjs +1 -0
- package/model-function/index.d.ts +1 -0
- package/model-function/index.js +1 -0
- package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
- package/model-provider/ollama/OllamaChatModel.d.ts +9 -9
- package/model-provider/openai/AbstractOpenAITextEmbeddingModel.cjs +82 -0
- package/model-provider/openai/AbstractOpenAITextEmbeddingModel.d.ts +91 -0
- package/model-provider/openai/AbstractOpenAITextEmbeddingModel.js +78 -0
- package/model-provider/openai/OpenAIFacade.cjs +18 -18
- package/model-provider/openai/OpenAIFacade.d.ts +18 -18
- package/model-provider/openai/OpenAIFacade.js +18 -18
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -68
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +4 -82
- package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -68
- package/model-provider/openai/index.cjs +1 -0
- package/model-provider/openai/index.d.ts +1 -0
- package/model-provider/openai/index.js +1 -0
- package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +2 -0
- package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.d.ts +2 -0
- package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +28 -7
- package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +24 -6
- package/model-provider/openai-compatible/OpenAICompatibleFacade.js +26 -6
- package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.cjs +27 -0
- package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.d.ts +18 -0
- package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.js +23 -0
- package/model-provider/openai-compatible/index.cjs +1 -0
- package/model-provider/openai-compatible/index.d.ts +1 -0
- package/model-provider/openai-compatible/index.js +1 -0
- package/package.json +1 -1
- package/classifier/SemanticClassifier.cjs +0 -81
- package/classifier/SemanticClassifier.d.ts +0 -25
- package/classifier/SemanticClassifier.js +0 -77
- package/classifier/index.d.ts +0 -1
- package/classifier/index.js +0 -1
@@ -1,14 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.OpenAITextEmbeddingModel = exports.calculateOpenAIEmbeddingCostInMillicents = exports.isOpenAIEmbeddingModel = exports.OPENAI_TEXT_EMBEDDING_MODELS = void 0;
|
4
|
-
const zod_1 = require("zod");
|
5
|
-
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
|
-
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
-
const ZodSchema_js_1 = require("../../core/schema/ZodSchema.cjs");
|
8
|
-
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
4
|
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
10
|
-
const
|
11
|
-
const OpenAIError_js_1 = require("./OpenAIError.cjs");
|
5
|
+
const AbstractOpenAITextEmbeddingModel_js_1 = require("./AbstractOpenAITextEmbeddingModel.cjs");
|
12
6
|
const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
|
13
7
|
exports.OPENAI_TEXT_EMBEDDING_MODELS = {
|
14
8
|
"text-embedding-ada-002": {
|
@@ -43,21 +37,15 @@ exports.calculateOpenAIEmbeddingCostInMillicents = calculateOpenAIEmbeddingCostI
|
|
43
37
|
* ]
|
44
38
|
* );
|
45
39
|
*/
|
46
|
-
class OpenAITextEmbeddingModel extends
|
40
|
+
class OpenAITextEmbeddingModel extends AbstractOpenAITextEmbeddingModel_js_1.AbstractOpenAITextEmbeddingModel {
|
47
41
|
constructor(settings) {
|
48
|
-
super(
|
42
|
+
super(settings);
|
49
43
|
Object.defineProperty(this, "provider", {
|
50
44
|
enumerable: true,
|
51
45
|
configurable: true,
|
52
46
|
writable: true,
|
53
47
|
value: "openai"
|
54
48
|
});
|
55
|
-
Object.defineProperty(this, "isParallelizable", {
|
56
|
-
enumerable: true,
|
57
|
-
configurable: true,
|
58
|
-
writable: true,
|
59
|
-
value: true
|
60
|
-
});
|
61
49
|
Object.defineProperty(this, "embeddingDimensions", {
|
62
50
|
enumerable: true,
|
63
51
|
configurable: true,
|
@@ -85,67 +73,14 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
85
73
|
get modelName() {
|
86
74
|
return this.settings.model;
|
87
75
|
}
|
88
|
-
get maxValuesPerCall() {
|
89
|
-
return this.settings.maxValuesPerCall ?? 2048;
|
90
|
-
}
|
91
76
|
async countTokens(input) {
|
92
77
|
return (0, countTokens_js_1.countTokens)(this.tokenizer, input);
|
93
78
|
}
|
94
|
-
async callAPI(texts, callOptions) {
|
95
|
-
const api = this.settings.api ?? new OpenAIApiConfiguration_js_1.OpenAIApiConfiguration();
|
96
|
-
const abortSignal = callOptions.run?.abortSignal;
|
97
|
-
return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
|
98
|
-
retry: api.retry,
|
99
|
-
throttle: api.throttle,
|
100
|
-
call: async () => (0, postToApi_js_1.postJsonToApi)({
|
101
|
-
url: api.assembleUrl("/embeddings"),
|
102
|
-
headers: api.headers({
|
103
|
-
functionType: callOptions.functionType,
|
104
|
-
functionId: callOptions.functionId,
|
105
|
-
run: callOptions.run,
|
106
|
-
callId: callOptions.callId,
|
107
|
-
}),
|
108
|
-
body: {
|
109
|
-
model: this.modelName,
|
110
|
-
input: texts,
|
111
|
-
user: this.settings.isUserIdForwardingEnabled
|
112
|
-
? callOptions.run?.userId
|
113
|
-
: undefined,
|
114
|
-
},
|
115
|
-
failedResponseHandler: OpenAIError_js_1.failedOpenAICallResponseHandler,
|
116
|
-
successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)((0, ZodSchema_js_1.zodSchema)(openAITextEmbeddingResponseSchema)),
|
117
|
-
abortSignal,
|
118
|
-
}),
|
119
|
-
});
|
120
|
-
}
|
121
79
|
get settingsForEvent() {
|
122
80
|
return {};
|
123
81
|
}
|
124
|
-
async doEmbedValues(texts, callOptions) {
|
125
|
-
if (texts.length > this.maxValuesPerCall) {
|
126
|
-
throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
127
|
-
}
|
128
|
-
const rawResponse = await this.callAPI(texts, callOptions);
|
129
|
-
return {
|
130
|
-
rawResponse,
|
131
|
-
embeddings: rawResponse.data.map((data) => data.embedding),
|
132
|
-
};
|
133
|
-
}
|
134
82
|
withSettings(additionalSettings) {
|
135
83
|
return new OpenAITextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
136
84
|
}
|
137
85
|
}
|
138
86
|
exports.OpenAITextEmbeddingModel = OpenAITextEmbeddingModel;
|
139
|
-
const openAITextEmbeddingResponseSchema = zod_1.z.object({
|
140
|
-
object: zod_1.z.literal("list"),
|
141
|
-
data: zod_1.z.array(zod_1.z.object({
|
142
|
-
object: zod_1.z.literal("embedding"),
|
143
|
-
embedding: zod_1.z.array(zod_1.z.number()),
|
144
|
-
index: zod_1.z.number(),
|
145
|
-
})),
|
146
|
-
model: zod_1.z.string(),
|
147
|
-
usage: zod_1.z.object({
|
148
|
-
prompt_tokens: zod_1.z.number(),
|
149
|
-
total_tokens: zod_1.z.number(),
|
150
|
-
}),
|
151
|
-
});
|
@@ -1,8 +1,5 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
3
|
-
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
|
-
import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
|
1
|
+
import { EmbeddingModel } from "../../model-function/embed/EmbeddingModel.js";
|
2
|
+
import { AbstractOpenAITextEmbeddingModel, AbstractOpenAITextEmbeddingModelSettings, OpenAITextEmbeddingResponse } from "./AbstractOpenAITextEmbeddingModel.js";
|
6
3
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
7
4
|
export declare const OPENAI_TEXT_EMBEDDING_MODELS: {
|
8
5
|
"text-embedding-ada-002": {
|
@@ -17,11 +14,8 @@ export declare const calculateOpenAIEmbeddingCostInMillicents: ({ model, respons
|
|
17
14
|
model: OpenAITextEmbeddingModelType;
|
18
15
|
responses: OpenAITextEmbeddingResponse[];
|
19
16
|
}) => number;
|
20
|
-
export interface OpenAITextEmbeddingModelSettings extends
|
21
|
-
api?: ApiConfiguration;
|
22
|
-
maxValuesPerCall?: number | undefined;
|
17
|
+
export interface OpenAITextEmbeddingModelSettings extends AbstractOpenAITextEmbeddingModelSettings {
|
23
18
|
model: OpenAITextEmbeddingModelType;
|
24
|
-
isUserIdForwardingEnabled?: boolean;
|
25
19
|
}
|
26
20
|
/**
|
27
21
|
* Create a text embedding model that calls the OpenAI embedding API.
|
@@ -37,86 +31,14 @@ export interface OpenAITextEmbeddingModelSettings extends EmbeddingModelSettings
|
|
37
31
|
* ]
|
38
32
|
* );
|
39
33
|
*/
|
40
|
-
export declare class OpenAITextEmbeddingModel extends
|
34
|
+
export declare class OpenAITextEmbeddingModel extends AbstractOpenAITextEmbeddingModel<OpenAITextEmbeddingModelSettings> implements EmbeddingModel<string, OpenAITextEmbeddingModelSettings> {
|
41
35
|
constructor(settings: OpenAITextEmbeddingModelSettings);
|
42
36
|
readonly provider: "openai";
|
43
37
|
get modelName(): "text-embedding-ada-002";
|
44
|
-
get maxValuesPerCall(): number;
|
45
|
-
readonly isParallelizable = true;
|
46
38
|
readonly embeddingDimensions: number;
|
47
39
|
readonly tokenizer: TikTokenTokenizer;
|
48
40
|
readonly contextWindowSize: number;
|
49
41
|
countTokens(input: string): Promise<number>;
|
50
|
-
callAPI(texts: Array<string>, callOptions: FunctionCallOptions): Promise<OpenAITextEmbeddingResponse>;
|
51
42
|
get settingsForEvent(): Partial<OpenAITextEmbeddingModelSettings>;
|
52
|
-
doEmbedValues(texts: string[], callOptions: FunctionCallOptions): Promise<{
|
53
|
-
rawResponse: {
|
54
|
-
object: "list";
|
55
|
-
model: string;
|
56
|
-
usage: {
|
57
|
-
prompt_tokens: number;
|
58
|
-
total_tokens: number;
|
59
|
-
};
|
60
|
-
data: {
|
61
|
-
object: "embedding";
|
62
|
-
embedding: number[];
|
63
|
-
index: number;
|
64
|
-
}[];
|
65
|
-
};
|
66
|
-
embeddings: number[][];
|
67
|
-
}>;
|
68
43
|
withSettings(additionalSettings: OpenAITextEmbeddingModelSettings): this;
|
69
44
|
}
|
70
|
-
declare const openAITextEmbeddingResponseSchema: z.ZodObject<{
|
71
|
-
object: z.ZodLiteral<"list">;
|
72
|
-
data: z.ZodArray<z.ZodObject<{
|
73
|
-
object: z.ZodLiteral<"embedding">;
|
74
|
-
embedding: z.ZodArray<z.ZodNumber, "many">;
|
75
|
-
index: z.ZodNumber;
|
76
|
-
}, "strip", z.ZodTypeAny, {
|
77
|
-
object: "embedding";
|
78
|
-
embedding: number[];
|
79
|
-
index: number;
|
80
|
-
}, {
|
81
|
-
object: "embedding";
|
82
|
-
embedding: number[];
|
83
|
-
index: number;
|
84
|
-
}>, "many">;
|
85
|
-
model: z.ZodString;
|
86
|
-
usage: z.ZodObject<{
|
87
|
-
prompt_tokens: z.ZodNumber;
|
88
|
-
total_tokens: z.ZodNumber;
|
89
|
-
}, "strip", z.ZodTypeAny, {
|
90
|
-
prompt_tokens: number;
|
91
|
-
total_tokens: number;
|
92
|
-
}, {
|
93
|
-
prompt_tokens: number;
|
94
|
-
total_tokens: number;
|
95
|
-
}>;
|
96
|
-
}, "strip", z.ZodTypeAny, {
|
97
|
-
object: "list";
|
98
|
-
model: string;
|
99
|
-
usage: {
|
100
|
-
prompt_tokens: number;
|
101
|
-
total_tokens: number;
|
102
|
-
};
|
103
|
-
data: {
|
104
|
-
object: "embedding";
|
105
|
-
embedding: number[];
|
106
|
-
index: number;
|
107
|
-
}[];
|
108
|
-
}, {
|
109
|
-
object: "list";
|
110
|
-
model: string;
|
111
|
-
usage: {
|
112
|
-
prompt_tokens: number;
|
113
|
-
total_tokens: number;
|
114
|
-
};
|
115
|
-
data: {
|
116
|
-
object: "embedding";
|
117
|
-
embedding: number[];
|
118
|
-
index: number;
|
119
|
-
}[];
|
120
|
-
}>;
|
121
|
-
export type OpenAITextEmbeddingResponse = z.infer<typeof openAITextEmbeddingResponseSchema>;
|
122
|
-
export {};
|
@@ -1,11 +1,5 @@
|
|
1
|
-
import { z } from "zod";
|
2
|
-
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
|
-
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
-
import { zodSchema } from "../../core/schema/ZodSchema.js";
|
5
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
1
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
7
|
-
import {
|
8
|
-
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
2
|
+
import { AbstractOpenAITextEmbeddingModel, } from "./AbstractOpenAITextEmbeddingModel.js";
|
9
3
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
10
4
|
export const OPENAI_TEXT_EMBEDDING_MODELS = {
|
11
5
|
"text-embedding-ada-002": {
|
@@ -38,21 +32,15 @@ export const calculateOpenAIEmbeddingCostInMillicents = ({ model, responses, })
|
|
38
32
|
* ]
|
39
33
|
* );
|
40
34
|
*/
|
41
|
-
export class OpenAITextEmbeddingModel extends
|
35
|
+
export class OpenAITextEmbeddingModel extends AbstractOpenAITextEmbeddingModel {
|
42
36
|
constructor(settings) {
|
43
|
-
super(
|
37
|
+
super(settings);
|
44
38
|
Object.defineProperty(this, "provider", {
|
45
39
|
enumerable: true,
|
46
40
|
configurable: true,
|
47
41
|
writable: true,
|
48
42
|
value: "openai"
|
49
43
|
});
|
50
|
-
Object.defineProperty(this, "isParallelizable", {
|
51
|
-
enumerable: true,
|
52
|
-
configurable: true,
|
53
|
-
writable: true,
|
54
|
-
value: true
|
55
|
-
});
|
56
44
|
Object.defineProperty(this, "embeddingDimensions", {
|
57
45
|
enumerable: true,
|
58
46
|
configurable: true,
|
@@ -80,66 +68,13 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
80
68
|
get modelName() {
|
81
69
|
return this.settings.model;
|
82
70
|
}
|
83
|
-
get maxValuesPerCall() {
|
84
|
-
return this.settings.maxValuesPerCall ?? 2048;
|
85
|
-
}
|
86
71
|
async countTokens(input) {
|
87
72
|
return countTokens(this.tokenizer, input);
|
88
73
|
}
|
89
|
-
async callAPI(texts, callOptions) {
|
90
|
-
const api = this.settings.api ?? new OpenAIApiConfiguration();
|
91
|
-
const abortSignal = callOptions.run?.abortSignal;
|
92
|
-
return callWithRetryAndThrottle({
|
93
|
-
retry: api.retry,
|
94
|
-
throttle: api.throttle,
|
95
|
-
call: async () => postJsonToApi({
|
96
|
-
url: api.assembleUrl("/embeddings"),
|
97
|
-
headers: api.headers({
|
98
|
-
functionType: callOptions.functionType,
|
99
|
-
functionId: callOptions.functionId,
|
100
|
-
run: callOptions.run,
|
101
|
-
callId: callOptions.callId,
|
102
|
-
}),
|
103
|
-
body: {
|
104
|
-
model: this.modelName,
|
105
|
-
input: texts,
|
106
|
-
user: this.settings.isUserIdForwardingEnabled
|
107
|
-
? callOptions.run?.userId
|
108
|
-
: undefined,
|
109
|
-
},
|
110
|
-
failedResponseHandler: failedOpenAICallResponseHandler,
|
111
|
-
successfulResponseHandler: createJsonResponseHandler(zodSchema(openAITextEmbeddingResponseSchema)),
|
112
|
-
abortSignal,
|
113
|
-
}),
|
114
|
-
});
|
115
|
-
}
|
116
74
|
get settingsForEvent() {
|
117
75
|
return {};
|
118
76
|
}
|
119
|
-
async doEmbedValues(texts, callOptions) {
|
120
|
-
if (texts.length > this.maxValuesPerCall) {
|
121
|
-
throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
122
|
-
}
|
123
|
-
const rawResponse = await this.callAPI(texts, callOptions);
|
124
|
-
return {
|
125
|
-
rawResponse,
|
126
|
-
embeddings: rawResponse.data.map((data) => data.embedding),
|
127
|
-
};
|
128
|
-
}
|
129
77
|
withSettings(additionalSettings) {
|
130
78
|
return new OpenAITextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
131
79
|
}
|
132
80
|
}
|
133
|
-
const openAITextEmbeddingResponseSchema = z.object({
|
134
|
-
object: z.literal("list"),
|
135
|
-
data: z.array(z.object({
|
136
|
-
object: z.literal("embedding"),
|
137
|
-
embedding: z.array(z.number()),
|
138
|
-
index: z.number(),
|
139
|
-
})),
|
140
|
-
model: z.string(),
|
141
|
-
usage: z.object({
|
142
|
-
prompt_tokens: z.number(),
|
143
|
-
total_tokens: z.number(),
|
144
|
-
}),
|
145
|
-
});
|
@@ -29,6 +29,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
29
|
exports.openai = exports.OpenAIChatPrompt = void 0;
|
30
30
|
__exportStar(require("./AbstractOpenAIChatModel.cjs"), exports);
|
31
31
|
__exportStar(require("./AbstractOpenAICompletionModel.cjs"), exports);
|
32
|
+
__exportStar(require("./AbstractOpenAITextEmbeddingModel.cjs"), exports);
|
32
33
|
__exportStar(require("./AzureOpenAIApiConfiguration.cjs"), exports);
|
33
34
|
__exportStar(require("./OpenAIApiConfiguration.cjs"), exports);
|
34
35
|
__exportStar(require("./OpenAIChatMessage.cjs"), exports);
|
@@ -1,5 +1,6 @@
|
|
1
1
|
export * from "./AbstractOpenAIChatModel.js";
|
2
2
|
export * from "./AbstractOpenAICompletionModel.js";
|
3
|
+
export * from "./AbstractOpenAITextEmbeddingModel.js";
|
3
4
|
export * from "./AzureOpenAIApiConfiguration.js";
|
4
5
|
export * from "./OpenAIApiConfiguration.js";
|
5
6
|
export * from "./OpenAIChatMessage.js";
|
@@ -1,5 +1,6 @@
|
|
1
1
|
export * from "./AbstractOpenAIChatModel.js";
|
2
2
|
export * from "./AbstractOpenAICompletionModel.js";
|
3
|
+
export * from "./AbstractOpenAITextEmbeddingModel.js";
|
3
4
|
export * from "./AzureOpenAIApiConfiguration.js";
|
4
5
|
export * from "./OpenAIApiConfiguration.js";
|
5
6
|
export * from "./OpenAIChatMessage.js";
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
1
2
|
import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
|
2
3
|
import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
|
3
4
|
import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
|
@@ -8,6 +9,7 @@ import { ToolCallsGenerationModel } from "../../tool/generate-tool-calls/ToolCal
|
|
8
9
|
import { AbstractOpenAIChatModel, AbstractOpenAIChatSettings, OpenAIChatPrompt } from "../openai/AbstractOpenAIChatModel.js";
|
9
10
|
import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
|
10
11
|
export interface OpenAICompatibleChatSettings extends AbstractOpenAIChatSettings {
|
12
|
+
api: ApiConfiguration;
|
11
13
|
provider?: OpenAICompatibleProviderName;
|
12
14
|
}
|
13
15
|
/**
|
@@ -1,9 +1,11 @@
|
|
1
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
1
2
|
import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
|
2
3
|
import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
|
3
4
|
import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
|
4
5
|
import { AbstractOpenAICompletionModel, AbstractOpenAICompletionModelSettings } from "../openai/AbstractOpenAICompletionModel.js";
|
5
6
|
import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
|
6
7
|
export interface OpenAICompatibleCompletionModelSettings extends AbstractOpenAICompletionModelSettings {
|
8
|
+
api: ApiConfiguration;
|
7
9
|
provider?: OpenAICompatibleProviderName;
|
8
10
|
}
|
9
11
|
/**
|
@@ -1,9 +1,10 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.TogetherAIApi = exports.FireworksAIApi = void 0;
|
3
|
+
exports.TextEmbedder = exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.TogetherAIApi = exports.FireworksAIApi = void 0;
|
4
4
|
const FireworksAIApiConfiguration_js_1 = require("./FireworksAIApiConfiguration.cjs");
|
5
5
|
const OpenAICompatibleChatModel_js_1 = require("./OpenAICompatibleChatModel.cjs");
|
6
6
|
const OpenAICompatibleCompletionModel_js_1 = require("./OpenAICompatibleCompletionModel.cjs");
|
7
|
+
const OpenAICompatibleTextEmbeddingModel_js_1 = require("./OpenAICompatibleTextEmbeddingModel.cjs");
|
7
8
|
const TogetherAIApiConfiguration_js_1 = require("./TogetherAIApiConfiguration.cjs");
|
8
9
|
/**
|
9
10
|
* Configuration for the Fireworks.ai API.
|
@@ -43,10 +44,10 @@ exports.TogetherAIApi = TogetherAIApi;
|
|
43
44
|
* maxGenerationTokens: 500,
|
44
45
|
* });
|
45
46
|
*
|
46
|
-
* const text = await generateText(
|
47
|
+
* const text = await generateText({
|
47
48
|
* model,
|
48
|
-
* "Write a short story about a robot learning to love:"
|
49
|
-
* );
|
49
|
+
* prompt: "Write a short story about a robot learning to love:"
|
50
|
+
* });
|
50
51
|
* ```
|
51
52
|
*/
|
52
53
|
function CompletionTextGenerator(settings) {
|
@@ -69,17 +70,37 @@ exports.CompletionTextGenerator = CompletionTextGenerator;
|
|
69
70
|
* maxGenerationTokens: 500,
|
70
71
|
* });
|
71
72
|
*
|
72
|
-
* const text = await generateText(
|
73
|
+
* const text = await generateText({
|
73
74
|
* model,
|
74
|
-
* [
|
75
|
+
* prompt: [
|
75
76
|
* openai.ChatMessage.user(
|
76
77
|
* "Write a short story about a robot learning to love:"
|
77
78
|
* ),
|
78
79
|
* ]
|
79
|
-
* );
|
80
|
+
* });
|
80
81
|
* ```
|
81
82
|
*/
|
82
83
|
function ChatTextGenerator(settings) {
|
83
84
|
return new OpenAICompatibleChatModel_js_1.OpenAICompatibleChatModel(settings);
|
84
85
|
}
|
85
86
|
exports.ChatTextGenerator = ChatTextGenerator;
|
87
|
+
/**
|
88
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
89
|
+
*
|
90
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
91
|
+
*
|
92
|
+
* @example
|
93
|
+
* const embeddings = await embedMany({
|
94
|
+
* model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
|
95
|
+
* values: [
|
96
|
+
* "At first, Nox didn't know what to do with the pup.",
|
97
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
98
|
+
* ]
|
99
|
+
* });
|
100
|
+
*
|
101
|
+
* @returns A new instance of {@link OpenAITextEmbeddingModel}.
|
102
|
+
*/
|
103
|
+
function TextEmbedder(settings) {
|
104
|
+
return new OpenAICompatibleTextEmbeddingModel_js_1.OpenAICompatibleTextEmbeddingModel(settings);
|
105
|
+
}
|
106
|
+
exports.TextEmbedder = TextEmbedder;
|
@@ -2,6 +2,7 @@ import { PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseU
|
|
2
2
|
import { FireworksAIApiConfiguration } from "./FireworksAIApiConfiguration.js";
|
3
3
|
import { OpenAICompatibleChatModel, OpenAICompatibleChatSettings } from "./OpenAICompatibleChatModel.js";
|
4
4
|
import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
|
5
|
+
import { OpenAICompatibleTextEmbeddingModel, OpenAICompatibleTextEmbeddingModelSettings } from "./OpenAICompatibleTextEmbeddingModel.js";
|
5
6
|
import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
|
6
7
|
/**
|
7
8
|
* Configuration for the Fireworks.ai API.
|
@@ -39,10 +40,10 @@ export declare function TogetherAIApi(settings?: PartialBaseUrlPartsApiConfigura
|
|
39
40
|
* maxGenerationTokens: 500,
|
40
41
|
* });
|
41
42
|
*
|
42
|
-
* const text = await generateText(
|
43
|
+
* const text = await generateText({
|
43
44
|
* model,
|
44
|
-
* "Write a short story about a robot learning to love:"
|
45
|
-
* );
|
45
|
+
* prompt: "Write a short story about a robot learning to love:"
|
46
|
+
* });
|
46
47
|
* ```
|
47
48
|
*/
|
48
49
|
export declare function CompletionTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleCompletionModel;
|
@@ -62,14 +63,31 @@ export declare function CompletionTextGenerator(settings: OpenAICompatibleChatSe
|
|
62
63
|
* maxGenerationTokens: 500,
|
63
64
|
* });
|
64
65
|
*
|
65
|
-
* const text = await generateText(
|
66
|
+
* const text = await generateText({
|
66
67
|
* model,
|
67
|
-
* [
|
68
|
+
* prompt: [
|
68
69
|
* openai.ChatMessage.user(
|
69
70
|
* "Write a short story about a robot learning to love:"
|
70
71
|
* ),
|
71
72
|
* ]
|
72
|
-
* );
|
73
|
+
* });
|
73
74
|
* ```
|
74
75
|
*/
|
75
76
|
export declare function ChatTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleChatModel;
|
77
|
+
/**
|
78
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
79
|
+
*
|
80
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
81
|
+
*
|
82
|
+
* @example
|
83
|
+
* const embeddings = await embedMany({
|
84
|
+
* model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
|
85
|
+
* values: [
|
86
|
+
* "At first, Nox didn't know what to do with the pup.",
|
87
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
88
|
+
* ]
|
89
|
+
* });
|
90
|
+
*
|
91
|
+
* @returns A new instance of {@link OpenAITextEmbeddingModel}.
|
92
|
+
*/
|
93
|
+
export declare function TextEmbedder(settings: OpenAICompatibleTextEmbeddingModelSettings): OpenAICompatibleTextEmbeddingModel;
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { FireworksAIApiConfiguration } from "./FireworksAIApiConfiguration.js";
|
2
2
|
import { OpenAICompatibleChatModel, } from "./OpenAICompatibleChatModel.js";
|
3
3
|
import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
|
4
|
+
import { OpenAICompatibleTextEmbeddingModel, } from "./OpenAICompatibleTextEmbeddingModel.js";
|
4
5
|
import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
|
5
6
|
/**
|
6
7
|
* Configuration for the Fireworks.ai API.
|
@@ -38,10 +39,10 @@ export function TogetherAIApi(settings = {}) {
|
|
38
39
|
* maxGenerationTokens: 500,
|
39
40
|
* });
|
40
41
|
*
|
41
|
-
* const text = await generateText(
|
42
|
+
* const text = await generateText({
|
42
43
|
* model,
|
43
|
-
* "Write a short story about a robot learning to love:"
|
44
|
-
* );
|
44
|
+
* prompt: "Write a short story about a robot learning to love:"
|
45
|
+
* });
|
45
46
|
* ```
|
46
47
|
*/
|
47
48
|
export function CompletionTextGenerator(settings) {
|
@@ -63,16 +64,35 @@ export function CompletionTextGenerator(settings) {
|
|
63
64
|
* maxGenerationTokens: 500,
|
64
65
|
* });
|
65
66
|
*
|
66
|
-
* const text = await generateText(
|
67
|
+
* const text = await generateText({
|
67
68
|
* model,
|
68
|
-
* [
|
69
|
+
* prompt: [
|
69
70
|
* openai.ChatMessage.user(
|
70
71
|
* "Write a short story about a robot learning to love:"
|
71
72
|
* ),
|
72
73
|
* ]
|
73
|
-
* );
|
74
|
+
* });
|
74
75
|
* ```
|
75
76
|
*/
|
76
77
|
export function ChatTextGenerator(settings) {
|
77
78
|
return new OpenAICompatibleChatModel(settings);
|
78
79
|
}
|
80
|
+
/**
|
81
|
+
* Create a text embedding model that calls the OpenAI embedding API.
|
82
|
+
*
|
83
|
+
* @see https://platform.openai.com/docs/api-reference/embeddings
|
84
|
+
*
|
85
|
+
* @example
|
86
|
+
* const embeddings = await embedMany({
|
87
|
+
* model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
|
88
|
+
* values: [
|
89
|
+
* "At first, Nox didn't know what to do with the pup.",
|
90
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
91
|
+
* ]
|
92
|
+
* });
|
93
|
+
*
|
94
|
+
* @returns A new instance of {@link OpenAITextEmbeddingModel}.
|
95
|
+
*/
|
96
|
+
export function TextEmbedder(settings) {
|
97
|
+
return new OpenAICompatibleTextEmbeddingModel(settings);
|
98
|
+
}
|
@@ -0,0 +1,27 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.OpenAICompatibleTextEmbeddingModel = void 0;
|
4
|
+
const AbstractOpenAITextEmbeddingModel_js_1 = require("../openai/AbstractOpenAITextEmbeddingModel.cjs");
|
5
|
+
class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel_js_1.AbstractOpenAITextEmbeddingModel {
|
6
|
+
constructor(settings) {
|
7
|
+
super(settings);
|
8
|
+
}
|
9
|
+
get provider() {
|
10
|
+
return this.settings.provider ?? "openaicompatible";
|
11
|
+
}
|
12
|
+
get modelName() {
|
13
|
+
return this.settings.model;
|
14
|
+
}
|
15
|
+
get embeddingDimensions() {
|
16
|
+
return this.settings.embeddingDimensions;
|
17
|
+
}
|
18
|
+
get settingsForEvent() {
|
19
|
+
return {
|
20
|
+
embeddingDimensions: this.settings.embeddingDimensions,
|
21
|
+
};
|
22
|
+
}
|
23
|
+
withSettings(additionalSettings) {
|
24
|
+
return new OpenAICompatibleTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
25
|
+
}
|
26
|
+
}
|
27
|
+
exports.OpenAICompatibleTextEmbeddingModel = OpenAICompatibleTextEmbeddingModel;
|
@@ -0,0 +1,18 @@
|
|
1
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
2
|
+
import { EmbeddingModel } from "../../model-function/embed/EmbeddingModel.js";
|
3
|
+
import { AbstractOpenAITextEmbeddingModel, AbstractOpenAITextEmbeddingModelSettings } from "../openai/AbstractOpenAITextEmbeddingModel.js";
|
4
|
+
import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
|
5
|
+
export interface OpenAICompatibleTextEmbeddingModelSettings extends AbstractOpenAITextEmbeddingModelSettings {
|
6
|
+
api: ApiConfiguration;
|
7
|
+
provider?: OpenAICompatibleProviderName;
|
8
|
+
model: string;
|
9
|
+
embeddingDimensions?: number;
|
10
|
+
}
|
11
|
+
export declare class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel<OpenAICompatibleTextEmbeddingModelSettings> implements EmbeddingModel<string, OpenAICompatibleTextEmbeddingModelSettings> {
|
12
|
+
constructor(settings: OpenAICompatibleTextEmbeddingModelSettings);
|
13
|
+
get provider(): OpenAICompatibleProviderName;
|
14
|
+
get modelName(): string;
|
15
|
+
get embeddingDimensions(): number | undefined;
|
16
|
+
get settingsForEvent(): Partial<OpenAICompatibleTextEmbeddingModelSettings>;
|
17
|
+
withSettings(additionalSettings: OpenAICompatibleTextEmbeddingModelSettings): this;
|
18
|
+
}
|
@@ -0,0 +1,23 @@
|
|
1
|
+
import { AbstractOpenAITextEmbeddingModel, } from "../openai/AbstractOpenAITextEmbeddingModel.js";
|
2
|
+
export class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel {
|
3
|
+
constructor(settings) {
|
4
|
+
super(settings);
|
5
|
+
}
|
6
|
+
get provider() {
|
7
|
+
return this.settings.provider ?? "openaicompatible";
|
8
|
+
}
|
9
|
+
get modelName() {
|
10
|
+
return this.settings.model;
|
11
|
+
}
|
12
|
+
get embeddingDimensions() {
|
13
|
+
return this.settings.embeddingDimensions;
|
14
|
+
}
|
15
|
+
get settingsForEvent() {
|
16
|
+
return {
|
17
|
+
embeddingDimensions: this.settings.embeddingDimensions,
|
18
|
+
};
|
19
|
+
}
|
20
|
+
withSettings(additionalSettings) {
|
21
|
+
return new OpenAICompatibleTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
|
22
|
+
}
|
23
|
+
}
|
@@ -30,6 +30,7 @@ exports.openaicompatible = void 0;
|
|
30
30
|
__exportStar(require("./FireworksAIApiConfiguration.cjs"), exports);
|
31
31
|
__exportStar(require("./OpenAICompatibleChatModel.cjs"), exports);
|
32
32
|
__exportStar(require("./OpenAICompatibleCompletionModel.cjs"), exports);
|
33
|
+
__exportStar(require("./OpenAICompatibleTextEmbeddingModel.cjs"), exports);
|
33
34
|
exports.openaicompatible = __importStar(require("./OpenAICompatibleFacade.cjs"));
|
34
35
|
__exportStar(require("./OpenAICompatibleProviderName.cjs"), exports);
|
35
36
|
__exportStar(require("./TogetherAIApiConfiguration.cjs"), exports);
|
@@ -1,6 +1,7 @@
|
|
1
1
|
export * from "./FireworksAIApiConfiguration.js";
|
2
2
|
export * from "./OpenAICompatibleChatModel.js";
|
3
3
|
export * from "./OpenAICompatibleCompletionModel.js";
|
4
|
+
export * from "./OpenAICompatibleTextEmbeddingModel.js";
|
4
5
|
export * as openaicompatible from "./OpenAICompatibleFacade.js";
|
5
6
|
export * from "./OpenAICompatibleProviderName.js";
|
6
7
|
export * from "./TogetherAIApiConfiguration.js";
|
@@ -1,6 +1,7 @@
|
|
1
1
|
export * from "./FireworksAIApiConfiguration.js";
|
2
2
|
export * from "./OpenAICompatibleChatModel.js";
|
3
3
|
export * from "./OpenAICompatibleCompletionModel.js";
|
4
|
+
export * from "./OpenAICompatibleTextEmbeddingModel.js";
|
4
5
|
export * as openaicompatible from "./OpenAICompatibleFacade.js";
|
5
6
|
export * from "./OpenAICompatibleProviderName.js";
|
6
7
|
export * from "./TogetherAIApiConfiguration.js";
|