vecbox 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +377 -0
- package/dist/index.cjs +917 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +111 -0
- package/dist/index.d.ts +111 -0
- package/dist/index.js +874 -0
- package/dist/index.js.map +1 -0
- package/package.json +79 -0
- package/src/factory/EmbeddingFactory.ts +37 -0
- package/src/images/embed-kit.png +0 -0
- package/src/providers/base/EmbeddingProvider.ts +32 -0
- package/src/providers/claude.ts +78 -0
- package/src/providers/deepseek.ts +115 -0
- package/src/providers/gemini.ts +105 -0
- package/src/providers/llamacpp.ts +292 -0
- package/src/providers/mistral.ts +113 -0
- package/src/providers/openai.ts +108 -0
- package/src/types/deepseek.d.ts +15 -0
- package/src/types/index.d.ts +43 -0
- package/src/types/index.ts +43 -0
- package/src/types/package.json +1 -0
- package/src/types/transformers.d.ts +7 -0
- package/src/util/logger.ts +125 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,917 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
22
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
23
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
24
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
25
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
26
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
27
|
+
mod
|
|
28
|
+
));
|
|
29
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
30
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
31
|
+
|
|
32
|
+
// index.ts
|
|
33
|
+
var index_exports = {};
|
|
34
|
+
__export(index_exports, {
|
|
35
|
+
EmbeddingFactory: () => EmbeddingFactory,
|
|
36
|
+
EmbeddingProvider: () => EmbeddingProvider,
|
|
37
|
+
LIB_INFO: () => LIB_INFO,
|
|
38
|
+
VERSION: () => VERSION,
|
|
39
|
+
autoEmbed: () => autoEmbed,
|
|
40
|
+
createProvider: () => createProvider,
|
|
41
|
+
embed: () => embed,
|
|
42
|
+
getSupportedProviders: () => getSupportedProviders,
|
|
43
|
+
getVersion: () => getVersion
|
|
44
|
+
});
|
|
45
|
+
module.exports = __toCommonJS(index_exports);
|
|
46
|
+
|
|
47
|
+
// main.ts
|
|
48
|
+
var dotenv = __toESM(require("dotenv"), 1);
|
|
49
|
+
|
|
50
|
+
// src/providers/openai.ts
|
|
51
|
+
var import_openai = __toESM(require("openai"), 1);
|
|
52
|
+
|
|
53
|
+
// src/providers/base/EmbeddingProvider.ts
|
|
54
|
+
var EmbeddingProvider = class {
|
|
55
|
+
constructor(config2) {
|
|
56
|
+
__publicField(this, "config");
|
|
57
|
+
this.config = config2;
|
|
58
|
+
}
|
|
59
|
+
getModel() {
|
|
60
|
+
return this.config.model || "default";
|
|
61
|
+
}
|
|
62
|
+
async readInput(input) {
|
|
63
|
+
if (input.text) {
|
|
64
|
+
return input.text;
|
|
65
|
+
}
|
|
66
|
+
if (input.filePath) {
|
|
67
|
+
const fs = await import("fs/promises");
|
|
68
|
+
return await fs.readFile(input.filePath, "utf-8");
|
|
69
|
+
}
|
|
70
|
+
throw new Error("Either text or filePath must be provided");
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
// src/util/logger.ts
|
|
75
|
+
var _Logger = class _Logger {
|
|
76
|
+
constructor(moduleName = "embedbox", level = 1 /* INFO */) {
|
|
77
|
+
__publicField(this, "currentLevel");
|
|
78
|
+
__publicField(this, "moduleName");
|
|
79
|
+
this.moduleName = moduleName;
|
|
80
|
+
this.currentLevel = level;
|
|
81
|
+
}
|
|
82
|
+
static getInstance(moduleName, level) {
|
|
83
|
+
if (!_Logger.instance) {
|
|
84
|
+
_Logger.instance = new _Logger(moduleName || "embedbox", level);
|
|
85
|
+
}
|
|
86
|
+
return _Logger.instance;
|
|
87
|
+
}
|
|
88
|
+
setLevel(level) {
|
|
89
|
+
this.currentLevel = level;
|
|
90
|
+
}
|
|
91
|
+
getLevel() {
|
|
92
|
+
return this.currentLevel;
|
|
93
|
+
}
|
|
94
|
+
formatMessage(level, message) {
|
|
95
|
+
const levelName = _Logger.LEVEL_NAMES[level];
|
|
96
|
+
const color = _Logger.COLORS[levelName];
|
|
97
|
+
const reset = _Logger.COLORS.RESET;
|
|
98
|
+
return `${color}[${levelName}(${this.moduleName})]${reset} ${message}
|
|
99
|
+
|
|
100
|
+
`;
|
|
101
|
+
}
|
|
102
|
+
log(level, message) {
|
|
103
|
+
if (level < this.currentLevel) {
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
const formattedMessage = this.formatMessage(level, message);
|
|
107
|
+
process.stdout.write(formattedMessage);
|
|
108
|
+
}
|
|
109
|
+
debug(message) {
|
|
110
|
+
this.log(0 /* DEBUG */, message);
|
|
111
|
+
}
|
|
112
|
+
info(message) {
|
|
113
|
+
this.log(1 /* INFO */, message);
|
|
114
|
+
}
|
|
115
|
+
warn(message) {
|
|
116
|
+
this.log(2 /* WARN */, message);
|
|
117
|
+
}
|
|
118
|
+
error(message) {
|
|
119
|
+
this.log(3 /* ERROR */, message);
|
|
120
|
+
}
|
|
121
|
+
// Static methods for quick access
|
|
122
|
+
static debug(message, moduleName) {
|
|
123
|
+
const logger9 = new _Logger(moduleName || "embedbox");
|
|
124
|
+
logger9.debug(message);
|
|
125
|
+
}
|
|
126
|
+
static info(message, moduleName) {
|
|
127
|
+
const logger9 = new _Logger(moduleName || "embedbox");
|
|
128
|
+
logger9.info(message);
|
|
129
|
+
}
|
|
130
|
+
static warn(message, moduleName) {
|
|
131
|
+
const logger9 = new _Logger(moduleName || "embedbox");
|
|
132
|
+
logger9.warn(message);
|
|
133
|
+
}
|
|
134
|
+
static error(message, moduleName) {
|
|
135
|
+
const logger9 = new _Logger(moduleName || "embedbox");
|
|
136
|
+
logger9.error(message);
|
|
137
|
+
}
|
|
138
|
+
// Method to create a logger instance for a specific module
|
|
139
|
+
static createModuleLogger(moduleName, level) {
|
|
140
|
+
return new _Logger(`embedbox:${moduleName}`, level);
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
__publicField(_Logger, "instance");
|
|
144
|
+
// ANSI color codes - simplified for better readability
|
|
145
|
+
__publicField(_Logger, "COLORS", {
|
|
146
|
+
RESET: "\x1B[0m",
|
|
147
|
+
DEBUG: "\x1B[36m",
|
|
148
|
+
// Cyan
|
|
149
|
+
INFO: "\x1B[32m",
|
|
150
|
+
// Green
|
|
151
|
+
WARN: "\x1B[33m",
|
|
152
|
+
// Yellow
|
|
153
|
+
ERROR: "\x1B[31m"
|
|
154
|
+
// Red
|
|
155
|
+
});
|
|
156
|
+
__publicField(_Logger, "LEVEL_NAMES", {
|
|
157
|
+
[0 /* DEBUG */]: "DEBUG",
|
|
158
|
+
[1 /* INFO */]: "INFO",
|
|
159
|
+
[2 /* WARN */]: "WARN",
|
|
160
|
+
[3 /* ERROR */]: "ERROR"
|
|
161
|
+
});
|
|
162
|
+
var Logger = _Logger;
|
|
163
|
+
var logger = Logger.getInstance();
|
|
164
|
+
|
|
165
|
+
// src/providers/openai.ts
|
|
166
|
+
var logger2 = Logger.createModuleLogger("openai");
|
|
167
|
+
var OpenAIProvider = class extends EmbeddingProvider {
|
|
168
|
+
constructor(config2) {
|
|
169
|
+
super(config2);
|
|
170
|
+
__publicField(this, "client");
|
|
171
|
+
if (!config2.apiKey) {
|
|
172
|
+
throw new Error("OpenAI API key is required");
|
|
173
|
+
}
|
|
174
|
+
this.client = new import_openai.default({
|
|
175
|
+
apiKey: config2.apiKey,
|
|
176
|
+
baseURL: config2.baseUrl,
|
|
177
|
+
timeout: config2.timeout || 3e4
|
|
178
|
+
});
|
|
179
|
+
logger2.info("OpenAI provider initialized");
|
|
180
|
+
}
|
|
181
|
+
async embed(input) {
|
|
182
|
+
try {
|
|
183
|
+
const text = await this.readInput(input);
|
|
184
|
+
logger2.debug(`Embedding text with model: ${this.getModel()}`);
|
|
185
|
+
const response = await this.client.embeddings.create({
|
|
186
|
+
model: this.getModel(),
|
|
187
|
+
input: text
|
|
188
|
+
});
|
|
189
|
+
const embedding = response.data[0];
|
|
190
|
+
if (!embedding) {
|
|
191
|
+
throw new Error("No embedding returned from OpenAI API");
|
|
192
|
+
}
|
|
193
|
+
return {
|
|
194
|
+
embedding: embedding.embedding || [],
|
|
195
|
+
dimensions: embedding.embedding?.length || 0,
|
|
196
|
+
model: response.model,
|
|
197
|
+
provider: "openai",
|
|
198
|
+
usage: response.usage ? {
|
|
199
|
+
promptTokens: response.usage.prompt_tokens,
|
|
200
|
+
totalTokens: response.usage.total_tokens
|
|
201
|
+
} : void 0
|
|
202
|
+
};
|
|
203
|
+
} catch (error) {
|
|
204
|
+
logger2.error(`OpenAI embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
205
|
+
throw error;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
async embedBatch(inputs) {
|
|
209
|
+
try {
|
|
210
|
+
const texts = await Promise.all(inputs.map((input) => this.readInput(input)));
|
|
211
|
+
logger2.debug(`Batch embedding ${texts.length} texts with model: ${this.getModel()}`);
|
|
212
|
+
const response = await this.client.embeddings.create({
|
|
213
|
+
model: this.getModel(),
|
|
214
|
+
input: texts
|
|
215
|
+
});
|
|
216
|
+
const embeddings = response.data.map((item) => item.embedding);
|
|
217
|
+
return {
|
|
218
|
+
embeddings,
|
|
219
|
+
dimensions: embeddings[0]?.length || 0,
|
|
220
|
+
model: response.model,
|
|
221
|
+
provider: "openai",
|
|
222
|
+
usage: response.usage ? {
|
|
223
|
+
promptTokens: response.usage.prompt_tokens,
|
|
224
|
+
totalTokens: response.usage.total_tokens
|
|
225
|
+
} : void 0
|
|
226
|
+
};
|
|
227
|
+
} catch (error) {
|
|
228
|
+
logger2.error(`OpenAI batch embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
229
|
+
throw error;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
getDimensions() {
|
|
233
|
+
const model = this.getModel();
|
|
234
|
+
if (model.includes("text-embedding-3-large")) return 3072;
|
|
235
|
+
if (model.includes("text-embedding-3-small")) return 1536;
|
|
236
|
+
if (model.includes("text-embedding-ada-002")) return 1536;
|
|
237
|
+
return 1536;
|
|
238
|
+
}
|
|
239
|
+
getProviderName() {
|
|
240
|
+
return "OpenAI";
|
|
241
|
+
}
|
|
242
|
+
async isReady() {
|
|
243
|
+
try {
|
|
244
|
+
await this.client.models.list();
|
|
245
|
+
return true;
|
|
246
|
+
} catch (error) {
|
|
247
|
+
logger2.error(`OpenAI readiness check failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
248
|
+
return false;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
// src/providers/gemini.ts
|
|
254
|
+
var import_generative_ai = require("@google/generative-ai");
|
|
255
|
+
var logger3 = Logger.createModuleLogger("gemini");
|
|
256
|
+
var GeminiProvider = class extends EmbeddingProvider {
|
|
257
|
+
constructor(config2) {
|
|
258
|
+
super(config2);
|
|
259
|
+
__publicField(this, "client");
|
|
260
|
+
if (!config2.apiKey) {
|
|
261
|
+
throw new Error("Google API key is required");
|
|
262
|
+
}
|
|
263
|
+
this.client = new import_generative_ai.GoogleGenerativeAI(config2.apiKey);
|
|
264
|
+
logger3.info("Gemini provider initialized");
|
|
265
|
+
}
|
|
266
|
+
async embed(input) {
|
|
267
|
+
try {
|
|
268
|
+
const text = await this.readInput(input);
|
|
269
|
+
logger3.debug(`Embedding text with model: ${this.getModel()}`);
|
|
270
|
+
const model = this.client.getGenerativeModel({
|
|
271
|
+
model: this.getModel()
|
|
272
|
+
});
|
|
273
|
+
const result = await model.embedContent(text);
|
|
274
|
+
const embedding = result.embedding;
|
|
275
|
+
return {
|
|
276
|
+
embedding: embedding.values,
|
|
277
|
+
dimensions: embedding.values.length,
|
|
278
|
+
model: this.getModel(),
|
|
279
|
+
provider: "gemini"
|
|
280
|
+
};
|
|
281
|
+
} catch (error) {
|
|
282
|
+
logger3.error(`Gemini embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
283
|
+
throw error;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
async embedBatch(inputs) {
|
|
287
|
+
try {
|
|
288
|
+
const texts = await Promise.all(inputs.map((input) => this.readInput(input)));
|
|
289
|
+
logger3.debug(`Batch embedding ${texts.length} texts with model: ${this.getModel()}`);
|
|
290
|
+
const model = this.client.getGenerativeModel({
|
|
291
|
+
model: this.getModel()
|
|
292
|
+
});
|
|
293
|
+
const results = await Promise.all(
|
|
294
|
+
texts.map((text) => model.embedContent(text))
|
|
295
|
+
);
|
|
296
|
+
const embeddings = results.map((result) => result.embedding.values);
|
|
297
|
+
return {
|
|
298
|
+
embeddings,
|
|
299
|
+
dimensions: embeddings[0]?.length || 0,
|
|
300
|
+
model: this.getModel(),
|
|
301
|
+
provider: "gemini"
|
|
302
|
+
};
|
|
303
|
+
} catch (error) {
|
|
304
|
+
logger3.error(`Gemini batch embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
305
|
+
throw error;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
getDimensions() {
|
|
309
|
+
const model = this.getModel();
|
|
310
|
+
if (model.includes("gemini-embedding-001")) return 768;
|
|
311
|
+
if (model.includes("text-embedding-004")) return 768;
|
|
312
|
+
if (model.includes("embedding-001")) return 768;
|
|
313
|
+
if (model.includes("multimodalembedding")) return 768;
|
|
314
|
+
return 768;
|
|
315
|
+
}
|
|
316
|
+
getProviderName() {
|
|
317
|
+
return "Google Gemini";
|
|
318
|
+
}
|
|
319
|
+
getModel() {
|
|
320
|
+
return this.config.model || "gemini-embedding-001";
|
|
321
|
+
}
|
|
322
|
+
async isReady() {
|
|
323
|
+
try {
|
|
324
|
+
const model = this.client.getGenerativeModel({
|
|
325
|
+
model: this.getModel()
|
|
326
|
+
});
|
|
327
|
+
await model.embedContent("test");
|
|
328
|
+
return true;
|
|
329
|
+
} catch (error) {
|
|
330
|
+
logger3.error(`Gemini readiness check failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
331
|
+
return false;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// src/providers/claude.ts
|
|
337
|
+
var import_sdk = __toESM(require("@anthropic-ai/sdk"), 1);
|
|
338
|
+
var logger4 = Logger.createModuleLogger("claude");
|
|
339
|
+
var ClaudeProvider = class extends EmbeddingProvider {
|
|
340
|
+
constructor(config2) {
|
|
341
|
+
super(config2);
|
|
342
|
+
__publicField(this, "client");
|
|
343
|
+
if (!config2.apiKey) {
|
|
344
|
+
throw new Error("Anthropic API key is required");
|
|
345
|
+
}
|
|
346
|
+
this.client = new import_sdk.default({
|
|
347
|
+
apiKey: config2.apiKey,
|
|
348
|
+
baseURL: config2.baseUrl,
|
|
349
|
+
timeout: config2.timeout || 3e4
|
|
350
|
+
});
|
|
351
|
+
logger4.info("Claude provider initialized");
|
|
352
|
+
}
|
|
353
|
+
async embed() {
|
|
354
|
+
try {
|
|
355
|
+
logger4.debug(`Embedding text with model: ${this.getModel()}`);
|
|
356
|
+
throw new Error("Claude embeddings API not yet available. Please use another provider.");
|
|
357
|
+
} catch (error) {
|
|
358
|
+
const errorMessage = error instanceof Error ? error instanceof Error ? error.message : String(error) : "Unknown error";
|
|
359
|
+
logger4.error(`Claude embedding failed: ${errorMessage}`);
|
|
360
|
+
throw error;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
async embedBatch() {
|
|
364
|
+
try {
|
|
365
|
+
throw new Error("Claude embeddings API not yet available. Please use another provider.");
|
|
366
|
+
} catch (error) {
|
|
367
|
+
const errorMessage = error instanceof Error ? error instanceof Error ? error.message : String(error) : "Unknown error";
|
|
368
|
+
logger4.error(`Claude batch embedding failed: ${errorMessage}`);
|
|
369
|
+
throw error;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
getDimensions() {
|
|
373
|
+
return 0;
|
|
374
|
+
}
|
|
375
|
+
getProviderName() {
|
|
376
|
+
return "Anthropic Claude";
|
|
377
|
+
}
|
|
378
|
+
async isReady() {
|
|
379
|
+
try {
|
|
380
|
+
await this.client.messages.create({
|
|
381
|
+
model: "claude-3-haiku-20240307",
|
|
382
|
+
max_tokens: 10,
|
|
383
|
+
messages: [{ role: "user", content: "test" }]
|
|
384
|
+
});
|
|
385
|
+
return true;
|
|
386
|
+
} catch (error) {
|
|
387
|
+
const errorMessage = error instanceof Error ? error instanceof Error ? error.message : String(error) : "Unknown error";
|
|
388
|
+
logger4.error(`Claude readiness check failed: ${errorMessage}`);
|
|
389
|
+
return false;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
// src/providers/mistral.ts
|
|
395
|
+
var import_mistralai = require("@mistralai/mistralai");
|
|
396
|
+
var logger5 = Logger.createModuleLogger("mistral");
|
|
397
|
+
var MistralProvider = class extends EmbeddingProvider {
|
|
398
|
+
constructor(config2) {
|
|
399
|
+
super(config2);
|
|
400
|
+
__publicField(this, "client");
|
|
401
|
+
if (!config2.apiKey) {
|
|
402
|
+
throw new Error("Mistral API key is required");
|
|
403
|
+
}
|
|
404
|
+
this.client = new import_mistralai.Mistral({
|
|
405
|
+
apiKey: config2.apiKey,
|
|
406
|
+
serverURL: config2.baseUrl,
|
|
407
|
+
timeoutMs: config2.timeout || 3e4
|
|
408
|
+
});
|
|
409
|
+
logger5.info("Mistral provider initialized");
|
|
410
|
+
}
|
|
411
|
+
async embed(input) {
|
|
412
|
+
try {
|
|
413
|
+
const text = await this.readInput(input);
|
|
414
|
+
logger5.debug(`Embedding text with model: ${this.getModel()}`);
|
|
415
|
+
const response = await this.client.embeddings.create({
|
|
416
|
+
model: this.getModel(),
|
|
417
|
+
inputs: [text]
|
|
418
|
+
});
|
|
419
|
+
const embedding = response.data[0];
|
|
420
|
+
if (!embedding) {
|
|
421
|
+
throw new Error("No embedding returned from Mistral API");
|
|
422
|
+
}
|
|
423
|
+
return {
|
|
424
|
+
embedding: embedding.embedding || [],
|
|
425
|
+
dimensions: embedding.embedding?.length || 0,
|
|
426
|
+
model: response.model,
|
|
427
|
+
provider: "mistral",
|
|
428
|
+
usage: response.usage?.promptTokens && response.usage?.totalTokens ? {
|
|
429
|
+
promptTokens: response.usage.promptTokens,
|
|
430
|
+
totalTokens: response.usage.totalTokens
|
|
431
|
+
} : void 0
|
|
432
|
+
};
|
|
433
|
+
} catch (error) {
|
|
434
|
+
logger5.error(`Mistral embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
435
|
+
throw error;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
async embedBatch(inputs) {
|
|
439
|
+
try {
|
|
440
|
+
const texts = await Promise.all(inputs.map((input) => this.readInput(input)));
|
|
441
|
+
logger5.debug(`Batch embedding ${texts.length} texts with model: ${this.getModel()}`);
|
|
442
|
+
const response = await this.client.embeddings.create({
|
|
443
|
+
model: this.getModel(),
|
|
444
|
+
inputs: texts
|
|
445
|
+
});
|
|
446
|
+
const embeddings = response.data.map((item) => {
|
|
447
|
+
if (!item.embedding) throw new Error("No embedding returned from Mistral API");
|
|
448
|
+
return item.embedding;
|
|
449
|
+
});
|
|
450
|
+
return {
|
|
451
|
+
embeddings,
|
|
452
|
+
dimensions: embeddings[0]?.length || 0,
|
|
453
|
+
model: response.model,
|
|
454
|
+
provider: "mistral",
|
|
455
|
+
usage: response.usage?.promptTokens && response.usage?.totalTokens ? {
|
|
456
|
+
promptTokens: response.usage.promptTokens,
|
|
457
|
+
totalTokens: response.usage.totalTokens
|
|
458
|
+
} : void 0
|
|
459
|
+
};
|
|
460
|
+
} catch (error) {
|
|
461
|
+
logger5.error(`Mistral batch embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
462
|
+
throw error;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
getDimensions() {
|
|
466
|
+
const model = this.getModel();
|
|
467
|
+
if (model.includes("mistral-embed")) return 1024;
|
|
468
|
+
return 1024;
|
|
469
|
+
}
|
|
470
|
+
getProviderName() {
|
|
471
|
+
return "Mistral AI";
|
|
472
|
+
}
|
|
473
|
+
async isReady() {
|
|
474
|
+
try {
|
|
475
|
+
const response = await this.client.embeddings.create({
|
|
476
|
+
model: this.getModel(),
|
|
477
|
+
inputs: ["test"]
|
|
478
|
+
});
|
|
479
|
+
return response.data.length > 0;
|
|
480
|
+
} catch (error) {
|
|
481
|
+
logger5.error(`Mistral readiness check failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
482
|
+
return false;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
|
|
487
|
+
// src/providers/deepseek.ts
|
|
488
|
+
var import_deepseek = require("deepseek");
|
|
489
|
+
var logger6 = Logger.createModuleLogger("deepseek");
|
|
490
|
+
var DeepSeekProvider = class extends EmbeddingProvider {
|
|
491
|
+
constructor(config2) {
|
|
492
|
+
super(config2);
|
|
493
|
+
__publicField(this, "client");
|
|
494
|
+
if (!config2.apiKey) {
|
|
495
|
+
throw new Error("DeepSeek API key is required");
|
|
496
|
+
}
|
|
497
|
+
const clientOptions = {
|
|
498
|
+
apiKey: config2.apiKey,
|
|
499
|
+
timeout: config2.timeout || 3e4
|
|
500
|
+
};
|
|
501
|
+
if (config2.baseUrl) {
|
|
502
|
+
clientOptions.baseURL = config2.baseUrl;
|
|
503
|
+
}
|
|
504
|
+
this.client = new import_deepseek.DeepSeek(clientOptions);
|
|
505
|
+
logger6.info("DeepSeek provider initialized");
|
|
506
|
+
}
|
|
507
|
+
async embed(input) {
|
|
508
|
+
try {
|
|
509
|
+
const text = await this.readInput(input);
|
|
510
|
+
logger6.debug(`Embedding text with model: ${this.getModel()}`);
|
|
511
|
+
const response = await this.client.embeddings.create({
|
|
512
|
+
model: this.getModel(),
|
|
513
|
+
input: text
|
|
514
|
+
});
|
|
515
|
+
const embedding = response.data[0];
|
|
516
|
+
if (!embedding) {
|
|
517
|
+
throw new Error("No embedding returned from DeepSeek API");
|
|
518
|
+
}
|
|
519
|
+
return {
|
|
520
|
+
embedding: embedding.embedding || [],
|
|
521
|
+
dimensions: embedding.embedding?.length || 0,
|
|
522
|
+
model: embedding.model || this.getModel(),
|
|
523
|
+
provider: "deepseek",
|
|
524
|
+
usage: response.usage ? {
|
|
525
|
+
promptTokens: response.usage.prompt_tokens,
|
|
526
|
+
totalTokens: response.usage.total_tokens
|
|
527
|
+
} : void 0
|
|
528
|
+
};
|
|
529
|
+
} catch (error) {
|
|
530
|
+
logger6.error(`DeepSeek embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
531
|
+
throw error;
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
async embedBatch(inputs) {
|
|
535
|
+
try {
|
|
536
|
+
const texts = await Promise.all(inputs.map((input) => this.readInput(input)));
|
|
537
|
+
logger6.debug(`Batch embedding ${texts.length} texts with model: ${this.getModel()}`);
|
|
538
|
+
const response = await this.client.embeddings.create({
|
|
539
|
+
model: this.getModel(),
|
|
540
|
+
input: texts
|
|
541
|
+
});
|
|
542
|
+
const embeddings = response.data.map((item) => item.embedding);
|
|
543
|
+
return {
|
|
544
|
+
embeddings,
|
|
545
|
+
dimensions: embeddings[0]?.length || 0,
|
|
546
|
+
model: response.model,
|
|
547
|
+
provider: "deepseek",
|
|
548
|
+
usage: response.usage ? {
|
|
549
|
+
promptTokens: response.usage.prompt_tokens,
|
|
550
|
+
totalTokens: response.usage.total_tokens
|
|
551
|
+
} : void 0
|
|
552
|
+
};
|
|
553
|
+
} catch (error) {
|
|
554
|
+
logger6.error(`DeepSeek batch embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
555
|
+
throw error;
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
getDimensions() {
|
|
559
|
+
const model = this.getModel();
|
|
560
|
+
if (model.includes("deepseek-chat")) return 4096;
|
|
561
|
+
return 4096;
|
|
562
|
+
}
|
|
563
|
+
getProviderName() {
|
|
564
|
+
return "DeepSeek";
|
|
565
|
+
}
|
|
566
|
+
async isReady() {
|
|
567
|
+
try {
|
|
568
|
+
await this.client.embeddings.create({
|
|
569
|
+
model: this.getModel(),
|
|
570
|
+
input: "test"
|
|
571
|
+
});
|
|
572
|
+
return true;
|
|
573
|
+
} catch (error) {
|
|
574
|
+
logger6.error(`DeepSeek readiness check failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
575
|
+
return false;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
};
|
|
579
|
+
|
|
580
|
+
// src/providers/llamacpp.ts
|
|
581
|
+
var import_promises = require("fs/promises");
|
|
582
|
+
var import_path = require("path");
|
|
583
|
+
var http = __toESM(require("http"), 1);
|
|
584
|
+
var LlamaCppProvider = class extends EmbeddingProvider {
|
|
585
|
+
constructor(config2) {
|
|
586
|
+
super({ ...config2, provider: "llamacpp" });
|
|
587
|
+
__publicField(this, "llamaPath");
|
|
588
|
+
__publicField(this, "modelPath");
|
|
589
|
+
this.modelPath = config2.model || "nomic-embed-text-v1.5.Q4_K_M.gguf";
|
|
590
|
+
this.llamaPath = config2.llamaPath || "./llama.cpp/build/bin/llama-embedding";
|
|
591
|
+
logger.info(`Llama.cpp provider initialized with model: ${this.modelPath}`);
|
|
592
|
+
}
|
|
593
|
+
// Public API methods
|
|
594
|
+
getProviderName() {
|
|
595
|
+
return "Llama.cpp";
|
|
596
|
+
}
|
|
597
|
+
getDimensions() {
|
|
598
|
+
const model = this.getModel();
|
|
599
|
+
if (model.includes("nomic-embed-text-v1.5")) return 768;
|
|
600
|
+
if (model.includes("nomic-embed-text-v1")) return 768;
|
|
601
|
+
if (model.includes("all-MiniLM-L6-v2")) return 384;
|
|
602
|
+
if (model.includes("bge-base")) return 768;
|
|
603
|
+
if (model.includes("bert-base")) return 768;
|
|
604
|
+
return 768;
|
|
605
|
+
}
|
|
606
|
+
async isReady() {
|
|
607
|
+
try {
|
|
608
|
+
await (0, import_promises.access)(this.llamaPath, import_promises.constants.F_OK);
|
|
609
|
+
await (0, import_promises.access)(this.llamaPath, import_promises.constants.X_OK);
|
|
610
|
+
const modelPath = await this.getModelPath();
|
|
611
|
+
await (0, import_promises.access)(modelPath, import_promises.constants.F_OK);
|
|
612
|
+
logger.debug("Llama.cpp provider is ready");
|
|
613
|
+
return true;
|
|
614
|
+
} catch (error) {
|
|
615
|
+
logger.error(`Llama.cpp readiness check failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
616
|
+
return false;
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
async embed(input) {
|
|
620
|
+
try {
|
|
621
|
+
logger.debug(`Embedding text with llama.cpp: ${this.getModel()}`);
|
|
622
|
+
const text = await this.readInput(input);
|
|
623
|
+
if (!text.trim()) {
|
|
624
|
+
throw new Error("Text input cannot be empty");
|
|
625
|
+
}
|
|
626
|
+
const requestBody = {
|
|
627
|
+
input: text,
|
|
628
|
+
model: await this.getModelPath(),
|
|
629
|
+
pooling: "mean",
|
|
630
|
+
normalize: 2
|
|
631
|
+
};
|
|
632
|
+
const result = await this.executeLlamaEmbedding([JSON.stringify(requestBody)]);
|
|
633
|
+
const embedding = this.parseRawOutput(result.stdout);
|
|
634
|
+
return {
|
|
635
|
+
embedding,
|
|
636
|
+
dimensions: embedding.length,
|
|
637
|
+
model: this.getModel(),
|
|
638
|
+
provider: "llamacpp"
|
|
639
|
+
};
|
|
640
|
+
} catch (error) {
|
|
641
|
+
logger.error(`Llama.cpp embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
642
|
+
throw error;
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
async embedBatch(inputs) {
|
|
646
|
+
try {
|
|
647
|
+
logger.debug(`Batch embedding ${inputs.length} texts with llama.cpp`);
|
|
648
|
+
const texts = [];
|
|
649
|
+
for (const input of inputs) {
|
|
650
|
+
const text = await this.readInput(input);
|
|
651
|
+
if (text.trim()) {
|
|
652
|
+
texts.push(text);
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
if (texts.length === 0) {
|
|
656
|
+
throw new Error("No valid texts to embed");
|
|
657
|
+
}
|
|
658
|
+
const modelPath = await this.getModelPath();
|
|
659
|
+
const requests = inputs.map((input) => ({
|
|
660
|
+
input: input.text || "",
|
|
661
|
+
model: modelPath,
|
|
662
|
+
pooling: "mean",
|
|
663
|
+
normalize: 2
|
|
664
|
+
}));
|
|
665
|
+
const embeddings = [];
|
|
666
|
+
for (const request2 of requests) {
|
|
667
|
+
const result = await this.executeLlamaEmbedding([JSON.stringify(request2)]);
|
|
668
|
+
const embedding = this.parseRawOutput(result.stdout);
|
|
669
|
+
embeddings.push(embedding);
|
|
670
|
+
}
|
|
671
|
+
return {
|
|
672
|
+
embeddings,
|
|
673
|
+
dimensions: embeddings[0]?.length || 0,
|
|
674
|
+
model: this.getModel(),
|
|
675
|
+
provider: "llamacpp"
|
|
676
|
+
};
|
|
677
|
+
} catch (error) {
|
|
678
|
+
logger.error(`Llama.cpp batch embedding failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
679
|
+
throw error;
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
// Protected methods
|
|
683
|
+
getModel() {
|
|
684
|
+
return this.modelPath;
|
|
685
|
+
}
|
|
686
|
+
// Private helper methods
|
|
687
|
+
async getModelPath() {
|
|
688
|
+
const possiblePaths = [
|
|
689
|
+
this.modelPath,
|
|
690
|
+
// As provided
|
|
691
|
+
(0, import_path.join)("./llama.cpp/models", this.modelPath),
|
|
692
|
+
// In llama.cpp/models
|
|
693
|
+
(0, import_path.join)("./llama.cpp", this.modelPath),
|
|
694
|
+
// In llama.cpp root
|
|
695
|
+
this.modelPath
|
|
696
|
+
// Fallback
|
|
697
|
+
];
|
|
698
|
+
for (const path of possiblePaths) {
|
|
699
|
+
try {
|
|
700
|
+
await (0, import_promises.access)(path, import_promises.constants.F_OK);
|
|
701
|
+
return (0, import_path.resolve)(path);
|
|
702
|
+
} catch {
|
|
703
|
+
continue;
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
throw new Error(`Model file not found: ${this.modelPath}`);
|
|
707
|
+
}
|
|
708
|
+
async executeLlamaEmbedding(args) {
|
|
709
|
+
return new Promise((resolve2, reject) => {
|
|
710
|
+
const port = 8080;
|
|
711
|
+
let requestBody;
|
|
712
|
+
try {
|
|
713
|
+
requestBody = JSON.parse(args[0] || "{}");
|
|
714
|
+
} catch {
|
|
715
|
+
reject(new Error("Invalid request body for HTTP API"));
|
|
716
|
+
return;
|
|
717
|
+
}
|
|
718
|
+
const postData = JSON.stringify(requestBody);
|
|
719
|
+
const options = {
|
|
720
|
+
hostname: "localhost",
|
|
721
|
+
port,
|
|
722
|
+
path: "/embedding",
|
|
723
|
+
method: "POST",
|
|
724
|
+
headers: {
|
|
725
|
+
"Content-Type": "application/json",
|
|
726
|
+
"Content-Length": Buffer.byteLength(postData)
|
|
727
|
+
}
|
|
728
|
+
};
|
|
729
|
+
const req = http.request(options, (res) => {
|
|
730
|
+
let data = "";
|
|
731
|
+
res.on("data", (chunk) => {
|
|
732
|
+
data += chunk;
|
|
733
|
+
});
|
|
734
|
+
res.on("end", () => {
|
|
735
|
+
if (res.statusCode === 200) {
|
|
736
|
+
resolve2({ stdout: data, stderr: "" });
|
|
737
|
+
} else {
|
|
738
|
+
reject(new Error(`HTTP ${res.statusCode}: ${data}`));
|
|
739
|
+
}
|
|
740
|
+
});
|
|
741
|
+
});
|
|
742
|
+
req.on("error", (error) => {
|
|
743
|
+
reject(new Error(`Failed to connect to llama.cpp server: ${error instanceof Error ? error.message : String(error)}`));
|
|
744
|
+
});
|
|
745
|
+
req.write(postData);
|
|
746
|
+
req.end();
|
|
747
|
+
});
|
|
748
|
+
}
|
|
749
|
+
parseRawOutput(output) {
|
|
750
|
+
try {
|
|
751
|
+
const response = JSON.parse(output);
|
|
752
|
+
logger.debug(`PARSE DEBUG: Response type: ${typeof response}`);
|
|
753
|
+
logger.debug(`PARSE DEBUG: Is Array: ${Array.isArray(response)}`);
|
|
754
|
+
if (Array.isArray(response) && response.length > 0) {
|
|
755
|
+
const first = response[0];
|
|
756
|
+
if (first && first.embedding && Array.isArray(first.embedding)) {
|
|
757
|
+
const emb = first.embedding;
|
|
758
|
+
if (Array.isArray(emb[0])) {
|
|
759
|
+
const flat = emb[0];
|
|
760
|
+
logger.debug(`Parsed ${flat.length} dimensions (nested)`);
|
|
761
|
+
return flat;
|
|
762
|
+
}
|
|
763
|
+
logger.debug(`Parsed ${emb.length} dimensions (direct)`);
|
|
764
|
+
return emb;
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
if (response.embedding && Array.isArray(response.embedding)) {
|
|
768
|
+
const emb = response.embedding;
|
|
769
|
+
if (Array.isArray(emb[0])) {
|
|
770
|
+
return emb[0];
|
|
771
|
+
}
|
|
772
|
+
return emb;
|
|
773
|
+
}
|
|
774
|
+
if (Array.isArray(response) && typeof response[0] === "number") {
|
|
775
|
+
logger.debug(`Parsed ${response.length} dimensions (flat array)`);
|
|
776
|
+
return response;
|
|
777
|
+
}
|
|
778
|
+
throw new Error(`Unexpected format: ${JSON.stringify(Object.keys(response))}`);
|
|
779
|
+
} catch (error) {
|
|
780
|
+
const errorMessage = error instanceof Error ? error instanceof Error ? error.message : String(error) : "Unknown error";
|
|
781
|
+
throw new Error(`Parse failed: ${errorMessage}`, { cause: error });
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
parseArrayOutput(output) {
|
|
785
|
+
const arrayPattern = /\[([^\]]+)\]/g;
|
|
786
|
+
const matches = [...output.matchAll(arrayPattern)];
|
|
787
|
+
if (matches.length === 0) {
|
|
788
|
+
throw new Error("No array embeddings found in output");
|
|
789
|
+
}
|
|
790
|
+
const embeddings = matches.map((match) => {
|
|
791
|
+
const values = match[1]?.split(",").map((v) => v.trim()) || [];
|
|
792
|
+
return values.map((v) => parseFloat(v)).filter((v) => !isNaN(v));
|
|
793
|
+
}).filter((embedding) => embedding.length > 0);
|
|
794
|
+
return embeddings;
|
|
795
|
+
}
|
|
796
|
+
};
|
|
797
|
+
|
|
798
|
+
// src/factory/EmbeddingFactory.ts
|
|
799
|
+
var logger7 = Logger.createModuleLogger("factory");
|
|
800
|
+
var EmbeddingFactory = class {
|
|
801
|
+
static create(config2) {
|
|
802
|
+
logger7.info(`Creating provider: ${config2.provider}`);
|
|
803
|
+
const ProviderClass = this.providers.get(config2.provider);
|
|
804
|
+
if (!ProviderClass) {
|
|
805
|
+
throw new Error(`Unsupported provider: ${config2.provider}`);
|
|
806
|
+
}
|
|
807
|
+
return new ProviderClass(config2);
|
|
808
|
+
}
|
|
809
|
+
static getSupportedProviders() {
|
|
810
|
+
return Array.from(this.providers.keys());
|
|
811
|
+
}
|
|
812
|
+
};
|
|
813
|
+
__publicField(EmbeddingFactory, "providers", /* @__PURE__ */ new Map([
|
|
814
|
+
["openai", OpenAIProvider],
|
|
815
|
+
["gemini", GeminiProvider],
|
|
816
|
+
["claude", ClaudeProvider],
|
|
817
|
+
["mistral", MistralProvider],
|
|
818
|
+
["deepseek", DeepSeekProvider],
|
|
819
|
+
["llamacpp", LlamaCppProvider]
|
|
820
|
+
// Local embeddings with llama.cpp
|
|
821
|
+
]));
|
|
822
|
+
|
|
823
|
+
// main.ts
|
|
824
|
+
dotenv.config();
|
|
825
|
+
var logger8 = Logger.createModuleLogger("main");
|
|
826
|
+
async function embed(config2, input) {
|
|
827
|
+
try {
|
|
828
|
+
logger8.info(`Starting embedding with provider: ${config2.provider}`);
|
|
829
|
+
const provider = EmbeddingFactory.create(config2);
|
|
830
|
+
const isReady = await provider.isReady();
|
|
831
|
+
if (!isReady) {
|
|
832
|
+
throw new Error(`Provider ${config2.provider} is not ready`);
|
|
833
|
+
}
|
|
834
|
+
if (Array.isArray(input)) {
|
|
835
|
+
logger8.debug(`Processing batch of ${input.length} items`);
|
|
836
|
+
return await provider.embedBatch(input);
|
|
837
|
+
} else {
|
|
838
|
+
logger8.debug(`Processing single item`);
|
|
839
|
+
return await provider.embed(input);
|
|
840
|
+
}
|
|
841
|
+
} catch (error) {
|
|
842
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
843
|
+
logger8.error(`Embedding failed: ${errorMessage}`);
|
|
844
|
+
throw error;
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
async function autoEmbed(input) {
|
|
848
|
+
logger8.info("Auto-detecting best provider...");
|
|
849
|
+
const providers = [
|
|
850
|
+
{ provider: "llamacpp", model: "nomic-embed-text-v1.5.Q4_K_M.gguf" },
|
|
851
|
+
// Local & free (llama.cpp)
|
|
852
|
+
{ provider: "openai", model: "text-embedding-3-small", apiKey: process.env.OPENAI_API_KEY || void 0 },
|
|
853
|
+
{ provider: "gemini", model: "gemini-embedding-001", apiKey: process.env.GOOGLE_GENERATIVE_AI_API_KEY || void 0 },
|
|
854
|
+
{ provider: "mistral", model: "mistral-embed", apiKey: process.env.MISTRAL_API_KEY || void 0 },
|
|
855
|
+
{ provider: "deepseek", model: "deepseek-chat", apiKey: process.env.DEEPSEEK_API_KEY || void 0 }
|
|
856
|
+
];
|
|
857
|
+
for (const config2 of providers) {
|
|
858
|
+
try {
|
|
859
|
+
if (config2.provider === "llamacpp" || config2.apiKey) {
|
|
860
|
+
logger8.info(`Trying provider: ${config2.provider}`);
|
|
861
|
+
const cleanConfig = {
|
|
862
|
+
provider: config2.provider,
|
|
863
|
+
model: config2.model
|
|
864
|
+
};
|
|
865
|
+
if (config2.apiKey) {
|
|
866
|
+
cleanConfig.apiKey = config2.apiKey;
|
|
867
|
+
}
|
|
868
|
+
return await embed(cleanConfig, input);
|
|
869
|
+
}
|
|
870
|
+
} catch (error) {
|
|
871
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
872
|
+
logger8.warn(`Provider ${config2.provider} failed: ${errorMessage}`);
|
|
873
|
+
continue;
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
throw new Error("No available embedding provider found");
|
|
877
|
+
}
|
|
878
|
+
function getSupportedProviders() {
|
|
879
|
+
return EmbeddingFactory.getSupportedProviders();
|
|
880
|
+
}
|
|
881
|
+
function createProvider(config2) {
|
|
882
|
+
return EmbeddingFactory.create(config2);
|
|
883
|
+
}
|
|
884
|
+
|
|
885
|
+
// index.ts
|
|
886
|
+
var VERSION = "1.0.0";
|
|
887
|
+
function getVersion() {
|
|
888
|
+
return VERSION;
|
|
889
|
+
}
|
|
890
|
+
var LIB_INFO = {
|
|
891
|
+
name: "embedbox",
|
|
892
|
+
version: VERSION,
|
|
893
|
+
description: "A minimal and powerful embedding library",
|
|
894
|
+
homepage: "https://embedbox.dev",
|
|
895
|
+
repository: "https://github.com/embedbox/embedbox.git",
|
|
896
|
+
supportedProviders: [
|
|
897
|
+
"openai",
|
|
898
|
+
"gemini",
|
|
899
|
+
"claude",
|
|
900
|
+
"mistral",
|
|
901
|
+
"deepseek",
|
|
902
|
+
"llamacpp"
|
|
903
|
+
]
|
|
904
|
+
};
|
|
905
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
906
|
+
0 && (module.exports = {
|
|
907
|
+
EmbeddingFactory,
|
|
908
|
+
EmbeddingProvider,
|
|
909
|
+
LIB_INFO,
|
|
910
|
+
VERSION,
|
|
911
|
+
autoEmbed,
|
|
912
|
+
createProvider,
|
|
913
|
+
embed,
|
|
914
|
+
getSupportedProviders,
|
|
915
|
+
getVersion
|
|
916
|
+
});
|
|
917
|
+
//# sourceMappingURL=index.cjs.map
|