@digilogiclabs/platform-core 1.1.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +583 -545
- package/dist/{ConsoleEmail-CYPpn2sR.d.mts → ConsoleEmail-hUDFsKoA.d.mts} +1128 -40
- package/dist/{ConsoleEmail-CYPpn2sR.d.ts → ConsoleEmail-hUDFsKoA.d.ts} +1128 -40
- package/dist/index.d.mts +2586 -2278
- package/dist/index.d.ts +2586 -2278
- package/dist/index.js +21997 -12204
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +21769 -12012
- package/dist/index.mjs.map +1 -1
- package/dist/migrate.js +1158 -0
- package/dist/migrate.js.map +1 -0
- package/dist/migrations/index.js +33 -13
- package/dist/migrations/index.js.map +1 -1
- package/dist/migrations/index.mjs +33 -13
- package/dist/migrations/index.mjs.map +1 -1
- package/dist/testing.d.mts +2 -2
- package/dist/testing.d.ts +2 -2
- package/dist/testing.js +1196 -24
- package/dist/testing.js.map +1 -1
- package/dist/testing.mjs +1198 -24
- package/dist/testing.mjs.map +1 -1
- package/package.json +40 -9
package/dist/testing.js
CHANGED
|
@@ -3,6 +3,9 @@ var __defProp = Object.defineProperty;
|
|
|
3
3
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __esm = (fn, res) => function __init() {
|
|
7
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
8
|
+
};
|
|
6
9
|
var __export = (target, all) => {
|
|
7
10
|
for (var name in all)
|
|
8
11
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -17,6 +20,1016 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
17
20
|
};
|
|
18
21
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
22
|
|
|
23
|
+
// src/interfaces/IAI.ts
|
|
24
|
+
var MemoryAI;
|
|
25
|
+
var init_IAI = __esm({
|
|
26
|
+
"src/interfaces/IAI.ts"() {
|
|
27
|
+
"use strict";
|
|
28
|
+
MemoryAI = class {
|
|
29
|
+
constructor(config = {}) {
|
|
30
|
+
this.config = config;
|
|
31
|
+
this.models = config.models || [
|
|
32
|
+
{
|
|
33
|
+
modelId: "gpt-4",
|
|
34
|
+
provider: "openai",
|
|
35
|
+
capabilities: ["chat", "completion"],
|
|
36
|
+
maxContextTokens: 128e3,
|
|
37
|
+
maxOutputTokens: 4096,
|
|
38
|
+
inputCostPer1K: 0.03,
|
|
39
|
+
outputCostPer1K: 0.06,
|
|
40
|
+
supportsStreaming: true,
|
|
41
|
+
supportsTools: true,
|
|
42
|
+
supportsVision: true
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
modelId: "claude-3-opus",
|
|
46
|
+
provider: "anthropic",
|
|
47
|
+
capabilities: ["chat"],
|
|
48
|
+
maxContextTokens: 2e5,
|
|
49
|
+
maxOutputTokens: 4096,
|
|
50
|
+
inputCostPer1K: 0.015,
|
|
51
|
+
outputCostPer1K: 0.075,
|
|
52
|
+
supportsStreaming: true,
|
|
53
|
+
supportsTools: true,
|
|
54
|
+
supportsVision: true
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
modelId: "text-embedding-3-small",
|
|
58
|
+
provider: "openai",
|
|
59
|
+
capabilities: ["embedding"],
|
|
60
|
+
maxContextTokens: 8191,
|
|
61
|
+
maxOutputTokens: 0,
|
|
62
|
+
inputCostPer1K: 2e-5,
|
|
63
|
+
outputCostPer1K: 0,
|
|
64
|
+
supportsStreaming: false,
|
|
65
|
+
supportsTools: false,
|
|
66
|
+
supportsVision: false
|
|
67
|
+
}
|
|
68
|
+
];
|
|
69
|
+
}
|
|
70
|
+
models = [];
|
|
71
|
+
responses = /* @__PURE__ */ new Map();
|
|
72
|
+
embeddings = /* @__PURE__ */ new Map();
|
|
73
|
+
requestLog = [];
|
|
74
|
+
// ─────────────────────────────────────────────────────────────
|
|
75
|
+
// Test Helpers
|
|
76
|
+
// ─────────────────────────────────────────────────────────────
|
|
77
|
+
setResponse(key, response) {
|
|
78
|
+
this.responses.set(key, response);
|
|
79
|
+
}
|
|
80
|
+
setEmbedding(text, embedding) {
|
|
81
|
+
this.embeddings.set(text, embedding);
|
|
82
|
+
}
|
|
83
|
+
getRequestLog() {
|
|
84
|
+
return [...this.requestLog];
|
|
85
|
+
}
|
|
86
|
+
clearRequestLog() {
|
|
87
|
+
this.requestLog = [];
|
|
88
|
+
}
|
|
89
|
+
// ─────────────────────────────────────────────────────────────
|
|
90
|
+
// Chat Operations
|
|
91
|
+
// ─────────────────────────────────────────────────────────────
|
|
92
|
+
async chat(request) {
|
|
93
|
+
this.requestLog.push({ type: "chat", request, timestamp: /* @__PURE__ */ new Date() });
|
|
94
|
+
const model = request.model || this.config.defaultChatModel || "gpt-4";
|
|
95
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
96
|
+
const key = `${model}:${lastMessage?.content}`;
|
|
97
|
+
if (this.responses.has(key)) {
|
|
98
|
+
return this.responses.get(key);
|
|
99
|
+
}
|
|
100
|
+
const response = {
|
|
101
|
+
id: `chatcmpl-${Date.now()}`,
|
|
102
|
+
model,
|
|
103
|
+
provider: "openai",
|
|
104
|
+
choices: [
|
|
105
|
+
{
|
|
106
|
+
index: 0,
|
|
107
|
+
message: {
|
|
108
|
+
role: "assistant",
|
|
109
|
+
content: `Mock response to: ${lastMessage?.content || "empty"}`
|
|
110
|
+
},
|
|
111
|
+
finishReason: "stop"
|
|
112
|
+
}
|
|
113
|
+
],
|
|
114
|
+
usage: {
|
|
115
|
+
promptTokens: this.estimateTokensSync(
|
|
116
|
+
request.messages.map((m) => m.content).join(" ")
|
|
117
|
+
),
|
|
118
|
+
completionTokens: 20,
|
|
119
|
+
totalTokens: 0,
|
|
120
|
+
estimatedCostUsd: 0
|
|
121
|
+
},
|
|
122
|
+
created: /* @__PURE__ */ new Date(),
|
|
123
|
+
finishReason: "stop"
|
|
124
|
+
};
|
|
125
|
+
response.usage.totalTokens = response.usage.promptTokens + response.usage.completionTokens;
|
|
126
|
+
response.usage.estimatedCostUsd = this.calculateCost(model, response.usage);
|
|
127
|
+
return response;
|
|
128
|
+
}
|
|
129
|
+
async *chatStream(request) {
|
|
130
|
+
this.requestLog.push({
|
|
131
|
+
type: "chatStream",
|
|
132
|
+
request,
|
|
133
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
134
|
+
});
|
|
135
|
+
const model = request.model || this.config.defaultChatModel || "gpt-4";
|
|
136
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
137
|
+
const responseText = `Mock streaming response to: ${lastMessage?.content || "empty"}`;
|
|
138
|
+
const words = responseText.split(" ");
|
|
139
|
+
for (let i = 0; i < words.length; i++) {
|
|
140
|
+
yield {
|
|
141
|
+
id: `chatcmpl-${Date.now()}`,
|
|
142
|
+
model,
|
|
143
|
+
provider: "openai",
|
|
144
|
+
delta: {
|
|
145
|
+
content: (i > 0 ? " " : "") + words[i],
|
|
146
|
+
role: i === 0 ? "assistant" : void 0
|
|
147
|
+
},
|
|
148
|
+
finishReason: i === words.length - 1 ? "stop" : void 0
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
async chatWithCallback(request, callback) {
|
|
153
|
+
let fullContent = "";
|
|
154
|
+
for await (const chunk of this.chatStream(request)) {
|
|
155
|
+
await callback(chunk);
|
|
156
|
+
if (chunk.delta.content) {
|
|
157
|
+
fullContent += chunk.delta.content;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
const model = request.model || this.config.defaultChatModel || "gpt-4";
|
|
161
|
+
return {
|
|
162
|
+
id: `chatcmpl-${Date.now()}`,
|
|
163
|
+
model,
|
|
164
|
+
provider: "openai",
|
|
165
|
+
choices: [
|
|
166
|
+
{
|
|
167
|
+
index: 0,
|
|
168
|
+
message: { role: "assistant", content: fullContent },
|
|
169
|
+
finishReason: "stop"
|
|
170
|
+
}
|
|
171
|
+
],
|
|
172
|
+
usage: {
|
|
173
|
+
promptTokens: this.estimateTokensSync(
|
|
174
|
+
request.messages.map((m) => m.content).join(" ")
|
|
175
|
+
),
|
|
176
|
+
completionTokens: this.estimateTokensSync(fullContent),
|
|
177
|
+
totalTokens: 0,
|
|
178
|
+
estimatedCostUsd: 0
|
|
179
|
+
},
|
|
180
|
+
created: /* @__PURE__ */ new Date(),
|
|
181
|
+
finishReason: "stop"
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
// ─────────────────────────────────────────────────────────────
|
|
185
|
+
// Completion Operations
|
|
186
|
+
// ─────────────────────────────────────────────────────────────
|
|
187
|
+
async complete(request) {
|
|
188
|
+
this.requestLog.push({ type: "complete", request, timestamp: /* @__PURE__ */ new Date() });
|
|
189
|
+
const model = request.model || this.config.defaultCompletionModel || "gpt-4";
|
|
190
|
+
const key = `completion:${model}:${request.prompt}`;
|
|
191
|
+
if (this.responses.has(key)) {
|
|
192
|
+
return this.responses.get(key);
|
|
193
|
+
}
|
|
194
|
+
const response = {
|
|
195
|
+
id: `cmpl-${Date.now()}`,
|
|
196
|
+
model,
|
|
197
|
+
provider: "openai",
|
|
198
|
+
text: `Mock completion of: ${request.prompt.substring(0, 50)}...`,
|
|
199
|
+
usage: {
|
|
200
|
+
promptTokens: this.estimateTokensSync(request.prompt),
|
|
201
|
+
completionTokens: 20,
|
|
202
|
+
totalTokens: 0,
|
|
203
|
+
estimatedCostUsd: 0
|
|
204
|
+
},
|
|
205
|
+
created: /* @__PURE__ */ new Date(),
|
|
206
|
+
finishReason: "stop"
|
|
207
|
+
};
|
|
208
|
+
response.usage.totalTokens = response.usage.promptTokens + response.usage.completionTokens;
|
|
209
|
+
response.usage.estimatedCostUsd = this.calculateCost(model, response.usage);
|
|
210
|
+
return response;
|
|
211
|
+
}
|
|
212
|
+
async *completeStream(request) {
|
|
213
|
+
this.requestLog.push({
|
|
214
|
+
type: "completeStream",
|
|
215
|
+
request,
|
|
216
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
217
|
+
});
|
|
218
|
+
const model = request.model || this.config.defaultCompletionModel || "gpt-4";
|
|
219
|
+
const responseText = `Mock streaming completion of: ${request.prompt.substring(0, 30)}...`;
|
|
220
|
+
const words = responseText.split(" ");
|
|
221
|
+
for (let i = 0; i < words.length; i++) {
|
|
222
|
+
yield {
|
|
223
|
+
id: `cmpl-${Date.now()}`,
|
|
224
|
+
model,
|
|
225
|
+
provider: "openai",
|
|
226
|
+
delta: {
|
|
227
|
+
content: (i > 0 ? " " : "") + words[i]
|
|
228
|
+
},
|
|
229
|
+
finishReason: i === words.length - 1 ? "stop" : void 0
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
// ─────────────────────────────────────────────────────────────
|
|
234
|
+
// Embedding Operations
|
|
235
|
+
// ─────────────────────────────────────────────────────────────
|
|
236
|
+
async embed(request) {
|
|
237
|
+
this.requestLog.push({ type: "embed", request, timestamp: /* @__PURE__ */ new Date() });
|
|
238
|
+
const model = request.model || this.config.defaultEmbeddingModel || "text-embedding-3-small";
|
|
239
|
+
const inputs = Array.isArray(request.input) ? request.input : [request.input];
|
|
240
|
+
const dimensions = request.dimensions || 1536;
|
|
241
|
+
const embeddings = inputs.map((text) => {
|
|
242
|
+
if (this.embeddings.has(text)) {
|
|
243
|
+
return this.embeddings.get(text);
|
|
244
|
+
}
|
|
245
|
+
return this.generateMockEmbedding(text, dimensions);
|
|
246
|
+
});
|
|
247
|
+
return {
|
|
248
|
+
id: `emb-${Date.now()}`,
|
|
249
|
+
model,
|
|
250
|
+
provider: "openai",
|
|
251
|
+
embeddings,
|
|
252
|
+
usage: {
|
|
253
|
+
promptTokens: inputs.reduce(
|
|
254
|
+
(sum, t) => sum + this.estimateTokensSync(t),
|
|
255
|
+
0
|
|
256
|
+
),
|
|
257
|
+
completionTokens: 0,
|
|
258
|
+
totalTokens: 0,
|
|
259
|
+
estimatedCostUsd: 0
|
|
260
|
+
},
|
|
261
|
+
created: /* @__PURE__ */ new Date()
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
async similarity(text1, text2, model) {
|
|
265
|
+
const response = await this.embed({ input: [text1, text2], model });
|
|
266
|
+
const [emb1, emb2] = response.embeddings;
|
|
267
|
+
return this.cosineSimilarity(emb1, emb2);
|
|
268
|
+
}
|
|
269
|
+
// ─────────────────────────────────────────────────────────────
|
|
270
|
+
// Model Management
|
|
271
|
+
// ─────────────────────────────────────────────────────────────
|
|
272
|
+
async listModels() {
|
|
273
|
+
return [...this.models];
|
|
274
|
+
}
|
|
275
|
+
async getModel(modelId) {
|
|
276
|
+
return this.models.find((m) => m.modelId === modelId) || null;
|
|
277
|
+
}
|
|
278
|
+
async supportsCapability(modelId, capability) {
|
|
279
|
+
const model = await this.getModel(modelId);
|
|
280
|
+
return model?.capabilities.includes(capability) ?? false;
|
|
281
|
+
}
|
|
282
|
+
async estimateTokens(text, _model) {
|
|
283
|
+
return this.estimateTokensSync(text);
|
|
284
|
+
}
|
|
285
|
+
async estimateCost(request) {
|
|
286
|
+
let model;
|
|
287
|
+
let inputTokens;
|
|
288
|
+
if ("messages" in request) {
|
|
289
|
+
model = request.model || this.config.defaultChatModel || "gpt-4";
|
|
290
|
+
inputTokens = this.estimateTokensSync(
|
|
291
|
+
request.messages.map((m) => m.content).join(" ")
|
|
292
|
+
);
|
|
293
|
+
} else if ("prompt" in request) {
|
|
294
|
+
model = request.model || this.config.defaultCompletionModel || "gpt-4";
|
|
295
|
+
inputTokens = this.estimateTokensSync(request.prompt);
|
|
296
|
+
} else {
|
|
297
|
+
model = request.model || this.config.defaultEmbeddingModel || "text-embedding-3-small";
|
|
298
|
+
const inputs = Array.isArray(request.input) ? request.input : [request.input];
|
|
299
|
+
inputTokens = inputs.reduce(
|
|
300
|
+
(sum, t) => sum + this.estimateTokensSync(t),
|
|
301
|
+
0
|
|
302
|
+
);
|
|
303
|
+
}
|
|
304
|
+
const modelConfig = await this.getModel(model);
|
|
305
|
+
if (!modelConfig) return 0;
|
|
306
|
+
const estimatedOutputTokens = "messages" in request || "prompt" in request ? 100 : 0;
|
|
307
|
+
return inputTokens / 1e3 * modelConfig.inputCostPer1K + estimatedOutputTokens / 1e3 * modelConfig.outputCostPer1K;
|
|
308
|
+
}
|
|
309
|
+
// ─────────────────────────────────────────────────────────────
|
|
310
|
+
// Health & Status
|
|
311
|
+
// ─────────────────────────────────────────────────────────────
|
|
312
|
+
async healthCheck() {
|
|
313
|
+
return {
|
|
314
|
+
healthy: true,
|
|
315
|
+
providers: {
|
|
316
|
+
openai: { available: true, latencyMs: 50 },
|
|
317
|
+
anthropic: { available: true, latencyMs: 60 },
|
|
318
|
+
google: { available: true, latencyMs: 55 },
|
|
319
|
+
azure: { available: false, error: "Not configured" },
|
|
320
|
+
bedrock: { available: false, error: "Not configured" },
|
|
321
|
+
custom: { available: false, error: "Not configured" }
|
|
322
|
+
}
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
// ─────────────────────────────────────────────────────────────
|
|
326
|
+
// Private Helpers
|
|
327
|
+
// ─────────────────────────────────────────────────────────────
|
|
328
|
+
estimateTokensSync(text) {
|
|
329
|
+
return Math.ceil(text.length / 4);
|
|
330
|
+
}
|
|
331
|
+
calculateCost(modelId, usage) {
|
|
332
|
+
const model = this.models.find((m) => m.modelId === modelId);
|
|
333
|
+
if (!model) return 0;
|
|
334
|
+
return usage.promptTokens / 1e3 * model.inputCostPer1K + usage.completionTokens / 1e3 * model.outputCostPer1K;
|
|
335
|
+
}
|
|
336
|
+
generateMockEmbedding(text, dimensions) {
|
|
337
|
+
const embedding = [];
|
|
338
|
+
let hash = 0;
|
|
339
|
+
for (let i = 0; i < text.length; i++) {
|
|
340
|
+
hash = (hash << 5) - hash + text.charCodeAt(i);
|
|
341
|
+
hash = hash & hash;
|
|
342
|
+
}
|
|
343
|
+
for (let i = 0; i < dimensions; i++) {
|
|
344
|
+
const seed = hash + i * 31;
|
|
345
|
+
embedding.push(Math.sin(seed) * 0.5);
|
|
346
|
+
}
|
|
347
|
+
const magnitude = Math.sqrt(embedding.reduce((sum, v) => sum + v * v, 0));
|
|
348
|
+
return embedding.map((v) => v / magnitude);
|
|
349
|
+
}
|
|
350
|
+
cosineSimilarity(a, b) {
|
|
351
|
+
let dotProduct = 0;
|
|
352
|
+
let normA = 0;
|
|
353
|
+
let normB = 0;
|
|
354
|
+
for (let i = 0; i < a.length; i++) {
|
|
355
|
+
dotProduct += a[i] * b[i];
|
|
356
|
+
normA += a[i] * a[i];
|
|
357
|
+
normB += b[i] * b[i];
|
|
358
|
+
}
|
|
359
|
+
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
360
|
+
}
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
// src/interfaces/IRAG.ts
|
|
366
|
+
var ChunkingPresets, MemoryRAG;
|
|
367
|
+
var init_IRAG = __esm({
|
|
368
|
+
"src/interfaces/IRAG.ts"() {
|
|
369
|
+
"use strict";
|
|
370
|
+
ChunkingPresets = {
|
|
371
|
+
default: {
|
|
372
|
+
strategy: "recursive",
|
|
373
|
+
chunkSize: 512,
|
|
374
|
+
chunkOverlap: 50,
|
|
375
|
+
minChunkSize: 100,
|
|
376
|
+
separators: ["\n\n", "\n", ". ", " "]
|
|
377
|
+
},
|
|
378
|
+
small: {
|
|
379
|
+
strategy: "sentence",
|
|
380
|
+
chunkSize: 256,
|
|
381
|
+
chunkOverlap: 25,
|
|
382
|
+
minChunkSize: 50
|
|
383
|
+
},
|
|
384
|
+
large: {
|
|
385
|
+
strategy: "paragraph",
|
|
386
|
+
chunkSize: 1024,
|
|
387
|
+
chunkOverlap: 100,
|
|
388
|
+
minChunkSize: 200
|
|
389
|
+
},
|
|
390
|
+
code: {
|
|
391
|
+
strategy: "recursive",
|
|
392
|
+
chunkSize: 1e3,
|
|
393
|
+
chunkOverlap: 100,
|
|
394
|
+
separators: ["\n\nclass ", "\n\nfunction ", "\n\ndef ", "\n\n", "\n"]
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
MemoryRAG = class {
|
|
398
|
+
constructor(config = {}) {
|
|
399
|
+
this.config = config;
|
|
400
|
+
}
|
|
401
|
+
collections = /* @__PURE__ */ new Map();
|
|
402
|
+
documents = /* @__PURE__ */ new Map();
|
|
403
|
+
chunks = /* @__PURE__ */ new Map();
|
|
404
|
+
pipelines = /* @__PURE__ */ new Map();
|
|
405
|
+
embeddings = /* @__PURE__ */ new Map();
|
|
406
|
+
// ─────────────────────────────────────────────────────────────
|
|
407
|
+
// Collection Management
|
|
408
|
+
// ─────────────────────────────────────────────────────────────
|
|
409
|
+
async createCollection(options) {
|
|
410
|
+
const now = /* @__PURE__ */ new Date();
|
|
411
|
+
const collection = {
|
|
412
|
+
name: options.name,
|
|
413
|
+
description: options.description,
|
|
414
|
+
embeddingModel: options.embeddingModel || this.config.defaultEmbeddingModel || "text-embedding-3-small",
|
|
415
|
+
dimensions: options.dimensions || 1536,
|
|
416
|
+
distanceMetric: options.distanceMetric || "cosine",
|
|
417
|
+
chunkingConfig: {
|
|
418
|
+
...ChunkingPresets.default,
|
|
419
|
+
...this.config.defaultChunkingConfig,
|
|
420
|
+
...options.chunkingConfig
|
|
421
|
+
},
|
|
422
|
+
documentCount: 0,
|
|
423
|
+
chunkCount: 0,
|
|
424
|
+
totalTokens: 0,
|
|
425
|
+
createdAt: now,
|
|
426
|
+
updatedAt: now
|
|
427
|
+
};
|
|
428
|
+
this.collections.set(options.name, collection);
|
|
429
|
+
return collection;
|
|
430
|
+
}
|
|
431
|
+
async getCollection(name) {
|
|
432
|
+
return this.collections.get(name) || null;
|
|
433
|
+
}
|
|
434
|
+
async listCollections(tenantId) {
|
|
435
|
+
const collections = Array.from(this.collections.values());
|
|
436
|
+
return collections;
|
|
437
|
+
}
|
|
438
|
+
async deleteCollection(name) {
|
|
439
|
+
for (const [id, doc] of this.documents) {
|
|
440
|
+
if (doc.collection === name) {
|
|
441
|
+
for (const [chunkId, chunk] of this.chunks) {
|
|
442
|
+
if (chunk.documentId === id) {
|
|
443
|
+
this.chunks.delete(chunkId);
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
this.documents.delete(id);
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
this.collections.delete(name);
|
|
450
|
+
}
|
|
451
|
+
async getCollectionStats(name) {
|
|
452
|
+
const collection = await this.getCollection(name);
|
|
453
|
+
if (!collection) {
|
|
454
|
+
throw new Error(`Collection not found: ${name}`);
|
|
455
|
+
}
|
|
456
|
+
const docs = Array.from(this.documents.values()).filter(
|
|
457
|
+
(d) => d.collection === name
|
|
458
|
+
);
|
|
459
|
+
const docChunks = Array.from(this.chunks.values()).filter(
|
|
460
|
+
(c) => c.collection === name
|
|
461
|
+
);
|
|
462
|
+
const totalTokens = docChunks.reduce((sum, c) => sum + c.tokenCount, 0);
|
|
463
|
+
return {
|
|
464
|
+
documentCount: docs.length,
|
|
465
|
+
chunkCount: docChunks.length,
|
|
466
|
+
totalTokens,
|
|
467
|
+
averageChunkSize: docChunks.length > 0 ? totalTokens / docChunks.length : 0,
|
|
468
|
+
storageBytes: docChunks.reduce((sum, c) => sum + c.content.length, 0)
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
// ─────────────────────────────────────────────────────────────
|
|
472
|
+
// Document Management
|
|
473
|
+
// ─────────────────────────────────────────────────────────────
|
|
474
|
+
async ingest(collection, documents, options) {
|
|
475
|
+
const startTime = Date.now();
|
|
476
|
+
const results = [];
|
|
477
|
+
for (const doc of documents) {
|
|
478
|
+
const result = await this.ingestOne(
|
|
479
|
+
collection,
|
|
480
|
+
doc,
|
|
481
|
+
options
|
|
482
|
+
);
|
|
483
|
+
results.push(result);
|
|
484
|
+
}
|
|
485
|
+
return {
|
|
486
|
+
total: documents.length,
|
|
487
|
+
successful: results.filter((r) => r.status === "indexed").length,
|
|
488
|
+
failed: results.filter((r) => r.status === "failed").length,
|
|
489
|
+
results,
|
|
490
|
+
totalProcessingTimeMs: Date.now() - startTime
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
async ingestOne(collection, document, options) {
|
|
494
|
+
const startTime = Date.now();
|
|
495
|
+
const docId = `doc_${Date.now()}_${Math.random().toString(36).substring(7)}`;
|
|
496
|
+
const now = /* @__PURE__ */ new Date();
|
|
497
|
+
try {
|
|
498
|
+
const col = await this.getCollection(collection);
|
|
499
|
+
if (!col) {
|
|
500
|
+
throw new Error(`Collection not found: ${collection}`);
|
|
501
|
+
}
|
|
502
|
+
const doc = {
|
|
503
|
+
...document,
|
|
504
|
+
id: docId,
|
|
505
|
+
collection,
|
|
506
|
+
status: "processing",
|
|
507
|
+
tenantId: options?.tenantId || document.tenantId,
|
|
508
|
+
metadata: { ...document.metadata, ...options?.metadata },
|
|
509
|
+
createdAt: now,
|
|
510
|
+
updatedAt: now
|
|
511
|
+
};
|
|
512
|
+
this.documents.set(docId, doc);
|
|
513
|
+
const chunkingConfig = { ...col.chunkingConfig, ...options?.chunking };
|
|
514
|
+
const docChunks = this.chunkDocument(doc, chunkingConfig);
|
|
515
|
+
if (options?.generateEmbeddings !== false) {
|
|
516
|
+
for (const chunk of docChunks) {
|
|
517
|
+
chunk.embedding = await this.generateMockEmbedding(
|
|
518
|
+
chunk.content,
|
|
519
|
+
col.dimensions
|
|
520
|
+
);
|
|
521
|
+
this.embeddings.set(chunk.id, chunk.embedding);
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
for (const chunk of docChunks) {
|
|
525
|
+
this.chunks.set(chunk.id, chunk);
|
|
526
|
+
}
|
|
527
|
+
doc.status = "indexed";
|
|
528
|
+
doc.chunkCount = docChunks.length;
|
|
529
|
+
doc.tokenCount = docChunks.reduce((sum, c) => sum + c.tokenCount, 0);
|
|
530
|
+
this.documents.set(docId, doc);
|
|
531
|
+
col.documentCount++;
|
|
532
|
+
col.chunkCount += docChunks.length;
|
|
533
|
+
col.totalTokens += doc.tokenCount;
|
|
534
|
+
col.updatedAt = /* @__PURE__ */ new Date();
|
|
535
|
+
return {
|
|
536
|
+
documentId: docId,
|
|
537
|
+
status: "indexed",
|
|
538
|
+
chunkCount: doc.chunkCount,
|
|
539
|
+
tokenCount: doc.tokenCount,
|
|
540
|
+
processingTimeMs: Date.now() - startTime
|
|
541
|
+
};
|
|
542
|
+
} catch (error) {
|
|
543
|
+
const doc = this.documents.get(docId);
|
|
544
|
+
if (doc) {
|
|
545
|
+
doc.status = "failed";
|
|
546
|
+
doc.error = error instanceof Error ? error.message : "Unknown error";
|
|
547
|
+
}
|
|
548
|
+
return {
|
|
549
|
+
documentId: docId,
|
|
550
|
+
status: "failed",
|
|
551
|
+
chunkCount: 0,
|
|
552
|
+
tokenCount: 0,
|
|
553
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
554
|
+
processingTimeMs: Date.now() - startTime
|
|
555
|
+
};
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
async getDocument(documentId) {
|
|
559
|
+
return this.documents.get(documentId) || null;
|
|
560
|
+
}
|
|
561
|
+
async listDocuments(collection, options) {
|
|
562
|
+
let docs = Array.from(this.documents.values()).filter(
|
|
563
|
+
(d) => d.collection === collection
|
|
564
|
+
);
|
|
565
|
+
if (options?.tenantId) {
|
|
566
|
+
docs = docs.filter((d) => d.tenantId === options.tenantId);
|
|
567
|
+
}
|
|
568
|
+
if (options?.status) {
|
|
569
|
+
docs = docs.filter((d) => d.status === options.status);
|
|
570
|
+
}
|
|
571
|
+
const total = docs.length;
|
|
572
|
+
const offset = options?.offset || 0;
|
|
573
|
+
const limit = options?.limit || 50;
|
|
574
|
+
return {
|
|
575
|
+
documents: docs.slice(offset, offset + limit),
|
|
576
|
+
total
|
|
577
|
+
};
|
|
578
|
+
}
|
|
579
|
+
async deleteDocument(documentId) {
|
|
580
|
+
const doc = await this.getDocument(documentId);
|
|
581
|
+
if (doc) {
|
|
582
|
+
for (const [chunkId, chunk] of this.chunks) {
|
|
583
|
+
if (chunk.documentId === documentId) {
|
|
584
|
+
this.chunks.delete(chunkId);
|
|
585
|
+
this.embeddings.delete(chunkId);
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
const collection = await this.getCollection(doc.collection);
|
|
589
|
+
if (collection) {
|
|
590
|
+
collection.documentCount--;
|
|
591
|
+
collection.chunkCount -= doc.chunkCount || 0;
|
|
592
|
+
collection.totalTokens -= doc.tokenCount || 0;
|
|
593
|
+
}
|
|
594
|
+
this.documents.delete(documentId);
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
async reprocessDocument(documentId, options) {
|
|
598
|
+
const doc = await this.getDocument(documentId);
|
|
599
|
+
if (!doc) {
|
|
600
|
+
throw new Error(`Document not found: ${documentId}`);
|
|
601
|
+
}
|
|
602
|
+
for (const [chunkId, chunk] of this.chunks) {
|
|
603
|
+
if (chunk.documentId === documentId) {
|
|
604
|
+
this.chunks.delete(chunkId);
|
|
605
|
+
this.embeddings.delete(chunkId);
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
return this.ingestOne(
|
|
609
|
+
doc.collection,
|
|
610
|
+
{
|
|
611
|
+
source: doc.source,
|
|
612
|
+
type: doc.type,
|
|
613
|
+
content: doc.content,
|
|
614
|
+
title: doc.title,
|
|
615
|
+
metadata: doc.metadata,
|
|
616
|
+
tenantId: doc.tenantId
|
|
617
|
+
},
|
|
618
|
+
options
|
|
619
|
+
);
|
|
620
|
+
}
|
|
621
|
+
// ─────────────────────────────────────────────────────────────
|
|
622
|
+
// Chunk Management
|
|
623
|
+
// ─────────────────────────────────────────────────────────────
|
|
624
|
+
async getChunks(documentId) {
|
|
625
|
+
return Array.from(this.chunks.values()).filter((c) => c.documentId === documentId).sort((a, b) => a.index - b.index);
|
|
626
|
+
}
|
|
627
|
+
async getChunk(chunkId) {
|
|
628
|
+
return this.chunks.get(chunkId) || null;
|
|
629
|
+
}
|
|
630
|
+
async updateChunkMetadata(chunkId, metadata) {
|
|
631
|
+
const chunk = await this.getChunk(chunkId);
|
|
632
|
+
if (!chunk) {
|
|
633
|
+
throw new Error(`Chunk not found: ${chunkId}`);
|
|
634
|
+
}
|
|
635
|
+
chunk.metadata = { ...chunk.metadata, ...metadata };
|
|
636
|
+
this.chunks.set(chunkId, chunk);
|
|
637
|
+
return chunk;
|
|
638
|
+
}
|
|
639
|
+
// ─────────────────────────────────────────────────────────────
|
|
640
|
+
// Search & Retrieval
|
|
641
|
+
// ─────────────────────────────────────────────────────────────
|
|
642
|
+
async search(query) {
|
|
643
|
+
const startTime = Date.now();
|
|
644
|
+
const mode = query.mode || this.config.defaultSearchMode || "vector";
|
|
645
|
+
const limit = query.limit || this.config.defaultLimit || 10;
|
|
646
|
+
const queryEmbedding = await this.generateMockEmbedding(query.query, 1536);
|
|
647
|
+
let chunks = Array.from(this.chunks.values()).filter(
|
|
648
|
+
(c) => c.collection === query.collection
|
|
649
|
+
);
|
|
650
|
+
if (query.tenantId) {
|
|
651
|
+
chunks = chunks.filter((c) => c.tenantId === query.tenantId);
|
|
652
|
+
}
|
|
653
|
+
if (query.filters) {
|
|
654
|
+
chunks = chunks.filter(
|
|
655
|
+
(c) => this.matchesFilters(c.metadata, query.filters)
|
|
656
|
+
);
|
|
657
|
+
}
|
|
658
|
+
let results = chunks.map((chunk) => {
|
|
659
|
+
const embedding = this.embeddings.get(chunk.id) || [];
|
|
660
|
+
const vectorScore = embedding.length > 0 ? this.cosineSimilarity(queryEmbedding, embedding) : 0;
|
|
661
|
+
const keywordScore = this.keywordScore(query.query, chunk.content);
|
|
662
|
+
let score;
|
|
663
|
+
switch (mode) {
|
|
664
|
+
case "vector":
|
|
665
|
+
score = vectorScore;
|
|
666
|
+
break;
|
|
667
|
+
case "keyword":
|
|
668
|
+
score = keywordScore;
|
|
669
|
+
break;
|
|
670
|
+
case "hybrid":
|
|
671
|
+
score = 0.7 * vectorScore + 0.3 * keywordScore;
|
|
672
|
+
break;
|
|
673
|
+
default:
|
|
674
|
+
score = vectorScore;
|
|
675
|
+
}
|
|
676
|
+
return { chunk, score };
|
|
677
|
+
});
|
|
678
|
+
if (query.minScore) {
|
|
679
|
+
results = results.filter((r) => r.score >= query.minScore);
|
|
680
|
+
}
|
|
681
|
+
results.sort((a, b) => b.score - a.score);
|
|
682
|
+
if (query.rerank) {
|
|
683
|
+
const candidates = results.slice(0, query.rerankCandidates || limit * 3);
|
|
684
|
+
results = candidates.sort((a, b) => {
|
|
685
|
+
const aRelevance = this.keywordScore(query.query, a.chunk.content);
|
|
686
|
+
const bRelevance = this.keywordScore(query.query, b.chunk.content);
|
|
687
|
+
return b.score + bRelevance - (a.score + aRelevance);
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
results = results.slice(0, limit);
|
|
691
|
+
if (query.includeDocumentContent) {
|
|
692
|
+
for (const result of results) {
|
|
693
|
+
result.document = await this.getDocument(result.chunk.documentId) || void 0;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
for (const result of results) {
|
|
697
|
+
result.highlights = this.generateHighlights(
|
|
698
|
+
query.query,
|
|
699
|
+
result.chunk.content
|
|
700
|
+
);
|
|
701
|
+
}
|
|
702
|
+
return {
|
|
703
|
+
results,
|
|
704
|
+
query: query.query,
|
|
705
|
+
totalMatches: results.length,
|
|
706
|
+
searchTimeMs: Date.now() - startTime,
|
|
707
|
+
mode
|
|
708
|
+
};
|
|
709
|
+
}
|
|
710
|
+
async findSimilar(chunkId, options) {
|
|
711
|
+
const chunk = await this.getChunk(chunkId);
|
|
712
|
+
if (!chunk) {
|
|
713
|
+
throw new Error(`Chunk not found: ${chunkId}`);
|
|
714
|
+
}
|
|
715
|
+
const embedding = this.embeddings.get(chunkId);
|
|
716
|
+
if (!embedding) {
|
|
717
|
+
return [];
|
|
718
|
+
}
|
|
719
|
+
const collection = options?.collection || chunk.collection;
|
|
720
|
+
let chunks = Array.from(this.chunks.values()).filter(
|
|
721
|
+
(c) => c.collection === collection && c.id !== chunkId
|
|
722
|
+
);
|
|
723
|
+
const results = chunks.map((c) => {
|
|
724
|
+
const otherEmbedding = this.embeddings.get(c.id) || [];
|
|
725
|
+
const score = otherEmbedding.length > 0 ? this.cosineSimilarity(embedding, otherEmbedding) : 0;
|
|
726
|
+
return { chunk: c, score };
|
|
727
|
+
});
|
|
728
|
+
let filteredResults = results;
|
|
729
|
+
if (options?.minScore) {
|
|
730
|
+
filteredResults = results.filter((r) => r.score >= options.minScore);
|
|
731
|
+
}
|
|
732
|
+
filteredResults.sort((a, b) => b.score - a.score);
|
|
733
|
+
return filteredResults.slice(0, options?.limit || 10);
|
|
734
|
+
}
|
|
735
|
+
async multiSearch(queries) {
|
|
736
|
+
return Promise.all(queries.map((q) => this.search(q)));
|
|
737
|
+
}
|
|
738
|
+
// ─────────────────────────────────────────────────────────────
|
|
739
|
+
// Context Assembly
|
|
740
|
+
// ─────────────────────────────────────────────────────────────
|
|
741
|
+
async assembleContext(results, config) {
|
|
742
|
+
const resultArray = "results" in results ? results.results : results;
|
|
743
|
+
const maxTokens = config?.maxTokens || 4e3;
|
|
744
|
+
const chunkTemplate = config?.chunkTemplate || "{{content}}";
|
|
745
|
+
const contextTemplate = config?.contextTemplate || "{{chunks}}";
|
|
746
|
+
let chunks = [];
|
|
747
|
+
let totalTokens = 0;
|
|
748
|
+
const sources = [];
|
|
749
|
+
const seenDocs = /* @__PURE__ */ new Set();
|
|
750
|
+
let processedResults = resultArray;
|
|
751
|
+
if (config?.deduplicate) {
|
|
752
|
+
const threshold = config.dedupeThreshold || 0.9;
|
|
753
|
+
processedResults = this.deduplicateResults(resultArray, threshold);
|
|
754
|
+
}
|
|
755
|
+
if (config?.sortBy) {
|
|
756
|
+
processedResults = [...processedResults].sort((a, b) => {
|
|
757
|
+
switch (config.sortBy) {
|
|
758
|
+
case "score":
|
|
759
|
+
return b.score - a.score;
|
|
760
|
+
case "document":
|
|
761
|
+
return a.chunk.documentId.localeCompare(b.chunk.documentId);
|
|
762
|
+
case "position":
|
|
763
|
+
return a.chunk.index - b.chunk.index;
|
|
764
|
+
default:
|
|
765
|
+
return 0;
|
|
766
|
+
}
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
let truncated = false;
|
|
770
|
+
for (const result of processedResults) {
|
|
771
|
+
if (totalTokens + result.chunk.tokenCount > maxTokens) {
|
|
772
|
+
truncated = true;
|
|
773
|
+
break;
|
|
774
|
+
}
|
|
775
|
+
chunks.push(result.chunk);
|
|
776
|
+
totalTokens += result.chunk.tokenCount;
|
|
777
|
+
if (!seenDocs.has(result.chunk.documentId)) {
|
|
778
|
+
seenDocs.add(result.chunk.documentId);
|
|
779
|
+
const doc = await this.getDocument(result.chunk.documentId);
|
|
780
|
+
sources.push({
|
|
781
|
+
documentId: result.chunk.documentId,
|
|
782
|
+
title: doc?.title,
|
|
783
|
+
source: doc?.source || ""
|
|
784
|
+
});
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
const formattedChunks = chunks.map((chunk, i) => {
|
|
788
|
+
let formatted = chunkTemplate.replace("{{content}}", chunk.content).replace("{{index}}", String(i + 1)).replace("{{documentId}}", chunk.documentId);
|
|
789
|
+
if (config?.includeCitations) {
|
|
790
|
+
formatted = `[${i + 1}] ${formatted}`;
|
|
791
|
+
}
|
|
792
|
+
return formatted;
|
|
793
|
+
});
|
|
794
|
+
let context = contextTemplate.replace(
|
|
795
|
+
"{{chunks}}",
|
|
796
|
+
formattedChunks.join("\n\n")
|
|
797
|
+
);
|
|
798
|
+
if (config?.includeCitations && sources.length > 0) {
|
|
799
|
+
const citations = sources.map((s, i) => `[${i + 1}] ${s.title || s.source}`).join("\n");
|
|
800
|
+
context += `
|
|
801
|
+
|
|
802
|
+
Sources:
|
|
803
|
+
${citations}`;
|
|
804
|
+
}
|
|
805
|
+
return {
|
|
806
|
+
context,
|
|
807
|
+
chunks,
|
|
808
|
+
tokenCount: totalTokens,
|
|
809
|
+
sources,
|
|
810
|
+
truncated
|
|
811
|
+
};
|
|
812
|
+
}
|
|
813
|
+
async queryWithContext(query, contextConfig) {
|
|
814
|
+
const searchResponse = await this.search(query);
|
|
815
|
+
const context = await this.assembleContext(searchResponse, contextConfig);
|
|
816
|
+
return { searchResponse, context };
|
|
817
|
+
}
|
|
818
|
+
// ─────────────────────────────────────────────────────────────
|
|
819
|
+
// Embedding Management
|
|
820
|
+
// ─────────────────────────────────────────────────────────────
|
|
821
|
+
async embed(texts, model) {
|
|
822
|
+
const textArray = Array.isArray(texts) ? texts : [texts];
|
|
823
|
+
return Promise.all(
|
|
824
|
+
textArray.map((text) => this.generateMockEmbedding(text, 1536))
|
|
825
|
+
);
|
|
826
|
+
}
|
|
827
|
+
async reembed(collection, model, batchSize) {
|
|
828
|
+
const col = await this.getCollection(collection);
|
|
829
|
+
if (!col) {
|
|
830
|
+
throw new Error(`Collection not found: ${collection}`);
|
|
831
|
+
}
|
|
832
|
+
let updated = 0;
|
|
833
|
+
let errors = 0;
|
|
834
|
+
const chunks = Array.from(this.chunks.values()).filter(
|
|
835
|
+
(c) => c.collection === collection
|
|
836
|
+
);
|
|
837
|
+
for (const chunk of chunks) {
|
|
838
|
+
try {
|
|
839
|
+
const embedding = await this.generateMockEmbedding(
|
|
840
|
+
chunk.content,
|
|
841
|
+
col.dimensions
|
|
842
|
+
);
|
|
843
|
+
chunk.embedding = embedding;
|
|
844
|
+
this.embeddings.set(chunk.id, embedding);
|
|
845
|
+
updated++;
|
|
846
|
+
} catch {
|
|
847
|
+
errors++;
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
return { updated, errors };
|
|
851
|
+
}
|
|
852
|
+
// ─────────────────────────────────────────────────────────────
|
|
853
|
+
// Pipeline Management
|
|
854
|
+
// ─────────────────────────────────────────────────────────────
|
|
855
|
+
async createPipeline(pipeline) {
|
|
856
|
+
const id = `pipeline_${Date.now()}`;
|
|
857
|
+
const now = /* @__PURE__ */ new Date();
|
|
858
|
+
const newPipeline = {
|
|
859
|
+
...pipeline,
|
|
860
|
+
id,
|
|
861
|
+
createdAt: now,
|
|
862
|
+
updatedAt: now
|
|
863
|
+
};
|
|
864
|
+
this.pipelines.set(id, newPipeline);
|
|
865
|
+
return newPipeline;
|
|
866
|
+
}
|
|
867
|
+
async getPipeline(pipelineId) {
|
|
868
|
+
return this.pipelines.get(pipelineId) || null;
|
|
869
|
+
}
|
|
870
|
+
async runPipeline(pipelineId, documentIds) {
|
|
871
|
+
const pipeline = await this.getPipeline(pipelineId);
|
|
872
|
+
if (!pipeline) {
|
|
873
|
+
throw new Error(`Pipeline not found: ${pipelineId}`);
|
|
874
|
+
}
|
|
875
|
+
const results = [];
|
|
876
|
+
for (const docId of documentIds) {
|
|
877
|
+
const result = await this.reprocessDocument(docId);
|
|
878
|
+
results.push(result);
|
|
879
|
+
}
|
|
880
|
+
return {
|
|
881
|
+
total: documentIds.length,
|
|
882
|
+
successful: results.filter((r) => r.status === "indexed").length,
|
|
883
|
+
failed: results.filter((r) => r.status === "failed").length,
|
|
884
|
+
results,
|
|
885
|
+
totalProcessingTimeMs: results.reduce(
|
|
886
|
+
(sum, r) => sum + r.processingTimeMs,
|
|
887
|
+
0
|
|
888
|
+
)
|
|
889
|
+
};
|
|
890
|
+
}
|
|
891
|
+
// ─────────────────────────────────────────────────────────────
|
|
892
|
+
// Private Helpers
|
|
893
|
+
// ─────────────────────────────────────────────────────────────
|
|
894
|
+
chunkDocument(doc, config) {
|
|
895
|
+
const chunks = [];
|
|
896
|
+
const content = doc.content;
|
|
897
|
+
const chunkSize = config.chunkSize;
|
|
898
|
+
const overlap = config.chunkOverlap;
|
|
899
|
+
let startOffset = 0;
|
|
900
|
+
let index = 0;
|
|
901
|
+
while (startOffset < content.length) {
|
|
902
|
+
const endOffset = Math.min(startOffset + chunkSize * 4, content.length);
|
|
903
|
+
const chunkContent = content.slice(startOffset, endOffset);
|
|
904
|
+
chunks.push({
|
|
905
|
+
id: `chunk_${doc.id}_${index}`,
|
|
906
|
+
documentId: doc.id,
|
|
907
|
+
index,
|
|
908
|
+
content: chunkContent,
|
|
909
|
+
metadata: config.includeMetadata ? doc.metadata : {},
|
|
910
|
+
startOffset,
|
|
911
|
+
endOffset,
|
|
912
|
+
tokenCount: Math.ceil(chunkContent.length / 4),
|
|
913
|
+
collection: doc.collection,
|
|
914
|
+
tenantId: doc.tenantId
|
|
915
|
+
});
|
|
916
|
+
startOffset = endOffset - overlap * 4;
|
|
917
|
+
index++;
|
|
918
|
+
}
|
|
919
|
+
return chunks;
|
|
920
|
+
}
|
|
921
|
+
async generateMockEmbedding(text, dimensions) {
|
|
922
|
+
const embedding = [];
|
|
923
|
+
let hash = 0;
|
|
924
|
+
for (let i = 0; i < text.length; i++) {
|
|
925
|
+
hash = (hash << 5) - hash + text.charCodeAt(i);
|
|
926
|
+
hash = hash & hash;
|
|
927
|
+
}
|
|
928
|
+
for (let i = 0; i < dimensions; i++) {
|
|
929
|
+
const seed = hash + i * 31;
|
|
930
|
+
embedding.push(Math.sin(seed) * 0.5);
|
|
931
|
+
}
|
|
932
|
+
const magnitude = Math.sqrt(embedding.reduce((sum, v) => sum + v * v, 0));
|
|
933
|
+
return embedding.map((v) => v / magnitude);
|
|
934
|
+
}
|
|
935
|
+
cosineSimilarity(a, b) {
|
|
936
|
+
if (a.length !== b.length) return 0;
|
|
937
|
+
let dotProduct = 0;
|
|
938
|
+
let normA = 0;
|
|
939
|
+
let normB = 0;
|
|
940
|
+
for (let i = 0; i < a.length; i++) {
|
|
941
|
+
dotProduct += a[i] * b[i];
|
|
942
|
+
normA += a[i] * a[i];
|
|
943
|
+
normB += b[i] * b[i];
|
|
944
|
+
}
|
|
945
|
+
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
|
946
|
+
return denominator > 0 ? dotProduct / denominator : 0;
|
|
947
|
+
}
|
|
948
|
+
keywordScore(query, content) {
|
|
949
|
+
const queryWords = query.toLowerCase().split(/\s+/);
|
|
950
|
+
const contentLower = content.toLowerCase();
|
|
951
|
+
let matches = 0;
|
|
952
|
+
for (const word of queryWords) {
|
|
953
|
+
if (contentLower.includes(word)) {
|
|
954
|
+
matches++;
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
return queryWords.length > 0 ? matches / queryWords.length : 0;
|
|
958
|
+
}
|
|
959
|
+
matchesFilters(metadata, filters) {
|
|
960
|
+
for (const filter of filters) {
|
|
961
|
+
const value = metadata[filter.field];
|
|
962
|
+
switch (filter.operator) {
|
|
963
|
+
case "eq":
|
|
964
|
+
if (value !== filter.value) return false;
|
|
965
|
+
break;
|
|
966
|
+
case "ne":
|
|
967
|
+
if (value === filter.value) return false;
|
|
968
|
+
break;
|
|
969
|
+
case "gt":
|
|
970
|
+
if (typeof value !== "number" || typeof filter.value !== "number" || value <= filter.value)
|
|
971
|
+
return false;
|
|
972
|
+
break;
|
|
973
|
+
case "gte":
|
|
974
|
+
if (typeof value !== "number" || typeof filter.value !== "number" || value < filter.value)
|
|
975
|
+
return false;
|
|
976
|
+
break;
|
|
977
|
+
case "lt":
|
|
978
|
+
if (typeof value !== "number" || typeof filter.value !== "number" || value >= filter.value)
|
|
979
|
+
return false;
|
|
980
|
+
break;
|
|
981
|
+
case "lte":
|
|
982
|
+
if (typeof value !== "number" || typeof filter.value !== "number" || value > filter.value)
|
|
983
|
+
return false;
|
|
984
|
+
break;
|
|
985
|
+
case "in":
|
|
986
|
+
if (!Array.isArray(filter.value) || !filter.value.includes(value))
|
|
987
|
+
return false;
|
|
988
|
+
break;
|
|
989
|
+
case "nin":
|
|
990
|
+
if (!Array.isArray(filter.value) || filter.value.includes(value))
|
|
991
|
+
return false;
|
|
992
|
+
break;
|
|
993
|
+
case "contains":
|
|
994
|
+
if (typeof value !== "string" || typeof filter.value !== "string" || !value.includes(filter.value))
|
|
995
|
+
return false;
|
|
996
|
+
break;
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
return true;
|
|
1000
|
+
}
|
|
1001
|
+
generateHighlights(query, content) {
|
|
1002
|
+
const words = query.toLowerCase().split(/\s+/);
|
|
1003
|
+
const highlights = [];
|
|
1004
|
+
for (const word of words) {
|
|
1005
|
+
const index = content.toLowerCase().indexOf(word);
|
|
1006
|
+
if (index !== -1) {
|
|
1007
|
+
const start = Math.max(0, index - 30);
|
|
1008
|
+
const end = Math.min(content.length, index + word.length + 30);
|
|
1009
|
+
highlights.push(`...${content.slice(start, end)}...`);
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
return highlights.slice(0, 3);
|
|
1013
|
+
}
|
|
1014
|
+
deduplicateResults(results, threshold) {
|
|
1015
|
+
const deduplicated = [];
|
|
1016
|
+
for (const result of results) {
|
|
1017
|
+
const isDuplicate = deduplicated.some((r) => {
|
|
1018
|
+
const embedding1 = this.embeddings.get(result.chunk.id);
|
|
1019
|
+
const embedding2 = this.embeddings.get(r.chunk.id);
|
|
1020
|
+
if (!embedding1 || !embedding2) return false;
|
|
1021
|
+
return this.cosineSimilarity(embedding1, embedding2) > threshold;
|
|
1022
|
+
});
|
|
1023
|
+
if (!isDuplicate) {
|
|
1024
|
+
deduplicated.push(result);
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
return deduplicated;
|
|
1028
|
+
}
|
|
1029
|
+
};
|
|
1030
|
+
}
|
|
1031
|
+
});
|
|
1032
|
+
|
|
20
1033
|
// src/testing.ts
|
|
21
1034
|
var testing_exports = {};
|
|
22
1035
|
__export(testing_exports, {
|
|
@@ -156,7 +1169,11 @@ var MemoryQueryBuilder = class {
|
|
|
156
1169
|
const updated = [];
|
|
157
1170
|
const newTable = table.map((item) => {
|
|
158
1171
|
if (this.matchesWhere(item)) {
|
|
159
|
-
const updatedItem = {
|
|
1172
|
+
const updatedItem = {
|
|
1173
|
+
...item,
|
|
1174
|
+
...this._updateData,
|
|
1175
|
+
updated_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
1176
|
+
};
|
|
160
1177
|
updated.push(updatedItem);
|
|
161
1178
|
return updatedItem;
|
|
162
1179
|
}
|
|
@@ -171,8 +1188,10 @@ var MemoryQueryBuilder = class {
|
|
|
171
1188
|
result.sort((a, b) => {
|
|
172
1189
|
const aVal = a[column];
|
|
173
1190
|
const bVal = b[column];
|
|
174
|
-
if (aVal === null || aVal === void 0)
|
|
175
|
-
|
|
1191
|
+
if (aVal === null || aVal === void 0)
|
|
1192
|
+
return direction === "asc" ? 1 : -1;
|
|
1193
|
+
if (bVal === null || bVal === void 0)
|
|
1194
|
+
return direction === "asc" ? -1 : 1;
|
|
176
1195
|
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
177
1196
|
return direction === "asc" ? cmp : -cmp;
|
|
178
1197
|
});
|
|
@@ -253,7 +1272,9 @@ var MemoryCache = class {
|
|
|
253
1272
|
return Promise.all(keys.map((key) => this.get(key)));
|
|
254
1273
|
}
|
|
255
1274
|
async mset(entries) {
|
|
256
|
-
await Promise.all(
|
|
1275
|
+
await Promise.all(
|
|
1276
|
+
entries.map(({ key, value, ttl }) => this.set(key, value, ttl))
|
|
1277
|
+
);
|
|
257
1278
|
}
|
|
258
1279
|
async incr(key, by = 1) {
|
|
259
1280
|
const current = await this.get(key) || 0;
|
|
@@ -302,7 +1323,10 @@ var MemoryCache = class {
|
|
|
302
1323
|
var MemoryStorage = class {
|
|
303
1324
|
files = /* @__PURE__ */ new Map();
|
|
304
1325
|
async upload(key, data, options) {
|
|
305
|
-
this.files.set(key, {
|
|
1326
|
+
this.files.set(key, {
|
|
1327
|
+
data: Buffer.from("mock"),
|
|
1328
|
+
contentType: options?.contentType
|
|
1329
|
+
});
|
|
306
1330
|
return { url: "memory://" + key };
|
|
307
1331
|
}
|
|
308
1332
|
async download(key) {
|
|
@@ -1012,7 +2036,9 @@ var ConsoleEmail = class {
|
|
|
1012
2036
|
console.log("=".repeat(60));
|
|
1013
2037
|
console.log(`ID: ${id}`);
|
|
1014
2038
|
console.log(`To: ${this.formatAddresses(message.to)}`);
|
|
1015
|
-
console.log(
|
|
2039
|
+
console.log(
|
|
2040
|
+
`From: ${message.from ? this.formatAddress(message.from) : "(default)"}`
|
|
2041
|
+
);
|
|
1016
2042
|
console.log(`Subject: ${message.subject}`);
|
|
1017
2043
|
if (message.replyTo) {
|
|
1018
2044
|
console.log(`Reply-To: ${this.formatAddress(message.replyTo)}`);
|
|
@@ -1021,15 +2047,21 @@ var ConsoleEmail = class {
|
|
|
1021
2047
|
console.log(`Tags: ${message.tags.join(", ")}`);
|
|
1022
2048
|
}
|
|
1023
2049
|
if (message.attachments && message.attachments.length > 0) {
|
|
1024
|
-
console.log(
|
|
2050
|
+
console.log(
|
|
2051
|
+
`Attachments: ${message.attachments.map((a) => a.filename).join(", ")}`
|
|
2052
|
+
);
|
|
1025
2053
|
}
|
|
1026
2054
|
console.log("-".repeat(60));
|
|
1027
2055
|
if (message.text) {
|
|
1028
2056
|
console.log("TEXT BODY:");
|
|
1029
|
-
console.log(
|
|
2057
|
+
console.log(
|
|
2058
|
+
message.text.slice(0, 500) + (message.text.length > 500 ? "\n...(truncated)" : "")
|
|
2059
|
+
);
|
|
1030
2060
|
}
|
|
1031
2061
|
if (message.html) {
|
|
1032
|
-
console.log(
|
|
2062
|
+
console.log(
|
|
2063
|
+
"HTML BODY: [HTML content - " + message.html.length + " chars]"
|
|
2064
|
+
);
|
|
1033
2065
|
}
|
|
1034
2066
|
console.log("=".repeat(60) + "\n");
|
|
1035
2067
|
this.sentEmails.push(message);
|
|
@@ -1297,13 +2329,35 @@ var MemorySecrets = class {
|
|
|
1297
2329
|
|
|
1298
2330
|
// src/config.ts
|
|
1299
2331
|
var import_zod = require("zod");
|
|
1300
|
-
var DatabaseProviderSchema = import_zod.z.enum([
|
|
2332
|
+
var DatabaseProviderSchema = import_zod.z.enum([
|
|
2333
|
+
"memory",
|
|
2334
|
+
"postgres",
|
|
2335
|
+
"supabase"
|
|
2336
|
+
]);
|
|
1301
2337
|
var CacheProviderSchema = import_zod.z.enum(["memory", "redis", "upstash"]);
|
|
1302
|
-
var StorageProviderSchema = import_zod.z.enum([
|
|
1303
|
-
|
|
2338
|
+
var StorageProviderSchema = import_zod.z.enum([
|
|
2339
|
+
"memory",
|
|
2340
|
+
"s3",
|
|
2341
|
+
"minio",
|
|
2342
|
+
"r2",
|
|
2343
|
+
"supabase"
|
|
2344
|
+
]);
|
|
2345
|
+
var EmailProviderSchema = import_zod.z.enum([
|
|
2346
|
+
"memory",
|
|
2347
|
+
"console",
|
|
2348
|
+
"smtp",
|
|
2349
|
+
"resend"
|
|
2350
|
+
]);
|
|
1304
2351
|
var QueueProviderSchema = import_zod.z.enum(["memory", "bullmq"]);
|
|
1305
2352
|
var TracingProviderSchema = import_zod.z.enum(["noop", "memory", "otlp"]);
|
|
1306
2353
|
var LogLevelSchema = import_zod.z.enum(["debug", "info", "warn", "error"]);
|
|
2354
|
+
var AIProviderSchema = import_zod.z.enum([
|
|
2355
|
+
"memory",
|
|
2356
|
+
"openai",
|
|
2357
|
+
"anthropic",
|
|
2358
|
+
"google"
|
|
2359
|
+
]);
|
|
2360
|
+
var RAGProviderSchema = import_zod.z.enum(["memory", "pinecone", "weaviate"]);
|
|
1307
2361
|
var DatabaseConfigSchema = import_zod.z.object({
|
|
1308
2362
|
provider: DatabaseProviderSchema.default("memory"),
|
|
1309
2363
|
url: import_zod.z.string().optional().describe("PostgreSQL connection URL"),
|
|
@@ -1313,7 +2367,10 @@ var DatabaseConfigSchema = import_zod.z.object({
|
|
|
1313
2367
|
supabaseServiceRoleKey: import_zod.z.string().optional().describe("Supabase service role key"),
|
|
1314
2368
|
poolSize: import_zod.z.number().int().min(1).max(100).default(10).describe("Connection pool size"),
|
|
1315
2369
|
connectionTimeout: import_zod.z.number().int().min(1e3).max(6e4).default(5e3).describe("Connection timeout in ms"),
|
|
1316
|
-
ssl: import_zod.z.union([
|
|
2370
|
+
ssl: import_zod.z.union([
|
|
2371
|
+
import_zod.z.boolean(),
|
|
2372
|
+
import_zod.z.object({ rejectUnauthorized: import_zod.z.boolean().optional() })
|
|
2373
|
+
]).optional().describe("SSL configuration")
|
|
1317
2374
|
}).refine(
|
|
1318
2375
|
(data) => {
|
|
1319
2376
|
if (data.provider === "supabase") {
|
|
@@ -1416,6 +2473,53 @@ var QueueConfigSchema = import_zod.z.object({
|
|
|
1416
2473
|
message: "BullMQ requires redisUrl"
|
|
1417
2474
|
}
|
|
1418
2475
|
);
|
|
2476
|
+
var AIConfigSchema = import_zod.z.object({
|
|
2477
|
+
enabled: import_zod.z.boolean().default(false).describe("Enable AI capabilities"),
|
|
2478
|
+
provider: AIProviderSchema.default("memory"),
|
|
2479
|
+
apiKey: import_zod.z.string().optional().describe("API key for the AI provider"),
|
|
2480
|
+
model: import_zod.z.string().optional().describe("Default model to use"),
|
|
2481
|
+
maxTokens: import_zod.z.number().int().min(1).max(2e5).default(4096).describe("Default max tokens"),
|
|
2482
|
+
temperature: import_zod.z.number().min(0).max(2).default(0.7).describe("Default temperature"),
|
|
2483
|
+
timeout: import_zod.z.number().int().min(1e3).max(3e5).default(6e4).describe("Request timeout in ms"),
|
|
2484
|
+
baseUrl: import_zod.z.string().url().optional().describe("Custom base URL for API")
|
|
2485
|
+
}).refine(
|
|
2486
|
+
(data) => {
|
|
2487
|
+
if (data.enabled && data.provider !== "memory") {
|
|
2488
|
+
return data.apiKey;
|
|
2489
|
+
}
|
|
2490
|
+
return true;
|
|
2491
|
+
},
|
|
2492
|
+
{
|
|
2493
|
+
message: "Production AI providers require an API key"
|
|
2494
|
+
}
|
|
2495
|
+
);
|
|
2496
|
+
var RAGConfigSchema = import_zod.z.object({
|
|
2497
|
+
enabled: import_zod.z.boolean().default(false).describe("Enable RAG capabilities"),
|
|
2498
|
+
provider: RAGProviderSchema.default("memory"),
|
|
2499
|
+
apiKey: import_zod.z.string().optional().describe("API key for the RAG provider"),
|
|
2500
|
+
environment: import_zod.z.string().optional().describe("Pinecone environment"),
|
|
2501
|
+
indexName: import_zod.z.string().optional().describe("Pinecone index name or Weaviate class"),
|
|
2502
|
+
namespace: import_zod.z.string().optional().describe("Default namespace"),
|
|
2503
|
+
host: import_zod.z.string().url().optional().describe("Weaviate host URL"),
|
|
2504
|
+
embeddingProvider: AIProviderSchema.default("memory").describe(
|
|
2505
|
+
"Provider for generating embeddings"
|
|
2506
|
+
),
|
|
2507
|
+
embeddingApiKey: import_zod.z.string().optional().describe("API key for embedding provider"),
|
|
2508
|
+
embeddingModel: import_zod.z.string().optional().describe("Model for generating embeddings")
|
|
2509
|
+
}).refine(
|
|
2510
|
+
(data) => {
|
|
2511
|
+
if (data.enabled && data.provider === "pinecone") {
|
|
2512
|
+
return data.apiKey && data.indexName;
|
|
2513
|
+
}
|
|
2514
|
+
if (data.enabled && data.provider === "weaviate") {
|
|
2515
|
+
return data.host;
|
|
2516
|
+
}
|
|
2517
|
+
return true;
|
|
2518
|
+
},
|
|
2519
|
+
{
|
|
2520
|
+
message: "Pinecone requires apiKey and indexName; Weaviate requires host"
|
|
2521
|
+
}
|
|
2522
|
+
);
|
|
1419
2523
|
var RetryConfigSchema = import_zod.z.object({
|
|
1420
2524
|
enabled: import_zod.z.boolean().default(true).describe("Enable retry for failed operations"),
|
|
1421
2525
|
maxAttempts: import_zod.z.number().int().min(1).max(10).default(3).describe("Maximum retry attempts"),
|
|
@@ -1502,6 +2606,9 @@ var PlatformConfigSchema = import_zod.z.object({
|
|
|
1502
2606
|
storage: StorageConfigSchema.default({ provider: "memory" }),
|
|
1503
2607
|
email: EmailConfigSchema.default({ provider: "memory" }),
|
|
1504
2608
|
queue: QueueConfigSchema.default({ provider: "memory" }),
|
|
2609
|
+
// AI configurations
|
|
2610
|
+
ai: AIConfigSchema.default({ enabled: false }),
|
|
2611
|
+
rag: RAGConfigSchema.default({ enabled: false }),
|
|
1505
2612
|
// Resilience configuration
|
|
1506
2613
|
resilience: ResilienceConfigSchema.default({}),
|
|
1507
2614
|
// Observability configuration
|
|
@@ -1557,6 +2664,28 @@ function loadConfig() {
|
|
|
1557
2664
|
concurrency: process.env.QUEUE_CONCURRENCY ? parseInt(process.env.QUEUE_CONCURRENCY) : void 0,
|
|
1558
2665
|
maxRetries: process.env.QUEUE_MAX_RETRIES ? parseInt(process.env.QUEUE_MAX_RETRIES) : void 0
|
|
1559
2666
|
},
|
|
2667
|
+
ai: {
|
|
2668
|
+
enabled: process.env.AI_ENABLED === "true",
|
|
2669
|
+
provider: process.env.AI_PROVIDER || "memory",
|
|
2670
|
+
apiKey: process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY || process.env.GOOGLE_AI_API_KEY,
|
|
2671
|
+
model: process.env.AI_MODEL,
|
|
2672
|
+
maxTokens: process.env.AI_MAX_TOKENS ? parseInt(process.env.AI_MAX_TOKENS) : void 0,
|
|
2673
|
+
temperature: process.env.AI_TEMPERATURE ? parseFloat(process.env.AI_TEMPERATURE) : void 0,
|
|
2674
|
+
timeout: process.env.AI_TIMEOUT ? parseInt(process.env.AI_TIMEOUT) : void 0,
|
|
2675
|
+
baseUrl: process.env.AI_BASE_URL
|
|
2676
|
+
},
|
|
2677
|
+
rag: {
|
|
2678
|
+
enabled: process.env.RAG_ENABLED === "true",
|
|
2679
|
+
provider: process.env.RAG_PROVIDER || "memory",
|
|
2680
|
+
apiKey: process.env.PINECONE_API_KEY,
|
|
2681
|
+
environment: process.env.PINECONE_ENVIRONMENT,
|
|
2682
|
+
indexName: process.env.PINECONE_INDEX || process.env.RAG_INDEX_NAME,
|
|
2683
|
+
namespace: process.env.RAG_NAMESPACE,
|
|
2684
|
+
host: process.env.WEAVIATE_HOST,
|
|
2685
|
+
embeddingProvider: process.env.EMBEDDING_PROVIDER || "memory",
|
|
2686
|
+
embeddingApiKey: process.env.EMBEDDING_API_KEY || process.env.OPENAI_API_KEY,
|
|
2687
|
+
embeddingModel: process.env.EMBEDDING_MODEL
|
|
2688
|
+
},
|
|
1560
2689
|
resilience: {
|
|
1561
2690
|
retry: {
|
|
1562
2691
|
enabled: process.env.RESILIENCE_RETRY_ENABLED !== "false",
|
|
@@ -2124,6 +3253,8 @@ var NoopMetrics = class {
|
|
|
2124
3253
|
};
|
|
2125
3254
|
|
|
2126
3255
|
// src/factory.ts
|
|
3256
|
+
init_IAI();
|
|
3257
|
+
init_IRAG();
|
|
2127
3258
|
function createLogger(config) {
|
|
2128
3259
|
if (!config.observability.logging) {
|
|
2129
3260
|
return new NoopLogger();
|
|
@@ -2142,7 +3273,7 @@ function createMetrics(config) {
|
|
|
2142
3273
|
}
|
|
2143
3274
|
function createPlatform(config) {
|
|
2144
3275
|
const finalConfig = config ? deepMerge(loadConfig(), config) : loadConfig();
|
|
2145
|
-
const hasProductionAdapters = finalConfig.database.provider !== "memory" || finalConfig.cache.provider !== "memory" || finalConfig.storage.provider !== "memory" || finalConfig.email.provider !== "memory" && finalConfig.email.provider !== "console" || finalConfig.observability.tracing.provider === "otlp";
|
|
3276
|
+
const hasProductionAdapters = finalConfig.database.provider !== "memory" || finalConfig.cache.provider !== "memory" || finalConfig.storage.provider !== "memory" || finalConfig.email.provider !== "memory" && finalConfig.email.provider !== "console" || finalConfig.observability.tracing.provider === "otlp" || finalConfig.ai.enabled && finalConfig.ai.provider !== "memory" || finalConfig.rag.enabled && finalConfig.rag.provider !== "memory";
|
|
2146
3277
|
if (hasProductionAdapters) {
|
|
2147
3278
|
console.warn(
|
|
2148
3279
|
"createPlatform() is synchronous and cannot initialize production adapters. Use createPlatformAsync() for production adapters, or use memory/console adapters."
|
|
@@ -2156,10 +3287,23 @@ function createPlatform(config) {
|
|
|
2156
3287
|
const logger = createLogger(finalConfig);
|
|
2157
3288
|
const metrics = createMetrics(finalConfig);
|
|
2158
3289
|
const tracing = finalConfig.observability.tracing.provider === "memory" ? new MemoryTracing() : new NoopTracing();
|
|
2159
|
-
|
|
3290
|
+
const ai = finalConfig.ai.enabled ? new MemoryAI() : null;
|
|
3291
|
+
const rag = finalConfig.rag.enabled ? new MemoryRAG() : null;
|
|
3292
|
+
return createPlatformFromAdapters(
|
|
3293
|
+
db,
|
|
3294
|
+
cache,
|
|
3295
|
+
storage,
|
|
3296
|
+
email,
|
|
3297
|
+
queue,
|
|
3298
|
+
logger,
|
|
3299
|
+
metrics,
|
|
3300
|
+
tracing,
|
|
3301
|
+
ai,
|
|
3302
|
+
rag
|
|
3303
|
+
);
|
|
2160
3304
|
}
|
|
2161
|
-
function createPlatformFromAdapters(db, cache, storage, email, queue, logger, metrics, tracing) {
|
|
2162
|
-
|
|
3305
|
+
function createPlatformFromAdapters(db, cache, storage, email, queue, logger, metrics, tracing, ai, rag) {
|
|
3306
|
+
const platform = {
|
|
2163
3307
|
db,
|
|
2164
3308
|
cache,
|
|
2165
3309
|
storage,
|
|
@@ -2169,7 +3313,14 @@ function createPlatformFromAdapters(db, cache, storage, email, queue, logger, me
|
|
|
2169
3313
|
metrics,
|
|
2170
3314
|
tracing,
|
|
2171
3315
|
async healthCheck() {
|
|
2172
|
-
const [
|
|
3316
|
+
const [
|
|
3317
|
+
dbHealth,
|
|
3318
|
+
cacheHealth,
|
|
3319
|
+
storageHealth,
|
|
3320
|
+
emailHealth,
|
|
3321
|
+
queueHealth,
|
|
3322
|
+
tracingHealth
|
|
3323
|
+
] = await Promise.all([
|
|
2173
3324
|
db.healthCheck(),
|
|
2174
3325
|
cache.healthCheck(),
|
|
2175
3326
|
storage.healthCheck(),
|
|
@@ -2191,9 +3342,21 @@ function createPlatformFromAdapters(db, cache, storage, email, queue, logger, me
|
|
|
2191
3342
|
};
|
|
2192
3343
|
},
|
|
2193
3344
|
async close() {
|
|
2194
|
-
await Promise.all([
|
|
3345
|
+
await Promise.all([
|
|
3346
|
+
db.close(),
|
|
3347
|
+
cache.close(),
|
|
3348
|
+
queue.close(),
|
|
3349
|
+
tracing.close()
|
|
3350
|
+
]);
|
|
2195
3351
|
}
|
|
2196
3352
|
};
|
|
3353
|
+
if (ai) {
|
|
3354
|
+
platform.ai = ai;
|
|
3355
|
+
}
|
|
3356
|
+
if (rag) {
|
|
3357
|
+
platform.rag = rag;
|
|
3358
|
+
}
|
|
3359
|
+
return platform;
|
|
2197
3360
|
}
|
|
2198
3361
|
function deepMerge(target, source) {
|
|
2199
3362
|
const result = { ...target };
|
|
@@ -2251,7 +3414,11 @@ function createTestPlatformWithInternals() {
|
|
|
2251
3414
|
};
|
|
2252
3415
|
},
|
|
2253
3416
|
async close() {
|
|
2254
|
-
await Promise.all([
|
|
3417
|
+
await Promise.all([
|
|
3418
|
+
memoryDb.close(),
|
|
3419
|
+
memoryCache.close(),
|
|
3420
|
+
memoryQueue.close()
|
|
3421
|
+
]);
|
|
2255
3422
|
}
|
|
2256
3423
|
};
|
|
2257
3424
|
return {
|
|
@@ -2291,15 +3458,20 @@ function assertEmailSent(memoryEmail, expected) {
|
|
|
2291
3458
|
const matchesTo = (emailTo, expectedTo) => {
|
|
2292
3459
|
if (!emailTo) return false;
|
|
2293
3460
|
if (Array.isArray(emailTo)) {
|
|
2294
|
-
return emailTo.some(
|
|
3461
|
+
return emailTo.some(
|
|
3462
|
+
(addr) => addr === expectedTo || typeof addr === "object" && addr.email === expectedTo
|
|
3463
|
+
);
|
|
2295
3464
|
}
|
|
2296
3465
|
return emailTo === expectedTo || typeof emailTo === "object" && emailTo.email === expectedTo;
|
|
2297
3466
|
};
|
|
2298
3467
|
const found = emails.find((email) => {
|
|
2299
|
-
if (expected.to && !matchesTo(email.to, expected.to))
|
|
3468
|
+
if (expected.to && !matchesTo(email.to, expected.to))
|
|
3469
|
+
return false;
|
|
2300
3470
|
if (expected.subject) {
|
|
2301
|
-
if (typeof expected.subject === "string" && email.subject !== expected.subject)
|
|
2302
|
-
|
|
3471
|
+
if (typeof expected.subject === "string" && email.subject !== expected.subject)
|
|
3472
|
+
return false;
|
|
3473
|
+
if (expected.subject instanceof RegExp && !expected.subject.test(email.subject))
|
|
3474
|
+
return false;
|
|
2303
3475
|
}
|
|
2304
3476
|
if (expected.bodyContains) {
|
|
2305
3477
|
const body = email.html || email.text || "";
|