agentdb 2.0.0-alpha.2.6 → 2.0.0-alpha.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/dist/agentdb.browser.js +1685 -0
  2. package/dist/agentdb.browser.js.map +7 -0
  3. package/dist/agentdb.browser.min.js +3 -0
  4. package/dist/agentdb.browser.min.js.map +7 -0
  5. package/dist/agentdb.min.js +4 -4
  6. package/dist/agentdb.wasm-loader.js +207 -0
  7. package/dist/src/backends/VectorBackend.d.ts +3 -1
  8. package/dist/src/backends/VectorBackend.d.ts.map +1 -1
  9. package/dist/src/backends/factory.d.ts.map +1 -1
  10. package/dist/src/backends/factory.js +31 -2
  11. package/dist/src/backends/factory.js.map +1 -1
  12. package/dist/src/backends/hnswlib/HNSWLibBackend.js +1 -1
  13. package/dist/src/backends/hnswlib/HNSWLibBackend.js.map +1 -1
  14. package/dist/src/backends/ruvector/RuVectorBackend.d.ts.map +1 -1
  15. package/dist/src/backends/ruvector/RuVectorBackend.js +31 -6
  16. package/dist/src/backends/ruvector/RuVectorBackend.js.map +1 -1
  17. package/dist/src/browser/AttentionBrowser.d.ts +98 -0
  18. package/dist/src/browser/AttentionBrowser.d.ts.map +1 -0
  19. package/dist/src/browser/AttentionBrowser.js +299 -0
  20. package/dist/src/browser/AttentionBrowser.js.map +1 -0
  21. package/dist/src/browser/index.d.ts +3 -0
  22. package/dist/src/browser/index.d.ts.map +1 -1
  23. package/dist/src/browser/index.js +6 -0
  24. package/dist/src/browser/index.js.map +1 -1
  25. package/dist/src/cli/agentdb-cli.d.ts.map +1 -1
  26. package/dist/src/cli/agentdb-cli.js +45 -7
  27. package/dist/src/cli/agentdb-cli.js.map +1 -1
  28. package/dist/src/cli/attention-cli-integration.d.ts +25 -0
  29. package/dist/src/cli/attention-cli-integration.d.ts.map +1 -0
  30. package/dist/src/cli/attention-cli-integration.js +85 -0
  31. package/dist/src/cli/attention-cli-integration.js.map +1 -0
  32. package/dist/src/cli/commands/attention.d.ts +10 -0
  33. package/dist/src/cli/commands/attention.d.ts.map +1 -0
  34. package/dist/src/cli/commands/attention.js +536 -0
  35. package/dist/src/cli/commands/attention.js.map +1 -0
  36. package/dist/src/cli/commands/doctor.d.ts +11 -0
  37. package/dist/src/cli/commands/doctor.d.ts.map +1 -0
  38. package/dist/src/cli/commands/doctor.js +308 -0
  39. package/dist/src/cli/commands/doctor.js.map +1 -0
  40. package/dist/src/cli/lib/attention-config.d.ts +80 -0
  41. package/dist/src/cli/lib/attention-config.d.ts.map +1 -0
  42. package/dist/src/cli/lib/attention-config.js +230 -0
  43. package/dist/src/cli/lib/attention-config.js.map +1 -0
  44. package/dist/src/controllers/AttentionService.d.ts +201 -0
  45. package/dist/src/controllers/AttentionService.d.ts.map +1 -0
  46. package/dist/src/controllers/AttentionService.js +518 -0
  47. package/dist/src/controllers/AttentionService.js.map +1 -0
  48. package/dist/src/controllers/CausalMemoryGraph.d.ts +55 -3
  49. package/dist/src/controllers/CausalMemoryGraph.d.ts.map +1 -1
  50. package/dist/src/controllers/CausalMemoryGraph.js +178 -5
  51. package/dist/src/controllers/CausalMemoryGraph.js.map +1 -1
  52. package/dist/src/controllers/CausalRecall.d.ts.map +1 -1
  53. package/dist/src/controllers/CausalRecall.js +4 -4
  54. package/dist/src/controllers/CausalRecall.js.map +1 -1
  55. package/dist/src/controllers/EmbeddingService.d.ts.map +1 -1
  56. package/dist/src/controllers/EmbeddingService.js +7 -1
  57. package/dist/src/controllers/EmbeddingService.js.map +1 -1
  58. package/dist/src/controllers/ExplainableRecall.d.ts +32 -2
  59. package/dist/src/controllers/ExplainableRecall.d.ts.map +1 -1
  60. package/dist/src/controllers/ExplainableRecall.js +35 -2
  61. package/dist/src/controllers/ExplainableRecall.js.map +1 -1
  62. package/dist/src/controllers/NightlyLearner.d.ts +37 -0
  63. package/dist/src/controllers/NightlyLearner.d.ts.map +1 -1
  64. package/dist/src/controllers/NightlyLearner.js +141 -1
  65. package/dist/src/controllers/NightlyLearner.js.map +1 -1
  66. package/dist/src/controllers/index.d.ts +2 -0
  67. package/dist/src/controllers/index.d.ts.map +1 -1
  68. package/dist/src/controllers/index.js +1 -0
  69. package/dist/src/controllers/index.js.map +1 -1
  70. package/dist/src/core/AgentDB.d.ts +28 -0
  71. package/dist/src/core/AgentDB.d.ts.map +1 -0
  72. package/dist/src/core/AgentDB.js +91 -0
  73. package/dist/src/core/AgentDB.js.map +1 -0
  74. package/dist/src/core/backends/GraphBackend.d.ts +195 -0
  75. package/dist/src/core/backends/GraphBackend.js +32 -0
  76. package/dist/src/core/backends/LearningBackend.d.ts +147 -0
  77. package/dist/src/core/backends/LearningBackend.js +26 -0
  78. package/dist/src/core/backends/VectorBackend.d.ts +120 -0
  79. package/dist/src/core/backends/VectorBackend.js +13 -0
  80. package/dist/src/core/backends/factory.d.ts +49 -0
  81. package/dist/src/core/backends/factory.js +160 -0
  82. package/dist/src/core/backends/graph/GraphDatabaseAdapter.d.ts +150 -0
  83. package/dist/src/core/backends/graph/GraphDatabaseAdapter.js +220 -0
  84. package/dist/src/core/backends/hnswlib/HNSWLibBackend.d.ts +91 -0
  85. package/dist/src/core/backends/hnswlib/HNSWLibBackend.js +314 -0
  86. package/dist/src/core/backends/ruvector/RuVectorBackend.d.ts +74 -0
  87. package/dist/src/core/backends/ruvector/RuVectorBackend.js +211 -0
  88. package/dist/src/core/controllers/CausalMemoryGraph.d.ts +178 -0
  89. package/dist/src/core/controllers/CausalMemoryGraph.js +521 -0
  90. package/dist/src/core/controllers/EmbeddingService.d.ts +36 -0
  91. package/dist/src/core/controllers/EmbeddingService.js +138 -0
  92. package/dist/src/core/controllers/ReflexionMemory.d.ts +124 -0
  93. package/dist/src/core/controllers/ReflexionMemory.js +648 -0
  94. package/dist/src/core/controllers/SkillLibrary.d.ts +149 -0
  95. package/dist/src/core/controllers/SkillLibrary.js +603 -0
  96. package/dist/src/core/core/AgentDB.d.ts +27 -0
  97. package/dist/src/core/core/AgentDB.js +83 -0
  98. package/dist/src/core/services/AttentionService.d.ts +235 -0
  99. package/dist/src/core/services/AttentionService.js +402 -0
  100. package/dist/src/core/utils/NodeIdMapper.d.ts +37 -0
  101. package/dist/src/core/utils/NodeIdMapper.js +56 -0
  102. package/dist/src/index.d.ts +4 -0
  103. package/dist/src/index.d.ts.map +1 -1
  104. package/dist/src/index.js +6 -0
  105. package/dist/src/index.js.map +1 -1
  106. package/dist/src/mcp/attention-mcp-integration.d.ts +55 -0
  107. package/dist/src/mcp/attention-mcp-integration.d.ts.map +1 -0
  108. package/dist/src/mcp/attention-mcp-integration.js +133 -0
  109. package/dist/src/mcp/attention-mcp-integration.js.map +1 -0
  110. package/dist/src/mcp/attention-tools-handlers.d.ts +197 -0
  111. package/dist/src/mcp/attention-tools-handlers.d.ts.map +1 -0
  112. package/dist/src/mcp/attention-tools-handlers.js +579 -0
  113. package/dist/src/mcp/attention-tools-handlers.js.map +1 -0
  114. package/dist/src/services/AttentionService.d.ts +236 -0
  115. package/dist/src/services/AttentionService.d.ts.map +1 -0
  116. package/dist/src/services/AttentionService.js +408 -0
  117. package/dist/src/services/AttentionService.js.map +1 -0
  118. package/dist/src/types/attention.d.ts +249 -0
  119. package/dist/src/types/attention.d.ts.map +1 -0
  120. package/dist/src/types/attention.js +43 -0
  121. package/dist/src/types/attention.js.map +1 -0
  122. package/dist/src/utils/attention-metrics.d.ts +76 -0
  123. package/dist/src/utils/attention-metrics.d.ts.map +1 -0
  124. package/dist/src/utils/attention-metrics.js +178 -0
  125. package/dist/src/utils/attention-metrics.js.map +1 -0
  126. package/examples/browser/attention-demo.html +499 -0
  127. package/examples/browser/flash-consolidation.html +598 -0
  128. package/examples/browser/hyperbolic-hierarchy.html +497 -0
  129. package/package.json +16 -6
  130. package/src/backends/VectorBackend.ts +3 -1
  131. package/src/backends/factory.ts +30 -2
  132. package/src/backends/hnswlib/HNSWLibBackend.ts +1 -1
  133. package/src/backends/ruvector/RuVectorBackend.ts +37 -6
  134. package/src/browser/AttentionBrowser.ts +387 -0
  135. package/src/browser/index.ts +16 -0
  136. package/src/cli/agentdb-cli.ts +44 -6
  137. package/src/cli/attention-cli-integration.ts +91 -0
  138. package/src/cli/commands/attention.ts +666 -0
  139. package/src/cli/commands/doctor.ts +322 -0
  140. package/src/cli/lib/attention-config.ts +326 -0
  141. package/src/cli/tests/attention-cli.test.ts +335 -0
  142. package/src/controllers/AttentionService.ts +770 -0
  143. package/src/controllers/CausalMemoryGraph.ts +235 -6
  144. package/src/controllers/CausalRecall.ts +5 -5
  145. package/src/controllers/EmbeddingService.ts +7 -1
  146. package/src/controllers/ExplainableRecall.ts +53 -3
  147. package/src/controllers/NightlyLearner.ts +175 -1
  148. package/src/controllers/index.ts +2 -0
  149. package/src/core/AgentDB.ts +111 -0
  150. package/src/index.ts +8 -0
  151. package/src/mcp/attention-mcp-integration.ts +145 -0
  152. package/src/mcp/attention-tools-handlers.ts +586 -0
  153. package/src/services/AttentionService.ts +656 -0
  154. package/src/tests/attention-service.test.ts +492 -0
  155. package/src/types/attention.ts +392 -0
  156. package/src/utils/attention-metrics.ts +253 -0
@@ -0,0 +1,1685 @@
1
+ /*! AgentDB Browser Bundle v2.0.0-alpha.2.8 | MIT License | https://agentdb.ruv.io */
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __esm = (fn, res) => function __init() {
5
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
6
+ };
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+
12
+ // dist/agentdb.wasm-loader.js
13
+ var agentdb_wasm_loader_exports = {};
14
+ __export(agentdb_wasm_loader_exports, {
15
+ initWASM: () => initWASM,
16
+ wasmLoadError: () => wasmLoadError,
17
+ wasmModule: () => wasmModule
18
+ });
19
+ async function initWASM() {
20
+ if (wasmModule) return wasmModule;
21
+ if (wasmLoading) return wasmLoading;
22
+ wasmLoading = (async () => {
23
+ try {
24
+ if (typeof WebAssembly === "undefined") {
25
+ throw new Error("WebAssembly not supported in this browser");
26
+ }
27
+ const simdSupported = await detectWasmSIMD();
28
+ console.log(`WASM SIMD support: ${simdSupported}`);
29
+ wasmModule = {
30
+ flashAttention: createFlashAttentionMock(),
31
+ hyperbolicAttention: createHyperbolicAttentionMock(),
32
+ memoryConsolidation: createMemoryConsolidationMock(),
33
+ simdSupported
34
+ };
35
+ console.log("\u2705 WASM attention module loaded");
36
+ return wasmModule;
37
+ } catch (error) {
38
+ wasmLoadError = error;
39
+ console.warn("\u26A0\uFE0F WASM loading failed, using fallback:", error.message);
40
+ wasmModule = {
41
+ flashAttention: createFlashAttentionMock(),
42
+ hyperbolicAttention: createHyperbolicAttentionMock(),
43
+ memoryConsolidation: createMemoryConsolidationMock(),
44
+ simdSupported: false
45
+ };
46
+ return wasmModule;
47
+ } finally {
48
+ wasmLoading = null;
49
+ }
50
+ })();
51
+ return wasmLoading;
52
+ }
53
+ async function detectWasmSIMD() {
54
+ try {
55
+ const simdTest = new Uint8Array([
56
+ 0,
57
+ 97,
58
+ 115,
59
+ 109,
60
+ 1,
61
+ 0,
62
+ 0,
63
+ 0,
64
+ 1,
65
+ 5,
66
+ 1,
67
+ 96,
68
+ 0,
69
+ 1,
70
+ 123,
71
+ 3,
72
+ 2,
73
+ 1,
74
+ 0,
75
+ 10,
76
+ 10,
77
+ 1,
78
+ 8,
79
+ 0,
80
+ 253,
81
+ 12,
82
+ 253,
83
+ 12,
84
+ 253,
85
+ 84,
86
+ 11
87
+ ]);
88
+ const module = await WebAssembly.instantiate(simdTest);
89
+ return module instanceof WebAssembly.Instance;
90
+ } catch {
91
+ return false;
92
+ }
93
+ }
94
+ function createFlashAttentionMock() {
95
+ return (query, keys, values, options = {}) => {
96
+ const { dim = 384, numHeads = 4, blockSize = 64 } = options;
97
+ const seqLen = keys.length / dim;
98
+ const output = new Float32Array(query.length);
99
+ for (let i = 0; i < query.length; i += dim) {
100
+ const q = query.slice(i, i + dim);
101
+ let sumWeights = 0;
102
+ const weights = new Float32Array(seqLen);
103
+ for (let j = 0; j < seqLen; j++) {
104
+ const k = keys.slice(j * dim, (j + 1) * dim);
105
+ let dot = 0;
106
+ for (let d = 0; d < dim; d++) {
107
+ dot += q[d] * k[d];
108
+ }
109
+ weights[j] = Math.exp(dot / Math.sqrt(dim));
110
+ sumWeights += weights[j];
111
+ }
112
+ for (let j = 0; j < seqLen; j++) {
113
+ weights[j] /= sumWeights;
114
+ const v = values.slice(j * dim, (j + 1) * dim);
115
+ for (let d = 0; d < dim; d++) {
116
+ output[i + d] += weights[j] * v[d];
117
+ }
118
+ }
119
+ }
120
+ return output;
121
+ };
122
+ }
123
+ function createHyperbolicAttentionMock() {
124
+ return (query, keys, options = {}) => {
125
+ const { curvature = -1 } = options;
126
+ const k = Math.abs(curvature);
127
+ const similarities = new Float32Array(keys.length / query.length);
128
+ for (let i = 0; i < similarities.length; i++) {
129
+ const offset = i * query.length;
130
+ let dotProduct = 0;
131
+ let normQ = 0;
132
+ let normK = 0;
133
+ for (let j = 0; j < query.length; j++) {
134
+ dotProduct += query[j] * keys[offset + j];
135
+ normQ += query[j] * query[j];
136
+ normK += keys[offset + j] * keys[offset + j];
137
+ }
138
+ const euclidean = Math.sqrt(normQ + normK - 2 * dotProduct);
139
+ const poincare = Math.acosh(1 + 2 * k * euclidean * euclidean);
140
+ similarities[i] = 1 / (1 + poincare);
141
+ }
142
+ return similarities;
143
+ };
144
+ }
145
+ function createMemoryConsolidationMock() {
146
+ return (memories, options = {}) => {
147
+ const { threshold = 0.8, maxClusters = 10 } = options;
148
+ const consolidated = [];
149
+ const used = /* @__PURE__ */ new Set();
150
+ for (let i = 0; i < memories.length; i++) {
151
+ if (used.has(i)) continue;
152
+ const cluster = [memories[i]];
153
+ used.add(i);
154
+ for (let j = i + 1; j < memories.length; j++) {
155
+ if (used.has(j)) continue;
156
+ let dot = 0;
157
+ let norm1 = 0;
158
+ let norm2 = 0;
159
+ for (let k = 0; k < memories[i].length; k++) {
160
+ dot += memories[i][k] * memories[j][k];
161
+ norm1 += memories[i][k] * memories[i][k];
162
+ norm2 += memories[j][k] * memories[j][k];
163
+ }
164
+ const similarity = dot / (Math.sqrt(norm1 * norm2) || 1);
165
+ if (similarity > threshold) {
166
+ cluster.push(memories[j]);
167
+ used.add(j);
168
+ }
169
+ }
170
+ const avg = new Float32Array(memories[i].length);
171
+ for (const mem of cluster) {
172
+ for (let k = 0; k < avg.length; k++) {
173
+ avg[k] += mem[k] / cluster.length;
174
+ }
175
+ }
176
+ consolidated.push({
177
+ memory: avg,
178
+ count: cluster.size,
179
+ members: cluster
180
+ });
181
+ if (consolidated.length >= maxClusters) break;
182
+ }
183
+ return consolidated;
184
+ };
185
+ }
186
+ var wasmModule, wasmLoading, wasmLoadError;
187
+ var init_agentdb_wasm_loader = __esm({
188
+ "dist/agentdb.wasm-loader.js"() {
189
+ "use strict";
190
+ wasmModule = null;
191
+ wasmLoading = null;
192
+ wasmLoadError = null;
193
+ }
194
+ });
195
+
196
+ // src/browser/ProductQuantization.ts
197
+ var ProductQuantization = class {
198
+ config;
199
+ codebook = null;
200
+ trained = false;
201
+ constructor(config) {
202
+ this.config = {
203
+ dimension: config.dimension,
204
+ numSubvectors: config.numSubvectors,
205
+ numCentroids: config.numCentroids,
206
+ maxIterations: config.maxIterations || 50,
207
+ convergenceThreshold: config.convergenceThreshold || 1e-4
208
+ };
209
+ if (this.config.dimension % this.config.numSubvectors !== 0) {
210
+ throw new Error(`Dimension ${this.config.dimension} must be divisible by numSubvectors ${this.config.numSubvectors}`);
211
+ }
212
+ }
213
+ /**
214
+ * Train codebook using k-means on training vectors
215
+ */
216
+ async train(vectors) {
217
+ if (vectors.length === 0) {
218
+ throw new Error("Training requires at least one vector");
219
+ }
220
+ const subvectorDim = this.config.dimension / this.config.numSubvectors;
221
+ const centroids = [];
222
+ console.log(`[PQ] Training ${this.config.numSubvectors} subvectors with ${this.config.numCentroids} centroids each...`);
223
+ for (let s = 0; s < this.config.numSubvectors; s++) {
224
+ const startDim = s * subvectorDim;
225
+ const endDim = startDim + subvectorDim;
226
+ const subvectors = vectors.map((v) => v.slice(startDim, endDim));
227
+ const subCentroids = await this.kMeans(subvectors, this.config.numCentroids);
228
+ centroids.push(...subCentroids);
229
+ if ((s + 1) % 4 === 0 || s === this.config.numSubvectors - 1) {
230
+ console.log(`[PQ] Trained ${s + 1}/${this.config.numSubvectors} subvectors`);
231
+ }
232
+ }
233
+ this.codebook = {
234
+ subvectorDim,
235
+ numSubvectors: this.config.numSubvectors,
236
+ numCentroids: this.config.numCentroids,
237
+ centroids
238
+ };
239
+ this.trained = true;
240
+ console.log("[PQ] Training complete");
241
+ }
242
+ /**
243
+ * K-means clustering for centroids
244
+ */
245
+ async kMeans(vectors, k) {
246
+ const dim = vectors[0].length;
247
+ const n = vectors.length;
248
+ const centroids = this.kMeansPlusPlus(vectors, k);
249
+ const assignments = new Uint32Array(n);
250
+ let prevInertia = Infinity;
251
+ for (let iter = 0; iter < this.config.maxIterations; iter++) {
252
+ let inertia = 0;
253
+ for (let i = 0; i < n; i++) {
254
+ let minDist = Infinity;
255
+ let minIdx = 0;
256
+ for (let j = 0; j < k; j++) {
257
+ const dist = this.squaredDistance(vectors[i], centroids[j]);
258
+ if (dist < minDist) {
259
+ minDist = dist;
260
+ minIdx = j;
261
+ }
262
+ }
263
+ assignments[i] = minIdx;
264
+ inertia += minDist;
265
+ }
266
+ if (Math.abs(prevInertia - inertia) < this.config.convergenceThreshold) {
267
+ break;
268
+ }
269
+ prevInertia = inertia;
270
+ const counts = new Uint32Array(k);
271
+ const sums = Array.from({ length: k }, () => new Float32Array(dim));
272
+ for (let i = 0; i < n; i++) {
273
+ const cluster = assignments[i];
274
+ counts[cluster]++;
275
+ for (let d = 0; d < dim; d++) {
276
+ sums[cluster][d] += vectors[i][d];
277
+ }
278
+ }
279
+ for (let j = 0; j < k; j++) {
280
+ if (counts[j] > 0) {
281
+ for (let d = 0; d < dim; d++) {
282
+ centroids[j][d] = sums[j][d] / counts[j];
283
+ }
284
+ }
285
+ }
286
+ }
287
+ return centroids;
288
+ }
289
+ /**
290
+ * K-means++ initialization for better centroid selection
291
+ */
292
+ kMeansPlusPlus(vectors, k) {
293
+ const n = vectors.length;
294
+ const dim = vectors[0].length;
295
+ const centroids = [];
296
+ const firstIdx = Math.floor(Math.random() * n);
297
+ centroids.push(new Float32Array(vectors[firstIdx]));
298
+ for (let i = 1; i < k; i++) {
299
+ const distances = new Float32Array(n);
300
+ let sumDistances = 0;
301
+ for (let j = 0; j < n; j++) {
302
+ let minDist = Infinity;
303
+ for (const centroid of centroids) {
304
+ const dist = this.squaredDistance(vectors[j], centroid);
305
+ minDist = Math.min(minDist, dist);
306
+ }
307
+ distances[j] = minDist;
308
+ sumDistances += minDist;
309
+ }
310
+ let r = Math.random() * sumDistances;
311
+ for (let j = 0; j < n; j++) {
312
+ r -= distances[j];
313
+ if (r <= 0) {
314
+ centroids.push(new Float32Array(vectors[j]));
315
+ break;
316
+ }
317
+ }
318
+ }
319
+ return centroids;
320
+ }
321
+ /**
322
+ * Compress a vector using trained codebook
323
+ */
324
+ compress(vector) {
325
+ if (!this.trained || !this.codebook) {
326
+ throw new Error("Codebook must be trained before compression");
327
+ }
328
+ const codes = new Uint8Array(this.config.numSubvectors);
329
+ const subvectorDim = this.codebook.subvectorDim;
330
+ let norm = 0;
331
+ for (let i = 0; i < vector.length; i++) {
332
+ norm += vector[i] * vector[i];
333
+ }
334
+ norm = Math.sqrt(norm);
335
+ for (let s = 0; s < this.config.numSubvectors; s++) {
336
+ const startDim = s * subvectorDim;
337
+ const subvector = vector.slice(startDim, startDim + subvectorDim);
338
+ let minDist = Infinity;
339
+ let minIdx = 0;
340
+ const centroidOffset = s * this.config.numCentroids;
341
+ for (let c = 0; c < this.config.numCentroids; c++) {
342
+ const centroid = this.codebook.centroids[centroidOffset + c];
343
+ const dist = this.squaredDistance(subvector, centroid);
344
+ if (dist < minDist) {
345
+ minDist = dist;
346
+ minIdx = c;
347
+ }
348
+ }
349
+ codes[s] = minIdx;
350
+ }
351
+ return { codes, norm };
352
+ }
353
+ /**
354
+ * Decompress a vector (approximate reconstruction)
355
+ */
356
+ decompress(compressed) {
357
+ if (!this.codebook) {
358
+ throw new Error("Codebook not available");
359
+ }
360
+ const vector = new Float32Array(this.config.dimension);
361
+ const subvectorDim = this.codebook.subvectorDim;
362
+ for (let s = 0; s < this.config.numSubvectors; s++) {
363
+ const code = compressed.codes[s];
364
+ const centroidOffset = s * this.config.numCentroids;
365
+ const centroid = this.codebook.centroids[centroidOffset + code];
366
+ const startDim = s * subvectorDim;
367
+ for (let d = 0; d < subvectorDim; d++) {
368
+ vector[startDim + d] = centroid[d];
369
+ }
370
+ }
371
+ return vector;
372
+ }
373
+ /**
374
+ * Asymmetric Distance Computation (ADC)
375
+ * Computes distance from query vector to compressed vector
376
+ */
377
+ asymmetricDistance(query, compressed) {
378
+ if (!this.codebook) {
379
+ throw new Error("Codebook not available");
380
+ }
381
+ let distance = 0;
382
+ const subvectorDim = this.codebook.subvectorDim;
383
+ for (let s = 0; s < this.config.numSubvectors; s++) {
384
+ const code = compressed.codes[s];
385
+ const centroidOffset = s * this.config.numCentroids;
386
+ const centroid = this.codebook.centroids[centroidOffset + code];
387
+ const startDim = s * subvectorDim;
388
+ const querySubvector = query.slice(startDim, startDim + subvectorDim);
389
+ distance += this.squaredDistance(querySubvector, centroid);
390
+ }
391
+ return Math.sqrt(distance);
392
+ }
393
+ /**
394
+ * Batch compression for multiple vectors
395
+ */
396
+ batchCompress(vectors) {
397
+ return vectors.map((v) => this.compress(v));
398
+ }
399
+ /**
400
+ * Get memory savings
401
+ */
402
+ getCompressionRatio() {
403
+ const originalBytes = this.config.dimension * 4;
404
+ const compressedBytes = this.config.numSubvectors + 4;
405
+ return originalBytes / compressedBytes;
406
+ }
407
+ /**
408
+ * Export codebook for persistence
409
+ */
410
+ exportCodebook() {
411
+ if (!this.codebook) {
412
+ throw new Error("No codebook to export");
413
+ }
414
+ return JSON.stringify({
415
+ config: this.config,
416
+ codebook: {
417
+ subvectorDim: this.codebook.subvectorDim,
418
+ numSubvectors: this.codebook.numSubvectors,
419
+ numCentroids: this.codebook.numCentroids,
420
+ centroids: this.codebook.centroids.map((c) => Array.from(c))
421
+ }
422
+ });
423
+ }
424
+ /**
425
+ * Import codebook
426
+ */
427
+ importCodebook(json) {
428
+ const data = JSON.parse(json);
429
+ this.config = data.config;
430
+ this.codebook = {
431
+ subvectorDim: data.codebook.subvectorDim,
432
+ numSubvectors: data.codebook.numSubvectors,
433
+ numCentroids: data.codebook.numCentroids,
434
+ centroids: data.codebook.centroids.map((c) => new Float32Array(c))
435
+ };
436
+ this.trained = true;
437
+ }
438
+ /**
439
+ * Utility: Squared Euclidean distance
440
+ */
441
+ squaredDistance(a, b) {
442
+ let sum = 0;
443
+ for (let i = 0; i < a.length; i++) {
444
+ const diff = a[i] - b[i];
445
+ sum += diff * diff;
446
+ }
447
+ return sum;
448
+ }
449
+ /**
450
+ * Get statistics
451
+ */
452
+ getStats() {
453
+ const compressionRatio = this.getCompressionRatio();
454
+ const memoryPerVector = this.config.numSubvectors + 4;
455
+ const codebookSize = this.codebook ? this.config.numSubvectors * this.config.numCentroids * (this.config.dimension / this.config.numSubvectors) * 4 : 0;
456
+ return {
457
+ trained: this.trained,
458
+ compressionRatio,
459
+ memoryPerVector,
460
+ codebookSize
461
+ };
462
+ }
463
+ };
464
+ function createPQ8(dimension) {
465
+ return new ProductQuantization({
466
+ dimension,
467
+ numSubvectors: 8,
468
+ numCentroids: 256,
469
+ maxIterations: 50
470
+ });
471
+ }
472
+ function createPQ16(dimension) {
473
+ return new ProductQuantization({
474
+ dimension,
475
+ numSubvectors: 16,
476
+ numCentroids: 256,
477
+ maxIterations: 50
478
+ });
479
+ }
480
+ function createPQ32(dimension) {
481
+ return new ProductQuantization({
482
+ dimension,
483
+ numSubvectors: 32,
484
+ numCentroids: 256,
485
+ maxIterations: 50
486
+ });
487
+ }
488
+
489
+ // src/browser/HNSWIndex.ts
490
+ var MinHeap = class {
491
+ items = [];
492
+ push(item, priority) {
493
+ this.items.push({ item, priority });
494
+ this.bubbleUp(this.items.length - 1);
495
+ }
496
+ pop() {
497
+ if (this.items.length === 0) return void 0;
498
+ const result = this.items[0].item;
499
+ const last = this.items.pop();
500
+ if (this.items.length > 0) {
501
+ this.items[0] = last;
502
+ this.bubbleDown(0);
503
+ }
504
+ return result;
505
+ }
506
+ peek() {
507
+ var _a;
508
+ return (_a = this.items[0]) == null ? void 0 : _a.item;
509
+ }
510
+ size() {
511
+ return this.items.length;
512
+ }
513
+ bubbleUp(index) {
514
+ while (index > 0) {
515
+ const parentIndex = Math.floor((index - 1) / 2);
516
+ if (this.items[index].priority >= this.items[parentIndex].priority) break;
517
+ [this.items[index], this.items[parentIndex]] = [this.items[parentIndex], this.items[index]];
518
+ index = parentIndex;
519
+ }
520
+ }
521
+ bubbleDown(index) {
522
+ while (true) {
523
+ const leftChild = 2 * index + 1;
524
+ const rightChild = 2 * index + 2;
525
+ let smallest = index;
526
+ if (leftChild < this.items.length && this.items[leftChild].priority < this.items[smallest].priority) {
527
+ smallest = leftChild;
528
+ }
529
+ if (rightChild < this.items.length && this.items[rightChild].priority < this.items[smallest].priority) {
530
+ smallest = rightChild;
531
+ }
532
+ if (smallest === index) break;
533
+ [this.items[index], this.items[smallest]] = [this.items[smallest], this.items[index]];
534
+ index = smallest;
535
+ }
536
+ }
537
+ };
538
+ var HNSWIndex = class {
539
+ config;
540
+ nodes = /* @__PURE__ */ new Map();
541
+ entryPoint = null;
542
+ currentId = 0;
543
+ ml;
544
+ constructor(config = {}) {
545
+ this.config = {
546
+ dimension: config.dimension || 384,
547
+ M: config.M || 16,
548
+ efConstruction: config.efConstruction || 200,
549
+ efSearch: config.efSearch || 50,
550
+ ml: config.ml || 1 / Math.log(2),
551
+ maxLayers: config.maxLayers || 16,
552
+ distanceFunction: config.distanceFunction || "cosine"
553
+ };
554
+ this.ml = this.config.ml;
555
+ }
556
+ /**
557
+ * Add vector to index
558
+ */
559
+ add(vector, id) {
560
+ const nodeId = id !== void 0 ? id : this.currentId++;
561
+ const level = this.randomLevel();
562
+ const node = {
563
+ id: nodeId,
564
+ vector,
565
+ level,
566
+ connections: /* @__PURE__ */ new Map()
567
+ };
568
+ for (let l = 0; l <= level; l++) {
569
+ node.connections.set(l, []);
570
+ }
571
+ if (this.entryPoint === null) {
572
+ this.entryPoint = nodeId;
573
+ this.nodes.set(nodeId, node);
574
+ return nodeId;
575
+ }
576
+ const ep = this.entryPoint;
577
+ let nearest = ep;
578
+ for (let lc = this.nodes.get(ep).level; lc > level; lc--) {
579
+ nearest = this.searchLayer(vector, nearest, 1, lc)[0];
580
+ }
581
+ for (let lc = Math.min(level, this.nodes.get(ep).level); lc >= 0; lc--) {
582
+ const candidates = this.searchLayer(vector, nearest, this.config.efConstruction, lc);
583
+ const M = lc === 0 ? this.config.M * 2 : this.config.M;
584
+ const neighbors = this.selectNeighbors(vector, candidates, M);
585
+ for (const neighbor of neighbors) {
586
+ this.connect(nodeId, neighbor, lc);
587
+ this.connect(neighbor, nodeId, lc);
588
+ const neighborNode = this.nodes.get(neighbor);
589
+ const neighborConnections = neighborNode.connections.get(lc);
590
+ if (neighborConnections.length > M) {
591
+ const newNeighbors = this.selectNeighbors(
592
+ neighborNode.vector,
593
+ neighborConnections,
594
+ M
595
+ );
596
+ neighborNode.connections.set(lc, newNeighbors);
597
+ }
598
+ }
599
+ nearest = candidates[0];
600
+ }
601
+ if (level > this.nodes.get(this.entryPoint).level) {
602
+ this.entryPoint = nodeId;
603
+ }
604
+ this.nodes.set(nodeId, node);
605
+ return nodeId;
606
+ }
607
+ /**
608
+ * Search for k nearest neighbors
609
+ */
610
+ search(query, k, ef) {
611
+ if (this.entryPoint === null) return [];
612
+ ef = ef || Math.max(this.config.efSearch, k);
613
+ let ep = this.entryPoint;
614
+ let nearest = ep;
615
+ for (let lc = this.nodes.get(ep).level; lc > 0; lc--) {
616
+ nearest = this.searchLayer(query, nearest, 1, lc)[0];
617
+ }
618
+ const candidates = this.searchLayer(query, nearest, ef, 0);
619
+ return candidates.slice(0, k).map((id) => ({
620
+ id,
621
+ distance: this.distance(query, this.nodes.get(id).vector),
622
+ vector: this.nodes.get(id).vector
623
+ }));
624
+ }
625
+ /**
626
+ * Search at specific layer
627
+ */
628
+ searchLayer(query, ep, ef, layer) {
629
+ const visited = /* @__PURE__ */ new Set();
630
+ const candidates = new MinHeap();
631
+ const w = new MinHeap();
632
+ const dist = this.distance(query, this.nodes.get(ep).vector);
633
+ candidates.push(ep, dist);
634
+ w.push(ep, -dist);
635
+ visited.add(ep);
636
+ while (candidates.size() > 0) {
637
+ const c = candidates.pop();
638
+ const fDist = -w.peek();
639
+ const cDist = this.distance(query, this.nodes.get(c).vector);
640
+ if (cDist > fDist) break;
641
+ const neighbors = this.nodes.get(c).connections.get(layer) || [];
642
+ for (const e of neighbors) {
643
+ if (visited.has(e)) continue;
644
+ visited.add(e);
645
+ const eDist = this.distance(query, this.nodes.get(e).vector);
646
+ const fDist2 = -w.peek();
647
+ if (eDist < fDist2 || w.size() < ef) {
648
+ candidates.push(e, eDist);
649
+ w.push(e, -eDist);
650
+ if (w.size() > ef) {
651
+ w.pop();
652
+ }
653
+ }
654
+ }
655
+ }
656
+ const result = [];
657
+ while (w.size() > 0) {
658
+ result.unshift(w.pop());
659
+ }
660
+ return result;
661
+ }
662
+ /**
663
+ * Select best neighbors using heuristic
664
+ */
665
+ selectNeighbors(base, candidates, M) {
666
+ if (candidates.length <= M) return candidates;
667
+ const sorted = candidates.map((id) => ({
668
+ id,
669
+ distance: this.distance(base, this.nodes.get(id).vector)
670
+ })).sort((a, b) => a.distance - b.distance);
671
+ return sorted.slice(0, M).map((x) => x.id);
672
+ }
673
+ /**
674
+ * Connect two nodes at layer
675
+ */
676
+ connect(from, to, layer) {
677
+ const node = this.nodes.get(from);
678
+ const connections = node.connections.get(layer);
679
+ if (!connections.includes(to)) {
680
+ connections.push(to);
681
+ }
682
+ }
683
+ /**
684
+ * Random level assignment
685
+ */
686
+ randomLevel() {
687
+ let level = 0;
688
+ while (Math.random() < this.ml && level < this.config.maxLayers - 1) {
689
+ level++;
690
+ }
691
+ return level;
692
+ }
693
+ /**
694
+ * Distance function
695
+ */
696
+ distance(a, b) {
697
+ switch (this.config.distanceFunction) {
698
+ case "cosine":
699
+ return 1 - this.cosineSimilarity(a, b);
700
+ case "euclidean":
701
+ return this.euclideanDistance(a, b);
702
+ case "manhattan":
703
+ return this.manhattanDistance(a, b);
704
+ default:
705
+ return 1 - this.cosineSimilarity(a, b);
706
+ }
707
+ }
708
+ cosineSimilarity(a, b) {
709
+ let dotProduct = 0;
710
+ let normA = 0;
711
+ let normB = 0;
712
+ for (let i = 0; i < a.length; i++) {
713
+ dotProduct += a[i] * b[i];
714
+ normA += a[i] * a[i];
715
+ normB += b[i] * b[i];
716
+ }
717
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
718
+ }
719
+ euclideanDistance(a, b) {
720
+ let sum = 0;
721
+ for (let i = 0; i < a.length; i++) {
722
+ const diff = a[i] - b[i];
723
+ sum += diff * diff;
724
+ }
725
+ return Math.sqrt(sum);
726
+ }
727
+ manhattanDistance(a, b) {
728
+ let sum = 0;
729
+ for (let i = 0; i < a.length; i++) {
730
+ sum += Math.abs(a[i] - b[i]);
731
+ }
732
+ return sum;
733
+ }
734
+ /**
735
+ * Get index statistics
736
+ */
737
+ getStats() {
738
+ if (this.nodes.size === 0) {
739
+ return {
740
+ numNodes: 0,
741
+ numLayers: 0,
742
+ avgConnections: 0,
743
+ entryPointLevel: 0,
744
+ memoryBytes: 0
745
+ };
746
+ }
747
+ const maxLevel = Math.max(...Array.from(this.nodes.values()).map((n) => n.level));
748
+ let totalConnections = 0;
749
+ for (const node of this.nodes.values()) {
750
+ for (const connections of node.connections.values()) {
751
+ totalConnections += connections.length;
752
+ }
753
+ }
754
+ const avgConnections = totalConnections / this.nodes.size;
755
+ const vectorBytes = this.config.dimension * 4;
756
+ const connectionBytes = avgConnections * 4;
757
+ const metadataBytes = 100;
758
+ const memoryBytes = this.nodes.size * (vectorBytes + connectionBytes + metadataBytes);
759
+ return {
760
+ numNodes: this.nodes.size,
761
+ numLayers: maxLevel + 1,
762
+ avgConnections,
763
+ entryPointLevel: this.entryPoint ? this.nodes.get(this.entryPoint).level : 0,
764
+ memoryBytes
765
+ };
766
+ }
767
+ /**
768
+ * Export index for persistence
769
+ */
770
+ export() {
771
+ const data = {
772
+ config: this.config,
773
+ entryPoint: this.entryPoint,
774
+ currentId: this.currentId,
775
+ nodes: Array.from(this.nodes.entries()).map(([id, node]) => ({
776
+ id,
777
+ vector: Array.from(node.vector),
778
+ level: node.level,
779
+ connections: Array.from(node.connections.entries())
780
+ }))
781
+ };
782
+ return JSON.stringify(data);
783
+ }
784
+ /**
785
+ * Import index from JSON
786
+ */
787
+ import(json) {
788
+ const data = JSON.parse(json);
789
+ this.config = data.config;
790
+ this.entryPoint = data.entryPoint;
791
+ this.currentId = data.currentId;
792
+ this.nodes.clear();
793
+ for (const nodeData of data.nodes) {
794
+ const node = {
795
+ id: nodeData.id,
796
+ vector: new Float32Array(nodeData.vector),
797
+ level: nodeData.level,
798
+ connections: new Map(nodeData.connections)
799
+ };
800
+ this.nodes.set(nodeData.id, node);
801
+ }
802
+ }
803
+ /**
804
+ * Clear index
805
+ */
806
+ clear() {
807
+ this.nodes.clear();
808
+ this.entryPoint = null;
809
+ this.currentId = 0;
810
+ }
811
+ /**
812
+ * Get number of nodes
813
+ */
814
+ size() {
815
+ return this.nodes.size;
816
+ }
817
+ };
818
+ function createHNSW(dimension) {
819
+ return new HNSWIndex({
820
+ dimension,
821
+ M: 16,
822
+ efConstruction: 200,
823
+ efSearch: 50
824
+ });
825
+ }
826
+ function createFastHNSW(dimension) {
827
+ return new HNSWIndex({
828
+ dimension,
829
+ M: 8,
830
+ efConstruction: 100,
831
+ efSearch: 30
832
+ });
833
+ }
834
+ function createAccurateHNSW(dimension) {
835
+ return new HNSWIndex({
836
+ dimension,
837
+ M: 32,
838
+ efConstruction: 400,
839
+ efSearch: 100
840
+ });
841
+ }
842
+
843
+ // src/browser/AdvancedFeatures.ts
844
+ var GraphNeuralNetwork = class {
845
+ config;
846
+ nodes = /* @__PURE__ */ new Map();
847
+ edges = [];
848
+ attentionWeights = /* @__PURE__ */ new Map();
849
+ constructor(config = {}) {
850
+ this.config = {
851
+ hiddenDim: config.hiddenDim || 64,
852
+ numHeads: config.numHeads || 4,
853
+ dropout: config.dropout || 0.1,
854
+ learningRate: config.learningRate || 0.01,
855
+ attentionType: config.attentionType || "gat"
856
+ };
857
+ }
858
+ /**
859
+ * Add node to graph
860
+ */
861
+ addNode(id, features) {
862
+ this.nodes.set(id, {
863
+ id,
864
+ features,
865
+ neighbors: []
866
+ });
867
+ }
868
+ /**
869
+ * Add edge to graph
870
+ */
871
+ addEdge(from, to, weight = 1) {
872
+ this.edges.push({ from, to, weight });
873
+ const fromNode = this.nodes.get(from);
874
+ const toNode = this.nodes.get(to);
875
+ if (fromNode && !fromNode.neighbors.includes(to)) {
876
+ fromNode.neighbors.push(to);
877
+ }
878
+ if (toNode && !toNode.neighbors.includes(from)) {
879
+ toNode.neighbors.push(from);
880
+ }
881
+ }
882
+ /**
883
+ * Graph Attention Network (GAT) message passing
884
+ */
885
+ graphAttention(nodeId) {
886
+ const node = this.nodes.get(nodeId);
887
+ if (!node) throw new Error(`Node ${nodeId} not found`);
888
+ const neighbors = node.neighbors;
889
+ if (neighbors.length === 0) {
890
+ return node.features;
891
+ }
892
+ const headDim = Math.floor(this.config.hiddenDim / this.config.numHeads);
893
+ const aggregated = new Float32Array(this.config.hiddenDim);
894
+ for (let h = 0; h < this.config.numHeads; h++) {
895
+ let attentionSum = 0;
896
+ const headOutput = new Float32Array(headDim);
897
+ for (const neighborId of neighbors) {
898
+ const neighbor = this.nodes.get(neighborId);
899
+ const score = this.computeAttentionScore(
900
+ node.features,
901
+ neighbor.features,
902
+ h
903
+ );
904
+ attentionSum += score;
905
+ for (let i = 0; i < headDim && i < neighbor.features.length; i++) {
906
+ headOutput[i] += score * neighbor.features[i];
907
+ }
908
+ }
909
+ if (attentionSum > 0) {
910
+ for (let i = 0; i < headDim; i++) {
911
+ headOutput[i] /= attentionSum;
912
+ }
913
+ }
914
+ const offset = h * headDim;
915
+ for (let i = 0; i < headDim; i++) {
916
+ aggregated[offset + i] = headOutput[i];
917
+ }
918
+ }
919
+ for (let i = 0; i < aggregated.length; i++) {
920
+ aggregated[i] = aggregated[i] > 0 ? aggregated[i] : 0.01 * aggregated[i];
921
+ }
922
+ return aggregated;
923
+ }
924
+ /**
925
+ * Compute attention score between two nodes
926
+ */
927
+ computeAttentionScore(features1, features2, head) {
928
+ let score = 0;
929
+ const len = Math.min(features1.length, features2.length);
930
+ for (let i = 0; i < len; i++) {
931
+ score += features1[i] * features2[i];
932
+ }
933
+ return Math.exp(score / Math.sqrt(len));
934
+ }
935
+ /**
936
+ * Message passing for all nodes
937
+ */
938
+ messagePass() {
939
+ const newFeatures = /* @__PURE__ */ new Map();
940
+ for (const [nodeId] of this.nodes) {
941
+ newFeatures.set(nodeId, this.graphAttention(nodeId));
942
+ }
943
+ return newFeatures;
944
+ }
945
+ /**
946
+ * Update node features after message passing
947
+ */
948
+ update(newFeatures) {
949
+ for (const [nodeId, features] of newFeatures) {
950
+ const node = this.nodes.get(nodeId);
951
+ if (node) {
952
+ node.features = features;
953
+ }
954
+ }
955
+ }
956
+ /**
957
+ * Compute graph embeddings for query enhancement
958
+ */
959
+ computeGraphEmbedding(nodeId, hops = 2) {
960
+ const features = /* @__PURE__ */ new Map();
961
+ features.set(nodeId, this.nodes.get(nodeId).features);
962
+ for (let h = 0; h < hops; h++) {
963
+ const newFeatures = this.messagePass();
964
+ this.update(newFeatures);
965
+ }
966
+ return this.nodes.get(nodeId).features;
967
+ }
968
+ /**
969
+ * Get statistics
970
+ */
971
+ getStats() {
972
+ return {
973
+ numNodes: this.nodes.size,
974
+ numEdges: this.edges.length,
975
+ avgDegree: this.edges.length / Math.max(this.nodes.size, 1),
976
+ config: this.config
977
+ };
978
+ }
979
+ };
980
+ var MaximalMarginalRelevance = class {
981
+ config;
982
+ constructor(config = {}) {
983
+ this.config = {
984
+ lambda: config.lambda || 0.7,
985
+ metric: config.metric || "cosine"
986
+ };
987
+ }
988
+ /**
989
+ * Rerank results for diversity
990
+ * @param query Query vector
991
+ * @param candidates Candidate vectors with scores
992
+ * @param k Number of results to return
993
+ * @returns Reranked indices
994
+ */
995
+ rerank(query, candidates, k) {
996
+ if (candidates.length === 0) return [];
997
+ const selected = [];
998
+ const remaining = new Set(candidates.map((_, i) => i));
999
+ let bestIdx = 0;
1000
+ let bestScore = -Infinity;
1001
+ for (let i = 0; i < candidates.length; i++) {
1002
+ if (candidates[i].score > bestScore) {
1003
+ bestScore = candidates[i].score;
1004
+ bestIdx = i;
1005
+ }
1006
+ }
1007
+ selected.push(candidates[bestIdx].id);
1008
+ remaining.delete(bestIdx);
1009
+ while (selected.length < k && remaining.size > 0) {
1010
+ let bestMMR = -Infinity;
1011
+ let bestCandidate = -1;
1012
+ for (const idx of remaining) {
1013
+ const candidate = candidates[idx];
1014
+ const relevance = this.similarity(query, candidate.vector);
1015
+ let maxSimilarity = -Infinity;
1016
+ for (const selectedId of selected) {
1017
+ const selectedCandidate = candidates.find((c) => c.id === selectedId);
1018
+ const sim = this.similarity(candidate.vector, selectedCandidate.vector);
1019
+ maxSimilarity = Math.max(maxSimilarity, sim);
1020
+ }
1021
+ const mmr = this.config.lambda * relevance - (1 - this.config.lambda) * maxSimilarity;
1022
+ if (mmr > bestMMR) {
1023
+ bestMMR = mmr;
1024
+ bestCandidate = idx;
1025
+ }
1026
+ }
1027
+ if (bestCandidate !== -1) {
1028
+ selected.push(candidates[bestCandidate].id);
1029
+ remaining.delete(bestCandidate);
1030
+ } else {
1031
+ break;
1032
+ }
1033
+ }
1034
+ return selected;
1035
+ }
1036
+ /**
1037
+ * Similarity computation
1038
+ */
1039
+ similarity(a, b) {
1040
+ if (this.config.metric === "cosine") {
1041
+ return this.cosineSimilarity(a, b);
1042
+ } else {
1043
+ const dist = this.euclideanDistance(a, b);
1044
+ return 1 / (1 + dist);
1045
+ }
1046
+ }
1047
+ cosineSimilarity(a, b) {
1048
+ let dotProduct = 0;
1049
+ let normA = 0;
1050
+ let normB = 0;
1051
+ for (let i = 0; i < a.length; i++) {
1052
+ dotProduct += a[i] * b[i];
1053
+ normA += a[i] * a[i];
1054
+ normB += b[i] * b[i];
1055
+ }
1056
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
1057
+ }
1058
+ euclideanDistance(a, b) {
1059
+ let sum = 0;
1060
+ for (let i = 0; i < a.length; i++) {
1061
+ const diff = a[i] - b[i];
1062
+ sum += diff * diff;
1063
+ }
1064
+ return Math.sqrt(sum);
1065
+ }
1066
+ /**
1067
+ * Set lambda (relevance vs diversity trade-off)
1068
+ */
1069
+ setLambda(lambda) {
1070
+ this.config.lambda = Math.max(0, Math.min(1, lambda));
1071
+ }
1072
+ };
1073
+ var TensorCompression = class {
1074
+ /**
1075
+ * Reduce dimensionality using truncated SVD
1076
+ * @param vectors Array of vectors to compress
1077
+ * @param targetDim Target dimension
1078
+ * @returns Compressed vectors
1079
+ */
1080
+ static compress(vectors, targetDim) {
1081
+ if (vectors.length === 0) return [];
1082
+ const originalDim = vectors[0].length;
1083
+ if (targetDim >= originalDim) return vectors;
1084
+ const matrix = vectors.map((v) => Array.from(v));
1085
+ const mean = this.computeMean(matrix);
1086
+ const centered = matrix.map(
1087
+ (row) => row.map((val, i) => val - mean[i])
1088
+ );
1089
+ const cov = this.computeCovariance(centered);
1090
+ const eigenvectors = this.powerIteration(cov, targetDim);
1091
+ const compressed = centered.map((row) => {
1092
+ const projected = new Float32Array(targetDim);
1093
+ for (let i = 0; i < targetDim; i++) {
1094
+ let sum = 0;
1095
+ for (let j = 0; j < originalDim; j++) {
1096
+ sum += row[j] * eigenvectors[i][j];
1097
+ }
1098
+ projected[i] = sum;
1099
+ }
1100
+ return projected;
1101
+ });
1102
+ return compressed;
1103
+ }
1104
+ /**
1105
+ * Compute mean vector
1106
+ */
1107
+ static computeMean(matrix) {
1108
+ const n = matrix.length;
1109
+ const dim = matrix[0].length;
1110
+ const mean = new Array(dim).fill(0);
1111
+ for (const row of matrix) {
1112
+ for (let i = 0; i < dim; i++) {
1113
+ mean[i] += row[i];
1114
+ }
1115
+ }
1116
+ return mean.map((v) => v / n);
1117
+ }
1118
+ /**
1119
+ * Compute covariance matrix
1120
+ */
1121
+ static computeCovariance(matrix) {
1122
+ const n = matrix.length;
1123
+ const dim = matrix[0].length;
1124
+ const cov = Array.from(
1125
+ { length: dim },
1126
+ () => new Array(dim).fill(0)
1127
+ );
1128
+ for (let i = 0; i < dim; i++) {
1129
+ for (let j = 0; j <= i; j++) {
1130
+ let sum = 0;
1131
+ for (const row of matrix) {
1132
+ sum += row[i] * row[j];
1133
+ }
1134
+ cov[i][j] = cov[j][i] = sum / n;
1135
+ }
1136
+ }
1137
+ return cov;
1138
+ }
1139
+ /**
1140
+ * Power iteration for computing top eigenvectors
1141
+ */
1142
+ static powerIteration(matrix, k, iterations = 100) {
1143
+ const dim = matrix.length;
1144
+ const eigenvectors = [];
1145
+ for (let i = 0; i < k; i++) {
1146
+ let v = new Array(dim).fill(0).map(() => Math.random() - 0.5);
1147
+ for (let iter = 0; iter < iterations; iter++) {
1148
+ const newV = new Array(dim).fill(0);
1149
+ for (let r = 0; r < dim; r++) {
1150
+ for (let c = 0; c < dim; c++) {
1151
+ newV[r] += matrix[r][c] * v[c];
1152
+ }
1153
+ }
1154
+ for (const prev of eigenvectors) {
1155
+ let dot = 0;
1156
+ for (let j = 0; j < dim; j++) {
1157
+ dot += newV[j] * prev[j];
1158
+ }
1159
+ for (let j = 0; j < dim; j++) {
1160
+ newV[j] -= dot * prev[j];
1161
+ }
1162
+ }
1163
+ let norm = 0;
1164
+ for (const val of newV) {
1165
+ norm += val * val;
1166
+ }
1167
+ norm = Math.sqrt(norm);
1168
+ if (norm < 1e-10) break;
1169
+ v = newV.map((val) => val / norm);
1170
+ }
1171
+ eigenvectors.push(v);
1172
+ }
1173
+ return eigenvectors;
1174
+ }
1175
+ };
1176
+ var BatchProcessor = class {
1177
+ /**
1178
+ * Batch cosine similarity computation
1179
+ */
1180
+ static batchCosineSimilarity(query, vectors) {
1181
+ const similarities = new Float32Array(vectors.length);
1182
+ let queryNorm = 0;
1183
+ for (let i = 0; i < query.length; i++) {
1184
+ queryNorm += query[i] * query[i];
1185
+ }
1186
+ queryNorm = Math.sqrt(queryNorm);
1187
+ for (let v = 0; v < vectors.length; v++) {
1188
+ const vector = vectors[v];
1189
+ let dotProduct = 0;
1190
+ let vectorNorm = 0;
1191
+ for (let i = 0; i < query.length; i++) {
1192
+ dotProduct += query[i] * vector[i];
1193
+ vectorNorm += vector[i] * vector[i];
1194
+ }
1195
+ vectorNorm = Math.sqrt(vectorNorm);
1196
+ similarities[v] = dotProduct / (queryNorm * vectorNorm);
1197
+ }
1198
+ return similarities;
1199
+ }
1200
+ /**
1201
+ * Batch vector normalization
1202
+ */
1203
+ static batchNormalize(vectors) {
1204
+ return vectors.map((v) => {
1205
+ let norm = 0;
1206
+ for (let i = 0; i < v.length; i++) {
1207
+ norm += v[i] * v[i];
1208
+ }
1209
+ norm = Math.sqrt(norm);
1210
+ const normalized = new Float32Array(v.length);
1211
+ for (let i = 0; i < v.length; i++) {
1212
+ normalized[i] = v[i] / norm;
1213
+ }
1214
+ return normalized;
1215
+ });
1216
+ }
1217
+ };
1218
+
1219
+ // src/browser/AttentionBrowser.ts
1220
+ var AttentionBrowser = class {
1221
+ wasmModule = null;
1222
+ loadingState = "idle";
1223
+ loadError = null;
1224
+ config;
1225
+ constructor(config = {}) {
1226
+ this.config = {
1227
+ dimension: 384,
1228
+ numHeads: 4,
1229
+ blockSize: 64,
1230
+ curvature: -1,
1231
+ useWASM: true,
1232
+ ...config
1233
+ };
1234
+ }
1235
+ /**
1236
+ * Get current loading state
1237
+ */
1238
+ getLoadingState() {
1239
+ return this.loadingState;
1240
+ }
1241
+ /**
1242
+ * Get loading error if any
1243
+ */
1244
+ getError() {
1245
+ return this.loadError;
1246
+ }
1247
+ /**
1248
+ * Initialize WASM module (lazy loaded)
1249
+ */
1250
+ async initialize() {
1251
+ if (this.loadingState === "loaded") return;
1252
+ if (this.loadingState === "loading") {
1253
+ while (this.loadingState === "loading") {
1254
+ await new Promise((resolve) => setTimeout(resolve, 50));
1255
+ }
1256
+ return;
1257
+ }
1258
+ this.loadingState = "loading";
1259
+ try {
1260
+ if (!this.config.useWASM) {
1261
+ this.loadingState = "loaded";
1262
+ return;
1263
+ }
1264
+ const wasmLoader = await Promise.resolve().then(() => (init_agentdb_wasm_loader(), agentdb_wasm_loader_exports));
1265
+ this.wasmModule = await wasmLoader.initWASM();
1266
+ this.loadingState = "loaded";
1267
+ } catch (error) {
1268
+ this.loadError = error instanceof Error ? error : new Error(String(error));
1269
+ this.loadingState = "error";
1270
+ console.warn("WASM initialization failed, using fallback:", this.loadError.message);
1271
+ }
1272
+ }
1273
+ /**
1274
+ * Flash Attention - Optimized attention mechanism
1275
+ * O(N) memory complexity instead of O(N²)
1276
+ *
1277
+ * @param query - Query vectors
1278
+ * @param keys - Key vectors
1279
+ * @param values - Value vectors
1280
+ * @returns Attention output
1281
+ */
1282
+ async flashAttention(query, keys, values) {
1283
+ var _a;
1284
+ await this.initialize();
1285
+ if ((_a = this.wasmModule) == null ? void 0 : _a.flashAttention) {
1286
+ try {
1287
+ return this.wasmModule.flashAttention(query, keys, values, this.config);
1288
+ } catch (error) {
1289
+ console.warn("WASM flash attention failed, using fallback:", error);
1290
+ }
1291
+ }
1292
+ return this.flashAttentionFallback(query, keys, values);
1293
+ }
1294
+ /**
1295
+ * Hyperbolic Attention - Attention in hyperbolic space
1296
+ * Better for hierarchical relationships
1297
+ *
1298
+ * @param query - Query vector
1299
+ * @param keys - Key vectors
1300
+ * @returns Similarity scores in hyperbolic space
1301
+ */
1302
+ async hyperbolicAttention(query, keys) {
1303
+ var _a;
1304
+ await this.initialize();
1305
+ if ((_a = this.wasmModule) == null ? void 0 : _a.hyperbolicAttention) {
1306
+ try {
1307
+ return this.wasmModule.hyperbolicAttention(query, keys, this.config);
1308
+ } catch (error) {
1309
+ console.warn("WASM hyperbolic attention failed, using fallback:", error);
1310
+ }
1311
+ }
1312
+ return this.hyperbolicAttentionFallback(query, keys);
1313
+ }
1314
+ /**
1315
+ * Memory Consolidation - Cluster and consolidate similar memories
1316
+ *
1317
+ * @param memories - Array of memory vectors
1318
+ * @param config - Consolidation configuration
1319
+ * @returns Consolidated memory clusters
1320
+ */
1321
+ async consolidateMemories(memories, config = {}) {
1322
+ var _a;
1323
+ await this.initialize();
1324
+ const fullConfig = {
1325
+ threshold: 0.8,
1326
+ maxClusters: 10,
1327
+ minClusterSize: 1,
1328
+ ...config
1329
+ };
1330
+ if ((_a = this.wasmModule) == null ? void 0 : _a.memoryConsolidation) {
1331
+ try {
1332
+ return this.wasmModule.memoryConsolidation(memories, fullConfig);
1333
+ } catch (error) {
1334
+ console.warn("WASM memory consolidation failed, using fallback:", error);
1335
+ }
1336
+ }
1337
+ return this.consolidateMemoriesFallback(memories, fullConfig);
1338
+ }
1339
+ /**
1340
+ * Clean up WASM memory
1341
+ */
1342
+ dispose() {
1343
+ this.wasmModule = null;
1344
+ this.loadingState = "idle";
1345
+ this.loadError = null;
1346
+ }
1347
+ // ========================================================================
1348
+ // Fallback Implementations (Pure JavaScript)
1349
+ // ========================================================================
1350
+ flashAttentionFallback(query, keys, values) {
1351
+ const { dimension = 384 } = this.config;
1352
+ const seqLen = keys.length / dimension;
1353
+ const output = new Float32Array(query.length);
1354
+ for (let i = 0; i < query.length; i += dimension) {
1355
+ const q = query.slice(i, i + dimension);
1356
+ let sumWeights = 0;
1357
+ const weights = new Float32Array(seqLen);
1358
+ for (let j = 0; j < seqLen; j++) {
1359
+ const k = keys.slice(j * dimension, (j + 1) * dimension);
1360
+ let dot = 0;
1361
+ for (let d = 0; d < dimension; d++) {
1362
+ dot += q[d] * k[d];
1363
+ }
1364
+ weights[j] = Math.exp(dot / Math.sqrt(dimension));
1365
+ sumWeights += weights[j];
1366
+ }
1367
+ for (let j = 0; j < seqLen; j++) {
1368
+ weights[j] /= sumWeights || 1;
1369
+ const v = values.slice(j * dimension, (j + 1) * dimension);
1370
+ for (let d = 0; d < dimension; d++) {
1371
+ output[i + d] += weights[j] * v[d];
1372
+ }
1373
+ }
1374
+ }
1375
+ return output;
1376
+ }
1377
+ hyperbolicAttentionFallback(query, keys) {
1378
+ const { curvature = -1 } = this.config;
1379
+ const k = Math.abs(curvature);
1380
+ const similarities = new Float32Array(keys.length / query.length);
1381
+ for (let i = 0; i < similarities.length; i++) {
1382
+ const offset = i * query.length;
1383
+ let dotProduct = 0;
1384
+ let normQ = 0;
1385
+ let normK = 0;
1386
+ for (let j = 0; j < query.length; j++) {
1387
+ dotProduct += query[j] * keys[offset + j];
1388
+ normQ += query[j] * query[j];
1389
+ normK += keys[offset + j] * keys[offset + j];
1390
+ }
1391
+ const euclidean = Math.sqrt(normQ + normK - 2 * dotProduct);
1392
+ const poincare = Math.acosh(1 + 2 * k * euclidean * euclidean);
1393
+ similarities[i] = 1 / (1 + poincare);
1394
+ }
1395
+ return similarities;
1396
+ }
1397
+ consolidateMemoriesFallback(memories, config) {
1398
+ const { threshold = 0.8, maxClusters = 10, minClusterSize = 1 } = config;
1399
+ const consolidated = [];
1400
+ const used = /* @__PURE__ */ new Set();
1401
+ for (let i = 0; i < memories.length; i++) {
1402
+ if (used.has(i)) continue;
1403
+ const cluster = [memories[i]];
1404
+ used.add(i);
1405
+ for (let j = i + 1; j < memories.length; j++) {
1406
+ if (used.has(j)) continue;
1407
+ const similarity = this.cosineSimilarity(memories[i], memories[j]);
1408
+ if (similarity > threshold) {
1409
+ cluster.push(memories[j]);
1410
+ used.add(j);
1411
+ }
1412
+ }
1413
+ if (cluster.length >= minClusterSize) {
1414
+ const centroid = new Float32Array(memories[i].length);
1415
+ for (const mem of cluster) {
1416
+ for (let k = 0; k < centroid.length; k++) {
1417
+ centroid[k] += mem[k] / cluster.length;
1418
+ }
1419
+ }
1420
+ let norm = 0;
1421
+ for (let k = 0; k < centroid.length; k++) {
1422
+ norm += centroid[k] * centroid[k];
1423
+ }
1424
+ norm = Math.sqrt(norm);
1425
+ if (norm > 0) {
1426
+ for (let k = 0; k < centroid.length; k++) {
1427
+ centroid[k] /= norm;
1428
+ }
1429
+ }
1430
+ consolidated.push({
1431
+ memory: centroid,
1432
+ count: cluster.length,
1433
+ members: cluster
1434
+ });
1435
+ }
1436
+ if (consolidated.length >= maxClusters) break;
1437
+ }
1438
+ return consolidated;
1439
+ }
1440
+ cosineSimilarity(a, b) {
1441
+ let dot = 0;
1442
+ let normA = 0;
1443
+ let normB = 0;
1444
+ for (let i = 0; i < a.length; i++) {
1445
+ dot += a[i] * b[i];
1446
+ normA += a[i] * a[i];
1447
+ normB += b[i] * b[i];
1448
+ }
1449
+ const denominator = Math.sqrt(normA * normB);
1450
+ return denominator > 0 ? dot / denominator : 0;
1451
+ }
1452
+ };
1453
+ function createAttention(config) {
1454
+ return new AttentionBrowser(config);
1455
+ }
1456
+ function createFastAttention() {
1457
+ return new AttentionBrowser({
1458
+ dimension: 256,
1459
+ numHeads: 2,
1460
+ blockSize: 32,
1461
+ useWASM: true
1462
+ });
1463
+ }
1464
+ function createAccurateAttention() {
1465
+ return new AttentionBrowser({
1466
+ dimension: 768,
1467
+ numHeads: 8,
1468
+ blockSize: 128,
1469
+ useWASM: true
1470
+ });
1471
+ }
1472
+
1473
+ // src/browser/index.ts
1474
+ function detectFeatures() {
1475
+ return {
1476
+ indexedDB: "indexedDB" in globalThis,
1477
+ broadcastChannel: "BroadcastChannel" in globalThis,
1478
+ webWorkers: typeof globalThis.Worker !== "undefined",
1479
+ wasmSIMD: detectWasmSIMD2(),
1480
+ sharedArrayBuffer: typeof SharedArrayBuffer !== "undefined"
1481
+ };
1482
+ }
1483
+ async function detectWasmSIMD2() {
1484
+ try {
1485
+ if (typeof globalThis.WebAssembly === "undefined") {
1486
+ return false;
1487
+ }
1488
+ const simdTest = new Uint8Array([
1489
+ 0,
1490
+ 97,
1491
+ 115,
1492
+ 109,
1493
+ 1,
1494
+ 0,
1495
+ 0,
1496
+ 0,
1497
+ 1,
1498
+ 5,
1499
+ 1,
1500
+ 96,
1501
+ 0,
1502
+ 1,
1503
+ 123,
1504
+ 3,
1505
+ 2,
1506
+ 1,
1507
+ 0,
1508
+ 10,
1509
+ 10,
1510
+ 1,
1511
+ 8,
1512
+ 0,
1513
+ 253,
1514
+ 12,
1515
+ 253,
1516
+ 12,
1517
+ 253,
1518
+ 84,
1519
+ 11
1520
+ ]);
1521
+ const WA = globalThis.WebAssembly;
1522
+ const module = await WA.instantiate(simdTest);
1523
+ return module instanceof WA.Instance;
1524
+ } catch {
1525
+ return false;
1526
+ }
1527
+ }
1528
+ var SMALL_DATASET_CONFIG = {
1529
+ pq: { enabled: false },
1530
+ hnsw: { enabled: false },
1531
+ gnn: { enabled: true, numHeads: 2 },
1532
+ mmr: { enabled: true, lambda: 0.7 },
1533
+ svd: { enabled: false }
1534
+ };
1535
+ var MEDIUM_DATASET_CONFIG = {
1536
+ pq: { enabled: true, subvectors: 8 },
1537
+ hnsw: { enabled: true, M: 16 },
1538
+ gnn: { enabled: true, numHeads: 4 },
1539
+ mmr: { enabled: true, lambda: 0.7 },
1540
+ svd: { enabled: false }
1541
+ };
1542
+ var LARGE_DATASET_CONFIG = {
1543
+ pq: { enabled: true, subvectors: 16 },
1544
+ hnsw: { enabled: true, M: 32 },
1545
+ gnn: { enabled: true, numHeads: 4 },
1546
+ mmr: { enabled: true, lambda: 0.7 },
1547
+ svd: { enabled: true, targetDim: 128 }
1548
+ };
1549
+ var MEMORY_OPTIMIZED_CONFIG = {
1550
+ pq: { enabled: true, subvectors: 32 },
1551
+ // 16x compression
1552
+ hnsw: { enabled: true, M: 8 },
1553
+ // Fewer connections
1554
+ gnn: { enabled: false },
1555
+ mmr: { enabled: false },
1556
+ svd: { enabled: true, targetDim: 64 }
1557
+ // Aggressive dimension reduction
1558
+ };
1559
+ var SPEED_OPTIMIZED_CONFIG = {
1560
+ pq: { enabled: false },
1561
+ // No compression overhead
1562
+ hnsw: { enabled: true, M: 32, efSearch: 100 },
1563
+ // Maximum HNSW quality
1564
+ gnn: { enabled: false },
1565
+ mmr: { enabled: false },
1566
+ svd: { enabled: false }
1567
+ };
1568
+ var QUALITY_OPTIMIZED_CONFIG = {
1569
+ pq: { enabled: false },
1570
+ // No compression
1571
+ hnsw: { enabled: true, M: 48, efConstruction: 400 },
1572
+ // Highest quality
1573
+ gnn: { enabled: true, numHeads: 8 },
1574
+ // More attention heads
1575
+ mmr: { enabled: true, lambda: 0.8 },
1576
+ // More diversity
1577
+ svd: { enabled: false }
1578
+ // No dimension loss
1579
+ };
1580
+ var VERSION = {
1581
+ major: 2,
1582
+ minor: 0,
1583
+ patch: 0,
1584
+ prerelease: "alpha.2",
1585
+ features: "advanced",
1586
+ full: "2.0.0-alpha.2+advanced"
1587
+ };
1588
+ function estimateMemoryUsage(numVectors, dimension, config) {
1589
+ var _a, _b, _c;
1590
+ let vectorBytes = numVectors * dimension * 4;
1591
+ if ((_a = config.pq) == null ? void 0 : _a.enabled) {
1592
+ const subvectors = config.pq.subvectors || 8;
1593
+ vectorBytes = numVectors * (subvectors + 4);
1594
+ }
1595
+ if ((_b = config.svd) == null ? void 0 : _b.enabled) {
1596
+ const targetDim = config.svd.targetDim || dimension / 2;
1597
+ vectorBytes = numVectors * targetDim * 4;
1598
+ }
1599
+ let indexBytes = 0;
1600
+ if ((_c = config.hnsw) == null ? void 0 : _c.enabled) {
1601
+ const M = config.hnsw.M || 16;
1602
+ const avgConnections = M * 1.5;
1603
+ indexBytes = numVectors * avgConnections * 4;
1604
+ }
1605
+ const total = vectorBytes + indexBytes;
1606
+ return {
1607
+ vectors: vectorBytes,
1608
+ index: indexBytes,
1609
+ total,
1610
+ totalMB: total / (1024 * 1024)
1611
+ };
1612
+ }
1613
+ function recommendConfig(numVectors, dimension) {
1614
+ if (numVectors < 1e3) {
1615
+ return {
1616
+ name: "SMALL_DATASET",
1617
+ config: SMALL_DATASET_CONFIG,
1618
+ reason: "Small dataset, linear search is fast enough"
1619
+ };
1620
+ } else if (numVectors < 1e4) {
1621
+ return {
1622
+ name: "MEDIUM_DATASET",
1623
+ config: MEDIUM_DATASET_CONFIG,
1624
+ reason: "Medium dataset, HNSW + PQ8 recommended"
1625
+ };
1626
+ } else {
1627
+ return {
1628
+ name: "LARGE_DATASET",
1629
+ config: LARGE_DATASET_CONFIG,
1630
+ reason: "Large dataset, aggressive compression + HNSW recommended"
1631
+ };
1632
+ }
1633
+ }
1634
+ async function benchmarkSearch(searchFn, numQueries = 100, k = 10, dimension = 384) {
1635
+ const times = [];
1636
+ for (let i = 0; i < numQueries; i++) {
1637
+ const query = new Float32Array(dimension);
1638
+ for (let d = 0; d < dimension; d++) {
1639
+ query[d] = Math.random() - 0.5;
1640
+ }
1641
+ const start = performance.now();
1642
+ searchFn(query, k);
1643
+ const end = performance.now();
1644
+ times.push(end - start);
1645
+ }
1646
+ times.sort((a, b) => a - b);
1647
+ return {
1648
+ avgTimeMs: times.reduce((a, b) => a + b, 0) / times.length,
1649
+ minTimeMs: times[0],
1650
+ maxTimeMs: times[times.length - 1],
1651
+ p50Ms: times[Math.floor(times.length * 0.5)],
1652
+ p95Ms: times[Math.floor(times.length * 0.95)],
1653
+ p99Ms: times[Math.floor(times.length * 0.99)]
1654
+ };
1655
+ }
1656
+ export {
1657
+ AttentionBrowser,
1658
+ BatchProcessor,
1659
+ GraphNeuralNetwork,
1660
+ HNSWIndex,
1661
+ LARGE_DATASET_CONFIG,
1662
+ MEDIUM_DATASET_CONFIG,
1663
+ MEMORY_OPTIMIZED_CONFIG,
1664
+ MaximalMarginalRelevance,
1665
+ ProductQuantization,
1666
+ QUALITY_OPTIMIZED_CONFIG,
1667
+ SMALL_DATASET_CONFIG,
1668
+ SPEED_OPTIMIZED_CONFIG,
1669
+ TensorCompression,
1670
+ VERSION,
1671
+ benchmarkSearch,
1672
+ createAccurateAttention,
1673
+ createAccurateHNSW,
1674
+ createAttention,
1675
+ createFastAttention,
1676
+ createFastHNSW,
1677
+ createHNSW,
1678
+ createPQ16,
1679
+ createPQ32,
1680
+ createPQ8,
1681
+ detectFeatures,
1682
+ estimateMemoryUsage,
1683
+ recommendConfig
1684
+ };
1685
+ //# sourceMappingURL=agentdb.browser.js.map