brainbank 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +1059 -0
  3. package/assets/architecture.png +0 -0
  4. package/bin/brainbank +11 -0
  5. package/dist/chunk-2P3EGY6S.js +37 -0
  6. package/dist/chunk-2P3EGY6S.js.map +1 -0
  7. package/dist/chunk-3GAIDXRW.js +105 -0
  8. package/dist/chunk-3GAIDXRW.js.map +1 -0
  9. package/dist/chunk-4ZKBQ33J.js +56 -0
  10. package/dist/chunk-4ZKBQ33J.js.map +1 -0
  11. package/dist/chunk-7QVYU63E.js +7 -0
  12. package/dist/chunk-7QVYU63E.js.map +1 -0
  13. package/dist/chunk-EDKSKLX4.js +490 -0
  14. package/dist/chunk-EDKSKLX4.js.map +1 -0
  15. package/dist/chunk-GOUBW7UA.js +373 -0
  16. package/dist/chunk-GOUBW7UA.js.map +1 -0
  17. package/dist/chunk-MJ3Y24H6.js +185 -0
  18. package/dist/chunk-MJ3Y24H6.js.map +1 -0
  19. package/dist/chunk-N6ZMBFDE.js +224 -0
  20. package/dist/chunk-N6ZMBFDE.js.map +1 -0
  21. package/dist/chunk-YGSEUWLV.js +2053 -0
  22. package/dist/chunk-YGSEUWLV.js.map +1 -0
  23. package/dist/chunk-Z5SU54HP.js +171 -0
  24. package/dist/chunk-Z5SU54HP.js.map +1 -0
  25. package/dist/cli.d.ts +1 -0
  26. package/dist/cli.js +731 -0
  27. package/dist/cli.js.map +1 -0
  28. package/dist/code.d.ts +31 -0
  29. package/dist/code.js +8 -0
  30. package/dist/code.js.map +1 -0
  31. package/dist/docs.d.ts +19 -0
  32. package/dist/docs.js +8 -0
  33. package/dist/docs.js.map +1 -0
  34. package/dist/git.d.ts +31 -0
  35. package/dist/git.js +8 -0
  36. package/dist/git.js.map +1 -0
  37. package/dist/index.d.ts +845 -0
  38. package/dist/index.js +80 -0
  39. package/dist/index.js.map +1 -0
  40. package/dist/memory.d.ts +19 -0
  41. package/dist/memory.js +146 -0
  42. package/dist/memory.js.map +1 -0
  43. package/dist/notes.d.ts +19 -0
  44. package/dist/notes.js +57 -0
  45. package/dist/notes.js.map +1 -0
  46. package/dist/openai-PCTYLOWI.js +8 -0
  47. package/dist/openai-PCTYLOWI.js.map +1 -0
  48. package/dist/types-Da_zLLOl.d.ts +474 -0
  49. package/package.json +91 -0
Binary file
package/bin/brainbank ADDED
@@ -0,0 +1,11 @@
1
+ #!/bin/sh
2
+ # Resolve symlinks to get the real script location
3
+ SCRIPT="$0"
4
+ while [ -L "$SCRIPT" ]; do
5
+ DIR="$(cd "$(dirname "$SCRIPT")" && pwd)"
6
+ SCRIPT="$(readlink "$SCRIPT")"
7
+ [ "${SCRIPT#/}" = "$SCRIPT" ] && SCRIPT="$DIR/$SCRIPT"
8
+ done
9
+ DIR="$(cd "$(dirname "$SCRIPT")" && pwd)"
10
+
11
+ exec npx tsx "$DIR/../src/integrations/cli.ts" "$@"
@@ -0,0 +1,37 @@
1
+ import {
2
+ __name
3
+ } from "./chunk-7QVYU63E.js";
4
+
5
+ // src/embeddings/math.ts
6
+ function cosineSimilarity(a, b) {
7
+ if (a.length !== b.length) {
8
+ throw new Error(`Vector dimension mismatch: ${a.length} vs ${b.length}`);
9
+ }
10
+ if (a.length === 0) return 0;
11
+ let dot = 0;
12
+ for (let i = 0; i < a.length; i++) {
13
+ dot += a[i] * b[i];
14
+ }
15
+ return dot;
16
+ }
17
+ __name(cosineSimilarity, "cosineSimilarity");
18
+ function normalize(vec) {
19
+ let norm = 0;
20
+ for (let i = 0; i < vec.length; i++) {
21
+ norm += vec[i] * vec[i];
22
+ }
23
+ norm = Math.sqrt(norm);
24
+ if (norm === 0) return new Float32Array(vec.length);
25
+ const result = new Float32Array(vec.length);
26
+ for (let i = 0; i < vec.length; i++) {
27
+ result[i] = vec[i] / norm;
28
+ }
29
+ return result;
30
+ }
31
+ __name(normalize, "normalize");
32
+
33
+ export {
34
+ cosineSimilarity,
35
+ normalize
36
+ };
37
+ //# sourceMappingURL=chunk-2P3EGY6S.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/embeddings/math.ts"],"sourcesContent":["/**\n * BrainBank — Math Utilities\n * \n * Pure vector math functions for similarity calculations.\n * No dependencies — works on Float32Array directly.\n */\n\n/**\n * Cosine similarity between two vectors.\n * Assumes vectors are already normalized (unit length).\n * Returns value between -1.0 and 1.0.\n */\nexport function cosineSimilarity(a: Float32Array, b: Float32Array): number {\n if (a.length !== b.length) {\n throw new Error(`Vector dimension mismatch: ${a.length} vs ${b.length}`);\n }\n if (a.length === 0) return 0;\n\n let dot = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n }\n return dot;\n}\n\n/**\n * Full cosine similarity (normalizes first).\n * Use this when vectors may not be pre-normalized.\n */\nexport function cosineSimilarityFull(a: Float32Array, b: Float32Array): number {\n if (a.length !== b.length) {\n throw new Error(`Vector dimension mismatch: ${a.length} vs ${b.length}`);\n }\n if (a.length === 0) return 0;\n\n let dot = 0, normA = 0, normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n const denom = Math.sqrt(normA) * Math.sqrt(normB);\n return denom === 0 ? 0 : dot / denom;\n}\n\n/**\n * L2-normalize a vector to unit length.\n * Returns a new Float32Array.\n */\nexport function normalize(vec: Float32Array): Float32Array {\n let norm = 0;\n for (let i = 0; i < vec.length; i++) {\n norm += vec[i] * vec[i];\n }\n norm = Math.sqrt(norm);\n if (norm === 0) return new Float32Array(vec.length);\n\n const result = new Float32Array(vec.length);\n for (let i = 0; i < vec.length; i++) {\n result[i] = vec[i] / norm;\n }\n return result;\n}\n\n/**\n * Euclidean distance between two vectors.\n */\nexport function euclideanDistance(a: Float32Array, b: Float32Array): number {\n if (a.length !== b.length) {\n throw new Error(`Vector dimension mismatch: ${a.length} vs ${b.length}`);\n }\n let sum = 0;\n for (let i = 0; i < a.length; i++) {\n const d = a[i] - b[i];\n sum += d * d;\n }\n return Math.sqrt(sum);\n}\n"],"mappings":";;;;;AAYO,SAAS,iBAAiB,GAAiB,GAAyB;AACvE,MAAI,EAAE,WAAW,EAAE,QAAQ;AACvB,UAAM,IAAI,MAAM,8BAA8B,EAAE,MAAM,OAAO,EAAE,MAAM,EAAE;AAAA,EAC3E;AACA,MAAI,EAAE,WAAW,EAAG,QAAO;AAE3B,MAAI,MAAM;AACV,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AAC/B,WAAO,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,EACrB;AACA,SAAO;AACX;AAXgB;AAqCT,SAAS,UAAU,KAAiC;AACvD,MAAI,OAAO;AACX,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAQ,IAAI,CAAC,IAAI,IAAI,CAAC;AAAA,EAC1B;AACA,SAAO,KAAK,KAAK,IAAI;AACrB,MAAI,SAAS,EAAG,QAAO,IAAI,aAAa,IAAI,MAAM;AAElD,QAAM,SAAS,IAAI,aAAa,IAAI,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,WAAO,CAAC,IAAI,IAAI,CAAC,IAAI;AAAA,EACzB;AACA,SAAO;AACX;AAbgB;","names":[]}
@@ -0,0 +1,105 @@
1
+ import {
2
+ __name
3
+ } from "./chunk-7QVYU63E.js";
4
+
5
+ // src/embeddings/openai.ts
6
+ var DEFAULT_MODEL = "text-embedding-3-small";
7
+ var DEFAULT_DIMS = {
8
+ "text-embedding-3-small": 1536,
9
+ "text-embedding-3-large": 3072,
10
+ "text-embedding-ada-002": 1536
11
+ };
12
+ var API_URL = "https://api.openai.com/v1/embeddings";
13
+ var MAX_BATCH = 100;
14
+ var OpenAIEmbedding = class {
15
+ static {
16
+ __name(this, "OpenAIEmbedding");
17
+ }
18
+ dims;
19
+ _apiKey;
20
+ _model;
21
+ _baseUrl;
22
+ _requestDims;
23
+ _retrying = false;
24
+ constructor(options = {}) {
25
+ this._apiKey = options.apiKey ?? process.env.OPENAI_API_KEY ?? "";
26
+ this._model = options.model ?? DEFAULT_MODEL;
27
+ this._baseUrl = options.baseUrl ?? API_URL;
28
+ if (options.dims && this._model.startsWith("text-embedding-3")) {
29
+ this._requestDims = options.dims;
30
+ this.dims = options.dims;
31
+ } else {
32
+ this.dims = options.dims ?? DEFAULT_DIMS[this._model] ?? 1536;
33
+ }
34
+ }
35
+ async embed(text) {
36
+ const results = await this._request([text]);
37
+ return results[0];
38
+ }
39
+ async embedBatch(texts) {
40
+ if (texts.length === 0) return [];
41
+ const results = [];
42
+ for (let i = 0; i < texts.length; i += MAX_BATCH) {
43
+ const batch = texts.slice(i, i + MAX_BATCH);
44
+ const embeddings = await this._request(batch);
45
+ results.push(...embeddings);
46
+ }
47
+ return results;
48
+ }
49
+ async close() {
50
+ }
51
+ _isTokenLimitError(errText) {
52
+ return errText.includes("maximum input length") || errText.includes("maximum context length") || errText.includes("too many tokens");
53
+ }
54
+ async _request(input) {
55
+ if (!this._apiKey) {
56
+ throw new Error("OpenAI API key required. Set OPENAI_API_KEY env var or pass apiKey option.");
57
+ }
58
+ const MAX_CHARS = 24e3;
59
+ const safeInput = input.map((t) => t.length > MAX_CHARS ? t.slice(0, MAX_CHARS) : t);
60
+ const body = {
61
+ model: this._model,
62
+ input: safeInput
63
+ };
64
+ if (this._requestDims) {
65
+ body.dimensions = this._requestDims;
66
+ }
67
+ const res = await fetch(this._baseUrl, {
68
+ method: "POST",
69
+ headers: {
70
+ "Content-Type": "application/json",
71
+ "Authorization": `Bearer ${this._apiKey}`
72
+ },
73
+ body: JSON.stringify(body)
74
+ });
75
+ if (!res.ok) {
76
+ const err = await res.text();
77
+ const isTokenLimit = res.status === 400 && this._isTokenLimitError(err);
78
+ if (isTokenLimit && safeInput.length > 1) {
79
+ const results = [];
80
+ for (const text of safeInput) {
81
+ const r = await this._request([text.slice(0, 8e3)]);
82
+ results.push(r[0]);
83
+ }
84
+ return results;
85
+ }
86
+ if (isTokenLimit && safeInput.length === 1 && !this._retrying) {
87
+ this._retrying = true;
88
+ try {
89
+ return await this._request([safeInput[0].slice(0, 6e3)]);
90
+ } finally {
91
+ this._retrying = false;
92
+ }
93
+ }
94
+ throw new Error(`OpenAI embedding API error (${res.status}): ${err}`);
95
+ }
96
+ const json = await res.json();
97
+ const sorted = json.data.sort((a, b) => a.index - b.index);
98
+ return sorted.map((d) => new Float32Array(d.embedding));
99
+ }
100
+ };
101
+
102
+ export {
103
+ OpenAIEmbedding
104
+ };
105
+ //# sourceMappingURL=chunk-3GAIDXRW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/embeddings/openai.ts"],"sourcesContent":["/**\n * BrainBank — OpenAI Embedding Provider\n * \n * Uses OpenAI's embedding API via fetch (no SDK dependency).\n * Supports text-embedding-3-small, text-embedding-3-large, and ada-002.\n * \n * Usage:\n * const brain = new BrainBank({\n * embeddingProvider: new OpenAIEmbedding({ model: 'text-embedding-3-small' }),\n * });\n */\n\nimport type { EmbeddingProvider } from '../types.ts';\n\nconst DEFAULT_MODEL = 'text-embedding-3-small';\nconst DEFAULT_DIMS: Record<string, number> = {\n 'text-embedding-3-small': 1536,\n 'text-embedding-3-large': 3072,\n 'text-embedding-ada-002': 1536,\n};\nconst API_URL = 'https://api.openai.com/v1/embeddings';\nconst MAX_BATCH = 100; // OpenAI limit per request\n\nexport interface OpenAIEmbeddingOptions {\n /** OpenAI API key. Falls back to OPENAI_API_KEY env var. */\n apiKey?: string;\n /** Model name. Default: 'text-embedding-3-small' */\n model?: string;\n /** Vector dimensions. If omitted, uses model default. text-embedding-3-* supports custom dims. */\n dims?: number;\n /** Base URL override (for Azure, proxies, etc.) */\n baseUrl?: string;\n}\n\nexport class OpenAIEmbedding implements EmbeddingProvider {\n readonly dims: number;\n\n private _apiKey: string;\n private _model: string;\n private _baseUrl: string;\n private _requestDims: number | undefined;\n private _retrying = false;\n\n constructor(options: OpenAIEmbeddingOptions = {}) {\n this._apiKey = options.apiKey ?? process.env.OPENAI_API_KEY ?? '';\n this._model = options.model ?? DEFAULT_MODEL;\n this._baseUrl = options.baseUrl ?? API_URL;\n\n // Custom dims only supported by text-embedding-3-*\n if (options.dims && this._model.startsWith('text-embedding-3')) {\n this._requestDims = options.dims;\n this.dims = options.dims;\n } else {\n this.dims = options.dims ?? DEFAULT_DIMS[this._model] ?? 1536;\n }\n }\n\n async embed(text: string): Promise<Float32Array> {\n const results = await this._request([text]);\n return results[0];\n }\n\n async embedBatch(texts: string[]): Promise<Float32Array[]> {\n if (texts.length === 0) return [];\n\n const results: Float32Array[] = [];\n\n // Split into chunks of MAX_BATCH\n for (let i = 0; i < texts.length; i += MAX_BATCH) {\n const batch = texts.slice(i, i + MAX_BATCH);\n const embeddings = await this._request(batch);\n results.push(...embeddings);\n }\n\n return results;\n }\n\n async close(): Promise<void> {\n // No resources to release\n }\n\n private _isTokenLimitError(errText: string): boolean {\n return errText.includes('maximum input length') ||\n errText.includes('maximum context length') ||\n errText.includes('too many tokens');\n }\n\n private async _request(input: string[]): Promise<Float32Array[]> {\n if (!this._apiKey) {\n throw new Error('OpenAI API key required. Set OPENAI_API_KEY env var or pass apiKey option.');\n }\n\n // Truncate texts that would exceed token limit (~4 chars per token, 8192 max)\n const MAX_CHARS = 24_000;\n const safeInput = input.map(t => t.length > MAX_CHARS ? t.slice(0, MAX_CHARS) : t);\n\n const body: Record<string, any> = {\n model: this._model,\n input: safeInput,\n };\n\n if (this._requestDims) {\n body.dimensions = this._requestDims;\n }\n\n const res = await fetch(this._baseUrl, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this._apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!res.ok) {\n const err = await res.text();\n const isTokenLimit = res.status === 400 && this._isTokenLimitError(err);\n\n // If token limit error in a batch, retry each item individually with more aggressive truncation\n if (isTokenLimit && safeInput.length > 1) {\n const results: Float32Array[] = [];\n for (const text of safeInput) {\n const r = await this._request([text.slice(0, 8_000)]);\n results.push(r[0]);\n }\n return results;\n }\n // Last resort: if single item still fails, truncate to ~2k tokens\n if (isTokenLimit && safeInput.length === 1 && !this._retrying) {\n this._retrying = true;\n try {\n return await this._request([safeInput[0].slice(0, 6_000)]);\n } finally {\n this._retrying = false;\n }\n }\n throw new Error(`OpenAI embedding API error (${res.status}): ${err}`);\n }\n\n const json = await res.json() as {\n data: Array<{ embedding: number[]; index: number }>;\n };\n\n // Sort by index (API may return out of order)\n const sorted = json.data.sort((a, b) => a.index - b.index);\n\n return sorted.map(d => new Float32Array(d.embedding));\n }\n}\n"],"mappings":";;;;;AAcA,IAAM,gBAAgB;AACtB,IAAM,eAAuC;AAAA,EACzC,0BAA0B;AAAA,EAC1B,0BAA0B;AAAA,EAC1B,0BAA0B;AAC9B;AACA,IAAM,UAAU;AAChB,IAAM,YAAY;AAaX,IAAM,kBAAN,MAAmD;AAAA,EAlC1D,OAkC0D;AAAA;AAAA;AAAA,EAC7C;AAAA,EAED;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EAEpB,YAAY,UAAkC,CAAC,GAAG;AAC9C,SAAK,UAAU,QAAQ,UAAU,QAAQ,IAAI,kBAAkB;AAC/D,SAAK,SAAS,QAAQ,SAAS;AAC/B,SAAK,WAAW,QAAQ,WAAW;AAGnC,QAAI,QAAQ,QAAQ,KAAK,OAAO,WAAW,kBAAkB,GAAG;AAC5D,WAAK,eAAe,QAAQ;AAC5B,WAAK,OAAO,QAAQ;AAAA,IACxB,OAAO;AACH,WAAK,OAAO,QAAQ,QAAQ,aAAa,KAAK,MAAM,KAAK;AAAA,IAC7D;AAAA,EACJ;AAAA,EAEA,MAAM,MAAM,MAAqC;AAC7C,UAAM,UAAU,MAAM,KAAK,SAAS,CAAC,IAAI,CAAC;AAC1C,WAAO,QAAQ,CAAC;AAAA,EACpB;AAAA,EAEA,MAAM,WAAW,OAA0C;AACvD,QAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAEhC,UAAM,UAA0B,CAAC;AAGjC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AAC9C,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,SAAS;AAC1C,YAAM,aAAa,MAAM,KAAK,SAAS,KAAK;AAC5C,cAAQ,KAAK,GAAG,UAAU;AAAA,IAC9B;AAEA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,QAAuB;AAAA,EAE7B;AAAA,EAEQ,mBAAmB,SAA0B;AACjD,WAAO,QAAQ,SAAS,sBAAsB,KACvC,QAAQ,SAAS,wBAAwB,KACzC,QAAQ,SAAS,iBAAiB;AAAA,EAC7C;AAAA,EAEA,MAAc,SAAS,OAA0C;AAC7D,QAAI,CAAC,KAAK,SAAS;AACf,YAAM,IAAI,MAAM,4EAA4E;AAAA,IAChG;AAGA,UAAM,YAAY;AAClB,UAAM,YAAY,MAAM,IAAI,OAAK,EAAE,SAAS,YAAY,EAAE,MAAM,GAAG,SAAS,IAAI,CAAC;AAEjF,UAAM,OAA4B;AAAA,MAC9B,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,IACX;AAEA,QAAI,KAAK,cAAc;AACnB,WAAK,aAAa,KAAK;AAAA,IAC3B;AAEA,UAAM,MAAM,MAAM,MAAM,KAAK,UAAU;AAAA,MACnC,QAAQ;AAAA,MACR,SAAS;AAAA,QACL,gBAAgB;AAAA,QAChB,iBAAiB,UAAU,KAAK,OAAO;AAAA,MAC3C;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC7B,CAAC;AAED,QAAI,CAAC,IAAI,IAAI;AACT,YAAM,MAAM,MAAM,IAAI,KAAK;AAC3B,YAAM,eAAe,IAAI,WAAW,OAAO,KAAK,mBAAmB,GAAG;AAGtE,UAAI,gBAAgB,UAAU,SAAS,GAAG;AACtC,cAAM,UAA0B,CAAC;AACjC,mBAAW,QAAQ,WAAW;AAC1B,gBAAM,IAAI,MAAM,KAAK,SAAS,CAAC,KAAK,MAAM,GAAG,GAAK,CAAC,CAAC;AACpD,kBAAQ,KAAK,EAAE,CAAC,CAAC;AAAA,QACrB;AACA,eAAO;AAAA,MACX;AAEA,UAAI,gBAAgB,UAAU,WAAW,KAAK,CAAC,KAAK,WAAW;AAC3D,aAAK,YAAY;AACjB,YAAI;AACA,iBAAO,MAAM,KAAK,SAAS,CAAC,UAAU,CAAC,EAAE,MAAM,GAAG,GAAK,CAAC,CAAC;AAAA,QAC7D,UAAE;AACE,eAAK,YAAY;AAAA,QACrB;AAAA,MACJ;AACA,YAAM,IAAI,MAAM,+BAA+B,IAAI,MAAM,MAAM,GAAG,EAAE;AAAA,IACxE;AAEA,UAAM,OAAO,MAAM,IAAI,KAAK;AAK5B,UAAM,SAAS,KAAK,KAAK,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEzD,WAAO,OAAO,IAAI,OAAK,IAAI,aAAa,EAAE,SAAS,CAAC;AAAA,EACxD;AACJ;","names":[]}
@@ -0,0 +1,56 @@
1
+ import {
2
+ __name
3
+ } from "./chunk-7QVYU63E.js";
4
+
5
+ // src/query/rrf.ts
6
+ function reciprocalRankFusion(resultSets, k = 60, maxResults = 15) {
7
+ const fused = /* @__PURE__ */ new Map();
8
+ for (const results of resultSets) {
9
+ for (let rank = 0; rank < results.length; rank++) {
10
+ const r = results[rank];
11
+ const key = resultKey(r);
12
+ const rrfContribution = 1 / (k + rank + 1);
13
+ const existing = fused.get(key);
14
+ if (existing) {
15
+ existing.rrfScore += rrfContribution;
16
+ if (r.score > existing.result.score) {
17
+ existing.result = { ...r };
18
+ }
19
+ } else {
20
+ fused.set(key, {
21
+ result: { ...r },
22
+ rrfScore: rrfContribution
23
+ });
24
+ }
25
+ }
26
+ }
27
+ const sorted = Array.from(fused.values()).sort((a, b) => b.rrfScore - a.rrfScore).slice(0, maxResults);
28
+ const maxRRF = sorted[0]?.rrfScore ?? 1;
29
+ return sorted.map((entry) => ({
30
+ ...entry.result,
31
+ score: entry.rrfScore / maxRRF,
32
+ metadata: {
33
+ ...entry.result.metadata,
34
+ rrfScore: entry.rrfScore
35
+ }
36
+ }));
37
+ }
38
+ __name(reciprocalRankFusion, "reciprocalRankFusion");
39
+ function resultKey(r) {
40
+ switch (r.type) {
41
+ case "code":
42
+ return `code:${r.filePath}:${r.metadata.startLine}-${r.metadata.endLine}`;
43
+ case "commit":
44
+ return `commit:${r.metadata.hash || r.metadata.shortHash}`;
45
+ case "pattern":
46
+ return `pattern:${r.metadata.taskType}:${r.content?.slice(0, 60)}`;
47
+ default:
48
+ return `${r.type}:${r.content?.slice(0, 80)}`;
49
+ }
50
+ }
51
+ __name(resultKey, "resultKey");
52
+
53
+ export {
54
+ reciprocalRankFusion
55
+ };
56
+ //# sourceMappingURL=chunk-4ZKBQ33J.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/query/rrf.ts"],"sourcesContent":["/**\n * BrainBank — Reciprocal Rank Fusion (RRF)\n * \n * Combines results from multiple search systems (vector + BM25)\n * using the RRF algorithm: score = Σ 1/(k + rank_i)\n * \n * This is the same algorithm used by Elasticsearch, QMD, and most\n * production hybrid search systems. Simple but very effective.\n * \n * Reference: Cormack et al., \"Reciprocal Rank Fusion outperforms\n * Condorcet and individual Rank Learning Methods\" (2009)\n */\n\nimport type { SearchResult } from '../types.ts';\n\n/**\n * Fuse ranked lists from different search systems into a single ranked list.\n * \n * @param resultSets - Arrays of SearchResult from different systems (e.g. vector, BM25)\n * @param k - Smoothing constant. Default: 60 (standard value). Higher = less emphasis on top ranks.\n * @param maxResults - Maximum results to return.\n */\nexport function reciprocalRankFusion(\n resultSets: SearchResult[][],\n k: number = 60,\n maxResults: number = 15,\n): SearchResult[] {\n // Build a map: unique key → { bestResult, rrfScore }\n const fused = new Map<string, { result: SearchResult; rrfScore: number }>();\n\n for (const results of resultSets) {\n for (let rank = 0; rank < results.length; rank++) {\n const r = results[rank];\n const key = resultKey(r);\n const rrfContribution = 1.0 / (k + rank + 1);\n\n const existing = fused.get(key);\n if (existing) {\n existing.rrfScore += rrfContribution;\n // Keep the result with the higher original score\n if (r.score > existing.result.score) {\n existing.result = { ...r };\n }\n } else {\n fused.set(key, {\n result: { ...r },\n rrfScore: rrfContribution,\n });\n }\n }\n }\n\n // Sort by RRF score descending, normalize, and return\n const sorted = Array.from(fused.values())\n .sort((a, b) => b.rrfScore - a.rrfScore)\n .slice(0, maxResults);\n\n // Normalize RRF scores to 0..1 range\n const maxRRF = sorted[0]?.rrfScore ?? 1;\n return sorted.map(entry => ({\n ...entry.result,\n score: entry.rrfScore / maxRRF,\n metadata: {\n ...entry.result.metadata,\n rrfScore: entry.rrfScore,\n },\n }));\n}\n\n/**\n * Generate a unique key for a search result to detect duplicates across systems.\n */\nfunction resultKey(r: SearchResult): string {\n switch (r.type) {\n case 'code':\n return `code:${r.filePath}:${r.metadata.startLine}-${r.metadata.endLine}`;\n case 'commit':\n return `commit:${r.metadata.hash || r.metadata.shortHash}`;\n case 'pattern':\n return `pattern:${r.metadata.taskType}:${r.content?.slice(0, 60)}`;\n default:\n return `${r.type}:${r.content?.slice(0, 80)}`;\n }\n}\n"],"mappings":";;;;;AAsBO,SAAS,qBACZ,YACA,IAAY,IACZ,aAAqB,IACP;AAEd,QAAM,QAAQ,oBAAI,IAAwD;AAE1E,aAAW,WAAW,YAAY;AAC9B,aAAS,OAAO,GAAG,OAAO,QAAQ,QAAQ,QAAQ;AAC9C,YAAM,IAAI,QAAQ,IAAI;AACtB,YAAM,MAAM,UAAU,CAAC;AACvB,YAAM,kBAAkB,KAAO,IAAI,OAAO;AAE1C,YAAM,WAAW,MAAM,IAAI,GAAG;AAC9B,UAAI,UAAU;AACV,iBAAS,YAAY;AAErB,YAAI,EAAE,QAAQ,SAAS,OAAO,OAAO;AACjC,mBAAS,SAAS,EAAE,GAAG,EAAE;AAAA,QAC7B;AAAA,MACJ,OAAO;AACH,cAAM,IAAI,KAAK;AAAA,UACX,QAAQ,EAAE,GAAG,EAAE;AAAA,UACf,UAAU;AAAA,QACd,CAAC;AAAA,MACL;AAAA,IACJ;AAAA,EACJ;AAGA,QAAM,SAAS,MAAM,KAAK,MAAM,OAAO,CAAC,EACnC,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ,EACtC,MAAM,GAAG,UAAU;AAGxB,QAAM,SAAS,OAAO,CAAC,GAAG,YAAY;AACtC,SAAO,OAAO,IAAI,YAAU;AAAA,IACxB,GAAG,MAAM;AAAA,IACT,OAAO,MAAM,WAAW;AAAA,IACxB,UAAU;AAAA,MACN,GAAG,MAAM,OAAO;AAAA,MAChB,UAAU,MAAM;AAAA,IACpB;AAAA,EACJ,EAAE;AACN;AA7CgB;AAkDhB,SAAS,UAAU,GAAyB;AACxC,UAAQ,EAAE,MAAM;AAAA,IACZ,KAAK;AACD,aAAO,QAAQ,EAAE,QAAQ,IAAI,EAAE,SAAS,SAAS,IAAI,EAAE,SAAS,OAAO;AAAA,IAC3E,KAAK;AACD,aAAO,UAAU,EAAE,SAAS,QAAQ,EAAE,SAAS,SAAS;AAAA,IAC5D,KAAK;AACD,aAAO,WAAW,EAAE,SAAS,QAAQ,IAAI,EAAE,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA,IACpE;AACI,aAAO,GAAG,EAAE,IAAI,IAAI,EAAE,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA,EACnD;AACJ;AAXS;","names":[]}
@@ -0,0 +1,7 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
+
4
+ export {
5
+ __name
6
+ };
7
+ //# sourceMappingURL=chunk-7QVYU63E.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}