@astrocyteai/local 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,118 @@
1
+ /**
2
+ * LLM-curated retain for local Context Tree.
3
+ *
4
+ * When an LLM provider is available, the LLM decides:
5
+ * - What action to take: ADD, UPDATE, MERGE, SKIP, DELETE
6
+ * - Which domain to store in (instead of just using the first tag)
7
+ * - What memory_layer to assign (fact, observation, model)
8
+ *
9
+ * Falls back to simple mechanical retain when no LLM is configured.
10
+ */
11
+ const CURATION_PROMPT = `You are a memory curation agent for a local Context Tree. Analyze the new content and decide how to store it.
12
+
13
+ ## Existing memories (most similar):
14
+ {existing}
15
+
16
+ ## Context Tree domains currently in use:
17
+ {domains}
18
+
19
+ ## New content:
20
+ {content}
21
+
22
+ ## Decide:
23
+ 1. action: "add" (new info), "update" (replace existing), "merge" (combine with existing), "skip" (redundant), "delete" (contradicts old)
24
+ 2. domain: Which Context Tree directory to store in (e.g., "preferences", "architecture", "decisions"). Use an existing domain if appropriate, or suggest a new one.
25
+ 3. memory_layer: "fact" (raw info), "observation" (pattern/insight), "model" (consolidated understanding)
26
+ 4. content: The processed text to store (may rewrite for clarity)
27
+ 5. reasoning: Brief explanation
28
+
29
+ Respond with JSON:
30
+ {"action": "add", "domain": "preferences", "content": "...", "memory_layer": "fact", "reasoning": "...", "target_id": null}`;
31
+ export async function curateLocalRetain(options) {
32
+ const { content, bankId, tree, search, llmProvider, contextLimit = 5 } = options;
33
+ // Get existing similar memories for context
34
+ const existingHits = search.search(content, bankId, { limit: contextLimit });
35
+ let existingText;
36
+ if (existingHits.length > 0) {
37
+ existingText = existingHits
38
+ .map((h) => `- [${h.id}] (${h.domain}/${h.file_path}) score=${h.score.toFixed(2)}: ${h.text.slice(0, 200)}`)
39
+ .join("\n");
40
+ }
41
+ else {
42
+ existingText = "(no existing memories)";
43
+ }
44
+ // Get current domains
45
+ const domains = tree.listDomains(bankId);
46
+ const domainsText = domains.length > 0 ? domains.join(", ") : "(none yet)";
47
+ const prompt = CURATION_PROMPT.replace("{existing}", existingText)
48
+ .replace("{domains}", domainsText)
49
+ .replace("{content}", content);
50
+ try {
51
+ const completion = await llmProvider.complete({
52
+ messages: [{ role: "user", content: prompt }],
53
+ maxTokens: 500,
54
+ temperature: 0,
55
+ });
56
+ return parseResponse(completion.text, content);
57
+ }
58
+ catch {
59
+ return {
60
+ action: "add",
61
+ domain: "general",
62
+ content,
63
+ memory_layer: "fact",
64
+ reasoning: "LLM curation failed, defaulting to ADD",
65
+ };
66
+ }
67
+ }
68
+ export function parseResponse(response, originalContent) {
69
+ try {
70
+ let text = response.trim();
71
+ // Extract from code block if present
72
+ if (text.includes("```")) {
73
+ const start = text.indexOf("```") + 3;
74
+ let contentStart = start;
75
+ if (text.slice(start).startsWith("json")) {
76
+ contentStart = start + 4;
77
+ }
78
+ const end = text.indexOf("```", contentStart);
79
+ if (end > contentStart) {
80
+ text = text.slice(contentStart, end).trim();
81
+ }
82
+ }
83
+ const data = JSON.parse(text);
84
+ if (typeof data !== "object" || data === null) {
85
+ throw new Error("Expected JSON object");
86
+ }
87
+ let action = (data.action || "add").toLowerCase();
88
+ if (!["add", "update", "merge", "skip", "delete"].includes(action)) {
89
+ action = "add";
90
+ }
91
+ let memoryLayer = (data.memory_layer || "fact").toLowerCase();
92
+ if (!["fact", "observation", "model"].includes(memoryLayer)) {
93
+ memoryLayer = "fact";
94
+ }
95
+ // Sanitize domain name
96
+ let domain = (data.domain || "general").toLowerCase().trim();
97
+ domain = domain.replace(/ /g, "-").replace(/\//g, "-");
98
+ if (!domain)
99
+ domain = "general";
100
+ return {
101
+ action: action,
102
+ domain,
103
+ content: data.content || originalContent,
104
+ memory_layer: memoryLayer,
105
+ reasoning: data.reasoning || "",
106
+ target_id: data.target_id || undefined,
107
+ };
108
+ }
109
+ catch {
110
+ return {
111
+ action: "add",
112
+ domain: "general",
113
+ content: originalContent,
114
+ memory_layer: "fact",
115
+ reasoning: "Failed to parse LLM response",
116
+ };
117
+ }
118
+ }
@@ -0,0 +1,13 @@
1
+ /**
2
+ * @astrocyteai/local — Zero-infrastructure memory for AI coding agents.
3
+ *
4
+ * Context Tree + SQLite FTS5 search. No database, no embeddings, no API keys.
5
+ */
6
+ export { ContextTree } from "./context-tree.js";
7
+ export { SearchEngine } from "./search.js";
8
+ export { createMcpServer, startMcpServer } from "./mcp-server.js";
9
+ export type { McpServerOptions } from "./mcp-server.js";
10
+ export { curateLocalRetain, parseResponse } from "./curated-retain.js";
11
+ export type { LLMProvider, CurationDecision } from "./curated-retain.js";
12
+ export { LocalRecallCache, LocalTieredRetriever } from "./tiered-retrieval.js";
13
+ export type { MemoryEntry, SearchHit, RetainResult, RecallResult, BrowseResult, LocalConfig, } from "./types.js";
package/dist/index.js ADDED
@@ -0,0 +1,10 @@
1
+ /**
2
+ * @astrocyteai/local — Zero-infrastructure memory for AI coding agents.
3
+ *
4
+ * Context Tree + SQLite FTS5 search. No database, no embeddings, no API keys.
5
+ */
6
+ export { ContextTree } from "./context-tree.js";
7
+ export { SearchEngine } from "./search.js";
8
+ export { createMcpServer, startMcpServer } from "./mcp-server.js";
9
+ export { curateLocalRetain, parseResponse } from "./curated-retain.js";
10
+ export { LocalRecallCache, LocalTieredRetriever } from "./tiered-retrieval.js";
@@ -0,0 +1,18 @@
1
+ /**
2
+ * MCP server — exposes Context Tree as MCP tools.
3
+ *
4
+ * See docs/mcp-tools.md for tool schemas.
5
+ *
6
+ * Usage:
7
+ * npx @astrocyteai/local --root .astrocyte
8
+ * astrocyte-local-mcp --root .astrocyte
9
+ */
10
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
11
+ export interface McpServerOptions {
12
+ root: string;
13
+ defaultBank?: string;
14
+ transport?: "stdio" | "sse";
15
+ port?: number;
16
+ }
17
+ export declare function createMcpServer(options: McpServerOptions): McpServer;
18
+ export declare function startMcpServer(options: McpServerOptions): Promise<void>;
@@ -0,0 +1,212 @@
1
+ /**
2
+ * MCP server — exposes Context Tree as MCP tools.
3
+ *
4
+ * See docs/mcp-tools.md for tool schemas.
5
+ *
6
+ * Usage:
7
+ * npx @astrocyteai/local --root .astrocyte
8
+ * astrocyte-local-mcp --root .astrocyte
9
+ */
10
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
11
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
12
+ import { z } from "zod";
13
+ import { ContextTree } from "./context-tree.js";
14
+ import { SearchEngine } from "./search.js";
15
+ export function createMcpServer(options) {
16
+ const { root, defaultBank = "project" } = options;
17
+ const tree = new ContextTree(root);
18
+ const search = new SearchEngine(`${root}/_search.db`);
19
+ search.buildIndex(tree);
20
+ const server = new McpServer({
21
+ name: "astrocyte-local",
22
+ version: "0.1.0",
23
+ }, {
24
+ instructions: "Local memory server. Use memory_retain to store information, " +
25
+ "memory_recall to search memories, memory_browse to explore the " +
26
+ "Context Tree hierarchy, and memory_forget to remove memories.",
27
+ });
28
+ // ── memory_retain ──
29
+ server.tool("memory_retain", "Store content into local memory.", {
30
+ content: z.string().describe("The text to memorize."),
31
+ bank_id: z.string().optional().describe("Memory bank (default: project)."),
32
+ tags: z.array(z.string()).optional().describe("Optional tags for filtering."),
33
+ domain: z.string().optional().describe("Context Tree domain (auto-inferred if omitted)."),
34
+ }, async (args) => {
35
+ const bankId = args.bank_id || defaultBank;
36
+ const domain = args.domain || (args.tags?.[0] ?? "general");
37
+ const entry = tree.store({
38
+ content: args.content,
39
+ bank_id: bankId,
40
+ domain,
41
+ tags: args.tags || [],
42
+ });
43
+ search.addDocument(entry);
44
+ return {
45
+ content: [
46
+ {
47
+ type: "text",
48
+ text: JSON.stringify({
49
+ stored: true,
50
+ memory_id: entry.id,
51
+ domain: entry.domain,
52
+ file: entry.file_path,
53
+ }),
54
+ },
55
+ ],
56
+ };
57
+ });
58
+ // ── memory_recall ──
59
+ server.tool("memory_recall", "Search local memory for content relevant to a query.", {
60
+ query: z.string().describe("Natural language search query."),
61
+ bank_id: z.string().optional().describe("Memory bank (default: project)."),
62
+ max_results: z.number().optional().describe("Maximum results (default: 10)."),
63
+ tags: z.array(z.string()).optional().describe("Filter by tags."),
64
+ }, async (args) => {
65
+ const bankId = args.bank_id || defaultBank;
66
+ const hits = search.search(args.query, bankId, {
67
+ limit: args.max_results || 10,
68
+ tags: args.tags,
69
+ });
70
+ // Record recall
71
+ for (const h of hits) {
72
+ tree.recordRecall(h.id);
73
+ }
74
+ return {
75
+ content: [
76
+ {
77
+ type: "text",
78
+ text: JSON.stringify({
79
+ hits: hits.map((h) => ({
80
+ text: h.text,
81
+ score: Math.round(h.score * 10000) / 10000,
82
+ domain: h.domain,
83
+ file: h.file_path,
84
+ memory_id: h.id,
85
+ })),
86
+ total: hits.length,
87
+ }),
88
+ },
89
+ ],
90
+ };
91
+ });
92
+ // ── memory_browse ──
93
+ server.tool("memory_browse", "Browse the Context Tree hierarchy.", {
94
+ path: z.string().optional().describe("Path to browse (empty for root, 'preferences' for a domain)."),
95
+ bank_id: z.string().optional().describe("Memory bank (default: project)."),
96
+ }, async (args) => {
97
+ const bankId = args.bank_id || defaultBank;
98
+ const browsePath = args.path || "";
99
+ if (!browsePath) {
100
+ const domains = tree.listDomains(bankId);
101
+ const total = tree.count(bankId);
102
+ return {
103
+ content: [
104
+ {
105
+ type: "text",
106
+ text: JSON.stringify({
107
+ path: "",
108
+ domains,
109
+ entries: [],
110
+ total_memories: total,
111
+ }),
112
+ },
113
+ ],
114
+ };
115
+ }
116
+ const entries = tree.listEntries(bankId, browsePath);
117
+ return {
118
+ content: [
119
+ {
120
+ type: "text",
121
+ text: JSON.stringify({
122
+ path: browsePath,
123
+ domains: [],
124
+ entries: entries.map((e) => ({
125
+ file: e.file_path,
126
+ title: e.text.slice(0, 80),
127
+ memory_id: e.id,
128
+ recall_count: e.recall_count,
129
+ })),
130
+ total_memories: entries.length,
131
+ }),
132
+ },
133
+ ],
134
+ };
135
+ });
136
+ // ── memory_forget ──
137
+ server.tool("memory_forget", "Remove memories from local storage.", {
138
+ memory_ids: z.array(z.string()).describe("IDs of memories to delete."),
139
+ bank_id: z.string().optional().describe("Memory bank (default: project)."),
140
+ }, async (args) => {
141
+ let deleted = 0;
142
+ const filesRemoved = [];
143
+ for (const mid of args.memory_ids) {
144
+ const entry = tree.read(mid);
145
+ if (entry) {
146
+ filesRemoved.push(entry.file_path);
147
+ }
148
+ if (tree.delete(mid)) {
149
+ search.removeDocument(mid);
150
+ deleted++;
151
+ }
152
+ }
153
+ return {
154
+ content: [
155
+ {
156
+ type: "text",
157
+ text: JSON.stringify({
158
+ deleted_count: deleted,
159
+ files_removed: filesRemoved,
160
+ }),
161
+ },
162
+ ],
163
+ };
164
+ });
165
+ // ── memory_banks ──
166
+ server.tool("memory_banks", "List available memory banks.", {}, async () => {
167
+ const allEntries = tree.scanAll();
168
+ const bankIds = [...new Set(allEntries.map((e) => e.bank_id))].sort();
169
+ if (!bankIds.includes(defaultBank)) {
170
+ bankIds.unshift(defaultBank);
171
+ }
172
+ return {
173
+ content: [
174
+ {
175
+ type: "text",
176
+ text: JSON.stringify({
177
+ banks: bankIds,
178
+ default: defaultBank,
179
+ root,
180
+ }),
181
+ },
182
+ ],
183
+ };
184
+ });
185
+ // ── memory_health ──
186
+ server.tool("memory_health", "Check local memory system health.", {}, async () => {
187
+ const total = tree.count();
188
+ return {
189
+ content: [
190
+ {
191
+ type: "text",
192
+ text: JSON.stringify({
193
+ healthy: true,
194
+ total_memories: total,
195
+ index_status: "current",
196
+ root,
197
+ }),
198
+ },
199
+ ],
200
+ };
201
+ });
202
+ return server;
203
+ }
204
+ export async function startMcpServer(options) {
205
+ const server = createMcpServer(options);
206
+ if (options.transport === "sse") {
207
+ // SSE transport would require express — for now only stdio
208
+ throw new Error("SSE transport not yet implemented in TypeScript. Use stdio.");
209
+ }
210
+ const transport = new StdioServerTransport();
211
+ await server.connect(transport);
212
+ }
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Search engine — SQLite FTS5 full-text search.
3
+ *
4
+ * See docs/search-contract.md for behavior specification.
5
+ * All operations are sync. Uses better-sqlite3 for SQLite access.
6
+ */
7
+ import type { MemoryEntry, SearchHit } from "./types.js";
8
+ import type { ContextTree } from "./context-tree.js";
9
+ export declare class SearchEngine {
10
+ private dbPath;
11
+ private db;
12
+ constructor(dbPath: string);
13
+ private createTables;
14
+ /**
15
+ * Rebuild the FTS index from the Context Tree. Returns count indexed.
16
+ */
17
+ buildIndex(tree: ContextTree, bankId?: string): number;
18
+ /**
19
+ * Full-text search. Returns scored hits sorted by relevance.
20
+ */
21
+ search(query: string, bankId: string, options?: {
22
+ limit?: number;
23
+ tags?: string[];
24
+ layers?: string[];
25
+ }): SearchHit[];
26
+ /**
27
+ * Add a single entry to the index (incremental update).
28
+ */
29
+ addDocument(entry: MemoryEntry): void;
30
+ /**
31
+ * Remove a single entry from the index.
32
+ */
33
+ removeDocument(entryId: string): void;
34
+ /**
35
+ * Close the database connection.
36
+ */
37
+ close(): void;
38
+ private searchAll;
39
+ private rowsToHits;
40
+ /**
41
+ * Escape special FTS5 characters for safe querying.
42
+ * Does NOT quote individual tokens — quoting disables stemming.
43
+ */
44
+ static escapeFtsQuery(query: string): string;
45
+ }
package/dist/search.js ADDED
@@ -0,0 +1,174 @@
1
+ /**
2
+ * Search engine — SQLite FTS5 full-text search.
3
+ *
4
+ * See docs/search-contract.md for behavior specification.
5
+ * All operations are sync. Uses better-sqlite3 for SQLite access.
6
+ */
7
+ import Database from "better-sqlite3";
8
+ import path from "node:path";
9
+ import fs from "node:fs";
10
+ export class SearchEngine {
11
+ dbPath;
12
+ db;
13
+ constructor(dbPath) {
14
+ this.dbPath = dbPath;
15
+ const dir = path.dirname(dbPath);
16
+ fs.mkdirSync(dir, { recursive: true });
17
+ this.db = new Database(dbPath);
18
+ this.db.pragma("journal_mode = WAL");
19
+ this.createTables();
20
+ }
21
+ createTables() {
22
+ this.db.exec(`
23
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_fts USING fts5(
24
+ id,
25
+ bank_id,
26
+ text,
27
+ tags,
28
+ domain,
29
+ memory_layer,
30
+ fact_type,
31
+ file_path,
32
+ tokenize='porter unicode61'
33
+ );
34
+ `);
35
+ }
36
+ /**
37
+ * Rebuild the FTS index from the Context Tree. Returns count indexed.
38
+ */
39
+ buildIndex(tree, bankId) {
40
+ if (bankId) {
41
+ this.db.prepare("DELETE FROM memory_fts WHERE bank_id = ?").run(bankId);
42
+ }
43
+ else {
44
+ this.db.exec("DELETE FROM memory_fts");
45
+ }
46
+ const entries = tree.scanAll(bankId);
47
+ const insert = this.db.prepare(`INSERT INTO memory_fts (id, bank_id, text, tags, domain, memory_layer, fact_type, file_path)
48
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
49
+ const insertMany = this.db.transaction((entries) => {
50
+ for (const entry of entries) {
51
+ insert.run(entry.id, entry.bank_id, entry.text, entry.tags.join(" "), entry.domain, entry.memory_layer, entry.fact_type, entry.file_path);
52
+ }
53
+ });
54
+ insertMany(entries);
55
+ return entries.length;
56
+ }
57
+ /**
58
+ * Full-text search. Returns scored hits sorted by relevance.
59
+ */
60
+ search(query, bankId, options) {
61
+ const limit = options?.limit ?? 10;
62
+ const tags = options?.tags;
63
+ const layers = options?.layers;
64
+ if (query.trim() === "*") {
65
+ return this.searchAll(bankId, { limit, tags, layers });
66
+ }
67
+ const ftsQuery = SearchEngine.escapeFtsQuery(query);
68
+ if (!ftsQuery)
69
+ return [];
70
+ let rows;
71
+ try {
72
+ rows = this.db
73
+ .prepare(`SELECT id, bank_id, text, tags, domain, memory_layer, fact_type, file_path, rank
74
+ FROM memory_fts
75
+ WHERE memory_fts MATCH ? AND bank_id = ?
76
+ ORDER BY rank
77
+ LIMIT ?`)
78
+ .all(ftsQuery, bankId, limit * 3);
79
+ }
80
+ catch {
81
+ return [];
82
+ }
83
+ let hits = this.rowsToHits(rows);
84
+ // Post-filter by tags
85
+ if (tags && tags.length > 0) {
86
+ const tagSet = new Set(tags);
87
+ hits = hits.filter((h) => h.tags && tagSet.size > 0 && [...tagSet].every((t) => h.tags.includes(t)));
88
+ }
89
+ // Post-filter by layers
90
+ if (layers && layers.length > 0) {
91
+ hits = hits.filter((h) => h.memory_layer && layers.includes(h.memory_layer));
92
+ }
93
+ return hits.slice(0, limit);
94
+ }
95
+ /**
96
+ * Add a single entry to the index (incremental update).
97
+ */
98
+ addDocument(entry) {
99
+ this.db
100
+ .prepare(`INSERT INTO memory_fts (id, bank_id, text, tags, domain, memory_layer, fact_type, file_path)
101
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`)
102
+ .run(entry.id, entry.bank_id, entry.text, entry.tags.join(" "), entry.domain, entry.memory_layer, entry.fact_type, entry.file_path);
103
+ }
104
+ /**
105
+ * Remove a single entry from the index.
106
+ */
107
+ removeDocument(entryId) {
108
+ this.db.prepare("DELETE FROM memory_fts WHERE id = ?").run(entryId);
109
+ }
110
+ /**
111
+ * Close the database connection.
112
+ */
113
+ close() {
114
+ this.db.close();
115
+ }
116
+ // ── Internal ──
117
+ searchAll(bankId, options) {
118
+ const rows = this.db
119
+ .prepare(`SELECT id, bank_id, text, tags, domain, memory_layer, fact_type, file_path, 0 as rank
120
+ FROM memory_fts WHERE bank_id = ? LIMIT ?`)
121
+ .all(bankId, options.limit);
122
+ let hits = this.rowsToHits(rows, 1.0);
123
+ if (options.tags && options.tags.length > 0) {
124
+ const tagSet = new Set(options.tags);
125
+ hits = hits.filter((h) => h.tags && [...tagSet].every((t) => h.tags.includes(t)));
126
+ }
127
+ if (options.layers && options.layers.length > 0) {
128
+ hits = hits.filter((h) => h.memory_layer && options.layers.includes(h.memory_layer));
129
+ }
130
+ return hits;
131
+ }
132
+ rowsToHits(rows, defaultScore) {
133
+ if (rows.length === 0)
134
+ return [];
135
+ // Normalize BM25 scores (more negative = more relevant)
136
+ const rawScores = rows.map((r) => Math.abs(Number(r.rank)));
137
+ const maxScore = Math.max(...rawScores) || 1.0;
138
+ return rows.map((row) => {
139
+ const score = defaultScore !== undefined
140
+ ? defaultScore
141
+ : maxScore > 0
142
+ ? Math.abs(Number(row.rank)) / maxScore
143
+ : 0.5;
144
+ const tagStr = row.tags || "";
145
+ const tags = tagStr.split(/\s+/).filter(Boolean);
146
+ return {
147
+ id: row.id,
148
+ text: row.text,
149
+ score,
150
+ bank_id: row.bank_id,
151
+ domain: row.domain,
152
+ file_path: row.file_path,
153
+ memory_layer: row.memory_layer || undefined,
154
+ fact_type: row.fact_type || undefined,
155
+ tags,
156
+ };
157
+ });
158
+ }
159
+ /**
160
+ * Escape special FTS5 characters for safe querying.
161
+ * Does NOT quote individual tokens — quoting disables stemming.
162
+ */
163
+ static escapeFtsQuery(query) {
164
+ let cleaned = query
165
+ .replace(/"/g, " ")
166
+ .replace(/'/g, " ")
167
+ .replace(/\(/g, " ")
168
+ .replace(/\)/g, " ")
169
+ .replace(/:/g, " ")
170
+ .replace(/\^/g, " ");
171
+ const tokens = cleaned.split(/\s+/).filter(Boolean);
172
+ return tokens.join(" ");
173
+ }
174
+ }
@@ -0,0 +1,53 @@
1
+ /**
2
+ * Tiered retrieval for local Context Tree — cache → FTS5 → LLM-guided.
3
+ *
4
+ * 3-tier progressive escalation adapted for file-based storage:
5
+ * Tier 0: In-memory result cache (exact/fuzzy query match)
6
+ * Tier 1: FTS5 keyword search (standard)
7
+ * Tier 2: LLM-guided query reformulation + FTS5 retry
8
+ *
9
+ * Stops when sufficient results are found. No embeddings needed.
10
+ */
11
+ import type { SearchHit } from "./types.js";
12
+ import type { SearchEngine } from "./search.js";
13
+ import type { LLMProvider } from "./curated-retain.js";
14
+ export declare class LocalRecallCache {
15
+ private maxEntries;
16
+ private ttlMs;
17
+ private cache;
18
+ constructor(maxEntries?: number, ttlMs?: number);
19
+ get(query: string, bankId: string): SearchHit[] | null;
20
+ put(query: string, bankId: string, hits: SearchHit[]): void;
21
+ invalidateBank(bankId: string): void;
22
+ invalidateAll(): void;
23
+ size(): number;
24
+ }
25
+ export declare class LocalTieredRetriever {
26
+ private search;
27
+ private cache;
28
+ private llmProvider;
29
+ private minResults;
30
+ private minScore;
31
+ readonly maxTier: number;
32
+ constructor(search: SearchEngine, cache?: LocalRecallCache | null, llmProvider?: LLMProvider | null, minResults?: number, minScore?: number, maxTier?: number);
33
+ /**
34
+ * Run tiered retrieval. Returns [hits, tierUsed].
35
+ */
36
+ retrieve(query: string, bankId: string, options?: {
37
+ limit?: number;
38
+ tags?: string[];
39
+ }): [SearchHit[], number];
40
+ /**
41
+ * Async version of retrieve — supports LLM reformulation natively.
42
+ */
43
+ aretrieve(query: string, bankId: string, options?: {
44
+ limit?: number;
45
+ tags?: string[];
46
+ }): Promise<[SearchHit[], number]>;
47
+ private sufficient;
48
+ private reformulate;
49
+ /**
50
+ * Merge two hit lists, deduplicate by ID, keep highest score.
51
+ */
52
+ static mergeHits(hitsA: SearchHit[], hitsB: SearchHit[]): SearchHit[];
53
+ }