@unclick/mcp-server 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/README.md +34 -13
  2. package/dist/abn-tool.js +1 -1
  3. package/dist/bgg-tool.js +1 -1
  4. package/dist/carboninterface-tool.js +1 -1
  5. package/dist/cards/card.d.ts +9 -0
  6. package/dist/cards/card.d.ts.map +1 -0
  7. package/dist/cards/card.js +4 -0
  8. package/dist/cards/card.js.map +1 -0
  9. package/dist/cards/search-memory-card.d.ts +11 -0
  10. package/dist/cards/search-memory-card.d.ts.map +1 -0
  11. package/dist/cards/search-memory-card.js +75 -0
  12. package/dist/cards/search-memory-card.js.map +1 -0
  13. package/dist/cards/search-memory-card.test.d.ts +2 -0
  14. package/dist/cards/search-memory-card.test.d.ts.map +1 -0
  15. package/dist/cards/search-memory-card.test.js +59 -0
  16. package/dist/cards/search-memory-card.test.js.map +1 -0
  17. package/dist/catalog.js +36 -36
  18. package/dist/catalog.js.map +1 -1
  19. package/dist/client.d.ts.map +1 -1
  20. package/dist/client.js +96 -6
  21. package/dist/client.js.map +1 -1
  22. package/dist/converter-tools.js +1 -1
  23. package/dist/crews-tool.d.ts +12 -0
  24. package/dist/crews-tool.d.ts.map +1 -0
  25. package/dist/crews-tool.js +125 -0
  26. package/dist/crews-tool.js.map +1 -0
  27. package/dist/gdelt-tool.js +4 -4
  28. package/dist/hackernews-tool.js +1 -1
  29. package/dist/line-tool.js +1 -1
  30. package/dist/local-catalog-handlers.js +1 -1
  31. package/dist/local-catalog-handlers.js.map +1 -1
  32. package/dist/local-tools.js +7 -7
  33. package/dist/local-tools.js.map +1 -1
  34. package/dist/memory/__tests__/bitemporal.test.d.ts +8 -0
  35. package/dist/memory/__tests__/bitemporal.test.d.ts.map +1 -0
  36. package/dist/memory/__tests__/bitemporal.test.js +148 -0
  37. package/dist/memory/__tests__/bitemporal.test.js.map +1 -0
  38. package/dist/memory/__tests__/hybrid-search.test.d.ts +14 -0
  39. package/dist/memory/__tests__/hybrid-search.test.d.ts.map +1 -0
  40. package/dist/memory/__tests__/hybrid-search.test.js +304 -0
  41. package/dist/memory/__tests__/hybrid-search.test.js.map +1 -0
  42. package/dist/memory/agent.d.ts +34 -0
  43. package/dist/memory/agent.d.ts.map +1 -0
  44. package/dist/memory/agent.js +69 -0
  45. package/dist/memory/agent.js.map +1 -0
  46. package/dist/memory/conflicts.d.ts +48 -0
  47. package/dist/memory/conflicts.d.ts.map +1 -0
  48. package/dist/memory/conflicts.js +209 -0
  49. package/dist/memory/conflicts.js.map +1 -0
  50. package/dist/memory/db.d.ts +18 -3
  51. package/dist/memory/db.d.ts.map +1 -1
  52. package/dist/memory/db.js +133 -11
  53. package/dist/memory/db.js.map +1 -1
  54. package/dist/memory/device.d.ts +20 -0
  55. package/dist/memory/device.d.ts.map +1 -0
  56. package/dist/memory/device.js +48 -0
  57. package/dist/memory/device.js.map +1 -0
  58. package/dist/memory/embeddings.d.ts +10 -0
  59. package/dist/memory/embeddings.d.ts.map +1 -0
  60. package/dist/memory/embeddings.js +40 -0
  61. package/dist/memory/embeddings.js.map +1 -0
  62. package/dist/memory/handlers.d.ts.map +1 -1
  63. package/dist/memory/handlers.js +98 -4
  64. package/dist/memory/handlers.js.map +1 -1
  65. package/dist/memory/instrumentation.d.ts +38 -0
  66. package/dist/memory/instrumentation.d.ts.map +1 -0
  67. package/dist/memory/instrumentation.js +97 -0
  68. package/dist/memory/instrumentation.js.map +1 -0
  69. package/dist/memory/load-events.d.ts +18 -0
  70. package/dist/memory/load-events.d.ts.map +1 -0
  71. package/dist/memory/load-events.js +61 -0
  72. package/dist/memory/load-events.js.map +1 -0
  73. package/dist/memory/local.d.ts +4 -1
  74. package/dist/memory/local.d.ts.map +1 -1
  75. package/dist/memory/local.js +14 -0
  76. package/dist/memory/local.js.map +1 -1
  77. package/dist/memory/session-state.d.ts +37 -0
  78. package/dist/memory/session-state.d.ts.map +1 -0
  79. package/dist/memory/session-state.js +82 -0
  80. package/dist/memory/session-state.js.map +1 -0
  81. package/dist/memory/supabase.d.ts +75 -5
  82. package/dist/memory/supabase.d.ts.map +1 -1
  83. package/dist/memory/supabase.js +584 -83
  84. package/dist/memory/supabase.js.map +1 -1
  85. package/dist/memory/tenant-settings.d.ts +33 -0
  86. package/dist/memory/tenant-settings.d.ts.map +1 -0
  87. package/dist/memory/tenant-settings.js +79 -0
  88. package/dist/memory/tenant-settings.js.map +1 -0
  89. package/dist/memory/tool-awareness.d.ts +66 -0
  90. package/dist/memory/tool-awareness.d.ts.map +1 -0
  91. package/dist/memory/tool-awareness.js +307 -0
  92. package/dist/memory/tool-awareness.js.map +1 -0
  93. package/dist/memory/types.d.ts +18 -2
  94. package/dist/memory/types.d.ts.map +1 -1
  95. package/dist/numbers-tool.js +2 -2
  96. package/dist/openfoodfacts-tool.js +1 -1
  97. package/dist/openmeteo-tool.js +1 -1
  98. package/dist/radiobrowser-tool.js +2 -2
  99. package/dist/server.d.ts.map +1 -1
  100. package/dist/server.js +779 -55
  101. package/dist/server.js.map +1 -1
  102. package/dist/signals/emit.d.ts +11 -0
  103. package/dist/signals/emit.d.ts.map +1 -0
  104. package/dist/signals/emit.js +26 -0
  105. package/dist/signals/emit.js.map +1 -0
  106. package/dist/testpass-tool.d.ts +12 -0
  107. package/dist/testpass-tool.d.ts.map +1 -0
  108. package/dist/testpass-tool.js +121 -0
  109. package/dist/testpass-tool.js.map +1 -0
  110. package/dist/tool-wiring.d.ts +320 -4
  111. package/dist/tool-wiring.d.ts.map +1 -1
  112. package/dist/tool-wiring.js +246 -5
  113. package/dist/tool-wiring.js.map +1 -1
  114. package/dist/trivia-tool.js +5 -5
  115. package/dist/usgs-tool.js +1 -1
  116. package/dist/uxpass-tool.d.ts +24 -0
  117. package/dist/uxpass-tool.d.ts.map +1 -0
  118. package/dist/uxpass-tool.js +165 -0
  119. package/dist/uxpass-tool.js.map +1 -0
  120. package/dist/vault-bridge.js +7 -7
  121. package/dist/vercel-tool.d.ts +3 -0
  122. package/dist/vercel-tool.d.ts.map +1 -1
  123. package/dist/vercel-tool.js +198 -7
  124. package/dist/vercel-tool.js.map +1 -1
  125. package/dist/web-tools.d.ts +62 -0
  126. package/dist/web-tools.d.ts.map +1 -0
  127. package/dist/web-tools.js +271 -0
  128. package/dist/web-tools.js.map +1 -0
  129. package/package.json +6 -3
  130. package/server.json +1 -1
@@ -1,65 +1,357 @@
1
1
  /**
2
2
  * Supabase backend for UnClick Memory.
3
3
  *
4
- * Cloud mode: data lives in the user's own Supabase project (BYOD).
5
- * Requires SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY env vars.
4
+ * Two tenancy modes:
5
+ *
6
+ * BYOD - data lives in the user's own Supabase project. Single-tenant
7
+ * tables (business_context, extracted_facts, ...) and the
8
+ * original RPC names. This is what the wizard (memory-admin
9
+ * setup) installs into a user's Supabase.
10
+ *
11
+ * managed - data lives in UnClick's central Supabase. Multi-tenant
12
+ * tables (mc_business_context, mc_extracted_facts, ...) where
13
+ * every row is tagged with api_key_hash. RPCs are mc_-prefixed
14
+ * and take p_api_key_hash as their first parameter. The backend
15
+ * is responsible for filtering / inserting api_key_hash on
16
+ * every operation.
6
17
  */
7
18
  import { createClient } from "@supabase/supabase-js";
8
- let client = null;
9
- function getSupabase() {
10
- if (client)
11
- return client;
12
- const url = process.env.SUPABASE_URL;
13
- const key = process.env.SUPABASE_SERVICE_ROLE_KEY || process.env.SUPABASE_ANON_KEY;
14
- if (!url || !key) {
15
- throw new Error("Missing SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY (or SUPABASE_ANON_KEY) environment variables. " +
16
- "Set these in your MCP config's env block.");
17
- }
18
- client = createClient(url, key, {
19
- auth: { persistSession: false, autoRefreshToken: false },
20
- });
21
- return client;
19
+ import { createHash } from "node:crypto";
20
+ function pgError(context, err) {
21
+ if (err instanceof Error)
22
+ return err;
23
+ const e = (err ?? {});
24
+ const parts = [`${context} failed`];
25
+ if (e.message)
26
+ parts.push(e.message);
27
+ if (e.code)
28
+ parts.push(`(code: ${e.code})`);
29
+ if (e.details)
30
+ parts.push(`details: ${e.details}`);
31
+ if (e.hint)
32
+ parts.push(`hint: ${e.hint}`);
33
+ return new Error(parts.join(" "));
22
34
  }
23
- async function rpc(fn, params = {}) {
24
- const sb = getSupabase();
25
- const { data, error } = await sb.rpc(fn, params);
26
- if (error)
27
- throw new Error(`rpc(${fn}) failed: ${error.message}`);
28
- return data;
35
+ function contentHash(text) {
36
+ return createHash("sha256").update(text.toLowerCase().trim(), "utf8").digest("hex");
37
+ }
38
+ async function extractAtomicFacts(text) {
39
+ const apiKey = process.env.OPENAI_API_KEY;
40
+ if (!apiKey)
41
+ return [text];
42
+ try {
43
+ const res = await fetch("https://api.openai.com/v1/chat/completions", {
44
+ method: "POST",
45
+ headers: { Authorization: `Bearer ${apiKey}`, "Content-Type": "application/json" },
46
+ body: JSON.stringify({
47
+ model: "gpt-4o-mini",
48
+ messages: [
49
+ {
50
+ role: "system",
51
+ content: 'Extract 3-10 atomic facts from the following text. Each fact must be a single, self-contained statement. Return ONLY a JSON object: {"facts": ["fact1", "fact2", ...]}',
52
+ },
53
+ { role: "user", content: text.slice(0, 4000) },
54
+ ],
55
+ response_format: { type: "json_object" },
56
+ max_tokens: 600,
57
+ }),
58
+ });
59
+ if (!res.ok)
60
+ return [text];
61
+ const data = (await res.json());
62
+ const raw = data.choices?.[0]?.message?.content ?? "{}";
63
+ const parsed = JSON.parse(raw);
64
+ if (Array.isArray(parsed.facts) && parsed.facts.length > 0) {
65
+ return parsed.facts.map(String).filter(Boolean);
66
+ }
67
+ return [text];
68
+ }
69
+ catch {
70
+ return [text];
71
+ }
29
72
  }
73
+ const BYOD_TABLES = {
74
+ business_context: "business_context",
75
+ knowledge_library: "knowledge_library",
76
+ knowledge_library_history: "knowledge_library_history",
77
+ session_summaries: "session_summaries",
78
+ extracted_facts: "extracted_facts",
79
+ conversation_log: "conversation_log",
80
+ code_dumps: "code_dumps",
81
+ };
82
+ const MANAGED_TABLES = {
83
+ business_context: "mc_business_context",
84
+ knowledge_library: "mc_knowledge_library",
85
+ knowledge_library_history: "mc_knowledge_library_history",
86
+ session_summaries: "mc_session_summaries",
87
+ extracted_facts: "mc_extracted_facts",
88
+ conversation_log: "mc_conversation_log",
89
+ code_dumps: "mc_code_dumps",
90
+ };
30
91
  function now() {
31
92
  return new Date().toISOString();
32
93
  }
33
94
  function truncate(s, max = 8000) {
34
95
  return s.length > max ? s.slice(0, max) + "\n...[truncated]" : s;
35
96
  }
97
+ // ─── Free-tier caps ──────────────────────────────────────────────────────
98
+ // Starting values from the v2 build plan. Adjust with real data later.
99
+ // Pro tier removes all caps. Caps only apply in managed cloud mode (BYOD
100
+ // users own their database, so they manage their own quota).
101
+ export const FREE_TIER_CAPS = {
102
+ storage_bytes: 50 * 1024 * 1024, // 50 MB
103
+ facts: 5000,
104
+ };
105
+ /**
106
+ * Thrown when a free-tier user tries to write past their cap. The MCP
107
+ * handlers surface the message verbatim back to the agent so the user
108
+ * sees an actionable upgrade path.
109
+ */
110
+ export class CapExceededError extends Error {
111
+ constructor(message) {
112
+ super(message);
113
+ this.name = "CapExceededError";
114
+ }
115
+ }
36
116
  export class SupabaseBackend {
37
- constructor() {
38
- // Verify connection on creation
39
- getSupabase();
40
- console.error("UnClick Memory: Supabase cloud mode");
117
+ client;
118
+ tenancy;
119
+ tables;
120
+ constructor(config) {
121
+ if (!config.url || !config.serviceRoleKey) {
122
+ throw new Error("SupabaseBackend requires url and serviceRoleKey");
123
+ }
124
+ this.client = createClient(config.url, config.serviceRoleKey, {
125
+ auth: { persistSession: false, autoRefreshToken: false },
126
+ });
127
+ this.tenancy = config.tenancy;
128
+ this.tables = config.tenancy.mode === "managed" ? MANAGED_TABLES : BYOD_TABLES;
129
+ console.error(`UnClick Memory: Supabase ${config.tenancy.mode === "managed" ? "managed cloud" : "BYOD"} mode`);
130
+ }
131
+ // ─── Tenancy helpers ─────────────────────────────────────────────────────
132
+ /** Adds api_key_hash to a row in managed mode; passes through in BYOD. */
133
+ withTenancy(row) {
134
+ if (this.tenancy.mode === "managed") {
135
+ return { ...row, api_key_hash: this.tenancy.apiKeyHash };
136
+ }
137
+ return row;
138
+ }
139
+ /**
140
+ * Enforce free-tier caps on writes. Only runs in managed cloud mode.
141
+ * BYOD users own their database, so caps don't apply. Pro tier (or any
142
+ * non-free tier) skips the check.
143
+ *
144
+ * `kind` selects which cap to check first. Storage is always verified;
145
+ * `kind: "fact"` additionally verifies the fact-count cap because
146
+ * extracted_facts has a separate row count limit.
147
+ */
148
+ async enforceCaps(kind) {
149
+ if (this.tenancy.mode !== "managed")
150
+ return;
151
+ const tier = (process.env.UNCLICK_TIER || "free").toLowerCase();
152
+ if (tier !== "free")
153
+ return;
154
+ if (kind === "fact") {
155
+ const { data, error } = await this.client.rpc("mc_get_fact_count", {
156
+ p_api_key_hash: this.tenancy.apiKeyHash,
157
+ });
158
+ if (error) {
159
+ // Fail open on counter errors so a transient DB hiccup doesn't
160
+ // break legitimate writes. Log to stderr for observability.
161
+ console.error("[memory] mc_get_fact_count failed:", error.message);
162
+ }
163
+ else if (typeof data === "number" && data >= FREE_TIER_CAPS.facts) {
164
+ throw new CapExceededError(`Free tier limit reached: ${FREE_TIER_CAPS.facts.toLocaleString()} active ` +
165
+ `facts. Upgrade to Pro for unlimited facts, or prune old facts ` +
166
+ `via the Memory surface. Current count: ${data}.`);
167
+ }
168
+ }
169
+ const { data: bytes, error: bytesErr } = await this.client.rpc("mc_get_storage_bytes", { p_api_key_hash: this.tenancy.apiKeyHash });
170
+ if (bytesErr) {
171
+ console.error("[memory] mc_get_storage_bytes failed:", bytesErr.message);
172
+ return;
173
+ }
174
+ if (typeof bytes === "number" && bytes >= FREE_TIER_CAPS.storage_bytes) {
175
+ const usedMb = (bytes / (1024 * 1024)).toFixed(1);
176
+ throw new CapExceededError(`Free tier limit reached: ${usedMb} MB used of ` +
177
+ `${FREE_TIER_CAPS.storage_bytes / (1024 * 1024)} MB. ` +
178
+ `Upgrade to Pro for unlimited storage, or prune memory via ` +
179
+ `the Memory surface.`);
180
+ }
181
+ }
182
+ /** Calls an RPC, choosing the BYOD or managed name based on tenancy. */
183
+ async rpc(byodName, byodParams, managedName, managedParams) {
184
+ const fn = this.tenancy.mode === "managed" ? managedName : byodName;
185
+ const params = this.tenancy.mode === "managed"
186
+ ? { p_api_key_hash: this.tenancy.apiKeyHash, ...managedParams }
187
+ : byodParams;
188
+ const { data, error } = await this.client.rpc(fn, params);
189
+ if (error)
190
+ throw new Error(`rpc(${fn}) failed: ${error.message}`);
191
+ return data;
41
192
  }
193
+ // ─── Memory operations ───────────────────────────────────────────────────
42
194
  async getStartupContext(numSessions) {
43
- return rpc("get_startup_context", { num_sessions: numSessions });
195
+ const data = await this.rpc("get_startup_context", { num_sessions: numSessions }, "mc_get_startup_context", { p_num_sessions: numSessions });
196
+ return {
197
+ agent_instructions: [
198
+ "You are connected to UnClick Memory - a persistent memory system that works across all sessions and devices.",
199
+ "ALWAYS use this memory as your primary knowledge source. It has the user's rules, preferences, projects, and history.",
200
+ "When the user says something ambiguous or short, SEARCH memory first - it may be a stored keyword or trigger.",
201
+ "When you learn something new (preferences, projects, contacts, decisions), store it using add_fact.",
202
+ "At the end of significant conversations, write a session summary using write_session_summary.",
203
+ "Business context entries (loaded below) are standing rules. Follow them as if the user said them right now.",
204
+ "Never say 'I don't have access to your previous conversations' - you DO, through this memory system."
205
+ ].join("\n"),
206
+ ...data,
207
+ };
208
+ }
209
+ async searchMemory(query, maxResults, asOf) {
210
+ // Hybrid lane: BM25 + pgvector RRF over mc_extracted_facts and
211
+ // mc_session_summaries. Two well-known failure modes turn this into a
212
+ // black hole and force a fallback:
213
+ //
214
+ // 1. Per-row embeddings are NULL (legacy facts, BYOD installs without
215
+ // backfill, or facts written before embedding wiring) so the vector
216
+ // lane drops them.
217
+ // 2. plainto_tsquery('english', ...) tokenizes proper nouns and short
218
+ // identifiers ("Chris", "Bailey", "UnClick") in ways that don't
219
+ // align with the matching to_tsvector lexemes, so the keyword lane
220
+ // misses too. Both branches fail, hybrid returns [].
221
+ //
222
+ // To stop returning [] when matching content exists, we run a robust
223
+ // ILIKE keyword fallback over the same tables whenever the hybrid call
224
+ // throws OR returns an empty result.
225
+ try {
226
+ const { embedText } = await import("./embeddings.js");
227
+ const embedding = await embedText(query);
228
+ if (embedding) {
229
+ const results = await this.rpc("search_memory_hybrid", { search_query: query, query_embedding: embedding, max_results: maxResults, as_of: asOf ?? null }, "mc_search_memory_hybrid", { p_search_query: query, p_query_embedding: embedding, p_max_results: maxResults, p_as_of: asOf ?? null });
230
+ if (Array.isArray(results) && results.length > 0)
231
+ return results;
232
+ }
233
+ }
234
+ catch (err) {
235
+ console.error("[search_memory] hybrid search failed, falling back to keyword:", err);
236
+ }
237
+ return this.keywordFallback(query, maxResults);
44
238
  }
45
- async searchMemory(query, maxResults) {
46
- return rpc("search_memory", { search_query: query, max_results: maxResults });
239
+ /**
240
+ * ILIKE-based keyword fallback over mc_extracted_facts +
241
+ * mc_session_summaries. Used when hybrid retrieval returns []. Returns
242
+ * rows shaped to mirror mc_search_memory_hybrid so callers don't branch.
243
+ * Never widens RLS: tenant scoping via api_key_hash is preserved.
244
+ *
245
+ * Phrase support: the query is tokenized on whitespace. Tokens shorter
246
+ * than 2 chars or containing PostgREST .or() metacharacters are dropped.
247
+ * We try AND-of-tokens first (every token must appear, in any order); if
248
+ * that returns nothing we degrade to OR-of-tokens and rank rows by how
249
+ * many tokens they contain so partial matches at least surface something.
250
+ */
251
+ async keywordFallback(query, maxResults) {
252
+ const tokens = query
253
+ .toLowerCase()
254
+ .split(/\s+/)
255
+ .filter((t) => t.length >= 2 && !/[,():]/.test(t));
256
+ if (tokens.length === 0)
257
+ return [];
258
+ const patterns = tokens.map((t) => `%${t.replace(/[\\%_]/g, (c) => `\\${c}`)}%`);
259
+ const score = (text) => {
260
+ const lower = text.toLowerCase();
261
+ let n = 0;
262
+ for (const t of tokens)
263
+ if (lower.includes(t))
264
+ n++;
265
+ return n;
266
+ };
267
+ const runScan = async (mode) => {
268
+ let factQ = this.client
269
+ .from(this.tables.extracted_facts)
270
+ .select("id, fact, category, confidence, created_at")
271
+ .eq("status", "active")
272
+ .is("invalidated_at", null);
273
+ let sessQ = this.client
274
+ .from(this.tables.session_summaries)
275
+ .select("id, summary, created_at");
276
+ if (mode === "and") {
277
+ for (const p of patterns) {
278
+ factQ = factQ.ilike("fact", p);
279
+ sessQ = sessQ.ilike("summary", p);
280
+ }
281
+ }
282
+ else {
283
+ factQ = factQ.or(patterns.map((p) => `fact.ilike.${p}`).join(","));
284
+ sessQ = sessQ.or(patterns.map((p) => `summary.ilike.${p}`).join(","));
285
+ }
286
+ if (this.tenancy.mode === "managed") {
287
+ factQ = factQ.eq("api_key_hash", this.tenancy.apiKeyHash);
288
+ sessQ = sessQ.eq("api_key_hash", this.tenancy.apiKeyHash);
289
+ }
290
+ factQ = factQ
291
+ .order("confidence", { ascending: false })
292
+ .order("created_at", { ascending: false })
293
+ .limit(maxResults);
294
+ sessQ = sessQ.order("created_at", { ascending: false }).limit(maxResults);
295
+ const [factsRes, sessRes] = await Promise.all([factQ, sessQ]);
296
+ const facts = (factsRes.data ?? []).map((r) => {
297
+ const s = score(r.fact);
298
+ return {
299
+ id: r.id,
300
+ source: "fact",
301
+ content: r.fact,
302
+ category: r.category,
303
+ confidence: r.confidence,
304
+ created_at: r.created_at,
305
+ final_score: (s / tokens.length) * (r.confidence ?? 0),
306
+ rrf_score: 0,
307
+ kw_score: s,
308
+ cosine_score: 0,
309
+ };
310
+ });
311
+ const sessions = (sessRes.data ?? []).map((r) => {
312
+ const s = score(r.summary);
313
+ return {
314
+ id: r.id,
315
+ source: "session",
316
+ content: r.summary,
317
+ category: "session",
318
+ confidence: 1,
319
+ created_at: r.created_at,
320
+ final_score: (s / tokens.length) * 0.5,
321
+ rrf_score: 0,
322
+ kw_score: s,
323
+ cosine_score: 0,
324
+ };
325
+ });
326
+ return [...facts, ...sessions]
327
+ .sort((a, b) => {
328
+ const d = (b.final_score ?? 0) - (a.final_score ?? 0);
329
+ return d !== 0 ? d : (b.created_at ?? "").localeCompare(a.created_at ?? "");
330
+ })
331
+ .slice(0, maxResults);
332
+ };
333
+ const andResults = await runScan("and");
334
+ if (andResults.length > 0 || tokens.length < 2)
335
+ return andResults;
336
+ return runScan("or");
47
337
  }
48
338
  async searchFacts(query) {
49
- return rpc("search_facts", { search_query: query });
339
+ return this.rpc("search_facts", { search_query: query }, "mc_search_facts", { p_search_query: query });
50
340
  }
51
341
  async searchLibrary(query) {
52
- return rpc("search_library", { search_query: query });
342
+ return this.rpc("search_library", { search_query: query }, "mc_search_library", { p_search_query: query });
53
343
  }
54
344
  async getLibraryDoc(slug) {
55
- return rpc("get_library_doc", { doc_slug: slug });
345
+ return this.rpc("get_library_doc", { doc_slug: slug }, "mc_get_library_doc", { p_doc_slug: slug });
56
346
  }
57
347
  async listLibrary() {
58
- return rpc("list_library");
348
+ return this.rpc("list_library", {}, "mc_list_library", {});
59
349
  }
60
350
  async writeSessionSummary(data) {
61
- const sb = getSupabase();
62
- const { data: row, error } = await sb.from("session_summaries").insert({
351
+ await this.enforceCaps("general");
352
+ const { data: row, error } = await this.client
353
+ .from(this.tables.session_summaries)
354
+ .insert(this.withTenancy({
63
355
  session_id: data.session_id,
64
356
  summary: data.summary,
65
357
  topics: data.topics,
@@ -67,14 +359,40 @@ export class SupabaseBackend {
67
359
  decisions: data.decisions,
68
360
  platform: data.platform,
69
361
  duration_minutes: data.duration_minutes,
70
- }).select().single();
362
+ }))
363
+ .select()
364
+ .single();
71
365
  if (error)
72
- throw error;
366
+ throw pgError("writeSessionSummary insert", error);
367
+ // Embed the summary so it joins the vector lane immediately (same
368
+ // motivation as addFact above). Fire-and-forget.
369
+ this.embedAndStore(this.tables.session_summaries, row.id, data.summary).catch(() => { });
73
370
  return { id: row.id };
74
371
  }
75
372
  async addFact(data) {
76
- const sb = getSupabase();
77
- const { data: row, error } = await sb.from("extracted_facts").insert({
373
+ // preserve_as_blob: write raw body to canonical_docs, then extract+store atomic facts
374
+ if (data.preserve_as_blob) {
375
+ return this.saveBlob(data);
376
+ }
377
+ await this.enforceCaps("fact");
378
+ const hash = contentHash(data.fact);
379
+ // Exact-hash dedup: if a live fact with this hash already exists, return it
380
+ const dupTable = this.tables.extracted_facts;
381
+ let dupQuery = this.client
382
+ .from(dupTable)
383
+ .select("id")
384
+ .eq("content_hash", hash)
385
+ .is("invalidated_at", null)
386
+ .limit(1);
387
+ if (this.tenancy.mode === "managed") {
388
+ dupQuery = dupQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
389
+ }
390
+ const { data: existing } = await dupQuery.maybeSingle();
391
+ if (existing)
392
+ return { id: existing.id };
393
+ const { data: row, error } = await this.client
394
+ .from(this.tables.extracted_facts)
395
+ .insert(this.withTenancy({
78
396
  fact: data.fact,
79
397
  category: data.category,
80
398
  confidence: data.confidence,
@@ -83,96 +401,258 @@ export class SupabaseBackend {
83
401
  status: "active",
84
402
  decay_tier: "hot",
85
403
  last_accessed: now(),
86
- }).select().single();
404
+ content_hash: hash,
405
+ valid_from: data.valid_from ?? now(),
406
+ recorded_at: now(),
407
+ extractor_id: data.extractor_id ?? "manual",
408
+ prompt_version: data.prompt_version ?? null,
409
+ model_id: data.model_id ?? null,
410
+ commit_sha: data.commit_sha ?? null,
411
+ pr_number: data.pr_number ?? null,
412
+ }))
413
+ .select()
414
+ .single();
87
415
  if (error)
88
- throw error;
416
+ throw pgError("addFact insert", error);
417
+ // Append audit row (fire-and-forget; never blocks the main insert)
418
+ this.writeFactAudit(row.id, "insert", { category: data.category }).catch(() => { });
419
+ // Embed the fact so it joins the vector lane immediately. Without this,
420
+ // every newly inserted fact has NULL embedding and only the keyword lane
421
+ // can find it. Fire-and-forget so embedding latency / OpenAI outages
422
+ // never block the primary insert.
423
+ this.embedAndStore(this.tables.extracted_facts, row.id, data.fact).catch(() => { });
89
424
  return { id: row.id };
90
425
  }
426
+ async embedAndStore(table, id, text) {
427
+ const { embedText, EMBEDDING_MODEL } = await import("./embeddings.js");
428
+ const vec = await embedText(text);
429
+ if (!vec)
430
+ return;
431
+ await this.client
432
+ .from(table)
433
+ .update({
434
+ embedding: JSON.stringify(vec),
435
+ embedding_model: EMBEDDING_MODEL,
436
+ embedding_created_at: now(),
437
+ })
438
+ .eq("id", id);
439
+ }
440
+ async saveBlob(data) {
441
+ await this.enforceCaps("general");
442
+ const hash = contentHash(data.fact);
443
+ const docTable = this.tenancy.mode === "managed" ? "mc_canonical_docs" : "canonical_docs";
444
+ // Upsert canonical_doc (idempotent by content_hash)
445
+ let docId;
446
+ {
447
+ let q = this.client.from(docTable).select("id").eq("content_hash", hash).limit(1);
448
+ if (this.tenancy.mode === "managed") {
449
+ q = q.eq("api_key_hash", this.tenancy.apiKeyHash);
450
+ }
451
+ const { data: existing } = await q.maybeSingle();
452
+ if (existing) {
453
+ docId = existing.id;
454
+ }
455
+ else {
456
+ const insertRow = this.tenancy.mode === "managed"
457
+ ? { api_key_hash: this.tenancy.apiKeyHash, title: data.category, body: data.fact, content_hash: hash }
458
+ : { title: data.category, body: data.fact, content_hash: hash };
459
+ const { data: doc, error } = await this.client.from(docTable).insert(insertRow).select().single();
460
+ if (error)
461
+ throw pgError("saveBlob canonical_docs insert", error);
462
+ docId = doc.id;
463
+ }
464
+ }
465
+ // Extract atomic facts (minimal extractor; Chunk 4 replaces with full pipeline)
466
+ const atomicFacts = await extractAtomicFacts(data.fact);
467
+ const factIds = [];
468
+ for (const factText of atomicFacts) {
469
+ const factHash = contentHash(factText);
470
+ // Skip if already live
471
+ let dupQ = this.client
472
+ .from(this.tables.extracted_facts)
473
+ .select("id")
474
+ .eq("content_hash", factHash)
475
+ .is("invalidated_at", null)
476
+ .limit(1);
477
+ if (this.tenancy.mode === "managed") {
478
+ dupQ = dupQ.eq("api_key_hash", this.tenancy.apiKeyHash);
479
+ }
480
+ const { data: dup } = await dupQ.maybeSingle();
481
+ if (dup) {
482
+ factIds.push(dup.id);
483
+ continue;
484
+ }
485
+ const { data: frow, error: ferr } = await this.client
486
+ .from(this.tables.extracted_facts)
487
+ .insert(this.withTenancy({
488
+ fact: factText,
489
+ category: data.category,
490
+ confidence: Math.max(0, data.confidence - 0.05), // slight confidence discount
491
+ source_session_id: data.source_session_id ?? null,
492
+ source_type: "auto_extract",
493
+ status: "active",
494
+ decay_tier: "hot",
495
+ last_accessed: now(),
496
+ content_hash: factHash,
497
+ valid_from: now(),
498
+ recorded_at: now(),
499
+ extractor_id: "auto-extract-v1",
500
+ derived_from_doc_id: docId,
501
+ }))
502
+ .select()
503
+ .single();
504
+ if (ferr && ferr.code !== "23505")
505
+ throw pgError("saveBlob extracted_facts insert", ferr);
506
+ if (!ferr && frow)
507
+ factIds.push(frow.id);
508
+ }
509
+ return { id: docId, fact_ids: factIds };
510
+ }
511
+ async writeFactAudit(factId, op, payload) {
512
+ const auditTable = this.tenancy.mode === "managed" ? "mc_facts_audit" : "facts_audit";
513
+ await this.client.from(auditTable).insert({ fact_id: factId, op, payload, actor: "agent", at: now() });
514
+ }
515
+ async invalidateFact(input) {
516
+ const result = await this.rpc("invalidate_fact", { p_fact_id: input.fact_id, p_reason: input.reason ?? null, p_session_id: input.session_id ?? null }, "mc_invalidate_fact", { p_fact_id: input.fact_id, p_reason: input.reason ?? null, p_session_id: input.session_id ?? null });
517
+ const row = Array.isArray(result) ? result[0] : result;
518
+ return { invalidated_at: row.invalidated_at };
519
+ }
91
520
  async supersedeFact(oldId, newText, category, confidence) {
92
- const params = { old_fact_id: oldId, new_fact_text: newText };
521
+ if (this.tenancy.mode === "managed") {
522
+ const params = {
523
+ p_api_key_hash: this.tenancy.apiKeyHash,
524
+ p_old_fact_id: oldId,
525
+ p_new_fact_text: newText,
526
+ };
527
+ if (category !== undefined)
528
+ params.p_new_category = category;
529
+ if (confidence !== undefined)
530
+ params.p_new_confidence = confidence;
531
+ const { data, error } = await this.client.rpc("mc_supersede_fact", params);
532
+ if (error)
533
+ throw new Error(`rpc(mc_supersede_fact) failed: ${error.message}`);
534
+ return String(data);
535
+ }
536
+ const params = {
537
+ old_fact_id: oldId,
538
+ new_fact_text: newText,
539
+ };
93
540
  if (category !== undefined)
94
541
  params.new_category = category;
95
542
  if (confidence !== undefined)
96
543
  params.new_confidence = confidence;
97
- const data = await rpc("supersede_fact", params);
544
+ const { data, error } = await this.client.rpc("supersede_fact", params);
545
+ if (error)
546
+ throw new Error(`rpc(supersede_fact) failed: ${error.message}`);
98
547
  return String(data);
99
548
  }
100
549
  async logConversation(data) {
101
- const sb = getSupabase();
102
- const { error } = await sb.from("conversation_log").insert({
550
+ await this.enforceCaps("general");
551
+ const { error } = await this.client
552
+ .from(this.tables.conversation_log)
553
+ .insert(this.withTenancy({
103
554
  session_id: data.session_id,
104
555
  role: data.role,
105
556
  content: truncate(data.content),
106
557
  has_code: data.has_code,
107
- });
558
+ }));
108
559
  if (error)
109
- throw error;
560
+ throw pgError("logConversation insert", error);
110
561
  }
111
562
  async getConversationDetail(sessionId) {
112
- return rpc("get_conversation_detail", { sid: sessionId });
563
+ return this.rpc("get_conversation_detail", { sid: sessionId }, "mc_get_conversation_detail", { p_session_id: sessionId });
113
564
  }
114
565
  async storeCode(data) {
115
- const sb = getSupabase();
116
- const { data: row, error } = await sb.from("code_dumps").insert({
566
+ await this.enforceCaps("general");
567
+ const { data: row, error } = await this.client
568
+ .from(this.tables.code_dumps)
569
+ .insert(this.withTenancy({
117
570
  session_id: data.session_id,
118
571
  language: data.language,
119
572
  filename: data.filename ?? null,
120
573
  content: truncate(data.content, 50000),
121
574
  description: data.description ?? null,
122
- }).select().single();
575
+ }))
576
+ .select()
577
+ .single();
123
578
  if (error)
124
- throw error;
579
+ throw pgError("storeCode insert", error);
125
580
  return { id: row.id };
126
581
  }
127
582
  async getBusinessContext() {
128
- const sb = getSupabase();
129
- const { data, error } = await sb.from("business_context").select("*").order("category").order("key");
583
+ let query = this.client
584
+ .from(this.tables.business_context)
585
+ .select("*")
586
+ .order("category")
587
+ .order("key");
588
+ if (this.tenancy.mode === "managed") {
589
+ query = query.eq("api_key_hash", this.tenancy.apiKeyHash);
590
+ }
591
+ const { data, error } = await query;
130
592
  if (error)
131
- throw error;
593
+ throw pgError("getBusinessContext select", error);
132
594
  return data ?? [];
133
595
  }
134
596
  async setBusinessContext(category, key, value, priority) {
135
- const sb = getSupabase();
597
+ await this.enforceCaps("general");
136
598
  const row = {
137
599
  category,
138
600
  key,
139
- value: typeof value === "string" ? (() => { try {
140
- return JSON.parse(value);
141
- }
142
- catch {
143
- return value;
144
- } })() : value,
601
+ value: typeof value === "string"
602
+ ? (() => {
603
+ try {
604
+ return JSON.parse(value);
605
+ }
606
+ catch {
607
+ return value;
608
+ }
609
+ })()
610
+ : value,
145
611
  last_accessed: now(),
146
612
  decay_tier: "hot",
147
613
  };
148
614
  if (priority !== undefined)
149
615
  row.priority = priority;
150
- const { error } = await sb.from("business_context")
151
- .upsert(row, { onConflict: "category,key" })
152
- .select().single();
616
+ const onConflict = this.tenancy.mode === "managed" ? "api_key_hash,category,key" : "category,key";
617
+ const { error } = await this.client
618
+ .from(this.tables.business_context)
619
+ .upsert(this.withTenancy(row), { onConflict })
620
+ .select()
621
+ .single();
153
622
  if (error)
154
- throw error;
623
+ throw pgError("setBusinessContext upsert", error);
155
624
  }
156
625
  async upsertLibraryDoc(data) {
157
- const sb = getSupabase();
158
- const { data: existing } = await sb.from("knowledge_library")
159
- .select("id, version").eq("slug", data.slug).single();
626
+ await this.enforceCaps("general");
627
+ let existingQuery = this.client
628
+ .from(this.tables.knowledge_library)
629
+ .select("id, version")
630
+ .eq("slug", data.slug);
631
+ if (this.tenancy.mode === "managed") {
632
+ existingQuery = existingQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
633
+ }
634
+ const { data: existing } = await existingQuery.maybeSingle();
160
635
  if (existing) {
161
636
  // DB trigger auto-archives old content and bumps version
162
- const { error } = await sb.from("knowledge_library").update({
637
+ const { error } = await this.client
638
+ .from(this.tables.knowledge_library)
639
+ .update({
163
640
  title: data.title,
164
641
  category: data.category,
165
642
  content: data.content,
166
643
  tags: data.tags,
167
644
  last_accessed: now(),
168
645
  decay_tier: "hot",
169
- }).eq("id", existing.id);
646
+ })
647
+ .eq("id", existing.id);
170
648
  if (error)
171
- throw error;
649
+ throw pgError("upsertLibraryDoc update", error);
172
650
  return `Library doc updated: "${data.title}" (v${existing.version + 1})`;
173
651
  }
174
652
  else {
175
- const { error } = await sb.from("knowledge_library").insert({
653
+ const { error } = await this.client
654
+ .from(this.tables.knowledge_library)
655
+ .insert(this.withTenancy({
176
656
  slug: data.slug,
177
657
  title: data.title,
178
658
  category: data.category,
@@ -181,29 +661,50 @@ export class SupabaseBackend {
181
661
  version: 1,
182
662
  decay_tier: "hot",
183
663
  last_accessed: now(),
184
- });
664
+ }));
185
665
  if (error)
186
- throw error;
666
+ throw pgError("upsertLibraryDoc insert", error);
187
667
  return `Library doc created: "${data.title}" (v1)`;
188
668
  }
189
669
  }
190
670
  async manageDecay() {
191
- return rpc("manage_decay");
671
+ return this.rpc("manage_decay", {}, "mc_manage_decay", {});
192
672
  }
193
673
  async getMemoryStatus() {
194
- const sb = getSupabase();
195
- const tables = ["business_context", "knowledge_library", "session_summaries", "extracted_facts", "conversation_log", "code_dumps"];
674
+ const tableKeys = [
675
+ "business_context",
676
+ "knowledge_library",
677
+ "session_summaries",
678
+ "extracted_facts",
679
+ "conversation_log",
680
+ "code_dumps",
681
+ ];
196
682
  const counts = {};
197
- for (const table of tables) {
198
- const { count } = await sb.from(table).select("*", { count: "exact", head: true });
199
- counts[table] = count;
683
+ for (const tk of tableKeys) {
684
+ let q = this.client.from(this.tables[tk]).select("*", { count: "exact", head: true });
685
+ if (this.tenancy.mode === "managed") {
686
+ q = q.eq("api_key_hash", this.tenancy.apiKeyHash);
687
+ }
688
+ const { count } = await q;
689
+ counts[tk] = count;
200
690
  }
201
- const { data: factTiers } = await sb.from("extracted_facts").select("decay_tier").eq("status", "active");
691
+ let factTiersQuery = this.client
692
+ .from(this.tables.extracted_facts)
693
+ .select("decay_tier")
694
+ .eq("status", "active");
695
+ if (this.tenancy.mode === "managed") {
696
+ factTiersQuery = factTiersQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
697
+ }
698
+ const { data: factTiers } = await factTiersQuery;
202
699
  const tiers = { hot: 0, warm: 0, cold: 0 };
203
700
  for (const row of factTiers ?? []) {
204
701
  tiers[row.decay_tier]++;
205
702
  }
206
- return { mode: "supabase", table_counts: counts, fact_decay_tiers: tiers };
703
+ return {
704
+ mode: this.tenancy.mode === "managed" ? "supabase-managed" : "supabase-byod",
705
+ table_counts: counts,
706
+ fact_decay_tiers: tiers,
707
+ };
207
708
  }
208
709
  }
209
710
  //# sourceMappingURL=supabase.js.map