@unclick/mcp-server 0.2.5 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/README.md +160 -139
  2. package/dist/abn-tool.js +1 -1
  3. package/dist/bgg-tool.js +1 -1
  4. package/dist/carboninterface-tool.js +1 -1
  5. package/dist/cards/card.d.ts +9 -0
  6. package/dist/cards/card.d.ts.map +1 -0
  7. package/dist/cards/card.js +4 -0
  8. package/dist/cards/card.js.map +1 -0
  9. package/dist/cards/search-memory-card.d.ts +11 -0
  10. package/dist/cards/search-memory-card.d.ts.map +1 -0
  11. package/dist/cards/search-memory-card.js +75 -0
  12. package/dist/cards/search-memory-card.js.map +1 -0
  13. package/dist/cards/search-memory-card.test.d.ts +2 -0
  14. package/dist/cards/search-memory-card.test.d.ts.map +1 -0
  15. package/dist/cards/search-memory-card.test.js +59 -0
  16. package/dist/cards/search-memory-card.test.js.map +1 -0
  17. package/dist/catalog.d.ts.map +1 -1
  18. package/dist/catalog.js +265 -4
  19. package/dist/catalog.js.map +1 -1
  20. package/dist/client.d.ts.map +1 -1
  21. package/dist/client.js +96 -6
  22. package/dist/client.js.map +1 -1
  23. package/dist/converter-tools.js +1 -1
  24. package/dist/crews-tool.d.ts +12 -0
  25. package/dist/crews-tool.d.ts.map +1 -0
  26. package/dist/crews-tool.js +125 -0
  27. package/dist/crews-tool.js.map +1 -0
  28. package/dist/gdelt-tool.js +4 -4
  29. package/dist/hackernews-tool.js +1 -1
  30. package/dist/index.js +0 -0
  31. package/dist/keychain-secure-input.js +42 -42
  32. package/dist/line-tool.js +1 -1
  33. package/dist/linear-tool.js +73 -73
  34. package/dist/local-catalog-handlers.js +1 -1
  35. package/dist/local-catalog-handlers.js.map +1 -1
  36. package/dist/local-tools.js +7 -7
  37. package/dist/local-tools.js.map +1 -1
  38. package/dist/memory/__tests__/bitemporal.test.d.ts +8 -0
  39. package/dist/memory/__tests__/bitemporal.test.d.ts.map +1 -0
  40. package/dist/memory/__tests__/bitemporal.test.js +148 -0
  41. package/dist/memory/__tests__/bitemporal.test.js.map +1 -0
  42. package/dist/memory/__tests__/hybrid-search.test.d.ts +14 -0
  43. package/dist/memory/__tests__/hybrid-search.test.d.ts.map +1 -0
  44. package/dist/memory/__tests__/hybrid-search.test.js +304 -0
  45. package/dist/memory/__tests__/hybrid-search.test.js.map +1 -0
  46. package/dist/memory/agent.d.ts +34 -0
  47. package/dist/memory/agent.d.ts.map +1 -0
  48. package/dist/memory/agent.js +69 -0
  49. package/dist/memory/agent.js.map +1 -0
  50. package/dist/memory/conflicts.d.ts +48 -0
  51. package/dist/memory/conflicts.d.ts.map +1 -0
  52. package/dist/memory/conflicts.js +209 -0
  53. package/dist/memory/conflicts.js.map +1 -0
  54. package/dist/memory/db.d.ts +25 -0
  55. package/dist/memory/db.d.ts.map +1 -0
  56. package/dist/memory/db.js +144 -0
  57. package/dist/memory/db.js.map +1 -0
  58. package/dist/memory/device.d.ts +20 -0
  59. package/dist/memory/device.d.ts.map +1 -0
  60. package/dist/memory/device.js +48 -0
  61. package/dist/memory/device.js.map +1 -0
  62. package/dist/memory/embeddings.d.ts +10 -0
  63. package/dist/memory/embeddings.d.ts.map +1 -0
  64. package/dist/memory/embeddings.js +40 -0
  65. package/dist/memory/embeddings.js.map +1 -0
  66. package/dist/memory/handlers.d.ts +11 -0
  67. package/dist/memory/handlers.d.ts.map +1 -0
  68. package/dist/memory/handlers.js +219 -0
  69. package/dist/memory/handlers.js.map +1 -0
  70. package/dist/memory/instrumentation.d.ts +38 -0
  71. package/dist/memory/instrumentation.d.ts.map +1 -0
  72. package/dist/memory/instrumentation.js +97 -0
  73. package/dist/memory/instrumentation.js.map +1 -0
  74. package/dist/memory/load-events.d.ts +18 -0
  75. package/dist/memory/load-events.d.ts.map +1 -0
  76. package/dist/memory/load-events.js +61 -0
  77. package/dist/memory/load-events.js.map +1 -0
  78. package/dist/memory/local.d.ts +40 -0
  79. package/dist/memory/local.d.ts.map +1 -0
  80. package/dist/memory/local.js +400 -0
  81. package/dist/memory/local.js.map +1 -0
  82. package/dist/memory/session-state.d.ts +37 -0
  83. package/dist/memory/session-state.d.ts.map +1 -0
  84. package/dist/memory/session-state.js +82 -0
  85. package/dist/memory/session-state.js.map +1 -0
  86. package/dist/memory/supabase.d.ts +104 -0
  87. package/dist/memory/supabase.d.ts.map +1 -0
  88. package/dist/memory/supabase.js +710 -0
  89. package/dist/memory/supabase.js.map +1 -0
  90. package/dist/memory/tenant-settings.d.ts +33 -0
  91. package/dist/memory/tenant-settings.d.ts.map +1 -0
  92. package/dist/memory/tenant-settings.js +79 -0
  93. package/dist/memory/tenant-settings.js.map +1 -0
  94. package/dist/memory/tool-awareness.d.ts +66 -0
  95. package/dist/memory/tool-awareness.d.ts.map +1 -0
  96. package/dist/memory/tool-awareness.js +307 -0
  97. package/dist/memory/tool-awareness.js.map +1 -0
  98. package/dist/memory/types.d.ts +97 -0
  99. package/dist/memory/types.d.ts.map +1 -0
  100. package/dist/memory/types.js +5 -0
  101. package/dist/memory/types.js.map +1 -0
  102. package/dist/monday-tool.js +46 -46
  103. package/dist/musicbrainz-tool.js +1 -1
  104. package/dist/musicbrainz-tool.js.map +1 -1
  105. package/dist/numbers-tool.js +2 -2
  106. package/dist/openfoodfacts-tool.js +1 -1
  107. package/dist/openmeteo-tool.js +1 -1
  108. package/dist/radiobrowser-tool.js +2 -2
  109. package/dist/server.d.ts.map +1 -1
  110. package/dist/server.js +838 -15
  111. package/dist/server.js.map +1 -1
  112. package/dist/signals/emit.d.ts +11 -0
  113. package/dist/signals/emit.d.ts.map +1 -0
  114. package/dist/signals/emit.js +26 -0
  115. package/dist/signals/emit.js.map +1 -0
  116. package/dist/testpass-tool.d.ts +12 -0
  117. package/dist/testpass-tool.d.ts.map +1 -0
  118. package/dist/testpass-tool.js +121 -0
  119. package/dist/testpass-tool.js.map +1 -0
  120. package/dist/toilets-tool.js +2 -2
  121. package/dist/tool-wiring.d.ts +320 -4
  122. package/dist/tool-wiring.d.ts.map +1 -1
  123. package/dist/tool-wiring.js +246 -5
  124. package/dist/tool-wiring.js.map +1 -1
  125. package/dist/trivia-tool.js +5 -5
  126. package/dist/usgs-tool.js +1 -1
  127. package/dist/uxpass-tool.d.ts +24 -0
  128. package/dist/uxpass-tool.d.ts.map +1 -0
  129. package/dist/uxpass-tool.js +165 -0
  130. package/dist/uxpass-tool.js.map +1 -0
  131. package/dist/vault-bridge.js +7 -7
  132. package/dist/vercel-tool.d.ts +3 -0
  133. package/dist/vercel-tool.d.ts.map +1 -1
  134. package/dist/vercel-tool.js +198 -7
  135. package/dist/vercel-tool.js.map +1 -1
  136. package/dist/web-tools.d.ts +62 -0
  137. package/dist/web-tools.d.ts.map +1 -0
  138. package/dist/web-tools.js +271 -0
  139. package/dist/web-tools.js.map +1 -0
  140. package/package.json +69 -65
  141. package/public/icon.svg +15 -15
  142. package/server.json +37 -37
@@ -0,0 +1,710 @@
1
+ /**
2
+ * Supabase backend for UnClick Memory.
3
+ *
4
+ * Two tenancy modes:
5
+ *
6
+ * BYOD - data lives in the user's own Supabase project. Single-tenant
7
+ * tables (business_context, extracted_facts, ...) and the
8
+ * original RPC names. This is what the wizard (memory-admin
9
+ * setup) installs into a user's Supabase.
10
+ *
11
+ * managed - data lives in UnClick's central Supabase. Multi-tenant
12
+ * tables (mc_business_context, mc_extracted_facts, ...) where
13
+ * every row is tagged with api_key_hash. RPCs are mc_-prefixed
14
+ * and take p_api_key_hash as their first parameter. The backend
15
+ * is responsible for filtering / inserting api_key_hash on
16
+ * every operation.
17
+ */
18
+ import { createClient } from "@supabase/supabase-js";
19
+ import { createHash } from "node:crypto";
20
+ function pgError(context, err) {
21
+ if (err instanceof Error)
22
+ return err;
23
+ const e = (err ?? {});
24
+ const parts = [`${context} failed`];
25
+ if (e.message)
26
+ parts.push(e.message);
27
+ if (e.code)
28
+ parts.push(`(code: ${e.code})`);
29
+ if (e.details)
30
+ parts.push(`details: ${e.details}`);
31
+ if (e.hint)
32
+ parts.push(`hint: ${e.hint}`);
33
+ return new Error(parts.join(" "));
34
+ }
35
+ function contentHash(text) {
36
+ return createHash("sha256").update(text.toLowerCase().trim(), "utf8").digest("hex");
37
+ }
38
+ async function extractAtomicFacts(text) {
39
+ const apiKey = process.env.OPENAI_API_KEY;
40
+ if (!apiKey)
41
+ return [text];
42
+ try {
43
+ const res = await fetch("https://api.openai.com/v1/chat/completions", {
44
+ method: "POST",
45
+ headers: { Authorization: `Bearer ${apiKey}`, "Content-Type": "application/json" },
46
+ body: JSON.stringify({
47
+ model: "gpt-4o-mini",
48
+ messages: [
49
+ {
50
+ role: "system",
51
+ content: 'Extract 3-10 atomic facts from the following text. Each fact must be a single, self-contained statement. Return ONLY a JSON object: {"facts": ["fact1", "fact2", ...]}',
52
+ },
53
+ { role: "user", content: text.slice(0, 4000) },
54
+ ],
55
+ response_format: { type: "json_object" },
56
+ max_tokens: 600,
57
+ }),
58
+ });
59
+ if (!res.ok)
60
+ return [text];
61
+ const data = (await res.json());
62
+ const raw = data.choices?.[0]?.message?.content ?? "{}";
63
+ const parsed = JSON.parse(raw);
64
+ if (Array.isArray(parsed.facts) && parsed.facts.length > 0) {
65
+ return parsed.facts.map(String).filter(Boolean);
66
+ }
67
+ return [text];
68
+ }
69
+ catch {
70
+ return [text];
71
+ }
72
+ }
73
+ const BYOD_TABLES = {
74
+ business_context: "business_context",
75
+ knowledge_library: "knowledge_library",
76
+ knowledge_library_history: "knowledge_library_history",
77
+ session_summaries: "session_summaries",
78
+ extracted_facts: "extracted_facts",
79
+ conversation_log: "conversation_log",
80
+ code_dumps: "code_dumps",
81
+ };
82
+ const MANAGED_TABLES = {
83
+ business_context: "mc_business_context",
84
+ knowledge_library: "mc_knowledge_library",
85
+ knowledge_library_history: "mc_knowledge_library_history",
86
+ session_summaries: "mc_session_summaries",
87
+ extracted_facts: "mc_extracted_facts",
88
+ conversation_log: "mc_conversation_log",
89
+ code_dumps: "mc_code_dumps",
90
+ };
91
+ function now() {
92
+ return new Date().toISOString();
93
+ }
94
+ function truncate(s, max = 8000) {
95
+ return s.length > max ? s.slice(0, max) + "\n...[truncated]" : s;
96
+ }
97
+ // ─── Free-tier caps ──────────────────────────────────────────────────────
98
+ // Starting values from the v2 build plan. Adjust with real data later.
99
+ // Pro tier removes all caps. Caps only apply in managed cloud mode (BYOD
100
+ // users own their database, so they manage their own quota).
101
+ export const FREE_TIER_CAPS = {
102
+ storage_bytes: 50 * 1024 * 1024, // 50 MB
103
+ facts: 5000,
104
+ };
105
+ /**
106
+ * Thrown when a free-tier user tries to write past their cap. The MCP
107
+ * handlers surface the message verbatim back to the agent so the user
108
+ * sees an actionable upgrade path.
109
+ */
110
+ export class CapExceededError extends Error {
111
+ constructor(message) {
112
+ super(message);
113
+ this.name = "CapExceededError";
114
+ }
115
+ }
116
+ export class SupabaseBackend {
117
+ client;
118
+ tenancy;
119
+ tables;
120
+ constructor(config) {
121
+ if (!config.url || !config.serviceRoleKey) {
122
+ throw new Error("SupabaseBackend requires url and serviceRoleKey");
123
+ }
124
+ this.client = createClient(config.url, config.serviceRoleKey, {
125
+ auth: { persistSession: false, autoRefreshToken: false },
126
+ });
127
+ this.tenancy = config.tenancy;
128
+ this.tables = config.tenancy.mode === "managed" ? MANAGED_TABLES : BYOD_TABLES;
129
+ console.error(`UnClick Memory: Supabase ${config.tenancy.mode === "managed" ? "managed cloud" : "BYOD"} mode`);
130
+ }
131
+ // ─── Tenancy helpers ─────────────────────────────────────────────────────
132
+ /** Adds api_key_hash to a row in managed mode; passes through in BYOD. */
133
+ withTenancy(row) {
134
+ if (this.tenancy.mode === "managed") {
135
+ return { ...row, api_key_hash: this.tenancy.apiKeyHash };
136
+ }
137
+ return row;
138
+ }
139
+ /**
140
+ * Enforce free-tier caps on writes. Only runs in managed cloud mode.
141
+ * BYOD users own their database, so caps don't apply. Pro tier (or any
142
+ * non-free tier) skips the check.
143
+ *
144
+ * `kind` selects which cap to check first. Storage is always verified;
145
+ * `kind: "fact"` additionally verifies the fact-count cap because
146
+ * extracted_facts has a separate row count limit.
147
+ */
148
+ async enforceCaps(kind) {
149
+ if (this.tenancy.mode !== "managed")
150
+ return;
151
+ const tier = (process.env.UNCLICK_TIER || "free").toLowerCase();
152
+ if (tier !== "free")
153
+ return;
154
+ if (kind === "fact") {
155
+ const { data, error } = await this.client.rpc("mc_get_fact_count", {
156
+ p_api_key_hash: this.tenancy.apiKeyHash,
157
+ });
158
+ if (error) {
159
+ // Fail open on counter errors so a transient DB hiccup doesn't
160
+ // break legitimate writes. Log to stderr for observability.
161
+ console.error("[memory] mc_get_fact_count failed:", error.message);
162
+ }
163
+ else if (typeof data === "number" && data >= FREE_TIER_CAPS.facts) {
164
+ throw new CapExceededError(`Free tier limit reached: ${FREE_TIER_CAPS.facts.toLocaleString()} active ` +
165
+ `facts. Upgrade to Pro for unlimited facts, or prune old facts ` +
166
+ `via the Memory surface. Current count: ${data}.`);
167
+ }
168
+ }
169
+ const { data: bytes, error: bytesErr } = await this.client.rpc("mc_get_storage_bytes", { p_api_key_hash: this.tenancy.apiKeyHash });
170
+ if (bytesErr) {
171
+ console.error("[memory] mc_get_storage_bytes failed:", bytesErr.message);
172
+ return;
173
+ }
174
+ if (typeof bytes === "number" && bytes >= FREE_TIER_CAPS.storage_bytes) {
175
+ const usedMb = (bytes / (1024 * 1024)).toFixed(1);
176
+ throw new CapExceededError(`Free tier limit reached: ${usedMb} MB used of ` +
177
+ `${FREE_TIER_CAPS.storage_bytes / (1024 * 1024)} MB. ` +
178
+ `Upgrade to Pro for unlimited storage, or prune memory via ` +
179
+ `the Memory surface.`);
180
+ }
181
+ }
182
+ /** Calls an RPC, choosing the BYOD or managed name based on tenancy. */
183
+ async rpc(byodName, byodParams, managedName, managedParams) {
184
+ const fn = this.tenancy.mode === "managed" ? managedName : byodName;
185
+ const params = this.tenancy.mode === "managed"
186
+ ? { p_api_key_hash: this.tenancy.apiKeyHash, ...managedParams }
187
+ : byodParams;
188
+ const { data, error } = await this.client.rpc(fn, params);
189
+ if (error)
190
+ throw new Error(`rpc(${fn}) failed: ${error.message}`);
191
+ return data;
192
+ }
193
+ // ─── Memory operations ───────────────────────────────────────────────────
194
+ async getStartupContext(numSessions) {
195
+ const data = await this.rpc("get_startup_context", { num_sessions: numSessions }, "mc_get_startup_context", { p_num_sessions: numSessions });
196
+ return {
197
+ agent_instructions: [
198
+ "You are connected to UnClick Memory - a persistent memory system that works across all sessions and devices.",
199
+ "ALWAYS use this memory as your primary knowledge source. It has the user's rules, preferences, projects, and history.",
200
+ "When the user says something ambiguous or short, SEARCH memory first - it may be a stored keyword or trigger.",
201
+ "When you learn something new (preferences, projects, contacts, decisions), store it using add_fact.",
202
+ "At the end of significant conversations, write a session summary using write_session_summary.",
203
+ "Business context entries (loaded below) are standing rules. Follow them as if the user said them right now.",
204
+ "Never say 'I don't have access to your previous conversations' - you DO, through this memory system."
205
+ ].join("\n"),
206
+ ...data,
207
+ };
208
+ }
209
+ async searchMemory(query, maxResults, asOf) {
210
+ // Hybrid lane: BM25 + pgvector RRF over mc_extracted_facts and
211
+ // mc_session_summaries. Two well-known failure modes turn this into a
212
+ // black hole and force a fallback:
213
+ //
214
+ // 1. Per-row embeddings are NULL (legacy facts, BYOD installs without
215
+ // backfill, or facts written before embedding wiring) so the vector
216
+ // lane drops them.
217
+ // 2. plainto_tsquery('english', ...) tokenizes proper nouns and short
218
+ // identifiers ("Chris", "Bailey", "UnClick") in ways that don't
219
+ // align with the matching to_tsvector lexemes, so the keyword lane
220
+ // misses too. Both branches fail, hybrid returns [].
221
+ //
222
+ // To stop returning [] when matching content exists, we run a robust
223
+ // ILIKE keyword fallback over the same tables whenever the hybrid call
224
+ // throws OR returns an empty result.
225
+ try {
226
+ const { embedText } = await import("./embeddings.js");
227
+ const embedding = await embedText(query);
228
+ if (embedding) {
229
+ const results = await this.rpc("search_memory_hybrid", { search_query: query, query_embedding: embedding, max_results: maxResults, as_of: asOf ?? null }, "mc_search_memory_hybrid", { p_search_query: query, p_query_embedding: embedding, p_max_results: maxResults, p_as_of: asOf ?? null });
230
+ if (Array.isArray(results) && results.length > 0)
231
+ return results;
232
+ }
233
+ }
234
+ catch (err) {
235
+ console.error("[search_memory] hybrid search failed, falling back to keyword:", err);
236
+ }
237
+ return this.keywordFallback(query, maxResults);
238
+ }
239
+ /**
240
+ * ILIKE-based keyword fallback over mc_extracted_facts +
241
+ * mc_session_summaries. Used when hybrid retrieval returns []. Returns
242
+ * rows shaped to mirror mc_search_memory_hybrid so callers don't branch.
243
+ * Never widens RLS: tenant scoping via api_key_hash is preserved.
244
+ *
245
+ * Phrase support: the query is tokenized on whitespace. Tokens shorter
246
+ * than 2 chars or containing PostgREST .or() metacharacters are dropped.
247
+ * We try AND-of-tokens first (every token must appear, in any order); if
248
+ * that returns nothing we degrade to OR-of-tokens and rank rows by how
249
+ * many tokens they contain so partial matches at least surface something.
250
+ */
251
+ async keywordFallback(query, maxResults) {
252
+ const tokens = query
253
+ .toLowerCase()
254
+ .split(/\s+/)
255
+ .filter((t) => t.length >= 2 && !/[,():]/.test(t));
256
+ if (tokens.length === 0)
257
+ return [];
258
+ const patterns = tokens.map((t) => `%${t.replace(/[\\%_]/g, (c) => `\\${c}`)}%`);
259
+ const score = (text) => {
260
+ const lower = text.toLowerCase();
261
+ let n = 0;
262
+ for (const t of tokens)
263
+ if (lower.includes(t))
264
+ n++;
265
+ return n;
266
+ };
267
+ const runScan = async (mode) => {
268
+ let factQ = this.client
269
+ .from(this.tables.extracted_facts)
270
+ .select("id, fact, category, confidence, created_at")
271
+ .eq("status", "active")
272
+ .is("invalidated_at", null);
273
+ let sessQ = this.client
274
+ .from(this.tables.session_summaries)
275
+ .select("id, summary, created_at");
276
+ if (mode === "and") {
277
+ for (const p of patterns) {
278
+ factQ = factQ.ilike("fact", p);
279
+ sessQ = sessQ.ilike("summary", p);
280
+ }
281
+ }
282
+ else {
283
+ factQ = factQ.or(patterns.map((p) => `fact.ilike.${p}`).join(","));
284
+ sessQ = sessQ.or(patterns.map((p) => `summary.ilike.${p}`).join(","));
285
+ }
286
+ if (this.tenancy.mode === "managed") {
287
+ factQ = factQ.eq("api_key_hash", this.tenancy.apiKeyHash);
288
+ sessQ = sessQ.eq("api_key_hash", this.tenancy.apiKeyHash);
289
+ }
290
+ factQ = factQ
291
+ .order("confidence", { ascending: false })
292
+ .order("created_at", { ascending: false })
293
+ .limit(maxResults);
294
+ sessQ = sessQ.order("created_at", { ascending: false }).limit(maxResults);
295
+ const [factsRes, sessRes] = await Promise.all([factQ, sessQ]);
296
+ const facts = (factsRes.data ?? []).map((r) => {
297
+ const s = score(r.fact);
298
+ return {
299
+ id: r.id,
300
+ source: "fact",
301
+ content: r.fact,
302
+ category: r.category,
303
+ confidence: r.confidence,
304
+ created_at: r.created_at,
305
+ final_score: (s / tokens.length) * (r.confidence ?? 0),
306
+ rrf_score: 0,
307
+ kw_score: s,
308
+ cosine_score: 0,
309
+ };
310
+ });
311
+ const sessions = (sessRes.data ?? []).map((r) => {
312
+ const s = score(r.summary);
313
+ return {
314
+ id: r.id,
315
+ source: "session",
316
+ content: r.summary,
317
+ category: "session",
318
+ confidence: 1,
319
+ created_at: r.created_at,
320
+ final_score: (s / tokens.length) * 0.5,
321
+ rrf_score: 0,
322
+ kw_score: s,
323
+ cosine_score: 0,
324
+ };
325
+ });
326
+ return [...facts, ...sessions]
327
+ .sort((a, b) => {
328
+ const d = (b.final_score ?? 0) - (a.final_score ?? 0);
329
+ return d !== 0 ? d : (b.created_at ?? "").localeCompare(a.created_at ?? "");
330
+ })
331
+ .slice(0, maxResults);
332
+ };
333
+ const andResults = await runScan("and");
334
+ if (andResults.length > 0 || tokens.length < 2)
335
+ return andResults;
336
+ return runScan("or");
337
+ }
338
+ async searchFacts(query) {
339
+ return this.rpc("search_facts", { search_query: query }, "mc_search_facts", { p_search_query: query });
340
+ }
341
+ async searchLibrary(query) {
342
+ return this.rpc("search_library", { search_query: query }, "mc_search_library", { p_search_query: query });
343
+ }
344
+ async getLibraryDoc(slug) {
345
+ return this.rpc("get_library_doc", { doc_slug: slug }, "mc_get_library_doc", { p_doc_slug: slug });
346
+ }
347
+ async listLibrary() {
348
+ return this.rpc("list_library", {}, "mc_list_library", {});
349
+ }
350
+ async writeSessionSummary(data) {
351
+ await this.enforceCaps("general");
352
+ const { data: row, error } = await this.client
353
+ .from(this.tables.session_summaries)
354
+ .insert(this.withTenancy({
355
+ session_id: data.session_id,
356
+ summary: data.summary,
357
+ topics: data.topics,
358
+ open_loops: data.open_loops,
359
+ decisions: data.decisions,
360
+ platform: data.platform,
361
+ duration_minutes: data.duration_minutes,
362
+ }))
363
+ .select()
364
+ .single();
365
+ if (error)
366
+ throw pgError("writeSessionSummary insert", error);
367
+ // Embed the summary so it joins the vector lane immediately (same
368
+ // motivation as addFact above). Fire-and-forget.
369
+ this.embedAndStore(this.tables.session_summaries, row.id, data.summary).catch(() => { });
370
+ return { id: row.id };
371
+ }
372
+ async addFact(data) {
373
+ // preserve_as_blob: write raw body to canonical_docs, then extract+store atomic facts
374
+ if (data.preserve_as_blob) {
375
+ return this.saveBlob(data);
376
+ }
377
+ await this.enforceCaps("fact");
378
+ const hash = contentHash(data.fact);
379
+ // Exact-hash dedup: if a live fact with this hash already exists, return it
380
+ const dupTable = this.tables.extracted_facts;
381
+ let dupQuery = this.client
382
+ .from(dupTable)
383
+ .select("id")
384
+ .eq("content_hash", hash)
385
+ .is("invalidated_at", null)
386
+ .limit(1);
387
+ if (this.tenancy.mode === "managed") {
388
+ dupQuery = dupQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
389
+ }
390
+ const { data: existing } = await dupQuery.maybeSingle();
391
+ if (existing)
392
+ return { id: existing.id };
393
+ const { data: row, error } = await this.client
394
+ .from(this.tables.extracted_facts)
395
+ .insert(this.withTenancy({
396
+ fact: data.fact,
397
+ category: data.category,
398
+ confidence: data.confidence,
399
+ source_session_id: data.source_session_id ?? null,
400
+ source_type: "manual",
401
+ status: "active",
402
+ decay_tier: "hot",
403
+ last_accessed: now(),
404
+ content_hash: hash,
405
+ valid_from: data.valid_from ?? now(),
406
+ recorded_at: now(),
407
+ extractor_id: data.extractor_id ?? "manual",
408
+ prompt_version: data.prompt_version ?? null,
409
+ model_id: data.model_id ?? null,
410
+ commit_sha: data.commit_sha ?? null,
411
+ pr_number: data.pr_number ?? null,
412
+ }))
413
+ .select()
414
+ .single();
415
+ if (error)
416
+ throw pgError("addFact insert", error);
417
+ // Append audit row (fire-and-forget; never blocks the main insert)
418
+ this.writeFactAudit(row.id, "insert", { category: data.category }).catch(() => { });
419
+ // Embed the fact so it joins the vector lane immediately. Without this,
420
+ // every newly inserted fact has NULL embedding and only the keyword lane
421
+ // can find it. Fire-and-forget so embedding latency / OpenAI outages
422
+ // never block the primary insert.
423
+ this.embedAndStore(this.tables.extracted_facts, row.id, data.fact).catch(() => { });
424
+ return { id: row.id };
425
+ }
426
+ async embedAndStore(table, id, text) {
427
+ const { embedText, EMBEDDING_MODEL } = await import("./embeddings.js");
428
+ const vec = await embedText(text);
429
+ if (!vec)
430
+ return;
431
+ await this.client
432
+ .from(table)
433
+ .update({
434
+ embedding: JSON.stringify(vec),
435
+ embedding_model: EMBEDDING_MODEL,
436
+ embedding_created_at: now(),
437
+ })
438
+ .eq("id", id);
439
+ }
440
+ async saveBlob(data) {
441
+ await this.enforceCaps("general");
442
+ const hash = contentHash(data.fact);
443
+ const docTable = this.tenancy.mode === "managed" ? "mc_canonical_docs" : "canonical_docs";
444
+ // Upsert canonical_doc (idempotent by content_hash)
445
+ let docId;
446
+ {
447
+ let q = this.client.from(docTable).select("id").eq("content_hash", hash).limit(1);
448
+ if (this.tenancy.mode === "managed") {
449
+ q = q.eq("api_key_hash", this.tenancy.apiKeyHash);
450
+ }
451
+ const { data: existing } = await q.maybeSingle();
452
+ if (existing) {
453
+ docId = existing.id;
454
+ }
455
+ else {
456
+ const insertRow = this.tenancy.mode === "managed"
457
+ ? { api_key_hash: this.tenancy.apiKeyHash, title: data.category, body: data.fact, content_hash: hash }
458
+ : { title: data.category, body: data.fact, content_hash: hash };
459
+ const { data: doc, error } = await this.client.from(docTable).insert(insertRow).select().single();
460
+ if (error)
461
+ throw pgError("saveBlob canonical_docs insert", error);
462
+ docId = doc.id;
463
+ }
464
+ }
465
+ // Extract atomic facts (minimal extractor; Chunk 4 replaces with full pipeline)
466
+ const atomicFacts = await extractAtomicFacts(data.fact);
467
+ const factIds = [];
468
+ for (const factText of atomicFacts) {
469
+ const factHash = contentHash(factText);
470
+ // Skip if already live
471
+ let dupQ = this.client
472
+ .from(this.tables.extracted_facts)
473
+ .select("id")
474
+ .eq("content_hash", factHash)
475
+ .is("invalidated_at", null)
476
+ .limit(1);
477
+ if (this.tenancy.mode === "managed") {
478
+ dupQ = dupQ.eq("api_key_hash", this.tenancy.apiKeyHash);
479
+ }
480
+ const { data: dup } = await dupQ.maybeSingle();
481
+ if (dup) {
482
+ factIds.push(dup.id);
483
+ continue;
484
+ }
485
+ const { data: frow, error: ferr } = await this.client
486
+ .from(this.tables.extracted_facts)
487
+ .insert(this.withTenancy({
488
+ fact: factText,
489
+ category: data.category,
490
+ confidence: Math.max(0, data.confidence - 0.05), // slight confidence discount
491
+ source_session_id: data.source_session_id ?? null,
492
+ source_type: "auto_extract",
493
+ status: "active",
494
+ decay_tier: "hot",
495
+ last_accessed: now(),
496
+ content_hash: factHash,
497
+ valid_from: now(),
498
+ recorded_at: now(),
499
+ extractor_id: "auto-extract-v1",
500
+ derived_from_doc_id: docId,
501
+ }))
502
+ .select()
503
+ .single();
504
+ if (ferr && ferr.code !== "23505")
505
+ throw pgError("saveBlob extracted_facts insert", ferr);
506
+ if (!ferr && frow)
507
+ factIds.push(frow.id);
508
+ }
509
+ return { id: docId, fact_ids: factIds };
510
+ }
511
+ async writeFactAudit(factId, op, payload) {
512
+ const auditTable = this.tenancy.mode === "managed" ? "mc_facts_audit" : "facts_audit";
513
+ await this.client.from(auditTable).insert({ fact_id: factId, op, payload, actor: "agent", at: now() });
514
+ }
515
+ async invalidateFact(input) {
516
+ const result = await this.rpc("invalidate_fact", { p_fact_id: input.fact_id, p_reason: input.reason ?? null, p_session_id: input.session_id ?? null }, "mc_invalidate_fact", { p_fact_id: input.fact_id, p_reason: input.reason ?? null, p_session_id: input.session_id ?? null });
517
+ const row = Array.isArray(result) ? result[0] : result;
518
+ return { invalidated_at: row.invalidated_at };
519
+ }
520
+ async supersedeFact(oldId, newText, category, confidence) {
521
+ if (this.tenancy.mode === "managed") {
522
+ const params = {
523
+ p_api_key_hash: this.tenancy.apiKeyHash,
524
+ p_old_fact_id: oldId,
525
+ p_new_fact_text: newText,
526
+ };
527
+ if (category !== undefined)
528
+ params.p_new_category = category;
529
+ if (confidence !== undefined)
530
+ params.p_new_confidence = confidence;
531
+ const { data, error } = await this.client.rpc("mc_supersede_fact", params);
532
+ if (error)
533
+ throw new Error(`rpc(mc_supersede_fact) failed: ${error.message}`);
534
+ return String(data);
535
+ }
536
+ const params = {
537
+ old_fact_id: oldId,
538
+ new_fact_text: newText,
539
+ };
540
+ if (category !== undefined)
541
+ params.new_category = category;
542
+ if (confidence !== undefined)
543
+ params.new_confidence = confidence;
544
+ const { data, error } = await this.client.rpc("supersede_fact", params);
545
+ if (error)
546
+ throw new Error(`rpc(supersede_fact) failed: ${error.message}`);
547
+ return String(data);
548
+ }
549
+ async logConversation(data) {
550
+ await this.enforceCaps("general");
551
+ const { error } = await this.client
552
+ .from(this.tables.conversation_log)
553
+ .insert(this.withTenancy({
554
+ session_id: data.session_id,
555
+ role: data.role,
556
+ content: truncate(data.content),
557
+ has_code: data.has_code,
558
+ }));
559
+ if (error)
560
+ throw pgError("logConversation insert", error);
561
+ }
562
+ async getConversationDetail(sessionId) {
563
+ return this.rpc("get_conversation_detail", { sid: sessionId }, "mc_get_conversation_detail", { p_session_id: sessionId });
564
+ }
565
+ async storeCode(data) {
566
+ await this.enforceCaps("general");
567
+ const { data: row, error } = await this.client
568
+ .from(this.tables.code_dumps)
569
+ .insert(this.withTenancy({
570
+ session_id: data.session_id,
571
+ language: data.language,
572
+ filename: data.filename ?? null,
573
+ content: truncate(data.content, 50000),
574
+ description: data.description ?? null,
575
+ }))
576
+ .select()
577
+ .single();
578
+ if (error)
579
+ throw pgError("storeCode insert", error);
580
+ return { id: row.id };
581
+ }
582
+ async getBusinessContext() {
583
+ let query = this.client
584
+ .from(this.tables.business_context)
585
+ .select("*")
586
+ .order("category")
587
+ .order("key");
588
+ if (this.tenancy.mode === "managed") {
589
+ query = query.eq("api_key_hash", this.tenancy.apiKeyHash);
590
+ }
591
+ const { data, error } = await query;
592
+ if (error)
593
+ throw pgError("getBusinessContext select", error);
594
+ return data ?? [];
595
+ }
596
+ async setBusinessContext(category, key, value, priority) {
597
+ await this.enforceCaps("general");
598
+ const row = {
599
+ category,
600
+ key,
601
+ value: typeof value === "string"
602
+ ? (() => {
603
+ try {
604
+ return JSON.parse(value);
605
+ }
606
+ catch {
607
+ return value;
608
+ }
609
+ })()
610
+ : value,
611
+ last_accessed: now(),
612
+ decay_tier: "hot",
613
+ };
614
+ if (priority !== undefined)
615
+ row.priority = priority;
616
+ const onConflict = this.tenancy.mode === "managed" ? "api_key_hash,category,key" : "category,key";
617
+ const { error } = await this.client
618
+ .from(this.tables.business_context)
619
+ .upsert(this.withTenancy(row), { onConflict })
620
+ .select()
621
+ .single();
622
+ if (error)
623
+ throw pgError("setBusinessContext upsert", error);
624
+ }
625
+ async upsertLibraryDoc(data) {
626
+ await this.enforceCaps("general");
627
+ let existingQuery = this.client
628
+ .from(this.tables.knowledge_library)
629
+ .select("id, version")
630
+ .eq("slug", data.slug);
631
+ if (this.tenancy.mode === "managed") {
632
+ existingQuery = existingQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
633
+ }
634
+ const { data: existing } = await existingQuery.maybeSingle();
635
+ if (existing) {
636
+ // DB trigger auto-archives old content and bumps version
637
+ const { error } = await this.client
638
+ .from(this.tables.knowledge_library)
639
+ .update({
640
+ title: data.title,
641
+ category: data.category,
642
+ content: data.content,
643
+ tags: data.tags,
644
+ last_accessed: now(),
645
+ decay_tier: "hot",
646
+ })
647
+ .eq("id", existing.id);
648
+ if (error)
649
+ throw pgError("upsertLibraryDoc update", error);
650
+ return `Library doc updated: "${data.title}" (v${existing.version + 1})`;
651
+ }
652
+ else {
653
+ const { error } = await this.client
654
+ .from(this.tables.knowledge_library)
655
+ .insert(this.withTenancy({
656
+ slug: data.slug,
657
+ title: data.title,
658
+ category: data.category,
659
+ content: data.content,
660
+ tags: data.tags,
661
+ version: 1,
662
+ decay_tier: "hot",
663
+ last_accessed: now(),
664
+ }));
665
+ if (error)
666
+ throw pgError("upsertLibraryDoc insert", error);
667
+ return `Library doc created: "${data.title}" (v1)`;
668
+ }
669
+ }
670
+ async manageDecay() {
671
+ return this.rpc("manage_decay", {}, "mc_manage_decay", {});
672
+ }
673
+ async getMemoryStatus() {
674
+ const tableKeys = [
675
+ "business_context",
676
+ "knowledge_library",
677
+ "session_summaries",
678
+ "extracted_facts",
679
+ "conversation_log",
680
+ "code_dumps",
681
+ ];
682
+ const counts = {};
683
+ for (const tk of tableKeys) {
684
+ let q = this.client.from(this.tables[tk]).select("*", { count: "exact", head: true });
685
+ if (this.tenancy.mode === "managed") {
686
+ q = q.eq("api_key_hash", this.tenancy.apiKeyHash);
687
+ }
688
+ const { count } = await q;
689
+ counts[tk] = count;
690
+ }
691
+ let factTiersQuery = this.client
692
+ .from(this.tables.extracted_facts)
693
+ .select("decay_tier")
694
+ .eq("status", "active");
695
+ if (this.tenancy.mode === "managed") {
696
+ factTiersQuery = factTiersQuery.eq("api_key_hash", this.tenancy.apiKeyHash);
697
+ }
698
+ const { data: factTiers } = await factTiersQuery;
699
+ const tiers = { hot: 0, warm: 0, cold: 0 };
700
+ for (const row of factTiers ?? []) {
701
+ tiers[row.decay_tier]++;
702
+ }
703
+ return {
704
+ mode: this.tenancy.mode === "managed" ? "supabase-managed" : "supabase-byod",
705
+ table_counts: counts,
706
+ fact_decay_tiers: tiers,
707
+ };
708
+ }
709
+ }
710
+ //# sourceMappingURL=supabase.js.map